re PR rtl-optimization/13169 (asm using r30 or r31 confuses global_alloc)
[gcc.git] / gcc / final.c
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This is the final pass of the compiler.
23 It looks at the rtl code for a function and outputs assembler code.
24
25 Call `final_start_function' to output the assembler code for function entry,
26 `final' to output assembler code for some RTL code,
27 `final_end_function' to output assembler code for function exit.
28 If a function is compiled in several pieces, each piece is
29 output separately with `final'.
30
31 Some optimizations are also done at this level.
32 Move instructions that were made unnecessary by good register allocation
33 are detected and omitted from the output. (Though most of these
34 are removed by the last jump pass.)
35
36 Instructions to set the condition codes are omitted when it can be
37 seen that the condition codes already had the desired values.
38
39 In some cases it is sufficient if the inherited condition codes
40 have related values, but this may require the following insn
41 (the one that tests the condition codes) to be modified.
42
43 The code for the function prologue and epilogue are generated
44 directly in assembler by the target functions function_prologue and
45 function_epilogue. Those instructions never exist as rtl. */
46
47 #include "config.h"
48 #include "system.h"
49 #include "coretypes.h"
50 #include "tm.h"
51
52 #include "tree.h"
53 #include "rtl.h"
54 #include "tm_p.h"
55 #include "regs.h"
56 #include "insn-config.h"
57 #include "insn-attr.h"
58 #include "recog.h"
59 #include "conditions.h"
60 #include "flags.h"
61 #include "real.h"
62 #include "hard-reg-set.h"
63 #include "output.h"
64 #include "except.h"
65 #include "function.h"
66 #include "toplev.h"
67 #include "reload.h"
68 #include "intl.h"
69 #include "basic-block.h"
70 #include "target.h"
71 #include "debug.h"
72 #include "expr.h"
73 #include "cfglayout.h"
74
75 #ifdef XCOFF_DEBUGGING_INFO
76 #include "xcoffout.h" /* Needed for external data
77 declarations for e.g. AIX 4.x. */
78 #endif
79
80 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
81 #include "dwarf2out.h"
82 #endif
83
84 #ifdef DBX_DEBUGGING_INFO
85 #include "dbxout.h"
86 #endif
87
88 /* If we aren't using cc0, CC_STATUS_INIT shouldn't exist. So define a
89 null default for it to save conditionalization later. */
90 #ifndef CC_STATUS_INIT
91 #define CC_STATUS_INIT
92 #endif
93
94 /* How to start an assembler comment. */
95 #ifndef ASM_COMMENT_START
96 #define ASM_COMMENT_START ";#"
97 #endif
98
99 /* Is the given character a logical line separator for the assembler? */
100 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
101 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C) ((C) == ';')
102 #endif
103
104 #ifndef JUMP_TABLES_IN_TEXT_SECTION
105 #define JUMP_TABLES_IN_TEXT_SECTION 0
106 #endif
107
108 #if defined(READONLY_DATA_SECTION) || defined(READONLY_DATA_SECTION_ASM_OP)
109 #define HAVE_READONLY_DATA_SECTION 1
110 #else
111 #define HAVE_READONLY_DATA_SECTION 0
112 #endif
113
114 /* Last insn processed by final_scan_insn. */
115 static rtx debug_insn;
116 rtx current_output_insn;
117
118 /* Line number of last NOTE. */
119 static int last_linenum;
120
121 /* Highest line number in current block. */
122 static int high_block_linenum;
123
124 /* Likewise for function. */
125 static int high_function_linenum;
126
127 /* Filename of last NOTE. */
128 static const char *last_filename;
129
130 extern int length_unit_log; /* This is defined in insn-attrtab.c. */
131
132 /* Nonzero while outputting an `asm' with operands.
133 This means that inconsistencies are the user's fault, so don't abort.
134 The precise value is the insn being output, to pass to error_for_asm. */
135 rtx this_is_asm_operands;
136
137 /* Number of operands of this insn, for an `asm' with operands. */
138 static unsigned int insn_noperands;
139
140 /* Compare optimization flag. */
141
142 static rtx last_ignored_compare = 0;
143
144 /* Assign a unique number to each insn that is output.
145 This can be used to generate unique local labels. */
146
147 static int insn_counter = 0;
148
149 #ifdef HAVE_cc0
150 /* This variable contains machine-dependent flags (defined in tm.h)
151 set and examined by output routines
152 that describe how to interpret the condition codes properly. */
153
154 CC_STATUS cc_status;
155
156 /* During output of an insn, this contains a copy of cc_status
157 from before the insn. */
158
159 CC_STATUS cc_prev_status;
160 #endif
161
162 /* Indexed by hardware reg number, is 1 if that register is ever
163 used in the current function.
164
165 In life_analysis, or in stupid_life_analysis, this is set
166 up to record the hard regs used explicitly. Reload adds
167 in the hard regs used for holding pseudo regs. Final uses
168 it to generate the code in the function prologue and epilogue
169 to save and restore registers as needed. */
170
171 char regs_ever_live[FIRST_PSEUDO_REGISTER];
172
173 /* Like regs_ever_live, but 1 if a reg is set or clobbered from an asm.
174 Unlike regs_ever_live, elements of this array corresponding to
175 eliminable regs like the frame pointer are set if an asm sets them. */
176
177 char regs_asm_clobbered[FIRST_PSEUDO_REGISTER];
178
179 /* Nonzero means current function must be given a frame pointer.
180 Initialized in function.c to 0. Set only in reload1.c as per
181 the needs of the function. */
182
183 int frame_pointer_needed;
184
185 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
186
187 static int block_depth;
188
189 /* Nonzero if have enabled APP processing of our assembler output. */
190
191 static int app_on;
192
193 /* If we are outputting an insn sequence, this contains the sequence rtx.
194 Zero otherwise. */
195
196 rtx final_sequence;
197
198 #ifdef ASSEMBLER_DIALECT
199
200 /* Number of the assembler dialect to use, starting at 0. */
201 static int dialect_number;
202 #endif
203
204 /* Indexed by line number, nonzero if there is a note for that line. */
205
206 static char *line_note_exists;
207
208 #ifdef HAVE_conditional_execution
209 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
210 rtx current_insn_predicate;
211 #endif
212
213 #ifdef HAVE_ATTR_length
214 static int asm_insn_count (rtx);
215 #endif
216 static void profile_function (FILE *);
217 static void profile_after_prologue (FILE *);
218 static bool notice_source_line (rtx);
219 static rtx walk_alter_subreg (rtx *);
220 static void output_asm_name (void);
221 static void output_alternate_entry_point (FILE *, rtx);
222 static tree get_mem_expr_from_op (rtx, int *);
223 static void output_asm_operand_names (rtx *, int *, int);
224 static void output_operand (rtx, int);
225 #ifdef LEAF_REGISTERS
226 static void leaf_renumber_regs (rtx);
227 #endif
228 #ifdef HAVE_cc0
229 static int alter_cond (rtx);
230 #endif
231 #ifndef ADDR_VEC_ALIGN
232 static int final_addr_vec_align (rtx);
233 #endif
234 #ifdef HAVE_ATTR_length
235 static int align_fuzz (rtx, rtx, int, unsigned);
236 #endif
237 \f
238 /* Initialize data in final at the beginning of a compilation. */
239
240 void
241 init_final (const char *filename ATTRIBUTE_UNUSED)
242 {
243 app_on = 0;
244 final_sequence = 0;
245
246 #ifdef ASSEMBLER_DIALECT
247 dialect_number = ASSEMBLER_DIALECT;
248 #endif
249 }
250
251 /* Default target function prologue and epilogue assembler output.
252
253 If not overridden for epilogue code, then the function body itself
254 contains return instructions wherever needed. */
255 void
256 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
257 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
258 {
259 }
260
261 /* Default target hook that outputs nothing to a stream. */
262 void
263 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
264 {
265 }
266
267 /* Enable APP processing of subsequent output.
268 Used before the output from an `asm' statement. */
269
270 void
271 app_enable (void)
272 {
273 if (! app_on)
274 {
275 fputs (ASM_APP_ON, asm_out_file);
276 app_on = 1;
277 }
278 }
279
280 /* Disable APP processing of subsequent output.
281 Called from varasm.c before most kinds of output. */
282
283 void
284 app_disable (void)
285 {
286 if (app_on)
287 {
288 fputs (ASM_APP_OFF, asm_out_file);
289 app_on = 0;
290 }
291 }
292 \f
293 /* Return the number of slots filled in the current
294 delayed branch sequence (we don't count the insn needing the
295 delay slot). Zero if not in a delayed branch sequence. */
296
297 #ifdef DELAY_SLOTS
298 int
299 dbr_sequence_length (void)
300 {
301 if (final_sequence != 0)
302 return XVECLEN (final_sequence, 0) - 1;
303 else
304 return 0;
305 }
306 #endif
307 \f
308 /* The next two pages contain routines used to compute the length of an insn
309 and to shorten branches. */
310
311 /* Arrays for insn lengths, and addresses. The latter is referenced by
312 `insn_current_length'. */
313
314 static int *insn_lengths;
315
316 varray_type insn_addresses_;
317
318 /* Max uid for which the above arrays are valid. */
319 static int insn_lengths_max_uid;
320
321 /* Address of insn being processed. Used by `insn_current_length'. */
322 int insn_current_address;
323
324 /* Address of insn being processed in previous iteration. */
325 int insn_last_address;
326
327 /* known invariant alignment of insn being processed. */
328 int insn_current_align;
329
330 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
331 gives the next following alignment insn that increases the known
332 alignment, or NULL_RTX if there is no such insn.
333 For any alignment obtained this way, we can again index uid_align with
334 its uid to obtain the next following align that in turn increases the
335 alignment, till we reach NULL_RTX; the sequence obtained this way
336 for each insn we'll call the alignment chain of this insn in the following
337 comments. */
338
339 struct label_alignment
340 {
341 short alignment;
342 short max_skip;
343 };
344
345 static rtx *uid_align;
346 static int *uid_shuid;
347 static struct label_alignment *label_align;
348
349 /* Indicate that branch shortening hasn't yet been done. */
350
351 void
352 init_insn_lengths (void)
353 {
354 if (uid_shuid)
355 {
356 free (uid_shuid);
357 uid_shuid = 0;
358 }
359 if (insn_lengths)
360 {
361 free (insn_lengths);
362 insn_lengths = 0;
363 insn_lengths_max_uid = 0;
364 }
365 #ifdef HAVE_ATTR_length
366 INSN_ADDRESSES_FREE ();
367 #endif
368 if (uid_align)
369 {
370 free (uid_align);
371 uid_align = 0;
372 }
373 }
374
375 /* Obtain the current length of an insn. If branch shortening has been done,
376 get its actual length. Otherwise, get its maximum length. */
377
378 int
379 get_attr_length (rtx insn ATTRIBUTE_UNUSED)
380 {
381 #ifdef HAVE_ATTR_length
382 rtx body;
383 int i;
384 int length = 0;
385
386 if (insn_lengths_max_uid > INSN_UID (insn))
387 return insn_lengths[INSN_UID (insn)];
388 else
389 switch (GET_CODE (insn))
390 {
391 case NOTE:
392 case BARRIER:
393 case CODE_LABEL:
394 return 0;
395
396 case CALL_INSN:
397 length = insn_default_length (insn);
398 break;
399
400 case JUMP_INSN:
401 body = PATTERN (insn);
402 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
403 {
404 /* Alignment is machine-dependent and should be handled by
405 ADDR_VEC_ALIGN. */
406 }
407 else
408 length = insn_default_length (insn);
409 break;
410
411 case INSN:
412 body = PATTERN (insn);
413 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
414 return 0;
415
416 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
417 length = asm_insn_count (body) * insn_default_length (insn);
418 else if (GET_CODE (body) == SEQUENCE)
419 for (i = 0; i < XVECLEN (body, 0); i++)
420 length += get_attr_length (XVECEXP (body, 0, i));
421 else
422 length = insn_default_length (insn);
423 break;
424
425 default:
426 break;
427 }
428
429 #ifdef ADJUST_INSN_LENGTH
430 ADJUST_INSN_LENGTH (insn, length);
431 #endif
432 return length;
433 #else /* not HAVE_ATTR_length */
434 return 0;
435 #endif /* not HAVE_ATTR_length */
436 }
437 \f
438 /* Code to handle alignment inside shorten_branches. */
439
440 /* Here is an explanation how the algorithm in align_fuzz can give
441 proper results:
442
443 Call a sequence of instructions beginning with alignment point X
444 and continuing until the next alignment point `block X'. When `X'
445 is used in an expression, it means the alignment value of the
446 alignment point.
447
448 Call the distance between the start of the first insn of block X, and
449 the end of the last insn of block X `IX', for the `inner size of X'.
450 This is clearly the sum of the instruction lengths.
451
452 Likewise with the next alignment-delimited block following X, which we
453 shall call block Y.
454
455 Call the distance between the start of the first insn of block X, and
456 the start of the first insn of block Y `OX', for the `outer size of X'.
457
458 The estimated padding is then OX - IX.
459
460 OX can be safely estimated as
461
462 if (X >= Y)
463 OX = round_up(IX, Y)
464 else
465 OX = round_up(IX, X) + Y - X
466
467 Clearly est(IX) >= real(IX), because that only depends on the
468 instruction lengths, and those being overestimated is a given.
469
470 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
471 we needn't worry about that when thinking about OX.
472
473 When X >= Y, the alignment provided by Y adds no uncertainty factor
474 for branch ranges starting before X, so we can just round what we have.
475 But when X < Y, we don't know anything about the, so to speak,
476 `middle bits', so we have to assume the worst when aligning up from an
477 address mod X to one mod Y, which is Y - X. */
478
479 #ifndef LABEL_ALIGN
480 #define LABEL_ALIGN(LABEL) align_labels_log
481 #endif
482
483 #ifndef LABEL_ALIGN_MAX_SKIP
484 #define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
485 #endif
486
487 #ifndef LOOP_ALIGN
488 #define LOOP_ALIGN(LABEL) align_loops_log
489 #endif
490
491 #ifndef LOOP_ALIGN_MAX_SKIP
492 #define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
493 #endif
494
495 #ifndef LABEL_ALIGN_AFTER_BARRIER
496 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
497 #endif
498
499 #ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
500 #define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
501 #endif
502
503 #ifndef JUMP_ALIGN
504 #define JUMP_ALIGN(LABEL) align_jumps_log
505 #endif
506
507 #ifndef JUMP_ALIGN_MAX_SKIP
508 #define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
509 #endif
510
511 #ifndef ADDR_VEC_ALIGN
512 static int
513 final_addr_vec_align (rtx addr_vec)
514 {
515 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
516
517 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
518 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
519 return exact_log2 (align);
520
521 }
522
523 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
524 #endif
525
526 #ifndef INSN_LENGTH_ALIGNMENT
527 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
528 #endif
529
530 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
531
532 static int min_labelno, max_labelno;
533
534 #define LABEL_TO_ALIGNMENT(LABEL) \
535 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
536
537 #define LABEL_TO_MAX_SKIP(LABEL) \
538 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
539
540 /* For the benefit of port specific code do this also as a function. */
541
542 int
543 label_to_alignment (rtx label)
544 {
545 return LABEL_TO_ALIGNMENT (label);
546 }
547
548 #ifdef HAVE_ATTR_length
549 /* The differences in addresses
550 between a branch and its target might grow or shrink depending on
551 the alignment the start insn of the range (the branch for a forward
552 branch or the label for a backward branch) starts out on; if these
553 differences are used naively, they can even oscillate infinitely.
554 We therefore want to compute a 'worst case' address difference that
555 is independent of the alignment the start insn of the range end
556 up on, and that is at least as large as the actual difference.
557 The function align_fuzz calculates the amount we have to add to the
558 naively computed difference, by traversing the part of the alignment
559 chain of the start insn of the range that is in front of the end insn
560 of the range, and considering for each alignment the maximum amount
561 that it might contribute to a size increase.
562
563 For casesi tables, we also want to know worst case minimum amounts of
564 address difference, in case a machine description wants to introduce
565 some common offset that is added to all offsets in a table.
566 For this purpose, align_fuzz with a growth argument of 0 computes the
567 appropriate adjustment. */
568
569 /* Compute the maximum delta by which the difference of the addresses of
570 START and END might grow / shrink due to a different address for start
571 which changes the size of alignment insns between START and END.
572 KNOWN_ALIGN_LOG is the alignment known for START.
573 GROWTH should be ~0 if the objective is to compute potential code size
574 increase, and 0 if the objective is to compute potential shrink.
575 The return value is undefined for any other value of GROWTH. */
576
577 static int
578 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
579 {
580 int uid = INSN_UID (start);
581 rtx align_label;
582 int known_align = 1 << known_align_log;
583 int end_shuid = INSN_SHUID (end);
584 int fuzz = 0;
585
586 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
587 {
588 int align_addr, new_align;
589
590 uid = INSN_UID (align_label);
591 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
592 if (uid_shuid[uid] > end_shuid)
593 break;
594 known_align_log = LABEL_TO_ALIGNMENT (align_label);
595 new_align = 1 << known_align_log;
596 if (new_align < known_align)
597 continue;
598 fuzz += (-align_addr ^ growth) & (new_align - known_align);
599 known_align = new_align;
600 }
601 return fuzz;
602 }
603
604 /* Compute a worst-case reference address of a branch so that it
605 can be safely used in the presence of aligned labels. Since the
606 size of the branch itself is unknown, the size of the branch is
607 not included in the range. I.e. for a forward branch, the reference
608 address is the end address of the branch as known from the previous
609 branch shortening pass, minus a value to account for possible size
610 increase due to alignment. For a backward branch, it is the start
611 address of the branch as known from the current pass, plus a value
612 to account for possible size increase due to alignment.
613 NB.: Therefore, the maximum offset allowed for backward branches needs
614 to exclude the branch size. */
615
616 int
617 insn_current_reference_address (rtx branch)
618 {
619 rtx dest, seq;
620 int seq_uid;
621
622 if (! INSN_ADDRESSES_SET_P ())
623 return 0;
624
625 seq = NEXT_INSN (PREV_INSN (branch));
626 seq_uid = INSN_UID (seq);
627 if (GET_CODE (branch) != JUMP_INSN)
628 /* This can happen for example on the PA; the objective is to know the
629 offset to address something in front of the start of the function.
630 Thus, we can treat it like a backward branch.
631 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
632 any alignment we'd encounter, so we skip the call to align_fuzz. */
633 return insn_current_address;
634 dest = JUMP_LABEL (branch);
635
636 /* BRANCH has no proper alignment chain set, so use SEQ.
637 BRANCH also has no INSN_SHUID. */
638 if (INSN_SHUID (seq) < INSN_SHUID (dest))
639 {
640 /* Forward branch. */
641 return (insn_last_address + insn_lengths[seq_uid]
642 - align_fuzz (seq, dest, length_unit_log, ~0));
643 }
644 else
645 {
646 /* Backward branch. */
647 return (insn_current_address
648 + align_fuzz (dest, seq, length_unit_log, ~0));
649 }
650 }
651 #endif /* HAVE_ATTR_length */
652 \f
653 void
654 compute_alignments (void)
655 {
656 int log, max_skip, max_log;
657 basic_block bb;
658
659 if (label_align)
660 {
661 free (label_align);
662 label_align = 0;
663 }
664
665 max_labelno = max_label_num ();
666 min_labelno = get_first_label_num ();
667 label_align = xcalloc (max_labelno - min_labelno + 1,
668 sizeof (struct label_alignment));
669
670 /* If not optimizing or optimizing for size, don't assign any alignments. */
671 if (! optimize || optimize_size)
672 return;
673
674 FOR_EACH_BB (bb)
675 {
676 rtx label = bb->head;
677 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
678 edge e;
679
680 if (GET_CODE (label) != CODE_LABEL
681 || probably_never_executed_bb_p (bb))
682 continue;
683 max_log = LABEL_ALIGN (label);
684 max_skip = LABEL_ALIGN_MAX_SKIP;
685
686 for (e = bb->pred; e; e = e->pred_next)
687 {
688 if (e->flags & EDGE_FALLTHRU)
689 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
690 else
691 branch_frequency += EDGE_FREQUENCY (e);
692 }
693
694 /* There are two purposes to align block with no fallthru incoming edge:
695 1) to avoid fetch stalls when branch destination is near cache boundary
696 2) to improve cache efficiency in case the previous block is not executed
697 (so it does not need to be in the cache).
698
699 We to catch first case, we align frequently executed blocks.
700 To catch the second, we align blocks that are executed more frequently
701 than the predecessor and the predecessor is likely to not be executed
702 when function is called. */
703
704 if (!has_fallthru
705 && (branch_frequency > BB_FREQ_MAX / 10
706 || (bb->frequency > bb->prev_bb->frequency * 10
707 && (bb->prev_bb->frequency
708 <= ENTRY_BLOCK_PTR->frequency / 2))))
709 {
710 log = JUMP_ALIGN (label);
711 if (max_log < log)
712 {
713 max_log = log;
714 max_skip = JUMP_ALIGN_MAX_SKIP;
715 }
716 }
717 /* In case block is frequent and reached mostly by non-fallthru edge,
718 align it. It is most likely a first block of loop. */
719 if (has_fallthru
720 && maybe_hot_bb_p (bb)
721 && branch_frequency + fallthru_frequency > BB_FREQ_MAX / 10
722 && branch_frequency > fallthru_frequency * 2)
723 {
724 log = LOOP_ALIGN (label);
725 if (max_log < log)
726 {
727 max_log = log;
728 max_skip = LOOP_ALIGN_MAX_SKIP;
729 }
730 }
731 LABEL_TO_ALIGNMENT (label) = max_log;
732 LABEL_TO_MAX_SKIP (label) = max_skip;
733 }
734 }
735 \f
736 /* Make a pass over all insns and compute their actual lengths by shortening
737 any branches of variable length if possible. */
738
739 /* Give a default value for the lowest address in a function. */
740
741 #ifndef FIRST_INSN_ADDRESS
742 #define FIRST_INSN_ADDRESS 0
743 #endif
744
745 /* shorten_branches might be called multiple times: for example, the SH
746 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
747 In order to do this, it needs proper length information, which it obtains
748 by calling shorten_branches. This cannot be collapsed with
749 shorten_branches itself into a single pass unless we also want to integrate
750 reorg.c, since the branch splitting exposes new instructions with delay
751 slots. */
752
753 void
754 shorten_branches (rtx first ATTRIBUTE_UNUSED)
755 {
756 rtx insn;
757 int max_uid;
758 int i;
759 int max_log;
760 int max_skip;
761 #ifdef HAVE_ATTR_length
762 #define MAX_CODE_ALIGN 16
763 rtx seq;
764 int something_changed = 1;
765 char *varying_length;
766 rtx body;
767 int uid;
768 rtx align_tab[MAX_CODE_ALIGN];
769
770 #endif
771
772 /* Compute maximum UID and allocate label_align / uid_shuid. */
773 max_uid = get_max_uid ();
774
775 uid_shuid = xmalloc (max_uid * sizeof *uid_shuid);
776
777 if (max_labelno != max_label_num ())
778 {
779 int old = max_labelno;
780 int n_labels;
781 int n_old_labels;
782
783 max_labelno = max_label_num ();
784
785 n_labels = max_labelno - min_labelno + 1;
786 n_old_labels = old - min_labelno + 1;
787
788 label_align = xrealloc (label_align,
789 n_labels * sizeof (struct label_alignment));
790
791 /* Range of labels grows monotonically in the function. Abort here
792 means that the initialization of array got lost. */
793 if (n_old_labels > n_labels)
794 abort ();
795
796 memset (label_align + n_old_labels, 0,
797 (n_labels - n_old_labels) * sizeof (struct label_alignment));
798 }
799
800 /* Initialize label_align and set up uid_shuid to be strictly
801 monotonically rising with insn order. */
802 /* We use max_log here to keep track of the maximum alignment we want to
803 impose on the next CODE_LABEL (or the current one if we are processing
804 the CODE_LABEL itself). */
805
806 max_log = 0;
807 max_skip = 0;
808
809 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
810 {
811 int log;
812
813 INSN_SHUID (insn) = i++;
814 if (INSN_P (insn))
815 {
816 /* reorg might make the first insn of a loop being run once only,
817 and delete the label in front of it. Then we want to apply
818 the loop alignment to the new label created by reorg, which
819 is separated by the former loop start insn from the
820 NOTE_INSN_LOOP_BEG. */
821 }
822 else if (GET_CODE (insn) == CODE_LABEL)
823 {
824 rtx next;
825
826 /* Merge in alignments computed by compute_alignments. */
827 log = LABEL_TO_ALIGNMENT (insn);
828 if (max_log < log)
829 {
830 max_log = log;
831 max_skip = LABEL_TO_MAX_SKIP (insn);
832 }
833
834 log = LABEL_ALIGN (insn);
835 if (max_log < log)
836 {
837 max_log = log;
838 max_skip = LABEL_ALIGN_MAX_SKIP;
839 }
840 next = NEXT_INSN (insn);
841 /* ADDR_VECs only take room if read-only data goes into the text
842 section. */
843 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
844 if (next && GET_CODE (next) == JUMP_INSN)
845 {
846 rtx nextbody = PATTERN (next);
847 if (GET_CODE (nextbody) == ADDR_VEC
848 || GET_CODE (nextbody) == ADDR_DIFF_VEC)
849 {
850 log = ADDR_VEC_ALIGN (next);
851 if (max_log < log)
852 {
853 max_log = log;
854 max_skip = LABEL_ALIGN_MAX_SKIP;
855 }
856 }
857 }
858 LABEL_TO_ALIGNMENT (insn) = max_log;
859 LABEL_TO_MAX_SKIP (insn) = max_skip;
860 max_log = 0;
861 max_skip = 0;
862 }
863 else if (GET_CODE (insn) == BARRIER)
864 {
865 rtx label;
866
867 for (label = insn; label && ! INSN_P (label);
868 label = NEXT_INSN (label))
869 if (GET_CODE (label) == CODE_LABEL)
870 {
871 log = LABEL_ALIGN_AFTER_BARRIER (insn);
872 if (max_log < log)
873 {
874 max_log = log;
875 max_skip = LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP;
876 }
877 break;
878 }
879 }
880 }
881 #ifdef HAVE_ATTR_length
882
883 /* Allocate the rest of the arrays. */
884 insn_lengths = xmalloc (max_uid * sizeof (*insn_lengths));
885 insn_lengths_max_uid = max_uid;
886 /* Syntax errors can lead to labels being outside of the main insn stream.
887 Initialize insn_addresses, so that we get reproducible results. */
888 INSN_ADDRESSES_ALLOC (max_uid);
889
890 varying_length = xcalloc (max_uid, sizeof (char));
891
892 /* Initialize uid_align. We scan instructions
893 from end to start, and keep in align_tab[n] the last seen insn
894 that does an alignment of at least n+1, i.e. the successor
895 in the alignment chain for an insn that does / has a known
896 alignment of n. */
897 uid_align = xcalloc (max_uid, sizeof *uid_align);
898
899 for (i = MAX_CODE_ALIGN; --i >= 0;)
900 align_tab[i] = NULL_RTX;
901 seq = get_last_insn ();
902 for (; seq; seq = PREV_INSN (seq))
903 {
904 int uid = INSN_UID (seq);
905 int log;
906 log = (GET_CODE (seq) == CODE_LABEL ? LABEL_TO_ALIGNMENT (seq) : 0);
907 uid_align[uid] = align_tab[0];
908 if (log)
909 {
910 /* Found an alignment label. */
911 uid_align[uid] = align_tab[log];
912 for (i = log - 1; i >= 0; i--)
913 align_tab[i] = seq;
914 }
915 }
916 #ifdef CASE_VECTOR_SHORTEN_MODE
917 if (optimize)
918 {
919 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
920 label fields. */
921
922 int min_shuid = INSN_SHUID (get_insns ()) - 1;
923 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
924 int rel;
925
926 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
927 {
928 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
929 int len, i, min, max, insn_shuid;
930 int min_align;
931 addr_diff_vec_flags flags;
932
933 if (GET_CODE (insn) != JUMP_INSN
934 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
935 continue;
936 pat = PATTERN (insn);
937 len = XVECLEN (pat, 1);
938 if (len <= 0)
939 abort ();
940 min_align = MAX_CODE_ALIGN;
941 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
942 {
943 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
944 int shuid = INSN_SHUID (lab);
945 if (shuid < min)
946 {
947 min = shuid;
948 min_lab = lab;
949 }
950 if (shuid > max)
951 {
952 max = shuid;
953 max_lab = lab;
954 }
955 if (min_align > LABEL_TO_ALIGNMENT (lab))
956 min_align = LABEL_TO_ALIGNMENT (lab);
957 }
958 XEXP (pat, 2) = gen_rtx_LABEL_REF (VOIDmode, min_lab);
959 XEXP (pat, 3) = gen_rtx_LABEL_REF (VOIDmode, max_lab);
960 insn_shuid = INSN_SHUID (insn);
961 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
962 flags.min_align = min_align;
963 flags.base_after_vec = rel > insn_shuid;
964 flags.min_after_vec = min > insn_shuid;
965 flags.max_after_vec = max > insn_shuid;
966 flags.min_after_base = min > rel;
967 flags.max_after_base = max > rel;
968 ADDR_DIFF_VEC_FLAGS (pat) = flags;
969 }
970 }
971 #endif /* CASE_VECTOR_SHORTEN_MODE */
972
973 /* Compute initial lengths, addresses, and varying flags for each insn. */
974 for (insn_current_address = FIRST_INSN_ADDRESS, insn = first;
975 insn != 0;
976 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
977 {
978 uid = INSN_UID (insn);
979
980 insn_lengths[uid] = 0;
981
982 if (GET_CODE (insn) == CODE_LABEL)
983 {
984 int log = LABEL_TO_ALIGNMENT (insn);
985 if (log)
986 {
987 int align = 1 << log;
988 int new_address = (insn_current_address + align - 1) & -align;
989 insn_lengths[uid] = new_address - insn_current_address;
990 }
991 }
992
993 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
994
995 if (GET_CODE (insn) == NOTE || GET_CODE (insn) == BARRIER
996 || GET_CODE (insn) == CODE_LABEL)
997 continue;
998 if (INSN_DELETED_P (insn))
999 continue;
1000
1001 body = PATTERN (insn);
1002 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1003 {
1004 /* This only takes room if read-only data goes into the text
1005 section. */
1006 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
1007 insn_lengths[uid] = (XVECLEN (body,
1008 GET_CODE (body) == ADDR_DIFF_VEC)
1009 * GET_MODE_SIZE (GET_MODE (body)));
1010 /* Alignment is handled by ADDR_VEC_ALIGN. */
1011 }
1012 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1013 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1014 else if (GET_CODE (body) == SEQUENCE)
1015 {
1016 int i;
1017 int const_delay_slots;
1018 #ifdef DELAY_SLOTS
1019 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1020 #else
1021 const_delay_slots = 0;
1022 #endif
1023 /* Inside a delay slot sequence, we do not do any branch shortening
1024 if the shortening could change the number of delay slots
1025 of the branch. */
1026 for (i = 0; i < XVECLEN (body, 0); i++)
1027 {
1028 rtx inner_insn = XVECEXP (body, 0, i);
1029 int inner_uid = INSN_UID (inner_insn);
1030 int inner_length;
1031
1032 if (GET_CODE (body) == ASM_INPUT
1033 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1034 inner_length = (asm_insn_count (PATTERN (inner_insn))
1035 * insn_default_length (inner_insn));
1036 else
1037 inner_length = insn_default_length (inner_insn);
1038
1039 insn_lengths[inner_uid] = inner_length;
1040 if (const_delay_slots)
1041 {
1042 if ((varying_length[inner_uid]
1043 = insn_variable_length_p (inner_insn)) != 0)
1044 varying_length[uid] = 1;
1045 INSN_ADDRESSES (inner_uid) = (insn_current_address
1046 + insn_lengths[uid]);
1047 }
1048 else
1049 varying_length[inner_uid] = 0;
1050 insn_lengths[uid] += inner_length;
1051 }
1052 }
1053 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1054 {
1055 insn_lengths[uid] = insn_default_length (insn);
1056 varying_length[uid] = insn_variable_length_p (insn);
1057 }
1058
1059 /* If needed, do any adjustment. */
1060 #ifdef ADJUST_INSN_LENGTH
1061 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1062 if (insn_lengths[uid] < 0)
1063 fatal_insn ("negative insn length", insn);
1064 #endif
1065 }
1066
1067 /* Now loop over all the insns finding varying length insns. For each,
1068 get the current insn length. If it has changed, reflect the change.
1069 When nothing changes for a full pass, we are done. */
1070
1071 while (something_changed)
1072 {
1073 something_changed = 0;
1074 insn_current_align = MAX_CODE_ALIGN - 1;
1075 for (insn_current_address = FIRST_INSN_ADDRESS, insn = first;
1076 insn != 0;
1077 insn = NEXT_INSN (insn))
1078 {
1079 int new_length;
1080 #ifdef ADJUST_INSN_LENGTH
1081 int tmp_length;
1082 #endif
1083 int length_align;
1084
1085 uid = INSN_UID (insn);
1086
1087 if (GET_CODE (insn) == CODE_LABEL)
1088 {
1089 int log = LABEL_TO_ALIGNMENT (insn);
1090 if (log > insn_current_align)
1091 {
1092 int align = 1 << log;
1093 int new_address= (insn_current_address + align - 1) & -align;
1094 insn_lengths[uid] = new_address - insn_current_address;
1095 insn_current_align = log;
1096 insn_current_address = new_address;
1097 }
1098 else
1099 insn_lengths[uid] = 0;
1100 INSN_ADDRESSES (uid) = insn_current_address;
1101 continue;
1102 }
1103
1104 length_align = INSN_LENGTH_ALIGNMENT (insn);
1105 if (length_align < insn_current_align)
1106 insn_current_align = length_align;
1107
1108 insn_last_address = INSN_ADDRESSES (uid);
1109 INSN_ADDRESSES (uid) = insn_current_address;
1110
1111 #ifdef CASE_VECTOR_SHORTEN_MODE
1112 if (optimize && GET_CODE (insn) == JUMP_INSN
1113 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1114 {
1115 rtx body = PATTERN (insn);
1116 int old_length = insn_lengths[uid];
1117 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1118 rtx min_lab = XEXP (XEXP (body, 2), 0);
1119 rtx max_lab = XEXP (XEXP (body, 3), 0);
1120 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1121 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1122 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1123 rtx prev;
1124 int rel_align = 0;
1125 addr_diff_vec_flags flags;
1126
1127 /* Avoid automatic aggregate initialization. */
1128 flags = ADDR_DIFF_VEC_FLAGS (body);
1129
1130 /* Try to find a known alignment for rel_lab. */
1131 for (prev = rel_lab;
1132 prev
1133 && ! insn_lengths[INSN_UID (prev)]
1134 && ! (varying_length[INSN_UID (prev)] & 1);
1135 prev = PREV_INSN (prev))
1136 if (varying_length[INSN_UID (prev)] & 2)
1137 {
1138 rel_align = LABEL_TO_ALIGNMENT (prev);
1139 break;
1140 }
1141
1142 /* See the comment on addr_diff_vec_flags in rtl.h for the
1143 meaning of the flags values. base: REL_LAB vec: INSN */
1144 /* Anything after INSN has still addresses from the last
1145 pass; adjust these so that they reflect our current
1146 estimate for this pass. */
1147 if (flags.base_after_vec)
1148 rel_addr += insn_current_address - insn_last_address;
1149 if (flags.min_after_vec)
1150 min_addr += insn_current_address - insn_last_address;
1151 if (flags.max_after_vec)
1152 max_addr += insn_current_address - insn_last_address;
1153 /* We want to know the worst case, i.e. lowest possible value
1154 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1155 its offset is positive, and we have to be wary of code shrink;
1156 otherwise, it is negative, and we have to be vary of code
1157 size increase. */
1158 if (flags.min_after_base)
1159 {
1160 /* If INSN is between REL_LAB and MIN_LAB, the size
1161 changes we are about to make can change the alignment
1162 within the observed offset, therefore we have to break
1163 it up into two parts that are independent. */
1164 if (! flags.base_after_vec && flags.min_after_vec)
1165 {
1166 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1167 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1168 }
1169 else
1170 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1171 }
1172 else
1173 {
1174 if (flags.base_after_vec && ! flags.min_after_vec)
1175 {
1176 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1177 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1178 }
1179 else
1180 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1181 }
1182 /* Likewise, determine the highest lowest possible value
1183 for the offset of MAX_LAB. */
1184 if (flags.max_after_base)
1185 {
1186 if (! flags.base_after_vec && flags.max_after_vec)
1187 {
1188 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1189 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1190 }
1191 else
1192 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1193 }
1194 else
1195 {
1196 if (flags.base_after_vec && ! flags.max_after_vec)
1197 {
1198 max_addr += align_fuzz (max_lab, insn, 0, 0);
1199 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1200 }
1201 else
1202 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1203 }
1204 PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1205 max_addr - rel_addr,
1206 body));
1207 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
1208 {
1209 insn_lengths[uid]
1210 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1211 insn_current_address += insn_lengths[uid];
1212 if (insn_lengths[uid] != old_length)
1213 something_changed = 1;
1214 }
1215
1216 continue;
1217 }
1218 #endif /* CASE_VECTOR_SHORTEN_MODE */
1219
1220 if (! (varying_length[uid]))
1221 {
1222 if (GET_CODE (insn) == INSN
1223 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1224 {
1225 int i;
1226
1227 body = PATTERN (insn);
1228 for (i = 0; i < XVECLEN (body, 0); i++)
1229 {
1230 rtx inner_insn = XVECEXP (body, 0, i);
1231 int inner_uid = INSN_UID (inner_insn);
1232
1233 INSN_ADDRESSES (inner_uid) = insn_current_address;
1234
1235 insn_current_address += insn_lengths[inner_uid];
1236 }
1237 }
1238 else
1239 insn_current_address += insn_lengths[uid];
1240
1241 continue;
1242 }
1243
1244 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1245 {
1246 int i;
1247
1248 body = PATTERN (insn);
1249 new_length = 0;
1250 for (i = 0; i < XVECLEN (body, 0); i++)
1251 {
1252 rtx inner_insn = XVECEXP (body, 0, i);
1253 int inner_uid = INSN_UID (inner_insn);
1254 int inner_length;
1255
1256 INSN_ADDRESSES (inner_uid) = insn_current_address;
1257
1258 /* insn_current_length returns 0 for insns with a
1259 non-varying length. */
1260 if (! varying_length[inner_uid])
1261 inner_length = insn_lengths[inner_uid];
1262 else
1263 inner_length = insn_current_length (inner_insn);
1264
1265 if (inner_length != insn_lengths[inner_uid])
1266 {
1267 insn_lengths[inner_uid] = inner_length;
1268 something_changed = 1;
1269 }
1270 insn_current_address += insn_lengths[inner_uid];
1271 new_length += inner_length;
1272 }
1273 }
1274 else
1275 {
1276 new_length = insn_current_length (insn);
1277 insn_current_address += new_length;
1278 }
1279
1280 #ifdef ADJUST_INSN_LENGTH
1281 /* If needed, do any adjustment. */
1282 tmp_length = new_length;
1283 ADJUST_INSN_LENGTH (insn, new_length);
1284 insn_current_address += (new_length - tmp_length);
1285 #endif
1286
1287 if (new_length != insn_lengths[uid])
1288 {
1289 insn_lengths[uid] = new_length;
1290 something_changed = 1;
1291 }
1292 }
1293 /* For a non-optimizing compile, do only a single pass. */
1294 if (!optimize)
1295 break;
1296 }
1297
1298 free (varying_length);
1299
1300 #endif /* HAVE_ATTR_length */
1301 }
1302
1303 #ifdef HAVE_ATTR_length
1304 /* Given the body of an INSN known to be generated by an ASM statement, return
1305 the number of machine instructions likely to be generated for this insn.
1306 This is used to compute its length. */
1307
1308 static int
1309 asm_insn_count (rtx body)
1310 {
1311 const char *template;
1312 int count = 1;
1313
1314 if (GET_CODE (body) == ASM_INPUT)
1315 template = XSTR (body, 0);
1316 else
1317 template = decode_asm_operands (body, NULL, NULL, NULL, NULL);
1318
1319 for (; *template; template++)
1320 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*template) || *template == '\n')
1321 count++;
1322
1323 return count;
1324 }
1325 #endif
1326 \f
1327 /* Output assembler code for the start of a function,
1328 and initialize some of the variables in this file
1329 for the new function. The label for the function and associated
1330 assembler pseudo-ops have already been output in `assemble_start_function'.
1331
1332 FIRST is the first insn of the rtl for the function being compiled.
1333 FILE is the file to write assembler code to.
1334 OPTIMIZE is nonzero if we should eliminate redundant
1335 test and compare insns. */
1336
1337 void
1338 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1339 int optimize ATTRIBUTE_UNUSED)
1340 {
1341 block_depth = 0;
1342
1343 this_is_asm_operands = 0;
1344
1345 last_filename = locator_file (prologue_locator);
1346 last_linenum = locator_line (prologue_locator);
1347
1348 high_block_linenum = high_function_linenum = last_linenum;
1349
1350 (*debug_hooks->begin_prologue) (last_linenum, last_filename);
1351
1352 #if defined (DWARF2_UNWIND_INFO) || defined (IA64_UNWIND_INFO)
1353 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1354 dwarf2out_begin_prologue (0, NULL);
1355 #endif
1356
1357 #ifdef LEAF_REG_REMAP
1358 if (current_function_uses_only_leaf_regs)
1359 leaf_renumber_regs (first);
1360 #endif
1361
1362 /* The Sun386i and perhaps other machines don't work right
1363 if the profiling code comes after the prologue. */
1364 #ifdef PROFILE_BEFORE_PROLOGUE
1365 if (current_function_profile)
1366 profile_function (file);
1367 #endif /* PROFILE_BEFORE_PROLOGUE */
1368
1369 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1370 if (dwarf2out_do_frame ())
1371 dwarf2out_frame_debug (NULL_RTX);
1372 #endif
1373
1374 /* If debugging, assign block numbers to all of the blocks in this
1375 function. */
1376 if (write_symbols)
1377 {
1378 remove_unnecessary_notes ();
1379 reemit_insn_block_notes ();
1380 number_blocks (current_function_decl);
1381 /* We never actually put out begin/end notes for the top-level
1382 block in the function. But, conceptually, that block is
1383 always needed. */
1384 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1385 }
1386
1387 /* First output the function prologue: code to set up the stack frame. */
1388 (*targetm.asm_out.function_prologue) (file, get_frame_size ());
1389
1390 /* If the machine represents the prologue as RTL, the profiling code must
1391 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1392 #ifdef HAVE_prologue
1393 if (! HAVE_prologue)
1394 #endif
1395 profile_after_prologue (file);
1396 }
1397
1398 static void
1399 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1400 {
1401 #ifndef PROFILE_BEFORE_PROLOGUE
1402 if (current_function_profile)
1403 profile_function (file);
1404 #endif /* not PROFILE_BEFORE_PROLOGUE */
1405 }
1406
1407 static void
1408 profile_function (FILE *file ATTRIBUTE_UNUSED)
1409 {
1410 #ifndef NO_PROFILE_COUNTERS
1411 # define NO_PROFILE_COUNTERS 0
1412 #endif
1413 #if defined(ASM_OUTPUT_REG_PUSH)
1414 int sval = current_function_returns_struct;
1415 rtx svrtx = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl), 1);
1416 #if defined(STATIC_CHAIN_INCOMING_REGNUM) || defined(STATIC_CHAIN_REGNUM)
1417 int cxt = current_function_needs_context;
1418 #endif
1419 #endif /* ASM_OUTPUT_REG_PUSH */
1420
1421 if (! NO_PROFILE_COUNTERS)
1422 {
1423 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1424 data_section ();
1425 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1426 (*targetm.asm_out.internal_label) (file, "LP", current_function_funcdef_no);
1427 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1428 }
1429
1430 function_section (current_function_decl);
1431
1432 #if defined(ASM_OUTPUT_REG_PUSH)
1433 if (sval && svrtx != NULL_RTX && GET_CODE (svrtx) == REG)
1434 ASM_OUTPUT_REG_PUSH (file, REGNO (svrtx));
1435 #endif
1436
1437 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1438 if (cxt)
1439 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_INCOMING_REGNUM);
1440 #else
1441 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1442 if (cxt)
1443 {
1444 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_REGNUM);
1445 }
1446 #endif
1447 #endif
1448
1449 FUNCTION_PROFILER (file, current_function_funcdef_no);
1450
1451 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1452 if (cxt)
1453 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_INCOMING_REGNUM);
1454 #else
1455 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1456 if (cxt)
1457 {
1458 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_REGNUM);
1459 }
1460 #endif
1461 #endif
1462
1463 #if defined(ASM_OUTPUT_REG_PUSH)
1464 if (sval && svrtx != NULL_RTX && GET_CODE (svrtx) == REG)
1465 ASM_OUTPUT_REG_POP (file, REGNO (svrtx));
1466 #endif
1467 }
1468
1469 /* Output assembler code for the end of a function.
1470 For clarity, args are same as those of `final_start_function'
1471 even though not all of them are needed. */
1472
1473 void
1474 final_end_function (void)
1475 {
1476 app_disable ();
1477
1478 (*debug_hooks->end_function) (high_function_linenum);
1479
1480 /* Finally, output the function epilogue:
1481 code to restore the stack frame and return to the caller. */
1482 (*targetm.asm_out.function_epilogue) (asm_out_file, get_frame_size ());
1483
1484 /* And debug output. */
1485 (*debug_hooks->end_epilogue) (last_linenum, last_filename);
1486
1487 #if defined (DWARF2_UNWIND_INFO)
1488 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG
1489 && dwarf2out_do_frame ())
1490 dwarf2out_end_epilogue (last_linenum, last_filename);
1491 #endif
1492 }
1493 \f
1494 /* Output assembler code for some insns: all or part of a function.
1495 For description of args, see `final_start_function', above.
1496
1497 PRESCAN is 1 if we are not really outputting,
1498 just scanning as if we were outputting.
1499 Prescanning deletes and rearranges insns just like ordinary output.
1500 PRESCAN is -2 if we are outputting after having prescanned.
1501 In this case, don't try to delete or rearrange insns
1502 because that has already been done.
1503 Prescanning is done only on certain machines. */
1504
1505 void
1506 final (rtx first, FILE *file, int optimize, int prescan)
1507 {
1508 rtx insn;
1509 int max_line = 0;
1510 int max_uid = 0;
1511
1512 last_ignored_compare = 0;
1513
1514 /* Make a map indicating which line numbers appear in this function.
1515 When producing SDB debugging info, delete troublesome line number
1516 notes from inlined functions in other files as well as duplicate
1517 line number notes. */
1518 #ifdef SDB_DEBUGGING_INFO
1519 if (write_symbols == SDB_DEBUG)
1520 {
1521 rtx last = 0;
1522 for (insn = first; insn; insn = NEXT_INSN (insn))
1523 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
1524 {
1525 if ((RTX_INTEGRATED_P (insn)
1526 && strcmp (NOTE_SOURCE_FILE (insn), main_input_filename) != 0)
1527 || (last != 0
1528 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last)
1529 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last)))
1530 {
1531 delete_insn (insn); /* Use delete_note. */
1532 continue;
1533 }
1534 last = insn;
1535 if (NOTE_LINE_NUMBER (insn) > max_line)
1536 max_line = NOTE_LINE_NUMBER (insn);
1537 }
1538 }
1539 else
1540 #endif
1541 {
1542 for (insn = first; insn; insn = NEXT_INSN (insn))
1543 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > max_line)
1544 max_line = NOTE_LINE_NUMBER (insn);
1545 }
1546
1547 line_note_exists = xcalloc (max_line + 1, sizeof (char));
1548
1549 for (insn = first; insn; insn = NEXT_INSN (insn))
1550 {
1551 if (INSN_UID (insn) > max_uid) /* Find largest UID. */
1552 max_uid = INSN_UID (insn);
1553 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
1554 line_note_exists[NOTE_LINE_NUMBER (insn)] = 1;
1555 #ifdef HAVE_cc0
1556 /* If CC tracking across branches is enabled, record the insn which
1557 jumps to each branch only reached from one place. */
1558 if (optimize && GET_CODE (insn) == JUMP_INSN)
1559 {
1560 rtx lab = JUMP_LABEL (insn);
1561 if (lab && LABEL_NUSES (lab) == 1)
1562 {
1563 LABEL_REFS (lab) = insn;
1564 }
1565 }
1566 #endif
1567 }
1568
1569 init_recog ();
1570
1571 CC_STATUS_INIT;
1572
1573 /* Output the insns. */
1574 for (insn = NEXT_INSN (first); insn;)
1575 {
1576 #ifdef HAVE_ATTR_length
1577 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1578 {
1579 /* This can be triggered by bugs elsewhere in the compiler if
1580 new insns are created after init_insn_lengths is called. */
1581 if (GET_CODE (insn) == NOTE)
1582 insn_current_address = -1;
1583 else
1584 abort ();
1585 }
1586 else
1587 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1588 #endif /* HAVE_ATTR_length */
1589
1590 insn = final_scan_insn (insn, file, optimize, prescan, 0);
1591 }
1592
1593 free (line_note_exists);
1594 line_note_exists = NULL;
1595 }
1596 \f
1597 const char *
1598 get_insn_template (int code, rtx insn)
1599 {
1600 const void *output = insn_data[code].output;
1601 switch (insn_data[code].output_format)
1602 {
1603 case INSN_OUTPUT_FORMAT_SINGLE:
1604 return (const char *) output;
1605 case INSN_OUTPUT_FORMAT_MULTI:
1606 return ((const char *const *) output)[which_alternative];
1607 case INSN_OUTPUT_FORMAT_FUNCTION:
1608 if (insn == NULL)
1609 abort ();
1610 return (*(insn_output_fn) output) (recog_data.operand, insn);
1611
1612 default:
1613 abort ();
1614 }
1615 }
1616
1617 /* Emit the appropriate declaration for an alternate-entry-point
1618 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1619 LABEL_KIND != LABEL_NORMAL.
1620
1621 The case fall-through in this function is intentional. */
1622 static void
1623 output_alternate_entry_point (FILE *file, rtx insn)
1624 {
1625 const char *name = LABEL_NAME (insn);
1626
1627 switch (LABEL_KIND (insn))
1628 {
1629 case LABEL_WEAK_ENTRY:
1630 #ifdef ASM_WEAKEN_LABEL
1631 ASM_WEAKEN_LABEL (file, name);
1632 #endif
1633 case LABEL_GLOBAL_ENTRY:
1634 (*targetm.asm_out.globalize_label) (file, name);
1635 case LABEL_STATIC_ENTRY:
1636 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1637 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1638 #endif
1639 ASM_OUTPUT_LABEL (file, name);
1640 break;
1641
1642 case LABEL_NORMAL:
1643 default:
1644 abort ();
1645 }
1646 }
1647
1648 /* The final scan for one insn, INSN.
1649 Args are same as in `final', except that INSN
1650 is the insn being scanned.
1651 Value returned is the next insn to be scanned.
1652
1653 NOPEEPHOLES is the flag to disallow peephole processing (currently
1654 used for within delayed branch sequence output). */
1655
1656 rtx
1657 final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
1658 int prescan, int nopeepholes ATTRIBUTE_UNUSED)
1659 {
1660 #ifdef HAVE_cc0
1661 rtx set;
1662 #endif
1663
1664 insn_counter++;
1665
1666 /* Ignore deleted insns. These can occur when we split insns (due to a
1667 template of "#") while not optimizing. */
1668 if (INSN_DELETED_P (insn))
1669 return NEXT_INSN (insn);
1670
1671 switch (GET_CODE (insn))
1672 {
1673 case NOTE:
1674 if (prescan > 0)
1675 break;
1676
1677 switch (NOTE_LINE_NUMBER (insn))
1678 {
1679 case NOTE_INSN_DELETED:
1680 case NOTE_INSN_LOOP_BEG:
1681 case NOTE_INSN_LOOP_END:
1682 case NOTE_INSN_LOOP_END_TOP_COND:
1683 case NOTE_INSN_LOOP_CONT:
1684 case NOTE_INSN_LOOP_VTOP:
1685 case NOTE_INSN_FUNCTION_END:
1686 case NOTE_INSN_REPEATED_LINE_NUMBER:
1687 case NOTE_INSN_EXPECTED_VALUE:
1688 break;
1689
1690 case NOTE_INSN_BASIC_BLOCK:
1691 #ifdef IA64_UNWIND_INFO
1692 IA64_UNWIND_EMIT (asm_out_file, insn);
1693 #endif
1694 if (flag_debug_asm)
1695 fprintf (asm_out_file, "\t%s basic block %d\n",
1696 ASM_COMMENT_START, NOTE_BASIC_BLOCK (insn)->index);
1697 break;
1698
1699 case NOTE_INSN_EH_REGION_BEG:
1700 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
1701 NOTE_EH_HANDLER (insn));
1702 break;
1703
1704 case NOTE_INSN_EH_REGION_END:
1705 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
1706 NOTE_EH_HANDLER (insn));
1707 break;
1708
1709 case NOTE_INSN_PROLOGUE_END:
1710 (*targetm.asm_out.function_end_prologue) (file);
1711 profile_after_prologue (file);
1712 break;
1713
1714 case NOTE_INSN_EPILOGUE_BEG:
1715 (*targetm.asm_out.function_begin_epilogue) (file);
1716 break;
1717
1718 case NOTE_INSN_FUNCTION_BEG:
1719 app_disable ();
1720 (*debug_hooks->end_prologue) (last_linenum, last_filename);
1721 break;
1722
1723 case NOTE_INSN_BLOCK_BEG:
1724 if (debug_info_level == DINFO_LEVEL_NORMAL
1725 || debug_info_level == DINFO_LEVEL_VERBOSE
1726 || write_symbols == DWARF_DEBUG
1727 || write_symbols == DWARF2_DEBUG
1728 || write_symbols == VMS_AND_DWARF2_DEBUG
1729 || write_symbols == VMS_DEBUG)
1730 {
1731 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1732
1733 app_disable ();
1734 ++block_depth;
1735 high_block_linenum = last_linenum;
1736
1737 /* Output debugging info about the symbol-block beginning. */
1738 (*debug_hooks->begin_block) (last_linenum, n);
1739
1740 /* Mark this block as output. */
1741 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
1742 }
1743 break;
1744
1745 case NOTE_INSN_BLOCK_END:
1746 if (debug_info_level == DINFO_LEVEL_NORMAL
1747 || debug_info_level == DINFO_LEVEL_VERBOSE
1748 || write_symbols == DWARF_DEBUG
1749 || write_symbols == DWARF2_DEBUG
1750 || write_symbols == VMS_AND_DWARF2_DEBUG
1751 || write_symbols == VMS_DEBUG)
1752 {
1753 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1754
1755 app_disable ();
1756
1757 /* End of a symbol-block. */
1758 --block_depth;
1759 if (block_depth < 0)
1760 abort ();
1761
1762 (*debug_hooks->end_block) (high_block_linenum, n);
1763 }
1764 break;
1765
1766 case NOTE_INSN_DELETED_LABEL:
1767 /* Emit the label. We may have deleted the CODE_LABEL because
1768 the label could be proved to be unreachable, though still
1769 referenced (in the form of having its address taken. */
1770 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
1771 break;
1772
1773 case 0:
1774 break;
1775
1776 default:
1777 if (NOTE_LINE_NUMBER (insn) <= 0)
1778 abort ();
1779 break;
1780 }
1781 break;
1782
1783 case BARRIER:
1784 #if defined (DWARF2_UNWIND_INFO)
1785 if (dwarf2out_do_frame ())
1786 dwarf2out_frame_debug (insn);
1787 #endif
1788 break;
1789
1790 case CODE_LABEL:
1791 /* The target port might emit labels in the output function for
1792 some insn, e.g. sh.c output_branchy_insn. */
1793 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
1794 {
1795 int align = LABEL_TO_ALIGNMENT (insn);
1796 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1797 int max_skip = LABEL_TO_MAX_SKIP (insn);
1798 #endif
1799
1800 if (align && NEXT_INSN (insn))
1801 {
1802 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1803 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
1804 #else
1805 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
1806 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
1807 #else
1808 ASM_OUTPUT_ALIGN (file, align);
1809 #endif
1810 #endif
1811 }
1812 }
1813 #ifdef HAVE_cc0
1814 CC_STATUS_INIT;
1815 /* If this label is reached from only one place, set the condition
1816 codes from the instruction just before the branch. */
1817
1818 /* Disabled because some insns set cc_status in the C output code
1819 and NOTICE_UPDATE_CC alone can set incorrect status. */
1820 if (0 /* optimize && LABEL_NUSES (insn) == 1*/)
1821 {
1822 rtx jump = LABEL_REFS (insn);
1823 rtx barrier = prev_nonnote_insn (insn);
1824 rtx prev;
1825 /* If the LABEL_REFS field of this label has been set to point
1826 at a branch, the predecessor of the branch is a regular
1827 insn, and that branch is the only way to reach this label,
1828 set the condition codes based on the branch and its
1829 predecessor. */
1830 if (barrier && GET_CODE (barrier) == BARRIER
1831 && jump && GET_CODE (jump) == JUMP_INSN
1832 && (prev = prev_nonnote_insn (jump))
1833 && GET_CODE (prev) == INSN)
1834 {
1835 NOTICE_UPDATE_CC (PATTERN (prev), prev);
1836 NOTICE_UPDATE_CC (PATTERN (jump), jump);
1837 }
1838 }
1839 #endif
1840 if (prescan > 0)
1841 break;
1842
1843 #ifdef FINAL_PRESCAN_LABEL
1844 FINAL_PRESCAN_INSN (insn, NULL, 0);
1845 #endif
1846
1847 if (LABEL_NAME (insn))
1848 (*debug_hooks->label) (insn);
1849
1850 if (app_on)
1851 {
1852 fputs (ASM_APP_OFF, file);
1853 app_on = 0;
1854 }
1855 if (NEXT_INSN (insn) != 0
1856 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN)
1857 {
1858 rtx nextbody = PATTERN (NEXT_INSN (insn));
1859
1860 /* If this label is followed by a jump-table,
1861 make sure we put the label in the read-only section. Also
1862 possibly write the label and jump table together. */
1863
1864 if (GET_CODE (nextbody) == ADDR_VEC
1865 || GET_CODE (nextbody) == ADDR_DIFF_VEC)
1866 {
1867 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1868 /* In this case, the case vector is being moved by the
1869 target, so don't output the label at all. Leave that
1870 to the back end macros. */
1871 #else
1872 if (! JUMP_TABLES_IN_TEXT_SECTION)
1873 {
1874 int log_align;
1875
1876 readonly_data_section ();
1877
1878 #ifdef ADDR_VEC_ALIGN
1879 log_align = ADDR_VEC_ALIGN (NEXT_INSN (insn));
1880 #else
1881 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1882 #endif
1883 ASM_OUTPUT_ALIGN (file, log_align);
1884 }
1885 else
1886 function_section (current_function_decl);
1887
1888 #ifdef ASM_OUTPUT_CASE_LABEL
1889 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
1890 NEXT_INSN (insn));
1891 #else
1892 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (insn));
1893 #endif
1894 #endif
1895 break;
1896 }
1897 }
1898 if (LABEL_ALT_ENTRY_P (insn))
1899 output_alternate_entry_point (file, insn);
1900 else
1901 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (insn));
1902 break;
1903
1904 default:
1905 {
1906 rtx body = PATTERN (insn);
1907 int insn_code_number;
1908 const char *template;
1909 rtx note;
1910
1911 /* An INSN, JUMP_INSN or CALL_INSN.
1912 First check for special kinds that recog doesn't recognize. */
1913
1914 if (GET_CODE (body) == USE /* These are just declarations */
1915 || GET_CODE (body) == CLOBBER)
1916 break;
1917
1918 #ifdef HAVE_cc0
1919 /* If there is a REG_CC_SETTER note on this insn, it means that
1920 the setting of the condition code was done in the delay slot
1921 of the insn that branched here. So recover the cc status
1922 from the insn that set it. */
1923
1924 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1925 if (note)
1926 {
1927 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
1928 cc_prev_status = cc_status;
1929 }
1930 #endif
1931
1932 /* Detect insns that are really jump-tables
1933 and output them as such. */
1934
1935 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1936 {
1937 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
1938 int vlen, idx;
1939 #endif
1940
1941 if (prescan > 0)
1942 break;
1943
1944 if (app_on)
1945 {
1946 fputs (ASM_APP_OFF, file);
1947 app_on = 0;
1948 }
1949
1950 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1951 if (GET_CODE (body) == ADDR_VEC)
1952 {
1953 #ifdef ASM_OUTPUT_ADDR_VEC
1954 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
1955 #else
1956 abort ();
1957 #endif
1958 }
1959 else
1960 {
1961 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
1962 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
1963 #else
1964 abort ();
1965 #endif
1966 }
1967 #else
1968 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
1969 for (idx = 0; idx < vlen; idx++)
1970 {
1971 if (GET_CODE (body) == ADDR_VEC)
1972 {
1973 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
1974 ASM_OUTPUT_ADDR_VEC_ELT
1975 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
1976 #else
1977 abort ();
1978 #endif
1979 }
1980 else
1981 {
1982 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
1983 ASM_OUTPUT_ADDR_DIFF_ELT
1984 (file,
1985 body,
1986 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
1987 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
1988 #else
1989 abort ();
1990 #endif
1991 }
1992 }
1993 #ifdef ASM_OUTPUT_CASE_END
1994 ASM_OUTPUT_CASE_END (file,
1995 CODE_LABEL_NUMBER (PREV_INSN (insn)),
1996 insn);
1997 #endif
1998 #endif
1999
2000 function_section (current_function_decl);
2001
2002 break;
2003 }
2004 /* Output this line note if it is the first or the last line
2005 note in a row. */
2006 if (notice_source_line (insn))
2007 {
2008 (*debug_hooks->source_line) (last_linenum, last_filename);
2009 }
2010
2011 if (GET_CODE (body) == ASM_INPUT)
2012 {
2013 const char *string = XSTR (body, 0);
2014
2015 /* There's no telling what that did to the condition codes. */
2016 CC_STATUS_INIT;
2017 if (prescan > 0)
2018 break;
2019
2020 if (string[0])
2021 {
2022 if (! app_on)
2023 {
2024 fputs (ASM_APP_ON, file);
2025 app_on = 1;
2026 }
2027 fprintf (asm_out_file, "\t%s\n", string);
2028 }
2029 break;
2030 }
2031
2032 /* Detect `asm' construct with operands. */
2033 if (asm_noperands (body) >= 0)
2034 {
2035 unsigned int noperands = asm_noperands (body);
2036 rtx *ops = alloca (noperands * sizeof (rtx));
2037 const char *string;
2038
2039 /* There's no telling what that did to the condition codes. */
2040 CC_STATUS_INIT;
2041 if (prescan > 0)
2042 break;
2043
2044 /* Get out the operand values. */
2045 string = decode_asm_operands (body, ops, NULL, NULL, NULL);
2046 /* Inhibit aborts on what would otherwise be compiler bugs. */
2047 insn_noperands = noperands;
2048 this_is_asm_operands = insn;
2049
2050 #ifdef FINAL_PRESCAN_INSN
2051 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2052 #endif
2053
2054 /* Output the insn using them. */
2055 if (string[0])
2056 {
2057 if (! app_on)
2058 {
2059 fputs (ASM_APP_ON, file);
2060 app_on = 1;
2061 }
2062 output_asm_insn (string, ops);
2063 }
2064
2065 this_is_asm_operands = 0;
2066 break;
2067 }
2068
2069 if (prescan <= 0 && app_on)
2070 {
2071 fputs (ASM_APP_OFF, file);
2072 app_on = 0;
2073 }
2074
2075 if (GET_CODE (body) == SEQUENCE)
2076 {
2077 /* A delayed-branch sequence */
2078 int i;
2079 rtx next;
2080
2081 if (prescan > 0)
2082 break;
2083 final_sequence = body;
2084
2085 /* Record the delay slots' frame information before the branch.
2086 This is needed for delayed calls: see execute_cfa_program(). */
2087 #if defined (DWARF2_UNWIND_INFO)
2088 if (dwarf2out_do_frame ())
2089 for (i = 1; i < XVECLEN (body, 0); i++)
2090 dwarf2out_frame_debug (XVECEXP (body, 0, i));
2091 #endif
2092
2093 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2094 force the restoration of a comparison that was previously
2095 thought unnecessary. If that happens, cancel this sequence
2096 and cause that insn to be restored. */
2097
2098 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, prescan, 1);
2099 if (next != XVECEXP (body, 0, 1))
2100 {
2101 final_sequence = 0;
2102 return next;
2103 }
2104
2105 for (i = 1; i < XVECLEN (body, 0); i++)
2106 {
2107 rtx insn = XVECEXP (body, 0, i);
2108 rtx next = NEXT_INSN (insn);
2109 /* We loop in case any instruction in a delay slot gets
2110 split. */
2111 do
2112 insn = final_scan_insn (insn, file, 0, prescan, 1);
2113 while (insn != next);
2114 }
2115 #ifdef DBR_OUTPUT_SEQEND
2116 DBR_OUTPUT_SEQEND (file);
2117 #endif
2118 final_sequence = 0;
2119
2120 /* If the insn requiring the delay slot was a CALL_INSN, the
2121 insns in the delay slot are actually executed before the
2122 called function. Hence we don't preserve any CC-setting
2123 actions in these insns and the CC must be marked as being
2124 clobbered by the function. */
2125 if (GET_CODE (XVECEXP (body, 0, 0)) == CALL_INSN)
2126 {
2127 CC_STATUS_INIT;
2128 }
2129 break;
2130 }
2131
2132 /* We have a real machine instruction as rtl. */
2133
2134 body = PATTERN (insn);
2135
2136 #ifdef HAVE_cc0
2137 set = single_set (insn);
2138
2139 /* Check for redundant test and compare instructions
2140 (when the condition codes are already set up as desired).
2141 This is done only when optimizing; if not optimizing,
2142 it should be possible for the user to alter a variable
2143 with the debugger in between statements
2144 and the next statement should reexamine the variable
2145 to compute the condition codes. */
2146
2147 if (optimize)
2148 {
2149 if (set
2150 && GET_CODE (SET_DEST (set)) == CC0
2151 && insn != last_ignored_compare)
2152 {
2153 if (GET_CODE (SET_SRC (set)) == SUBREG)
2154 SET_SRC (set) = alter_subreg (&SET_SRC (set));
2155 else if (GET_CODE (SET_SRC (set)) == COMPARE)
2156 {
2157 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2158 XEXP (SET_SRC (set), 0)
2159 = alter_subreg (&XEXP (SET_SRC (set), 0));
2160 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2161 XEXP (SET_SRC (set), 1)
2162 = alter_subreg (&XEXP (SET_SRC (set), 1));
2163 }
2164 if ((cc_status.value1 != 0
2165 && rtx_equal_p (SET_SRC (set), cc_status.value1))
2166 || (cc_status.value2 != 0
2167 && rtx_equal_p (SET_SRC (set), cc_status.value2)))
2168 {
2169 /* Don't delete insn if it has an addressing side-effect. */
2170 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2171 /* or if anything in it is volatile. */
2172 && ! volatile_refs_p (PATTERN (insn)))
2173 {
2174 /* We don't really delete the insn; just ignore it. */
2175 last_ignored_compare = insn;
2176 break;
2177 }
2178 }
2179 }
2180 }
2181 #endif
2182
2183 #ifndef STACK_REGS
2184 /* Don't bother outputting obvious no-ops, even without -O.
2185 This optimization is fast and doesn't interfere with debugging.
2186 Don't do this if the insn is in a delay slot, since this
2187 will cause an improper number of delay insns to be written. */
2188 if (final_sequence == 0
2189 && prescan >= 0
2190 && GET_CODE (insn) == INSN && GET_CODE (body) == SET
2191 && GET_CODE (SET_SRC (body)) == REG
2192 && GET_CODE (SET_DEST (body)) == REG
2193 && REGNO (SET_SRC (body)) == REGNO (SET_DEST (body)))
2194 break;
2195 #endif
2196
2197 #ifdef HAVE_cc0
2198 /* If this is a conditional branch, maybe modify it
2199 if the cc's are in a nonstandard state
2200 so that it accomplishes the same thing that it would
2201 do straightforwardly if the cc's were set up normally. */
2202
2203 if (cc_status.flags != 0
2204 && GET_CODE (insn) == JUMP_INSN
2205 && GET_CODE (body) == SET
2206 && SET_DEST (body) == pc_rtx
2207 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2208 && GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (body), 0))) == '<'
2209 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx
2210 /* This is done during prescan; it is not done again
2211 in final scan when prescan has been done. */
2212 && prescan >= 0)
2213 {
2214 /* This function may alter the contents of its argument
2215 and clear some of the cc_status.flags bits.
2216 It may also return 1 meaning condition now always true
2217 or -1 meaning condition now always false
2218 or 2 meaning condition nontrivial but altered. */
2219 int result = alter_cond (XEXP (SET_SRC (body), 0));
2220 /* If condition now has fixed value, replace the IF_THEN_ELSE
2221 with its then-operand or its else-operand. */
2222 if (result == 1)
2223 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2224 if (result == -1)
2225 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2226
2227 /* The jump is now either unconditional or a no-op.
2228 If it has become a no-op, don't try to output it.
2229 (It would not be recognized.) */
2230 if (SET_SRC (body) == pc_rtx)
2231 {
2232 delete_insn (insn);
2233 break;
2234 }
2235 else if (GET_CODE (SET_SRC (body)) == RETURN)
2236 /* Replace (set (pc) (return)) with (return). */
2237 PATTERN (insn) = body = SET_SRC (body);
2238
2239 /* Rerecognize the instruction if it has changed. */
2240 if (result != 0)
2241 INSN_CODE (insn) = -1;
2242 }
2243
2244 /* Make same adjustments to instructions that examine the
2245 condition codes without jumping and instructions that
2246 handle conditional moves (if this machine has either one). */
2247
2248 if (cc_status.flags != 0
2249 && set != 0)
2250 {
2251 rtx cond_rtx, then_rtx, else_rtx;
2252
2253 if (GET_CODE (insn) != JUMP_INSN
2254 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2255 {
2256 cond_rtx = XEXP (SET_SRC (set), 0);
2257 then_rtx = XEXP (SET_SRC (set), 1);
2258 else_rtx = XEXP (SET_SRC (set), 2);
2259 }
2260 else
2261 {
2262 cond_rtx = SET_SRC (set);
2263 then_rtx = const_true_rtx;
2264 else_rtx = const0_rtx;
2265 }
2266
2267 switch (GET_CODE (cond_rtx))
2268 {
2269 case GTU:
2270 case GT:
2271 case LTU:
2272 case LT:
2273 case GEU:
2274 case GE:
2275 case LEU:
2276 case LE:
2277 case EQ:
2278 case NE:
2279 {
2280 int result;
2281 if (XEXP (cond_rtx, 0) != cc0_rtx)
2282 break;
2283 result = alter_cond (cond_rtx);
2284 if (result == 1)
2285 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2286 else if (result == -1)
2287 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2288 else if (result == 2)
2289 INSN_CODE (insn) = -1;
2290 if (SET_DEST (set) == SET_SRC (set))
2291 delete_insn (insn);
2292 }
2293 break;
2294
2295 default:
2296 break;
2297 }
2298 }
2299
2300 #endif
2301
2302 #ifdef HAVE_peephole
2303 /* Do machine-specific peephole optimizations if desired. */
2304
2305 if (optimize && !flag_no_peephole && !nopeepholes)
2306 {
2307 rtx next = peephole (insn);
2308 /* When peepholing, if there were notes within the peephole,
2309 emit them before the peephole. */
2310 if (next != 0 && next != NEXT_INSN (insn))
2311 {
2312 rtx prev = PREV_INSN (insn);
2313
2314 for (note = NEXT_INSN (insn); note != next;
2315 note = NEXT_INSN (note))
2316 final_scan_insn (note, file, optimize, prescan, nopeepholes);
2317
2318 /* In case this is prescan, put the notes
2319 in proper position for later rescan. */
2320 note = NEXT_INSN (insn);
2321 PREV_INSN (note) = prev;
2322 NEXT_INSN (prev) = note;
2323 NEXT_INSN (PREV_INSN (next)) = insn;
2324 PREV_INSN (insn) = PREV_INSN (next);
2325 NEXT_INSN (insn) = next;
2326 PREV_INSN (next) = insn;
2327 }
2328
2329 /* PEEPHOLE might have changed this. */
2330 body = PATTERN (insn);
2331 }
2332 #endif
2333
2334 /* Try to recognize the instruction.
2335 If successful, verify that the operands satisfy the
2336 constraints for the instruction. Crash if they don't,
2337 since `reload' should have changed them so that they do. */
2338
2339 insn_code_number = recog_memoized (insn);
2340 cleanup_subreg_operands (insn);
2341
2342 /* Dump the insn in the assembly for debugging. */
2343 if (flag_dump_rtl_in_asm)
2344 {
2345 print_rtx_head = ASM_COMMENT_START;
2346 print_rtl_single (asm_out_file, insn);
2347 print_rtx_head = "";
2348 }
2349
2350 if (! constrain_operands_cached (1))
2351 fatal_insn_not_found (insn);
2352
2353 /* Some target machines need to prescan each insn before
2354 it is output. */
2355
2356 #ifdef FINAL_PRESCAN_INSN
2357 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2358 #endif
2359
2360 #ifdef HAVE_conditional_execution
2361 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
2362 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2363 else
2364 current_insn_predicate = NULL_RTX;
2365 #endif
2366
2367 #ifdef HAVE_cc0
2368 cc_prev_status = cc_status;
2369
2370 /* Update `cc_status' for this instruction.
2371 The instruction's output routine may change it further.
2372 If the output routine for a jump insn needs to depend
2373 on the cc status, it should look at cc_prev_status. */
2374
2375 NOTICE_UPDATE_CC (body, insn);
2376 #endif
2377
2378 current_output_insn = debug_insn = insn;
2379
2380 #if defined (DWARF2_UNWIND_INFO)
2381 if (GET_CODE (insn) == CALL_INSN && dwarf2out_do_frame ())
2382 dwarf2out_frame_debug (insn);
2383 #endif
2384
2385 /* Find the proper template for this insn. */
2386 template = get_insn_template (insn_code_number, insn);
2387
2388 /* If the C code returns 0, it means that it is a jump insn
2389 which follows a deleted test insn, and that test insn
2390 needs to be reinserted. */
2391 if (template == 0)
2392 {
2393 rtx prev;
2394
2395 if (prev_nonnote_insn (insn) != last_ignored_compare)
2396 abort ();
2397
2398 /* We have already processed the notes between the setter and
2399 the user. Make sure we don't process them again, this is
2400 particularly important if one of the notes is a block
2401 scope note or an EH note. */
2402 for (prev = insn;
2403 prev != last_ignored_compare;
2404 prev = PREV_INSN (prev))
2405 {
2406 if (GET_CODE (prev) == NOTE)
2407 delete_insn (prev); /* Use delete_note. */
2408 }
2409
2410 return prev;
2411 }
2412
2413 /* If the template is the string "#", it means that this insn must
2414 be split. */
2415 if (template[0] == '#' && template[1] == '\0')
2416 {
2417 rtx new = try_split (body, insn, 0);
2418
2419 /* If we didn't split the insn, go away. */
2420 if (new == insn && PATTERN (new) == body)
2421 fatal_insn ("could not split insn", insn);
2422
2423 #ifdef HAVE_ATTR_length
2424 /* This instruction should have been split in shorten_branches,
2425 to ensure that we would have valid length info for the
2426 splitees. */
2427 abort ();
2428 #endif
2429
2430 return new;
2431 }
2432
2433 if (prescan > 0)
2434 break;
2435
2436 #ifdef IA64_UNWIND_INFO
2437 IA64_UNWIND_EMIT (asm_out_file, insn);
2438 #endif
2439 /* Output assembler code from the template. */
2440
2441 output_asm_insn (template, recog_data.operand);
2442
2443 /* If necessary, report the effect that the instruction has on
2444 the unwind info. We've already done this for delay slots
2445 and call instructions. */
2446 #if defined (DWARF2_UNWIND_INFO)
2447 if (GET_CODE (insn) == INSN
2448 #if !defined (HAVE_prologue)
2449 && !ACCUMULATE_OUTGOING_ARGS
2450 #endif
2451 && final_sequence == 0
2452 && dwarf2out_do_frame ())
2453 dwarf2out_frame_debug (insn);
2454 #endif
2455
2456 #if 0
2457 /* It's not at all clear why we did this and doing so used to
2458 interfere with tests that used REG_WAS_0 notes, which are
2459 now gone, so let's try with this out. */
2460
2461 /* Mark this insn as having been output. */
2462 INSN_DELETED_P (insn) = 1;
2463 #endif
2464
2465 /* Emit information for vtable gc. */
2466 note = find_reg_note (insn, REG_VTABLE_REF, NULL_RTX);
2467
2468 current_output_insn = debug_insn = 0;
2469 }
2470 }
2471 return NEXT_INSN (insn);
2472 }
2473 \f
2474 /* Output debugging info to the assembler file FILE
2475 based on the NOTE-insn INSN, assumed to be a line number. */
2476
2477 static bool
2478 notice_source_line (rtx insn)
2479 {
2480 const char *filename = insn_file (insn);
2481 int linenum = insn_line (insn);
2482
2483 if (filename && (filename != last_filename || last_linenum != linenum))
2484 {
2485 last_filename = filename;
2486 last_linenum = linenum;
2487 high_block_linenum = MAX (last_linenum, high_block_linenum);
2488 high_function_linenum = MAX (last_linenum, high_function_linenum);
2489 return true;
2490 }
2491 return false;
2492 }
2493 \f
2494 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2495 directly to the desired hard register. */
2496
2497 void
2498 cleanup_subreg_operands (rtx insn)
2499 {
2500 int i;
2501 extract_insn_cached (insn);
2502 for (i = 0; i < recog_data.n_operands; i++)
2503 {
2504 /* The following test cannot use recog_data.operand when testing
2505 for a SUBREG: the underlying object might have been changed
2506 already if we are inside a match_operator expression that
2507 matches the else clause. Instead we test the underlying
2508 expression directly. */
2509 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2510 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
2511 else if (GET_CODE (recog_data.operand[i]) == PLUS
2512 || GET_CODE (recog_data.operand[i]) == MULT
2513 || GET_CODE (recog_data.operand[i]) == MEM)
2514 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i]);
2515 }
2516
2517 for (i = 0; i < recog_data.n_dups; i++)
2518 {
2519 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2520 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
2521 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2522 || GET_CODE (*recog_data.dup_loc[i]) == MULT
2523 || GET_CODE (*recog_data.dup_loc[i]) == MEM)
2524 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i]);
2525 }
2526 }
2527
2528 /* If X is a SUBREG, replace it with a REG or a MEM,
2529 based on the thing it is a subreg of. */
2530
2531 rtx
2532 alter_subreg (rtx *xp)
2533 {
2534 rtx x = *xp;
2535 rtx y = SUBREG_REG (x);
2536
2537 /* simplify_subreg does not remove subreg from volatile references.
2538 We are required to. */
2539 if (GET_CODE (y) == MEM)
2540 *xp = adjust_address (y, GET_MODE (x), SUBREG_BYTE (x));
2541 else
2542 {
2543 rtx new = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
2544 SUBREG_BYTE (x));
2545
2546 if (new != 0)
2547 *xp = new;
2548 /* Simplify_subreg can't handle some REG cases, but we have to. */
2549 else if (GET_CODE (y) == REG)
2550 {
2551 unsigned int regno = subreg_hard_regno (x, 1);
2552 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, SUBREG_BYTE (x));
2553 }
2554 else
2555 abort ();
2556 }
2557
2558 return *xp;
2559 }
2560
2561 /* Do alter_subreg on all the SUBREGs contained in X. */
2562
2563 static rtx
2564 walk_alter_subreg (rtx *xp)
2565 {
2566 rtx x = *xp;
2567 switch (GET_CODE (x))
2568 {
2569 case PLUS:
2570 case MULT:
2571 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2572 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1));
2573 break;
2574
2575 case MEM:
2576 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2577 break;
2578
2579 case SUBREG:
2580 return alter_subreg (xp);
2581
2582 default:
2583 break;
2584 }
2585
2586 return *xp;
2587 }
2588 \f
2589 #ifdef HAVE_cc0
2590
2591 /* Given BODY, the body of a jump instruction, alter the jump condition
2592 as required by the bits that are set in cc_status.flags.
2593 Not all of the bits there can be handled at this level in all cases.
2594
2595 The value is normally 0.
2596 1 means that the condition has become always true.
2597 -1 means that the condition has become always false.
2598 2 means that COND has been altered. */
2599
2600 static int
2601 alter_cond (rtx cond)
2602 {
2603 int value = 0;
2604
2605 if (cc_status.flags & CC_REVERSED)
2606 {
2607 value = 2;
2608 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
2609 }
2610
2611 if (cc_status.flags & CC_INVERTED)
2612 {
2613 value = 2;
2614 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
2615 }
2616
2617 if (cc_status.flags & CC_NOT_POSITIVE)
2618 switch (GET_CODE (cond))
2619 {
2620 case LE:
2621 case LEU:
2622 case GEU:
2623 /* Jump becomes unconditional. */
2624 return 1;
2625
2626 case GT:
2627 case GTU:
2628 case LTU:
2629 /* Jump becomes no-op. */
2630 return -1;
2631
2632 case GE:
2633 PUT_CODE (cond, EQ);
2634 value = 2;
2635 break;
2636
2637 case LT:
2638 PUT_CODE (cond, NE);
2639 value = 2;
2640 break;
2641
2642 default:
2643 break;
2644 }
2645
2646 if (cc_status.flags & CC_NOT_NEGATIVE)
2647 switch (GET_CODE (cond))
2648 {
2649 case GE:
2650 case GEU:
2651 /* Jump becomes unconditional. */
2652 return 1;
2653
2654 case LT:
2655 case LTU:
2656 /* Jump becomes no-op. */
2657 return -1;
2658
2659 case LE:
2660 case LEU:
2661 PUT_CODE (cond, EQ);
2662 value = 2;
2663 break;
2664
2665 case GT:
2666 case GTU:
2667 PUT_CODE (cond, NE);
2668 value = 2;
2669 break;
2670
2671 default:
2672 break;
2673 }
2674
2675 if (cc_status.flags & CC_NO_OVERFLOW)
2676 switch (GET_CODE (cond))
2677 {
2678 case GEU:
2679 /* Jump becomes unconditional. */
2680 return 1;
2681
2682 case LEU:
2683 PUT_CODE (cond, EQ);
2684 value = 2;
2685 break;
2686
2687 case GTU:
2688 PUT_CODE (cond, NE);
2689 value = 2;
2690 break;
2691
2692 case LTU:
2693 /* Jump becomes no-op. */
2694 return -1;
2695
2696 default:
2697 break;
2698 }
2699
2700 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
2701 switch (GET_CODE (cond))
2702 {
2703 default:
2704 abort ();
2705
2706 case NE:
2707 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
2708 value = 2;
2709 break;
2710
2711 case EQ:
2712 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
2713 value = 2;
2714 break;
2715 }
2716
2717 if (cc_status.flags & CC_NOT_SIGNED)
2718 /* The flags are valid if signed condition operators are converted
2719 to unsigned. */
2720 switch (GET_CODE (cond))
2721 {
2722 case LE:
2723 PUT_CODE (cond, LEU);
2724 value = 2;
2725 break;
2726
2727 case LT:
2728 PUT_CODE (cond, LTU);
2729 value = 2;
2730 break;
2731
2732 case GT:
2733 PUT_CODE (cond, GTU);
2734 value = 2;
2735 break;
2736
2737 case GE:
2738 PUT_CODE (cond, GEU);
2739 value = 2;
2740 break;
2741
2742 default:
2743 break;
2744 }
2745
2746 return value;
2747 }
2748 #endif
2749 \f
2750 /* Report inconsistency between the assembler template and the operands.
2751 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
2752
2753 void
2754 output_operand_lossage (const char *msgid, ...)
2755 {
2756 char *fmt_string;
2757 char *new_message;
2758 const char *pfx_str;
2759 va_list ap;
2760
2761 va_start (ap, msgid);
2762
2763 pfx_str = this_is_asm_operands ? _("invalid `asm': ") : "output_operand: ";
2764 asprintf (&fmt_string, "%s%s", pfx_str, _(msgid));
2765 vasprintf (&new_message, fmt_string, ap);
2766
2767 if (this_is_asm_operands)
2768 error_for_asm (this_is_asm_operands, "%s", new_message);
2769 else
2770 internal_error ("%s", new_message);
2771
2772 free (fmt_string);
2773 free (new_message);
2774 va_end (ap);
2775 }
2776 \f
2777 /* Output of assembler code from a template, and its subroutines. */
2778
2779 /* Annotate the assembly with a comment describing the pattern and
2780 alternative used. */
2781
2782 static void
2783 output_asm_name (void)
2784 {
2785 if (debug_insn)
2786 {
2787 int num = INSN_CODE (debug_insn);
2788 fprintf (asm_out_file, "\t%s %d\t%s",
2789 ASM_COMMENT_START, INSN_UID (debug_insn),
2790 insn_data[num].name);
2791 if (insn_data[num].n_alternatives > 1)
2792 fprintf (asm_out_file, "/%d", which_alternative + 1);
2793 #ifdef HAVE_ATTR_length
2794 fprintf (asm_out_file, "\t[length = %d]",
2795 get_attr_length (debug_insn));
2796 #endif
2797 /* Clear this so only the first assembler insn
2798 of any rtl insn will get the special comment for -dp. */
2799 debug_insn = 0;
2800 }
2801 }
2802
2803 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
2804 or its address, return that expr . Set *PADDRESSP to 1 if the expr
2805 corresponds to the address of the object and 0 if to the object. */
2806
2807 static tree
2808 get_mem_expr_from_op (rtx op, int *paddressp)
2809 {
2810 tree expr;
2811 int inner_addressp;
2812
2813 *paddressp = 0;
2814
2815 if (GET_CODE (op) == REG)
2816 return REG_EXPR (op);
2817 else if (GET_CODE (op) != MEM)
2818 return 0;
2819
2820 if (MEM_EXPR (op) != 0)
2821 return MEM_EXPR (op);
2822
2823 /* Otherwise we have an address, so indicate it and look at the address. */
2824 *paddressp = 1;
2825 op = XEXP (op, 0);
2826
2827 /* First check if we have a decl for the address, then look at the right side
2828 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
2829 But don't allow the address to itself be indirect. */
2830 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
2831 return expr;
2832 else if (GET_CODE (op) == PLUS
2833 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
2834 return expr;
2835
2836 while (GET_RTX_CLASS (GET_CODE (op)) == '1'
2837 || GET_RTX_CLASS (GET_CODE (op)) == '2')
2838 op = XEXP (op, 0);
2839
2840 expr = get_mem_expr_from_op (op, &inner_addressp);
2841 return inner_addressp ? 0 : expr;
2842 }
2843
2844 /* Output operand names for assembler instructions. OPERANDS is the
2845 operand vector, OPORDER is the order to write the operands, and NOPS
2846 is the number of operands to write. */
2847
2848 static void
2849 output_asm_operand_names (rtx *operands, int *oporder, int nops)
2850 {
2851 int wrote = 0;
2852 int i;
2853
2854 for (i = 0; i < nops; i++)
2855 {
2856 int addressp;
2857 rtx op = operands[oporder[i]];
2858 tree expr = get_mem_expr_from_op (op, &addressp);
2859
2860 fprintf (asm_out_file, "%c%s",
2861 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
2862 wrote = 1;
2863 if (expr)
2864 {
2865 fprintf (asm_out_file, "%s",
2866 addressp ? "*" : "");
2867 print_mem_expr (asm_out_file, expr);
2868 wrote = 1;
2869 }
2870 else if (REG_P (op) && ORIGINAL_REGNO (op)
2871 && ORIGINAL_REGNO (op) != REGNO (op))
2872 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
2873 }
2874 }
2875
2876 /* Output text from TEMPLATE to the assembler output file,
2877 obeying %-directions to substitute operands taken from
2878 the vector OPERANDS.
2879
2880 %N (for N a digit) means print operand N in usual manner.
2881 %lN means require operand N to be a CODE_LABEL or LABEL_REF
2882 and print the label name with no punctuation.
2883 %cN means require operand N to be a constant
2884 and print the constant expression with no punctuation.
2885 %aN means expect operand N to be a memory address
2886 (not a memory reference!) and print a reference
2887 to that address.
2888 %nN means expect operand N to be a constant
2889 and print a constant expression for minus the value
2890 of the operand, with no other punctuation. */
2891
2892 void
2893 output_asm_insn (const char *template, rtx *operands)
2894 {
2895 const char *p;
2896 int c;
2897 #ifdef ASSEMBLER_DIALECT
2898 int dialect = 0;
2899 #endif
2900 int oporder[MAX_RECOG_OPERANDS];
2901 char opoutput[MAX_RECOG_OPERANDS];
2902 int ops = 0;
2903
2904 /* An insn may return a null string template
2905 in a case where no assembler code is needed. */
2906 if (*template == 0)
2907 return;
2908
2909 memset (opoutput, 0, sizeof opoutput);
2910 p = template;
2911 putc ('\t', asm_out_file);
2912
2913 #ifdef ASM_OUTPUT_OPCODE
2914 ASM_OUTPUT_OPCODE (asm_out_file, p);
2915 #endif
2916
2917 while ((c = *p++))
2918 switch (c)
2919 {
2920 case '\n':
2921 if (flag_verbose_asm)
2922 output_asm_operand_names (operands, oporder, ops);
2923 if (flag_print_asm_name)
2924 output_asm_name ();
2925
2926 ops = 0;
2927 memset (opoutput, 0, sizeof opoutput);
2928
2929 putc (c, asm_out_file);
2930 #ifdef ASM_OUTPUT_OPCODE
2931 while ((c = *p) == '\t')
2932 {
2933 putc (c, asm_out_file);
2934 p++;
2935 }
2936 ASM_OUTPUT_OPCODE (asm_out_file, p);
2937 #endif
2938 break;
2939
2940 #ifdef ASSEMBLER_DIALECT
2941 case '{':
2942 {
2943 int i;
2944
2945 if (dialect)
2946 output_operand_lossage ("nested assembly dialect alternatives");
2947 else
2948 dialect = 1;
2949
2950 /* If we want the first dialect, do nothing. Otherwise, skip
2951 DIALECT_NUMBER of strings ending with '|'. */
2952 for (i = 0; i < dialect_number; i++)
2953 {
2954 while (*p && *p != '}' && *p++ != '|')
2955 ;
2956 if (*p == '}')
2957 break;
2958 if (*p == '|')
2959 p++;
2960 }
2961
2962 if (*p == '\0')
2963 output_operand_lossage ("unterminated assembly dialect alternative");
2964 }
2965 break;
2966
2967 case '|':
2968 if (dialect)
2969 {
2970 /* Skip to close brace. */
2971 do
2972 {
2973 if (*p == '\0')
2974 {
2975 output_operand_lossage ("unterminated assembly dialect alternative");
2976 break;
2977 }
2978 }
2979 while (*p++ != '}');
2980 dialect = 0;
2981 }
2982 else
2983 putc (c, asm_out_file);
2984 break;
2985
2986 case '}':
2987 if (! dialect)
2988 putc (c, asm_out_file);
2989 dialect = 0;
2990 break;
2991 #endif
2992
2993 case '%':
2994 /* %% outputs a single %. */
2995 if (*p == '%')
2996 {
2997 p++;
2998 putc (c, asm_out_file);
2999 }
3000 /* %= outputs a number which is unique to each insn in the entire
3001 compilation. This is useful for making local labels that are
3002 referred to more than once in a given insn. */
3003 else if (*p == '=')
3004 {
3005 p++;
3006 fprintf (asm_out_file, "%d", insn_counter);
3007 }
3008 /* % followed by a letter and some digits
3009 outputs an operand in a special way depending on the letter.
3010 Letters `acln' are implemented directly.
3011 Other letters are passed to `output_operand' so that
3012 the PRINT_OPERAND macro can define them. */
3013 else if (ISALPHA (*p))
3014 {
3015 int letter = *p++;
3016 c = atoi (p);
3017
3018 if (! ISDIGIT (*p))
3019 output_operand_lossage ("operand number missing after %%-letter");
3020 else if (this_is_asm_operands
3021 && (c < 0 || (unsigned int) c >= insn_noperands))
3022 output_operand_lossage ("operand number out of range");
3023 else if (letter == 'l')
3024 output_asm_label (operands[c]);
3025 else if (letter == 'a')
3026 output_address (operands[c]);
3027 else if (letter == 'c')
3028 {
3029 if (CONSTANT_ADDRESS_P (operands[c]))
3030 output_addr_const (asm_out_file, operands[c]);
3031 else
3032 output_operand (operands[c], 'c');
3033 }
3034 else if (letter == 'n')
3035 {
3036 if (GET_CODE (operands[c]) == CONST_INT)
3037 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3038 - INTVAL (operands[c]));
3039 else
3040 {
3041 putc ('-', asm_out_file);
3042 output_addr_const (asm_out_file, operands[c]);
3043 }
3044 }
3045 else
3046 output_operand (operands[c], letter);
3047
3048 if (!opoutput[c])
3049 oporder[ops++] = c;
3050 opoutput[c] = 1;
3051
3052 while (ISDIGIT (c = *p))
3053 p++;
3054 }
3055 /* % followed by a digit outputs an operand the default way. */
3056 else if (ISDIGIT (*p))
3057 {
3058 c = atoi (p);
3059 if (this_is_asm_operands
3060 && (c < 0 || (unsigned int) c >= insn_noperands))
3061 output_operand_lossage ("operand number out of range");
3062 else
3063 output_operand (operands[c], 0);
3064
3065 if (!opoutput[c])
3066 oporder[ops++] = c;
3067 opoutput[c] = 1;
3068
3069 while (ISDIGIT (c = *p))
3070 p++;
3071 }
3072 /* % followed by punctuation: output something for that
3073 punctuation character alone, with no operand.
3074 The PRINT_OPERAND macro decides what is actually done. */
3075 #ifdef PRINT_OPERAND_PUNCT_VALID_P
3076 else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p))
3077 output_operand (NULL_RTX, *p++);
3078 #endif
3079 else
3080 output_operand_lossage ("invalid %%-code");
3081 break;
3082
3083 default:
3084 putc (c, asm_out_file);
3085 }
3086
3087 /* Write out the variable names for operands, if we know them. */
3088 if (flag_verbose_asm)
3089 output_asm_operand_names (operands, oporder, ops);
3090 if (flag_print_asm_name)
3091 output_asm_name ();
3092
3093 putc ('\n', asm_out_file);
3094 }
3095 \f
3096 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3097
3098 void
3099 output_asm_label (rtx x)
3100 {
3101 char buf[256];
3102
3103 if (GET_CODE (x) == LABEL_REF)
3104 x = XEXP (x, 0);
3105 if (GET_CODE (x) == CODE_LABEL
3106 || (GET_CODE (x) == NOTE
3107 && NOTE_LINE_NUMBER (x) == NOTE_INSN_DELETED_LABEL))
3108 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3109 else
3110 output_operand_lossage ("`%%l' operand isn't a label");
3111
3112 assemble_name (asm_out_file, buf);
3113 }
3114
3115 /* Print operand X using machine-dependent assembler syntax.
3116 The macro PRINT_OPERAND is defined just to control this function.
3117 CODE is a non-digit that preceded the operand-number in the % spec,
3118 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3119 between the % and the digits.
3120 When CODE is a non-letter, X is 0.
3121
3122 The meanings of the letters are machine-dependent and controlled
3123 by PRINT_OPERAND. */
3124
3125 static void
3126 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3127 {
3128 if (x && GET_CODE (x) == SUBREG)
3129 x = alter_subreg (&x);
3130
3131 /* If X is a pseudo-register, abort now rather than writing trash to the
3132 assembler file. */
3133
3134 if (x && GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER)
3135 abort ();
3136
3137 PRINT_OPERAND (asm_out_file, x, code);
3138 }
3139
3140 /* Print a memory reference operand for address X
3141 using machine-dependent assembler syntax.
3142 The macro PRINT_OPERAND_ADDRESS exists just to control this function. */
3143
3144 void
3145 output_address (rtx x)
3146 {
3147 walk_alter_subreg (&x);
3148 PRINT_OPERAND_ADDRESS (asm_out_file, x);
3149 }
3150 \f
3151 /* Print an integer constant expression in assembler syntax.
3152 Addition and subtraction are the only arithmetic
3153 that may appear in these expressions. */
3154
3155 void
3156 output_addr_const (FILE *file, rtx x)
3157 {
3158 char buf[256];
3159
3160 restart:
3161 switch (GET_CODE (x))
3162 {
3163 case PC:
3164 putc ('.', file);
3165 break;
3166
3167 case SYMBOL_REF:
3168 #ifdef ASM_OUTPUT_SYMBOL_REF
3169 ASM_OUTPUT_SYMBOL_REF (file, x);
3170 #else
3171 assemble_name (file, XSTR (x, 0));
3172 #endif
3173 break;
3174
3175 case LABEL_REF:
3176 x = XEXP (x, 0);
3177 /* Fall through. */
3178 case CODE_LABEL:
3179 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3180 #ifdef ASM_OUTPUT_LABEL_REF
3181 ASM_OUTPUT_LABEL_REF (file, buf);
3182 #else
3183 assemble_name (file, buf);
3184 #endif
3185 break;
3186
3187 case CONST_INT:
3188 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3189 break;
3190
3191 case CONST:
3192 /* This used to output parentheses around the expression,
3193 but that does not work on the 386 (either ATT or BSD assembler). */
3194 output_addr_const (file, XEXP (x, 0));
3195 break;
3196
3197 case CONST_DOUBLE:
3198 if (GET_MODE (x) == VOIDmode)
3199 {
3200 /* We can use %d if the number is one word and positive. */
3201 if (CONST_DOUBLE_HIGH (x))
3202 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3203 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
3204 else if (CONST_DOUBLE_LOW (x) < 0)
3205 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
3206 else
3207 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3208 }
3209 else
3210 /* We can't handle floating point constants;
3211 PRINT_OPERAND must handle them. */
3212 output_operand_lossage ("floating constant misused");
3213 break;
3214
3215 case PLUS:
3216 /* Some assemblers need integer constants to appear last (eg masm). */
3217 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3218 {
3219 output_addr_const (file, XEXP (x, 1));
3220 if (INTVAL (XEXP (x, 0)) >= 0)
3221 fprintf (file, "+");
3222 output_addr_const (file, XEXP (x, 0));
3223 }
3224 else
3225 {
3226 output_addr_const (file, XEXP (x, 0));
3227 if (GET_CODE (XEXP (x, 1)) != CONST_INT
3228 || INTVAL (XEXP (x, 1)) >= 0)
3229 fprintf (file, "+");
3230 output_addr_const (file, XEXP (x, 1));
3231 }
3232 break;
3233
3234 case MINUS:
3235 /* Avoid outputting things like x-x or x+5-x,
3236 since some assemblers can't handle that. */
3237 x = simplify_subtraction (x);
3238 if (GET_CODE (x) != MINUS)
3239 goto restart;
3240
3241 output_addr_const (file, XEXP (x, 0));
3242 fprintf (file, "-");
3243 if ((GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0)
3244 || GET_CODE (XEXP (x, 1)) == PC
3245 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3246 output_addr_const (file, XEXP (x, 1));
3247 else
3248 {
3249 fputs (targetm.asm_out.open_paren, file);
3250 output_addr_const (file, XEXP (x, 1));
3251 fputs (targetm.asm_out.close_paren, file);
3252 }
3253 break;
3254
3255 case ZERO_EXTEND:
3256 case SIGN_EXTEND:
3257 case SUBREG:
3258 output_addr_const (file, XEXP (x, 0));
3259 break;
3260
3261 default:
3262 #ifdef OUTPUT_ADDR_CONST_EXTRA
3263 OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
3264 break;
3265
3266 fail:
3267 #endif
3268 output_operand_lossage ("invalid expression as operand");
3269 }
3270 }
3271 \f
3272 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3273 %R prints the value of REGISTER_PREFIX.
3274 %L prints the value of LOCAL_LABEL_PREFIX.
3275 %U prints the value of USER_LABEL_PREFIX.
3276 %I prints the value of IMMEDIATE_PREFIX.
3277 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3278 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3279
3280 We handle alternate assembler dialects here, just like output_asm_insn. */
3281
3282 void
3283 asm_fprintf (FILE *file, const char *p, ...)
3284 {
3285 char buf[10];
3286 char *q, c;
3287 va_list argptr;
3288
3289 va_start (argptr, p);
3290
3291 buf[0] = '%';
3292
3293 while ((c = *p++))
3294 switch (c)
3295 {
3296 #ifdef ASSEMBLER_DIALECT
3297 case '{':
3298 {
3299 int i;
3300
3301 /* If we want the first dialect, do nothing. Otherwise, skip
3302 DIALECT_NUMBER of strings ending with '|'. */
3303 for (i = 0; i < dialect_number; i++)
3304 {
3305 while (*p && *p++ != '|')
3306 ;
3307
3308 if (*p == '|')
3309 p++;
3310 }
3311 }
3312 break;
3313
3314 case '|':
3315 /* Skip to close brace. */
3316 while (*p && *p++ != '}')
3317 ;
3318 break;
3319
3320 case '}':
3321 break;
3322 #endif
3323
3324 case '%':
3325 c = *p++;
3326 q = &buf[1];
3327 while (strchr ("-+ #0", c))
3328 {
3329 *q++ = c;
3330 c = *p++;
3331 }
3332 while (ISDIGIT (c) || c == '.')
3333 {
3334 *q++ = c;
3335 c = *p++;
3336 }
3337 switch (c)
3338 {
3339 case '%':
3340 putc ('%', file);
3341 break;
3342
3343 case 'd': case 'i': case 'u':
3344 case 'x': case 'X': case 'o':
3345 case 'c':
3346 *q++ = c;
3347 *q = 0;
3348 fprintf (file, buf, va_arg (argptr, int));
3349 break;
3350
3351 case 'w':
3352 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3353 'o' cases, but we do not check for those cases. It
3354 means that the value is a HOST_WIDE_INT, which may be
3355 either `long' or `long long'. */
3356 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3357 q += strlen (HOST_WIDE_INT_PRINT);
3358 *q++ = *p++;
3359 *q = 0;
3360 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3361 break;
3362
3363 case 'l':
3364 *q++ = c;
3365 #ifdef HAVE_LONG_LONG
3366 if (*p == 'l')
3367 {
3368 *q++ = *p++;
3369 *q++ = *p++;
3370 *q = 0;
3371 fprintf (file, buf, va_arg (argptr, long long));
3372 }
3373 else
3374 #endif
3375 {
3376 *q++ = *p++;
3377 *q = 0;
3378 fprintf (file, buf, va_arg (argptr, long));
3379 }
3380
3381 break;
3382
3383 case 's':
3384 *q++ = c;
3385 *q = 0;
3386 fprintf (file, buf, va_arg (argptr, char *));
3387 break;
3388
3389 case 'O':
3390 #ifdef ASM_OUTPUT_OPCODE
3391 ASM_OUTPUT_OPCODE (asm_out_file, p);
3392 #endif
3393 break;
3394
3395 case 'R':
3396 #ifdef REGISTER_PREFIX
3397 fprintf (file, "%s", REGISTER_PREFIX);
3398 #endif
3399 break;
3400
3401 case 'I':
3402 #ifdef IMMEDIATE_PREFIX
3403 fprintf (file, "%s", IMMEDIATE_PREFIX);
3404 #endif
3405 break;
3406
3407 case 'L':
3408 #ifdef LOCAL_LABEL_PREFIX
3409 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
3410 #endif
3411 break;
3412
3413 case 'U':
3414 fputs (user_label_prefix, file);
3415 break;
3416
3417 #ifdef ASM_FPRINTF_EXTENSIONS
3418 /* Uppercase letters are reserved for general use by asm_fprintf
3419 and so are not available to target specific code. In order to
3420 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3421 they are defined here. As they get turned into real extensions
3422 to asm_fprintf they should be removed from this list. */
3423 case 'A': case 'B': case 'C': case 'D': case 'E':
3424 case 'F': case 'G': case 'H': case 'J': case 'K':
3425 case 'M': case 'N': case 'P': case 'Q': case 'S':
3426 case 'T': case 'V': case 'W': case 'Y': case 'Z':
3427 break;
3428
3429 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
3430 #endif
3431 default:
3432 abort ();
3433 }
3434 break;
3435
3436 default:
3437 putc (c, file);
3438 }
3439 va_end (argptr);
3440 }
3441 \f
3442 /* Split up a CONST_DOUBLE or integer constant rtx
3443 into two rtx's for single words,
3444 storing in *FIRST the word that comes first in memory in the target
3445 and in *SECOND the other. */
3446
3447 void
3448 split_double (rtx value, rtx *first, rtx *second)
3449 {
3450 if (GET_CODE (value) == CONST_INT)
3451 {
3452 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3453 {
3454 /* In this case the CONST_INT holds both target words.
3455 Extract the bits from it into two word-sized pieces.
3456 Sign extend each half to HOST_WIDE_INT. */
3457 unsigned HOST_WIDE_INT low, high;
3458 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
3459
3460 /* Set sign_bit to the most significant bit of a word. */
3461 sign_bit = 1;
3462 sign_bit <<= BITS_PER_WORD - 1;
3463
3464 /* Set mask so that all bits of the word are set. We could
3465 have used 1 << BITS_PER_WORD instead of basing the
3466 calculation on sign_bit. However, on machines where
3467 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3468 compiler warning, even though the code would never be
3469 executed. */
3470 mask = sign_bit << 1;
3471 mask--;
3472
3473 /* Set sign_extend as any remaining bits. */
3474 sign_extend = ~mask;
3475
3476 /* Pick the lower word and sign-extend it. */
3477 low = INTVAL (value);
3478 low &= mask;
3479 if (low & sign_bit)
3480 low |= sign_extend;
3481
3482 /* Pick the higher word, shifted to the least significant
3483 bits, and sign-extend it. */
3484 high = INTVAL (value);
3485 high >>= BITS_PER_WORD - 1;
3486 high >>= 1;
3487 high &= mask;
3488 if (high & sign_bit)
3489 high |= sign_extend;
3490
3491 /* Store the words in the target machine order. */
3492 if (WORDS_BIG_ENDIAN)
3493 {
3494 *first = GEN_INT (high);
3495 *second = GEN_INT (low);
3496 }
3497 else
3498 {
3499 *first = GEN_INT (low);
3500 *second = GEN_INT (high);
3501 }
3502 }
3503 else
3504 {
3505 /* The rule for using CONST_INT for a wider mode
3506 is that we regard the value as signed.
3507 So sign-extend it. */
3508 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
3509 if (WORDS_BIG_ENDIAN)
3510 {
3511 *first = high;
3512 *second = value;
3513 }
3514 else
3515 {
3516 *first = value;
3517 *second = high;
3518 }
3519 }
3520 }
3521 else if (GET_CODE (value) != CONST_DOUBLE)
3522 {
3523 if (WORDS_BIG_ENDIAN)
3524 {
3525 *first = const0_rtx;
3526 *second = value;
3527 }
3528 else
3529 {
3530 *first = value;
3531 *second = const0_rtx;
3532 }
3533 }
3534 else if (GET_MODE (value) == VOIDmode
3535 /* This is the old way we did CONST_DOUBLE integers. */
3536 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
3537 {
3538 /* In an integer, the words are defined as most and least significant.
3539 So order them by the target's convention. */
3540 if (WORDS_BIG_ENDIAN)
3541 {
3542 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
3543 *second = GEN_INT (CONST_DOUBLE_LOW (value));
3544 }
3545 else
3546 {
3547 *first = GEN_INT (CONST_DOUBLE_LOW (value));
3548 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
3549 }
3550 }
3551 else
3552 {
3553 REAL_VALUE_TYPE r;
3554 long l[2];
3555 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
3556
3557 /* Note, this converts the REAL_VALUE_TYPE to the target's
3558 format, splits up the floating point double and outputs
3559 exactly 32 bits of it into each of l[0] and l[1] --
3560 not necessarily BITS_PER_WORD bits. */
3561 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
3562
3563 /* If 32 bits is an entire word for the target, but not for the host,
3564 then sign-extend on the host so that the number will look the same
3565 way on the host that it would on the target. See for instance
3566 simplify_unary_operation. The #if is needed to avoid compiler
3567 warnings. */
3568
3569 #if HOST_BITS_PER_LONG > 32
3570 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
3571 {
3572 if (l[0] & ((long) 1 << 31))
3573 l[0] |= ((long) (-1) << 32);
3574 if (l[1] & ((long) 1 << 31))
3575 l[1] |= ((long) (-1) << 32);
3576 }
3577 #endif
3578
3579 *first = GEN_INT ((HOST_WIDE_INT) l[0]);
3580 *second = GEN_INT ((HOST_WIDE_INT) l[1]);
3581 }
3582 }
3583 \f
3584 /* Return nonzero if this function has no function calls. */
3585
3586 int
3587 leaf_function_p (void)
3588 {
3589 rtx insn;
3590 rtx link;
3591
3592 if (current_function_profile || profile_arc_flag)
3593 return 0;
3594
3595 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3596 {
3597 if (GET_CODE (insn) == CALL_INSN
3598 && ! SIBLING_CALL_P (insn))
3599 return 0;
3600 if (GET_CODE (insn) == INSN
3601 && GET_CODE (PATTERN (insn)) == SEQUENCE
3602 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
3603 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3604 return 0;
3605 }
3606 for (link = current_function_epilogue_delay_list;
3607 link;
3608 link = XEXP (link, 1))
3609 {
3610 insn = XEXP (link, 0);
3611
3612 if (GET_CODE (insn) == CALL_INSN
3613 && ! SIBLING_CALL_P (insn))
3614 return 0;
3615 if (GET_CODE (insn) == INSN
3616 && GET_CODE (PATTERN (insn)) == SEQUENCE
3617 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
3618 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3619 return 0;
3620 }
3621
3622 return 1;
3623 }
3624
3625 /* Return 1 if branch is a forward branch.
3626 Uses insn_shuid array, so it works only in the final pass. May be used by
3627 output templates to customary add branch prediction hints.
3628 */
3629 int
3630 final_forward_branch_p (rtx insn)
3631 {
3632 int insn_id, label_id;
3633 if (!uid_shuid)
3634 abort ();
3635 insn_id = INSN_SHUID (insn);
3636 label_id = INSN_SHUID (JUMP_LABEL (insn));
3637 /* We've hit some insns that does not have id information available. */
3638 if (!insn_id || !label_id)
3639 abort ();
3640 return insn_id < label_id;
3641 }
3642
3643 /* On some machines, a function with no call insns
3644 can run faster if it doesn't create its own register window.
3645 When output, the leaf function should use only the "output"
3646 registers. Ordinarily, the function would be compiled to use
3647 the "input" registers to find its arguments; it is a candidate
3648 for leaf treatment if it uses only the "input" registers.
3649 Leaf function treatment means renumbering so the function
3650 uses the "output" registers instead. */
3651
3652 #ifdef LEAF_REGISTERS
3653
3654 /* Return 1 if this function uses only the registers that can be
3655 safely renumbered. */
3656
3657 int
3658 only_leaf_regs_used (void)
3659 {
3660 int i;
3661 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
3662
3663 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3664 if ((regs_ever_live[i] || global_regs[i])
3665 && ! permitted_reg_in_leaf_functions[i])
3666 return 0;
3667
3668 if (current_function_uses_pic_offset_table
3669 && pic_offset_table_rtx != 0
3670 && GET_CODE (pic_offset_table_rtx) == REG
3671 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
3672 return 0;
3673
3674 return 1;
3675 }
3676
3677 /* Scan all instructions and renumber all registers into those
3678 available in leaf functions. */
3679
3680 static void
3681 leaf_renumber_regs (rtx first)
3682 {
3683 rtx insn;
3684
3685 /* Renumber only the actual patterns.
3686 The reg-notes can contain frame pointer refs,
3687 and renumbering them could crash, and should not be needed. */
3688 for (insn = first; insn; insn = NEXT_INSN (insn))
3689 if (INSN_P (insn))
3690 leaf_renumber_regs_insn (PATTERN (insn));
3691 for (insn = current_function_epilogue_delay_list;
3692 insn;
3693 insn = XEXP (insn, 1))
3694 if (INSN_P (XEXP (insn, 0)))
3695 leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
3696 }
3697
3698 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
3699 available in leaf functions. */
3700
3701 void
3702 leaf_renumber_regs_insn (rtx in_rtx)
3703 {
3704 int i, j;
3705 const char *format_ptr;
3706
3707 if (in_rtx == 0)
3708 return;
3709
3710 /* Renumber all input-registers into output-registers.
3711 renumbered_regs would be 1 for an output-register;
3712 they */
3713
3714 if (GET_CODE (in_rtx) == REG)
3715 {
3716 int newreg;
3717
3718 /* Don't renumber the same reg twice. */
3719 if (in_rtx->used)
3720 return;
3721
3722 newreg = REGNO (in_rtx);
3723 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
3724 to reach here as part of a REG_NOTE. */
3725 if (newreg >= FIRST_PSEUDO_REGISTER)
3726 {
3727 in_rtx->used = 1;
3728 return;
3729 }
3730 newreg = LEAF_REG_REMAP (newreg);
3731 if (newreg < 0)
3732 abort ();
3733 regs_ever_live[REGNO (in_rtx)] = 0;
3734 regs_ever_live[newreg] = 1;
3735 REGNO (in_rtx) = newreg;
3736 in_rtx->used = 1;
3737 }
3738
3739 if (INSN_P (in_rtx))
3740 {
3741 /* Inside a SEQUENCE, we find insns.
3742 Renumber just the patterns of these insns,
3743 just as we do for the top-level insns. */
3744 leaf_renumber_regs_insn (PATTERN (in_rtx));
3745 return;
3746 }
3747
3748 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
3749
3750 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
3751 switch (*format_ptr++)
3752 {
3753 case 'e':
3754 leaf_renumber_regs_insn (XEXP (in_rtx, i));
3755 break;
3756
3757 case 'E':
3758 if (NULL != XVEC (in_rtx, i))
3759 {
3760 for (j = 0; j < XVECLEN (in_rtx, i); j++)
3761 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
3762 }
3763 break;
3764
3765 case 'S':
3766 case 's':
3767 case '0':
3768 case 'i':
3769 case 'w':
3770 case 'n':
3771 case 'u':
3772 break;
3773
3774 default:
3775 abort ();
3776 }
3777 }
3778 #endif
3779
3780
3781 /* When -gused is used, emit debug info for only used symbols. But in
3782 addition to the standard intercepted debug_hooks there are some direct
3783 calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
3784 Those routines may also be called from a higher level intercepted routine. So
3785 to prevent recording data for an inner call to one of these for an intercept,
3786 we maintain a intercept nesting counter (debug_nesting). We only save the
3787 intercepted arguments if the nesting is 1. */
3788 int debug_nesting = 0;
3789
3790 static tree *symbol_queue;
3791 int symbol_queue_index = 0;
3792 static int symbol_queue_size = 0;
3793
3794 /* Generate the symbols for any queued up type symbols we encountered
3795 while generating the type info for some originally used symbol.
3796 This might generate additional entries in the queue. Only when
3797 the nesting depth goes to 0 is this routine called. */
3798
3799 void
3800 debug_flush_symbol_queue (void)
3801 {
3802 int i;
3803
3804 /* Make sure that additionally queued items are not flushed
3805 prematurely. */
3806
3807 ++debug_nesting;
3808
3809 for (i = 0; i < symbol_queue_index; ++i)
3810 {
3811 /* If we pushed queued symbols then such symbols are must be
3812 output no matter what anyone else says. Specifically,
3813 we need to make sure dbxout_symbol() thinks the symbol was
3814 used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
3815 which may be set for outside reasons. */
3816 int saved_tree_used = TREE_USED (symbol_queue[i]);
3817 int saved_suppress_debug = TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]);
3818 TREE_USED (symbol_queue[i]) = 1;
3819 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = 0;
3820
3821 #ifdef DBX_DEBUGGING_INFO
3822 dbxout_symbol (symbol_queue[i], 0);
3823 #endif
3824
3825 TREE_USED (symbol_queue[i]) = saved_tree_used;
3826 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = saved_suppress_debug;
3827 }
3828
3829 symbol_queue_index = 0;
3830 --debug_nesting;
3831 }
3832
3833 /* Queue a type symbol needed as part of the definition of a decl
3834 symbol. These symbols are generated when debug_flush_symbol_queue()
3835 is called. */
3836
3837 void
3838 debug_queue_symbol (tree decl)
3839 {
3840 if (symbol_queue_index >= symbol_queue_size)
3841 {
3842 symbol_queue_size += 10;
3843 symbol_queue = xrealloc (symbol_queue,
3844 symbol_queue_size * sizeof (tree));
3845 }
3846
3847 symbol_queue[symbol_queue_index++] = decl;
3848 }
3849
3850 /* Free symbol queue. */
3851 void
3852 debug_free_queue (void)
3853 {
3854 if (symbol_queue)
3855 {
3856 free (symbol_queue);
3857 symbol_queue = NULL;
3858 symbol_queue_size = 0;
3859 }
3860 }