Major cutover to using system.h:
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "expr.h"
36 #include "insn-config.h"
37 #include "recog.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "defaults.h"
41
42 #define CEIL(x,y) (((x) + (y) - 1) / (y))
43
44 /* Decide whether a function's arguments should be processed
45 from first to last or from last to first.
46
47 They should if the stack and args grow in opposite directions, but
48 only if we have push insns. */
49
50 #ifdef PUSH_ROUNDING
51
52 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
53 #define PUSH_ARGS_REVERSED /* If it's last to first */
54 #endif
55
56 #endif
57
58 #ifndef STACK_PUSH_CODE
59 #ifdef STACK_GROWS_DOWNWARD
60 #define STACK_PUSH_CODE PRE_DEC
61 #else
62 #define STACK_PUSH_CODE PRE_INC
63 #endif
64 #endif
65
66 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
67 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
68
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
72 #endif
73
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80 int cse_not_expected;
81
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
86
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
90
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
96
97 /* When temporaries are created by TARGET_EXPRs, they are created at
98 this level of temp_slot_level, so that they can remain allocated
99 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
100 of TARGET_EXPRs. */
101 int target_temp_slot_level;
102
103 /* Nonzero means __builtin_saveregs has already been done in this function.
104 The value is the pseudoreg containing the value __builtin_saveregs
105 returned. */
106 static rtx saveregs_value;
107
108 /* Similarly for __builtin_apply_args. */
109 static rtx apply_args_value;
110
111 /* Don't check memory usage, since code is being emitted to check a memory
112 usage. Used when flag_check_memory_usage is true, to avoid infinite
113 recursion. */
114 static int in_check_memory_usage;
115
116 /* This structure is used by move_by_pieces to describe the move to
117 be performed. */
118 struct move_by_pieces
119 {
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
124 int to_struct;
125 rtx from;
126 rtx from_addr;
127 int autinc_from;
128 int explicit_inc_from;
129 int from_struct;
130 int len;
131 int offset;
132 int reverse;
133 };
134
135 /* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
137
138 struct clear_by_pieces
139 {
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 int to_struct;
145 int len;
146 int offset;
147 int reverse;
148 };
149
150 extern struct obstack permanent_obstack;
151 extern rtx arg_pointer_save_area;
152
153 static rtx get_push_address PROTO ((int));
154
155 static rtx enqueue_insn PROTO((rtx, rtx));
156 static int queued_subexp_p PROTO((rtx));
157 static void init_queue PROTO((void));
158 static void move_by_pieces PROTO((rtx, rtx, int, int));
159 static int move_by_pieces_ninsns PROTO((unsigned int, int));
160 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
161 struct move_by_pieces *));
162 static void clear_by_pieces PROTO((rtx, int, int));
163 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
164 struct clear_by_pieces *));
165 static int is_zeros_p PROTO((tree));
166 static int mostly_zeros_p PROTO((tree));
167 static void store_constructor PROTO((tree, rtx, int));
168 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
169 enum machine_mode, int, int, int));
170 static tree save_noncopied_parts PROTO((tree, tree));
171 static tree init_noncopied_parts PROTO((tree, tree));
172 static int safe_from_p PROTO((rtx, tree));
173 static int fixed_type_p PROTO((tree));
174 static rtx var_rtx PROTO((tree));
175 static int get_pointer_alignment PROTO((tree, unsigned));
176 static tree string_constant PROTO((tree, tree *));
177 static tree c_strlen PROTO((tree));
178 static rtx expand_builtin PROTO((tree, rtx, rtx,
179 enum machine_mode, int));
180 static int apply_args_size PROTO((void));
181 static int apply_result_size PROTO((void));
182 static rtx result_vector PROTO((int, rtx));
183 static rtx expand_builtin_apply_args PROTO((void));
184 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
185 static void expand_builtin_return PROTO((rtx));
186 static rtx expand_increment PROTO((tree, int, int));
187 static void preexpand_calls PROTO((tree));
188 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
189 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
190 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
191 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
192 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
193 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
194 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
195 extern tree truthvalue_conversion PROTO((tree));
196
197 /* Record for each mode whether we can move a register directly to or
198 from an object of that mode in memory. If we can't, we won't try
199 to use that mode directly when accessing a field of that mode. */
200
201 static char direct_load[NUM_MACHINE_MODES];
202 static char direct_store[NUM_MACHINE_MODES];
203
204 /* MOVE_RATIO is the number of move instructions that is better than
205 a block move. */
206
207 #ifndef MOVE_RATIO
208 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
209 #define MOVE_RATIO 2
210 #else
211 /* A value of around 6 would minimize code size; infinity would minimize
212 execution time. */
213 #define MOVE_RATIO 15
214 #endif
215 #endif
216
217 /* This array records the insn_code of insns to perform block moves. */
218 enum insn_code movstr_optab[NUM_MACHINE_MODES];
219
220 /* This array records the insn_code of insns to perform block clears. */
221 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
222
223 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
224
225 #ifndef SLOW_UNALIGNED_ACCESS
226 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
227 #endif
228
229 /* Register mappings for target machines without register windows. */
230 #ifndef INCOMING_REGNO
231 #define INCOMING_REGNO(OUT) (OUT)
232 #endif
233 #ifndef OUTGOING_REGNO
234 #define OUTGOING_REGNO(IN) (IN)
235 #endif
236 \f
237 /* This is run once per compilation to set up which modes can be used
238 directly in memory and to initialize the block move optab. */
239
240 void
241 init_expr_once ()
242 {
243 rtx insn, pat;
244 enum machine_mode mode;
245 /* Try indexing by frame ptr and try by stack ptr.
246 It is known that on the Convex the stack ptr isn't a valid index.
247 With luck, one or the other is valid on any machine. */
248 rtx mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
249 rtx mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
250
251 start_sequence ();
252 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
253 pat = PATTERN (insn);
254
255 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
256 mode = (enum machine_mode) ((int) mode + 1))
257 {
258 int regno;
259 rtx reg;
260 int num_clobbers;
261
262 direct_load[(int) mode] = direct_store[(int) mode] = 0;
263 PUT_MODE (mem, mode);
264 PUT_MODE (mem1, mode);
265
266 /* See if there is some register that can be used in this mode and
267 directly loaded or stored from memory. */
268
269 if (mode != VOIDmode && mode != BLKmode)
270 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
271 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
272 regno++)
273 {
274 if (! HARD_REGNO_MODE_OK (regno, mode))
275 continue;
276
277 reg = gen_rtx_REG (mode, regno);
278
279 SET_SRC (pat) = mem;
280 SET_DEST (pat) = reg;
281 if (recog (pat, insn, &num_clobbers) >= 0)
282 direct_load[(int) mode] = 1;
283
284 SET_SRC (pat) = mem1;
285 SET_DEST (pat) = reg;
286 if (recog (pat, insn, &num_clobbers) >= 0)
287 direct_load[(int) mode] = 1;
288
289 SET_SRC (pat) = reg;
290 SET_DEST (pat) = mem;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_store[(int) mode] = 1;
293
294 SET_SRC (pat) = reg;
295 SET_DEST (pat) = mem1;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_store[(int) mode] = 1;
298 }
299 }
300
301 end_sequence ();
302 }
303
304 /* This is run at the start of compiling a function. */
305
306 void
307 init_expr ()
308 {
309 init_queue ();
310
311 pending_stack_adjust = 0;
312 inhibit_defer_pop = 0;
313 saveregs_value = 0;
314 apply_args_value = 0;
315 forced_labels = 0;
316 }
317
318 /* Save all variables describing the current status into the structure *P.
319 This is used before starting a nested function. */
320
321 void
322 save_expr_status (p)
323 struct function *p;
324 {
325 /* Instead of saving the postincrement queue, empty it. */
326 emit_queue ();
327
328 p->pending_stack_adjust = pending_stack_adjust;
329 p->inhibit_defer_pop = inhibit_defer_pop;
330 p->saveregs_value = saveregs_value;
331 p->apply_args_value = apply_args_value;
332 p->forced_labels = forced_labels;
333
334 pending_stack_adjust = 0;
335 inhibit_defer_pop = 0;
336 saveregs_value = 0;
337 apply_args_value = 0;
338 forced_labels = 0;
339 }
340
341 /* Restore all variables describing the current status from the structure *P.
342 This is used after a nested function. */
343
344 void
345 restore_expr_status (p)
346 struct function *p;
347 {
348 pending_stack_adjust = p->pending_stack_adjust;
349 inhibit_defer_pop = p->inhibit_defer_pop;
350 saveregs_value = p->saveregs_value;
351 apply_args_value = p->apply_args_value;
352 forced_labels = p->forced_labels;
353 }
354 \f
355 /* Manage the queue of increment instructions to be output
356 for POSTINCREMENT_EXPR expressions, etc. */
357
358 static rtx pending_chain;
359
360 /* Queue up to increment (or change) VAR later. BODY says how:
361 BODY should be the same thing you would pass to emit_insn
362 to increment right away. It will go to emit_insn later on.
363
364 The value is a QUEUED expression to be used in place of VAR
365 where you want to guarantee the pre-incrementation value of VAR. */
366
367 static rtx
368 enqueue_insn (var, body)
369 rtx var, body;
370 {
371 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
372 var, NULL_RTX, NULL_RTX, body,
373 pending_chain);
374 return pending_chain;
375 }
376
377 /* Use protect_from_queue to convert a QUEUED expression
378 into something that you can put immediately into an instruction.
379 If the queued incrementation has not happened yet,
380 protect_from_queue returns the variable itself.
381 If the incrementation has happened, protect_from_queue returns a temp
382 that contains a copy of the old value of the variable.
383
384 Any time an rtx which might possibly be a QUEUED is to be put
385 into an instruction, it must be passed through protect_from_queue first.
386 QUEUED expressions are not meaningful in instructions.
387
388 Do not pass a value through protect_from_queue and then hold
389 on to it for a while before putting it in an instruction!
390 If the queue is flushed in between, incorrect code will result. */
391
392 rtx
393 protect_from_queue (x, modify)
394 register rtx x;
395 int modify;
396 {
397 register RTX_CODE code = GET_CODE (x);
398
399 #if 0 /* A QUEUED can hang around after the queue is forced out. */
400 /* Shortcut for most common case. */
401 if (pending_chain == 0)
402 return x;
403 #endif
404
405 if (code != QUEUED)
406 {
407 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
408 use of autoincrement. Make a copy of the contents of the memory
409 location rather than a copy of the address, but not if the value is
410 of mode BLKmode. Don't modify X in place since it might be
411 shared. */
412 if (code == MEM && GET_MODE (x) != BLKmode
413 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
414 {
415 register rtx y = XEXP (x, 0);
416 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
417
418 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
419 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
420 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
421
422 if (QUEUED_INSN (y))
423 {
424 register rtx temp = gen_reg_rtx (GET_MODE (new));
425 emit_insn_before (gen_move_insn (temp, new),
426 QUEUED_INSN (y));
427 return temp;
428 }
429 return new;
430 }
431 /* Otherwise, recursively protect the subexpressions of all
432 the kinds of rtx's that can contain a QUEUED. */
433 if (code == MEM)
434 {
435 rtx tem = protect_from_queue (XEXP (x, 0), 0);
436 if (tem != XEXP (x, 0))
437 {
438 x = copy_rtx (x);
439 XEXP (x, 0) = tem;
440 }
441 }
442 else if (code == PLUS || code == MULT)
443 {
444 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
445 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
446 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
447 {
448 x = copy_rtx (x);
449 XEXP (x, 0) = new0;
450 XEXP (x, 1) = new1;
451 }
452 }
453 return x;
454 }
455 /* If the increment has not happened, use the variable itself. */
456 if (QUEUED_INSN (x) == 0)
457 return QUEUED_VAR (x);
458 /* If the increment has happened and a pre-increment copy exists,
459 use that copy. */
460 if (QUEUED_COPY (x) != 0)
461 return QUEUED_COPY (x);
462 /* The increment has happened but we haven't set up a pre-increment copy.
463 Set one up now, and use it. */
464 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
465 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
466 QUEUED_INSN (x));
467 return QUEUED_COPY (x);
468 }
469
470 /* Return nonzero if X contains a QUEUED expression:
471 if it contains anything that will be altered by a queued increment.
472 We handle only combinations of MEM, PLUS, MINUS and MULT operators
473 since memory addresses generally contain only those. */
474
475 static int
476 queued_subexp_p (x)
477 rtx x;
478 {
479 register enum rtx_code code = GET_CODE (x);
480 switch (code)
481 {
482 case QUEUED:
483 return 1;
484 case MEM:
485 return queued_subexp_p (XEXP (x, 0));
486 case MULT:
487 case PLUS:
488 case MINUS:
489 return (queued_subexp_p (XEXP (x, 0))
490 || queued_subexp_p (XEXP (x, 1)));
491 default:
492 return 0;
493 }
494 }
495
496 /* Perform all the pending incrementations. */
497
498 void
499 emit_queue ()
500 {
501 register rtx p;
502 while ((p = pending_chain))
503 {
504 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
505 pending_chain = QUEUED_NEXT (p);
506 }
507 }
508
509 static void
510 init_queue ()
511 {
512 if (pending_chain)
513 abort ();
514 }
515 \f
516 /* Copy data from FROM to TO, where the machine modes are not the same.
517 Both modes may be integer, or both may be floating.
518 UNSIGNEDP should be nonzero if FROM is an unsigned type.
519 This causes zero-extension instead of sign-extension. */
520
521 void
522 convert_move (to, from, unsignedp)
523 register rtx to, from;
524 int unsignedp;
525 {
526 enum machine_mode to_mode = GET_MODE (to);
527 enum machine_mode from_mode = GET_MODE (from);
528 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
529 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
530 enum insn_code code;
531 rtx libcall;
532
533 /* rtx code for making an equivalent value. */
534 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
535
536 to = protect_from_queue (to, 1);
537 from = protect_from_queue (from, 0);
538
539 if (to_real != from_real)
540 abort ();
541
542 /* If FROM is a SUBREG that indicates that we have already done at least
543 the required extension, strip it. We don't handle such SUBREGs as
544 TO here. */
545
546 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
547 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
548 >= GET_MODE_SIZE (to_mode))
549 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
550 from = gen_lowpart (to_mode, from), from_mode = to_mode;
551
552 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
553 abort ();
554
555 if (to_mode == from_mode
556 || (from_mode == VOIDmode && CONSTANT_P (from)))
557 {
558 emit_move_insn (to, from);
559 return;
560 }
561
562 if (to_real)
563 {
564 rtx value;
565
566 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
567 {
568 /* Try converting directly if the insn is supported. */
569 if ((code = can_extend_p (to_mode, from_mode, 0))
570 != CODE_FOR_nothing)
571 {
572 emit_unop_insn (code, to, from, UNKNOWN);
573 return;
574 }
575 }
576
577 #ifdef HAVE_trunchfqf2
578 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
579 {
580 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
581 return;
582 }
583 #endif
584 #ifdef HAVE_trunctqfqf2
585 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
586 {
587 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
588 return;
589 }
590 #endif
591 #ifdef HAVE_truncsfqf2
592 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
593 {
594 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
595 return;
596 }
597 #endif
598 #ifdef HAVE_truncdfqf2
599 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
602 return;
603 }
604 #endif
605 #ifdef HAVE_truncxfqf2
606 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
607 {
608 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
609 return;
610 }
611 #endif
612 #ifdef HAVE_trunctfqf2
613 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
614 {
615 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
616 return;
617 }
618 #endif
619
620 #ifdef HAVE_trunctqfhf2
621 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
622 {
623 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
624 return;
625 }
626 #endif
627 #ifdef HAVE_truncsfhf2
628 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
629 {
630 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
631 return;
632 }
633 #endif
634 #ifdef HAVE_truncdfhf2
635 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
636 {
637 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
638 return;
639 }
640 #endif
641 #ifdef HAVE_truncxfhf2
642 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
643 {
644 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
645 return;
646 }
647 #endif
648 #ifdef HAVE_trunctfhf2
649 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
650 {
651 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
652 return;
653 }
654 #endif
655
656 #ifdef HAVE_truncsftqf2
657 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
658 {
659 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
660 return;
661 }
662 #endif
663 #ifdef HAVE_truncdftqf2
664 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
665 {
666 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
667 return;
668 }
669 #endif
670 #ifdef HAVE_truncxftqf2
671 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
672 {
673 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
674 return;
675 }
676 #endif
677 #ifdef HAVE_trunctftqf2
678 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
679 {
680 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
681 return;
682 }
683 #endif
684
685 #ifdef HAVE_truncdfsf2
686 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
687 {
688 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
689 return;
690 }
691 #endif
692 #ifdef HAVE_truncxfsf2
693 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
694 {
695 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
696 return;
697 }
698 #endif
699 #ifdef HAVE_trunctfsf2
700 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
701 {
702 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
703 return;
704 }
705 #endif
706 #ifdef HAVE_truncxfdf2
707 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
708 {
709 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
710 return;
711 }
712 #endif
713 #ifdef HAVE_trunctfdf2
714 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
715 {
716 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
717 return;
718 }
719 #endif
720
721 libcall = (rtx) 0;
722 switch (from_mode)
723 {
724 case SFmode:
725 switch (to_mode)
726 {
727 case DFmode:
728 libcall = extendsfdf2_libfunc;
729 break;
730
731 case XFmode:
732 libcall = extendsfxf2_libfunc;
733 break;
734
735 case TFmode:
736 libcall = extendsftf2_libfunc;
737 break;
738
739 default:
740 break;
741 }
742 break;
743
744 case DFmode:
745 switch (to_mode)
746 {
747 case SFmode:
748 libcall = truncdfsf2_libfunc;
749 break;
750
751 case XFmode:
752 libcall = extenddfxf2_libfunc;
753 break;
754
755 case TFmode:
756 libcall = extenddftf2_libfunc;
757 break;
758
759 default:
760 break;
761 }
762 break;
763
764 case XFmode:
765 switch (to_mode)
766 {
767 case SFmode:
768 libcall = truncxfsf2_libfunc;
769 break;
770
771 case DFmode:
772 libcall = truncxfdf2_libfunc;
773 break;
774
775 default:
776 break;
777 }
778 break;
779
780 case TFmode:
781 switch (to_mode)
782 {
783 case SFmode:
784 libcall = trunctfsf2_libfunc;
785 break;
786
787 case DFmode:
788 libcall = trunctfdf2_libfunc;
789 break;
790
791 default:
792 break;
793 }
794 break;
795
796 default:
797 break;
798 }
799
800 if (libcall == (rtx) 0)
801 /* This conversion is not implemented yet. */
802 abort ();
803
804 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
805 1, from, from_mode);
806 emit_move_insn (to, value);
807 return;
808 }
809
810 /* Now both modes are integers. */
811
812 /* Handle expanding beyond a word. */
813 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
814 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
815 {
816 rtx insns;
817 rtx lowpart;
818 rtx fill_value;
819 rtx lowfrom;
820 int i;
821 enum machine_mode lowpart_mode;
822 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
823
824 /* Try converting directly if the insn is supported. */
825 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
826 != CODE_FOR_nothing)
827 {
828 /* If FROM is a SUBREG, put it into a register. Do this
829 so that we always generate the same set of insns for
830 better cse'ing; if an intermediate assignment occurred,
831 we won't be doing the operation directly on the SUBREG. */
832 if (optimize > 0 && GET_CODE (from) == SUBREG)
833 from = force_reg (from_mode, from);
834 emit_unop_insn (code, to, from, equiv_code);
835 return;
836 }
837 /* Next, try converting via full word. */
838 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
839 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
840 != CODE_FOR_nothing))
841 {
842 if (GET_CODE (to) == REG)
843 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
844 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
845 emit_unop_insn (code, to,
846 gen_lowpart (word_mode, to), equiv_code);
847 return;
848 }
849
850 /* No special multiword conversion insn; do it by hand. */
851 start_sequence ();
852
853 /* Since we will turn this into a no conflict block, we must ensure
854 that the source does not overlap the target. */
855
856 if (reg_overlap_mentioned_p (to, from))
857 from = force_reg (from_mode, from);
858
859 /* Get a copy of FROM widened to a word, if necessary. */
860 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
861 lowpart_mode = word_mode;
862 else
863 lowpart_mode = from_mode;
864
865 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
866
867 lowpart = gen_lowpart (lowpart_mode, to);
868 emit_move_insn (lowpart, lowfrom);
869
870 /* Compute the value to put in each remaining word. */
871 if (unsignedp)
872 fill_value = const0_rtx;
873 else
874 {
875 #ifdef HAVE_slt
876 if (HAVE_slt
877 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
878 && STORE_FLAG_VALUE == -1)
879 {
880 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
881 lowpart_mode, 0, 0);
882 fill_value = gen_reg_rtx (word_mode);
883 emit_insn (gen_slt (fill_value));
884 }
885 else
886 #endif
887 {
888 fill_value
889 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
890 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
891 NULL_RTX, 0);
892 fill_value = convert_to_mode (word_mode, fill_value, 1);
893 }
894 }
895
896 /* Fill the remaining words. */
897 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
898 {
899 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
900 rtx subword = operand_subword (to, index, 1, to_mode);
901
902 if (subword == 0)
903 abort ();
904
905 if (fill_value != subword)
906 emit_move_insn (subword, fill_value);
907 }
908
909 insns = get_insns ();
910 end_sequence ();
911
912 emit_no_conflict_block (insns, to, from, NULL_RTX,
913 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
914 return;
915 }
916
917 /* Truncating multi-word to a word or less. */
918 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
919 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
920 {
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 convert_move (to, gen_lowpart (word_mode, from), 0);
929 return;
930 }
931
932 /* Handle pointer conversion */ /* SPEE 900220 */
933 if (to_mode == PSImode)
934 {
935 if (from_mode != SImode)
936 from = convert_to_mode (SImode, from, unsignedp);
937
938 #ifdef HAVE_truncsipsi2
939 if (HAVE_truncsipsi2)
940 {
941 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
942 return;
943 }
944 #endif /* HAVE_truncsipsi2 */
945 abort ();
946 }
947
948 if (from_mode == PSImode)
949 {
950 if (to_mode != SImode)
951 {
952 from = convert_to_mode (SImode, from, unsignedp);
953 from_mode = SImode;
954 }
955 else
956 {
957 #ifdef HAVE_extendpsisi2
958 if (HAVE_extendpsisi2)
959 {
960 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
961 return;
962 }
963 #endif /* HAVE_extendpsisi2 */
964 abort ();
965 }
966 }
967
968 if (to_mode == PDImode)
969 {
970 if (from_mode != DImode)
971 from = convert_to_mode (DImode, from, unsignedp);
972
973 #ifdef HAVE_truncdipdi2
974 if (HAVE_truncdipdi2)
975 {
976 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
977 return;
978 }
979 #endif /* HAVE_truncdipdi2 */
980 abort ();
981 }
982
983 if (from_mode == PDImode)
984 {
985 if (to_mode != DImode)
986 {
987 from = convert_to_mode (DImode, from, unsignedp);
988 from_mode = DImode;
989 }
990 else
991 {
992 #ifdef HAVE_extendpdidi2
993 if (HAVE_extendpdidi2)
994 {
995 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
996 return;
997 }
998 #endif /* HAVE_extendpdidi2 */
999 abort ();
1000 }
1001 }
1002
1003 /* Now follow all the conversions between integers
1004 no more than a word long. */
1005
1006 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1007 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1008 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1009 GET_MODE_BITSIZE (from_mode)))
1010 {
1011 if (!((GET_CODE (from) == MEM
1012 && ! MEM_VOLATILE_P (from)
1013 && direct_load[(int) to_mode]
1014 && ! mode_dependent_address_p (XEXP (from, 0)))
1015 || GET_CODE (from) == REG
1016 || GET_CODE (from) == SUBREG))
1017 from = force_reg (from_mode, from);
1018 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1019 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1020 from = copy_to_reg (from);
1021 emit_move_insn (to, gen_lowpart (to_mode, from));
1022 return;
1023 }
1024
1025 /* Handle extension. */
1026 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1027 {
1028 /* Convert directly if that works. */
1029 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1030 != CODE_FOR_nothing)
1031 {
1032 emit_unop_insn (code, to, from, equiv_code);
1033 return;
1034 }
1035 else
1036 {
1037 enum machine_mode intermediate;
1038
1039 /* Search for a mode to convert via. */
1040 for (intermediate = from_mode; intermediate != VOIDmode;
1041 intermediate = GET_MODE_WIDER_MODE (intermediate))
1042 if (((can_extend_p (to_mode, intermediate, unsignedp)
1043 != CODE_FOR_nothing)
1044 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1045 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1046 && (can_extend_p (intermediate, from_mode, unsignedp)
1047 != CODE_FOR_nothing))
1048 {
1049 convert_move (to, convert_to_mode (intermediate, from,
1050 unsignedp), unsignedp);
1051 return;
1052 }
1053
1054 /* No suitable intermediate mode. */
1055 abort ();
1056 }
1057 }
1058
1059 /* Support special truncate insns for certain modes. */
1060
1061 if (from_mode == DImode && to_mode == SImode)
1062 {
1063 #ifdef HAVE_truncdisi2
1064 if (HAVE_truncdisi2)
1065 {
1066 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1067 return;
1068 }
1069 #endif
1070 convert_move (to, force_reg (from_mode, from), unsignedp);
1071 return;
1072 }
1073
1074 if (from_mode == DImode && to_mode == HImode)
1075 {
1076 #ifdef HAVE_truncdihi2
1077 if (HAVE_truncdihi2)
1078 {
1079 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1080 return;
1081 }
1082 #endif
1083 convert_move (to, force_reg (from_mode, from), unsignedp);
1084 return;
1085 }
1086
1087 if (from_mode == DImode && to_mode == QImode)
1088 {
1089 #ifdef HAVE_truncdiqi2
1090 if (HAVE_truncdiqi2)
1091 {
1092 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1093 return;
1094 }
1095 #endif
1096 convert_move (to, force_reg (from_mode, from), unsignedp);
1097 return;
1098 }
1099
1100 if (from_mode == SImode && to_mode == HImode)
1101 {
1102 #ifdef HAVE_truncsihi2
1103 if (HAVE_truncsihi2)
1104 {
1105 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1106 return;
1107 }
1108 #endif
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 return;
1111 }
1112
1113 if (from_mode == SImode && to_mode == QImode)
1114 {
1115 #ifdef HAVE_truncsiqi2
1116 if (HAVE_truncsiqi2)
1117 {
1118 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1119 return;
1120 }
1121 #endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1124 }
1125
1126 if (from_mode == HImode && to_mode == QImode)
1127 {
1128 #ifdef HAVE_trunchiqi2
1129 if (HAVE_trunchiqi2)
1130 {
1131 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1132 return;
1133 }
1134 #endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1137 }
1138
1139 if (from_mode == TImode && to_mode == DImode)
1140 {
1141 #ifdef HAVE_trunctidi2
1142 if (HAVE_trunctidi2)
1143 {
1144 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1145 return;
1146 }
1147 #endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1150 }
1151
1152 if (from_mode == TImode && to_mode == SImode)
1153 {
1154 #ifdef HAVE_trunctisi2
1155 if (HAVE_trunctisi2)
1156 {
1157 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1158 return;
1159 }
1160 #endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1163 }
1164
1165 if (from_mode == TImode && to_mode == HImode)
1166 {
1167 #ifdef HAVE_trunctihi2
1168 if (HAVE_trunctihi2)
1169 {
1170 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1171 return;
1172 }
1173 #endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1176 }
1177
1178 if (from_mode == TImode && to_mode == QImode)
1179 {
1180 #ifdef HAVE_trunctiqi2
1181 if (HAVE_trunctiqi2)
1182 {
1183 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1184 return;
1185 }
1186 #endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1189 }
1190
1191 /* Handle truncation of volatile memrefs, and so on;
1192 the things that couldn't be truncated directly,
1193 and for which there was no special instruction. */
1194 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1195 {
1196 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1197 emit_move_insn (to, temp);
1198 return;
1199 }
1200
1201 /* Mode combination is not recognized. */
1202 abort ();
1203 }
1204
1205 /* Return an rtx for a value that would result
1206 from converting X to mode MODE.
1207 Both X and MODE may be floating, or both integer.
1208 UNSIGNEDP is nonzero if X is an unsigned value.
1209 This can be done by referring to a part of X in place
1210 or by copying to a new temporary with conversion.
1211
1212 This function *must not* call protect_from_queue
1213 except when putting X into an insn (in which case convert_move does it). */
1214
1215 rtx
1216 convert_to_mode (mode, x, unsignedp)
1217 enum machine_mode mode;
1218 rtx x;
1219 int unsignedp;
1220 {
1221 return convert_modes (mode, VOIDmode, x, unsignedp);
1222 }
1223
1224 /* Return an rtx for a value that would result
1225 from converting X from mode OLDMODE to mode MODE.
1226 Both modes may be floating, or both integer.
1227 UNSIGNEDP is nonzero if X is an unsigned value.
1228
1229 This can be done by referring to a part of X in place
1230 or by copying to a new temporary with conversion.
1231
1232 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1233
1234 This function *must not* call protect_from_queue
1235 except when putting X into an insn (in which case convert_move does it). */
1236
1237 rtx
1238 convert_modes (mode, oldmode, x, unsignedp)
1239 enum machine_mode mode, oldmode;
1240 rtx x;
1241 int unsignedp;
1242 {
1243 register rtx temp;
1244
1245 /* If FROM is a SUBREG that indicates that we have already done at least
1246 the required extension, strip it. */
1247
1248 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1249 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1250 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1251 x = gen_lowpart (mode, x);
1252
1253 if (GET_MODE (x) != VOIDmode)
1254 oldmode = GET_MODE (x);
1255
1256 if (mode == oldmode)
1257 return x;
1258
1259 /* There is one case that we must handle specially: If we are converting
1260 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1261 we are to interpret the constant as unsigned, gen_lowpart will do
1262 the wrong if the constant appears negative. What we want to do is
1263 make the high-order word of the constant zero, not all ones. */
1264
1265 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1266 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1267 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1268 {
1269 HOST_WIDE_INT val = INTVAL (x);
1270
1271 if (oldmode != VOIDmode
1272 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1273 {
1274 int width = GET_MODE_BITSIZE (oldmode);
1275
1276 /* We need to zero extend VAL. */
1277 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1278 }
1279
1280 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1281 }
1282
1283 /* We can do this with a gen_lowpart if both desired and current modes
1284 are integer, and this is either a constant integer, a register, or a
1285 non-volatile MEM. Except for the constant case where MODE is no
1286 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1287
1288 if ((GET_CODE (x) == CONST_INT
1289 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1290 || (GET_MODE_CLASS (mode) == MODE_INT
1291 && GET_MODE_CLASS (oldmode) == MODE_INT
1292 && (GET_CODE (x) == CONST_DOUBLE
1293 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1294 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1295 && direct_load[(int) mode])
1296 || (GET_CODE (x) == REG
1297 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1298 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1299 {
1300 /* ?? If we don't know OLDMODE, we have to assume here that
1301 X does not need sign- or zero-extension. This may not be
1302 the case, but it's the best we can do. */
1303 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1304 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1305 {
1306 HOST_WIDE_INT val = INTVAL (x);
1307 int width = GET_MODE_BITSIZE (oldmode);
1308
1309 /* We must sign or zero-extend in this case. Start by
1310 zero-extending, then sign extend if we need to. */
1311 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1312 if (! unsignedp
1313 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1314 val |= (HOST_WIDE_INT) (-1) << width;
1315
1316 return GEN_INT (val);
1317 }
1318
1319 return gen_lowpart (mode, x);
1320 }
1321
1322 temp = gen_reg_rtx (mode);
1323 convert_move (temp, x, unsignedp);
1324 return temp;
1325 }
1326 \f
1327 /* Generate several move instructions to copy LEN bytes
1328 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1329 The caller must pass FROM and TO
1330 through protect_from_queue before calling.
1331 ALIGN (in bytes) is maximum alignment we can assume. */
1332
1333 static void
1334 move_by_pieces (to, from, len, align)
1335 rtx to, from;
1336 int len, align;
1337 {
1338 struct move_by_pieces data;
1339 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1340 int max_size = MOVE_MAX + 1;
1341
1342 data.offset = 0;
1343 data.to_addr = to_addr;
1344 data.from_addr = from_addr;
1345 data.to = to;
1346 data.from = from;
1347 data.autinc_to
1348 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1349 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1350 data.autinc_from
1351 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1352 || GET_CODE (from_addr) == POST_INC
1353 || GET_CODE (from_addr) == POST_DEC);
1354
1355 data.explicit_inc_from = 0;
1356 data.explicit_inc_to = 0;
1357 data.reverse
1358 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1359 if (data.reverse) data.offset = len;
1360 data.len = len;
1361
1362 data.to_struct = MEM_IN_STRUCT_P (to);
1363 data.from_struct = MEM_IN_STRUCT_P (from);
1364
1365 /* If copying requires more than two move insns,
1366 copy addresses to registers (to make displacements shorter)
1367 and use post-increment if available. */
1368 if (!(data.autinc_from && data.autinc_to)
1369 && move_by_pieces_ninsns (len, align) > 2)
1370 {
1371 #ifdef HAVE_PRE_DECREMENT
1372 if (data.reverse && ! data.autinc_from)
1373 {
1374 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1375 data.autinc_from = 1;
1376 data.explicit_inc_from = -1;
1377 }
1378 #endif
1379 #ifdef HAVE_POST_INCREMENT
1380 if (! data.autinc_from)
1381 {
1382 data.from_addr = copy_addr_to_reg (from_addr);
1383 data.autinc_from = 1;
1384 data.explicit_inc_from = 1;
1385 }
1386 #endif
1387 if (!data.autinc_from && CONSTANT_P (from_addr))
1388 data.from_addr = copy_addr_to_reg (from_addr);
1389 #ifdef HAVE_PRE_DECREMENT
1390 if (data.reverse && ! data.autinc_to)
1391 {
1392 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1393 data.autinc_to = 1;
1394 data.explicit_inc_to = -1;
1395 }
1396 #endif
1397 #ifdef HAVE_POST_INCREMENT
1398 if (! data.reverse && ! data.autinc_to)
1399 {
1400 data.to_addr = copy_addr_to_reg (to_addr);
1401 data.autinc_to = 1;
1402 data.explicit_inc_to = 1;
1403 }
1404 #endif
1405 if (!data.autinc_to && CONSTANT_P (to_addr))
1406 data.to_addr = copy_addr_to_reg (to_addr);
1407 }
1408
1409 if (! SLOW_UNALIGNED_ACCESS
1410 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1411 align = MOVE_MAX;
1412
1413 /* First move what we can in the largest integer mode, then go to
1414 successively smaller modes. */
1415
1416 while (max_size > 1)
1417 {
1418 enum machine_mode mode = VOIDmode, tmode;
1419 enum insn_code icode;
1420
1421 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1422 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1423 if (GET_MODE_SIZE (tmode) < max_size)
1424 mode = tmode;
1425
1426 if (mode == VOIDmode)
1427 break;
1428
1429 icode = mov_optab->handlers[(int) mode].insn_code;
1430 if (icode != CODE_FOR_nothing
1431 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1432 GET_MODE_SIZE (mode)))
1433 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1434
1435 max_size = GET_MODE_SIZE (mode);
1436 }
1437
1438 /* The code above should have handled everything. */
1439 if (data.len > 0)
1440 abort ();
1441 }
1442
1443 /* Return number of insns required to move L bytes by pieces.
1444 ALIGN (in bytes) is maximum alignment we can assume. */
1445
1446 static int
1447 move_by_pieces_ninsns (l, align)
1448 unsigned int l;
1449 int align;
1450 {
1451 register int n_insns = 0;
1452 int max_size = MOVE_MAX + 1;
1453
1454 if (! SLOW_UNALIGNED_ACCESS
1455 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1456 align = MOVE_MAX;
1457
1458 while (max_size > 1)
1459 {
1460 enum machine_mode mode = VOIDmode, tmode;
1461 enum insn_code icode;
1462
1463 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1464 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1465 if (GET_MODE_SIZE (tmode) < max_size)
1466 mode = tmode;
1467
1468 if (mode == VOIDmode)
1469 break;
1470
1471 icode = mov_optab->handlers[(int) mode].insn_code;
1472 if (icode != CODE_FOR_nothing
1473 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1474 GET_MODE_SIZE (mode)))
1475 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1476
1477 max_size = GET_MODE_SIZE (mode);
1478 }
1479
1480 return n_insns;
1481 }
1482
1483 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1484 with move instructions for mode MODE. GENFUN is the gen_... function
1485 to make a move insn for that mode. DATA has all the other info. */
1486
1487 static void
1488 move_by_pieces_1 (genfun, mode, data)
1489 rtx (*genfun) PROTO ((rtx, ...));
1490 enum machine_mode mode;
1491 struct move_by_pieces *data;
1492 {
1493 register int size = GET_MODE_SIZE (mode);
1494 register rtx to1, from1;
1495
1496 while (data->len >= size)
1497 {
1498 if (data->reverse) data->offset -= size;
1499
1500 to1 = (data->autinc_to
1501 ? gen_rtx_MEM (mode, data->to_addr)
1502 : copy_rtx (change_address (data->to, mode,
1503 plus_constant (data->to_addr,
1504 data->offset))));
1505 MEM_IN_STRUCT_P (to1) = data->to_struct;
1506
1507 from1
1508 = (data->autinc_from
1509 ? gen_rtx_MEM (mode, data->from_addr)
1510 : copy_rtx (change_address (data->from, mode,
1511 plus_constant (data->from_addr,
1512 data->offset))));
1513 MEM_IN_STRUCT_P (from1) = data->from_struct;
1514
1515 #ifdef HAVE_PRE_DECREMENT
1516 if (data->explicit_inc_to < 0)
1517 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1518 if (data->explicit_inc_from < 0)
1519 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1520 #endif
1521
1522 emit_insn ((*genfun) (to1, from1));
1523 #ifdef HAVE_POST_INCREMENT
1524 if (data->explicit_inc_to > 0)
1525 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1526 if (data->explicit_inc_from > 0)
1527 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1528 #endif
1529
1530 if (! data->reverse) data->offset += size;
1531
1532 data->len -= size;
1533 }
1534 }
1535 \f
1536 /* Emit code to move a block Y to a block X.
1537 This may be done with string-move instructions,
1538 with multiple scalar move instructions, or with a library call.
1539
1540 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1541 with mode BLKmode.
1542 SIZE is an rtx that says how long they are.
1543 ALIGN is the maximum alignment we can assume they have,
1544 measured in bytes.
1545
1546 Return the address of the new block, if memcpy is called and returns it,
1547 0 otherwise. */
1548
1549 rtx
1550 emit_block_move (x, y, size, align)
1551 rtx x, y;
1552 rtx size;
1553 int align;
1554 {
1555 rtx retval = 0;
1556
1557 if (GET_MODE (x) != BLKmode)
1558 abort ();
1559
1560 if (GET_MODE (y) != BLKmode)
1561 abort ();
1562
1563 x = protect_from_queue (x, 1);
1564 y = protect_from_queue (y, 0);
1565 size = protect_from_queue (size, 0);
1566
1567 if (GET_CODE (x) != MEM)
1568 abort ();
1569 if (GET_CODE (y) != MEM)
1570 abort ();
1571 if (size == 0)
1572 abort ();
1573
1574 if (GET_CODE (size) == CONST_INT
1575 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1576 move_by_pieces (x, y, INTVAL (size), align);
1577 else
1578 {
1579 /* Try the most limited insn first, because there's no point
1580 including more than one in the machine description unless
1581 the more limited one has some advantage. */
1582
1583 rtx opalign = GEN_INT (align);
1584 enum machine_mode mode;
1585
1586 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1587 mode = GET_MODE_WIDER_MODE (mode))
1588 {
1589 enum insn_code code = movstr_optab[(int) mode];
1590
1591 if (code != CODE_FOR_nothing
1592 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1593 here because if SIZE is less than the mode mask, as it is
1594 returned by the macro, it will definitely be less than the
1595 actual mode mask. */
1596 && ((GET_CODE (size) == CONST_INT
1597 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1598 <= GET_MODE_MASK (mode)))
1599 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1600 && (insn_operand_predicate[(int) code][0] == 0
1601 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1602 && (insn_operand_predicate[(int) code][1] == 0
1603 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1604 && (insn_operand_predicate[(int) code][3] == 0
1605 || (*insn_operand_predicate[(int) code][3]) (opalign,
1606 VOIDmode)))
1607 {
1608 rtx op2;
1609 rtx last = get_last_insn ();
1610 rtx pat;
1611
1612 op2 = convert_to_mode (mode, size, 1);
1613 if (insn_operand_predicate[(int) code][2] != 0
1614 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1615 op2 = copy_to_mode_reg (mode, op2);
1616
1617 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1618 if (pat)
1619 {
1620 emit_insn (pat);
1621 return 0;
1622 }
1623 else
1624 delete_insns_since (last);
1625 }
1626 }
1627
1628 #ifdef TARGET_MEM_FUNCTIONS
1629 retval
1630 = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
1631 ptr_mode, 3, XEXP (x, 0), Pmode,
1632 XEXP (y, 0), Pmode,
1633 convert_to_mode (TYPE_MODE (sizetype), size,
1634 TREE_UNSIGNED (sizetype)),
1635 TYPE_MODE (sizetype));
1636 #else
1637 emit_library_call (bcopy_libfunc, 0,
1638 VOIDmode, 3, XEXP (y, 0), Pmode,
1639 XEXP (x, 0), Pmode,
1640 convert_to_mode (TYPE_MODE (integer_type_node), size,
1641 TREE_UNSIGNED (integer_type_node)),
1642 TYPE_MODE (integer_type_node));
1643 #endif
1644 }
1645
1646 return retval;
1647 }
1648 \f
1649 /* Copy all or part of a value X into registers starting at REGNO.
1650 The number of registers to be filled is NREGS. */
1651
1652 void
1653 move_block_to_reg (regno, x, nregs, mode)
1654 int regno;
1655 rtx x;
1656 int nregs;
1657 enum machine_mode mode;
1658 {
1659 int i;
1660 #ifdef HAVE_load_multiple
1661 rtx pat;
1662 rtx last;
1663 #endif
1664
1665 if (nregs == 0)
1666 return;
1667
1668 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1669 x = validize_mem (force_const_mem (mode, x));
1670
1671 /* See if the machine can do this with a load multiple insn. */
1672 #ifdef HAVE_load_multiple
1673 if (HAVE_load_multiple)
1674 {
1675 last = get_last_insn ();
1676 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1677 GEN_INT (nregs));
1678 if (pat)
1679 {
1680 emit_insn (pat);
1681 return;
1682 }
1683 else
1684 delete_insns_since (last);
1685 }
1686 #endif
1687
1688 for (i = 0; i < nregs; i++)
1689 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1690 operand_subword_force (x, i, mode));
1691 }
1692
1693 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1694 The number of registers to be filled is NREGS. SIZE indicates the number
1695 of bytes in the object X. */
1696
1697
1698 void
1699 move_block_from_reg (regno, x, nregs, size)
1700 int regno;
1701 rtx x;
1702 int nregs;
1703 int size;
1704 {
1705 int i;
1706 #ifdef HAVE_store_multiple
1707 rtx pat;
1708 rtx last;
1709 #endif
1710 enum machine_mode mode;
1711
1712 /* If SIZE is that of a mode no bigger than a word, just use that
1713 mode's store operation. */
1714 if (size <= UNITS_PER_WORD
1715 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1716 {
1717 emit_move_insn (change_address (x, mode, NULL),
1718 gen_rtx_REG (mode, regno));
1719 return;
1720 }
1721
1722 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1723 to the left before storing to memory. Note that the previous test
1724 doesn't handle all cases (e.g. SIZE == 3). */
1725 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1726 {
1727 rtx tem = operand_subword (x, 0, 1, BLKmode);
1728 rtx shift;
1729
1730 if (tem == 0)
1731 abort ();
1732
1733 shift = expand_shift (LSHIFT_EXPR, word_mode,
1734 gen_rtx_REG (word_mode, regno),
1735 build_int_2 ((UNITS_PER_WORD - size)
1736 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1737 emit_move_insn (tem, shift);
1738 return;
1739 }
1740
1741 /* See if the machine can do this with a store multiple insn. */
1742 #ifdef HAVE_store_multiple
1743 if (HAVE_store_multiple)
1744 {
1745 last = get_last_insn ();
1746 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1747 GEN_INT (nregs));
1748 if (pat)
1749 {
1750 emit_insn (pat);
1751 return;
1752 }
1753 else
1754 delete_insns_since (last);
1755 }
1756 #endif
1757
1758 for (i = 0; i < nregs; i++)
1759 {
1760 rtx tem = operand_subword (x, i, 1, BLKmode);
1761
1762 if (tem == 0)
1763 abort ();
1764
1765 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1766 }
1767 }
1768
1769 /* Emit code to move a block Y to a block X, where X is non-consecutive
1770 registers represented by a PARALLEL. */
1771
1772 void
1773 emit_group_load (x, y)
1774 rtx x, y;
1775 {
1776 rtx target_reg, source;
1777 int i;
1778
1779 if (GET_CODE (x) != PARALLEL)
1780 abort ();
1781
1782 /* Check for a NULL entry, used to indicate that the parameter goes
1783 both on the stack and in registers. */
1784 if (XEXP (XVECEXP (x, 0, 0), 0))
1785 i = 0;
1786 else
1787 i = 1;
1788
1789 for (; i < XVECLEN (x, 0); i++)
1790 {
1791 rtx element = XVECEXP (x, 0, i);
1792
1793 target_reg = XEXP (element, 0);
1794
1795 if (GET_CODE (y) == MEM)
1796 source = change_address (y, GET_MODE (target_reg),
1797 plus_constant (XEXP (y, 0),
1798 INTVAL (XEXP (element, 1))));
1799 else if (XEXP (element, 1) == const0_rtx)
1800 {
1801 if (GET_MODE (target_reg) == GET_MODE (y))
1802 source = y;
1803 /* Allow for the target_reg to be smaller than the input register
1804 to allow for AIX with 4 DF arguments after a single SI arg. The
1805 last DF argument will only load 1 word into the integer registers,
1806 but load a DF value into the float registers. */
1807 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1808 <= GET_MODE_SIZE (GET_MODE (y)))
1809 && GET_MODE (target_reg) == word_mode)
1810 /* This might be a const_double, so we can't just use SUBREG. */
1811 source = operand_subword (y, 0, 0, VOIDmode);
1812 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1813 == GET_MODE_SIZE (GET_MODE (y)))
1814 source = gen_lowpart (GET_MODE (target_reg), y);
1815 else
1816 abort ();
1817 }
1818 else
1819 abort ();
1820
1821 emit_move_insn (target_reg, source);
1822 }
1823 }
1824
1825 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1826 registers represented by a PARALLEL. */
1827
1828 void
1829 emit_group_store (x, y)
1830 rtx x, y;
1831 {
1832 rtx source_reg, target;
1833 int i;
1834
1835 if (GET_CODE (y) != PARALLEL)
1836 abort ();
1837
1838 /* Check for a NULL entry, used to indicate that the parameter goes
1839 both on the stack and in registers. */
1840 if (XEXP (XVECEXP (y, 0, 0), 0))
1841 i = 0;
1842 else
1843 i = 1;
1844
1845 for (; i < XVECLEN (y, 0); i++)
1846 {
1847 rtx element = XVECEXP (y, 0, i);
1848
1849 source_reg = XEXP (element, 0);
1850
1851 if (GET_CODE (x) == MEM)
1852 target = change_address (x, GET_MODE (source_reg),
1853 plus_constant (XEXP (x, 0),
1854 INTVAL (XEXP (element, 1))));
1855 else if (XEXP (element, 1) == const0_rtx)
1856 {
1857 target = x;
1858 if (GET_MODE (target) != GET_MODE (source_reg))
1859 target = gen_lowpart (GET_MODE (source_reg), target);
1860 }
1861 else
1862 abort ();
1863
1864 emit_move_insn (target, source_reg);
1865 }
1866 }
1867
1868 /* Add a USE expression for REG to the (possibly empty) list pointed
1869 to by CALL_FUSAGE. REG must denote a hard register. */
1870
1871 void
1872 use_reg (call_fusage, reg)
1873 rtx *call_fusage, reg;
1874 {
1875 if (GET_CODE (reg) != REG
1876 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1877 abort();
1878
1879 *call_fusage
1880 = gen_rtx_EXPR_LIST (VOIDmode,
1881 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1882 }
1883
1884 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1885 starting at REGNO. All of these registers must be hard registers. */
1886
1887 void
1888 use_regs (call_fusage, regno, nregs)
1889 rtx *call_fusage;
1890 int regno;
1891 int nregs;
1892 {
1893 int i;
1894
1895 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1896 abort ();
1897
1898 for (i = 0; i < nregs; i++)
1899 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
1900 }
1901
1902 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1903 PARALLEL REGS. This is for calls that pass values in multiple
1904 non-contiguous locations. The Irix 6 ABI has examples of this. */
1905
1906 void
1907 use_group_regs (call_fusage, regs)
1908 rtx *call_fusage;
1909 rtx regs;
1910 {
1911 int i;
1912
1913 for (i = 0; i < XVECLEN (regs, 0); i++)
1914 {
1915 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1916
1917 /* A NULL entry means the parameter goes both on the stack and in
1918 registers. This can also be a MEM for targets that pass values
1919 partially on the stack and partially in registers. */
1920 if (reg != 0 && GET_CODE (reg) == REG)
1921 use_reg (call_fusage, reg);
1922 }
1923 }
1924 \f
1925 /* Generate several move instructions to clear LEN bytes of block TO.
1926 (A MEM rtx with BLKmode). The caller must pass TO through
1927 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1928 we can assume. */
1929
1930 static void
1931 clear_by_pieces (to, len, align)
1932 rtx to;
1933 int len, align;
1934 {
1935 struct clear_by_pieces data;
1936 rtx to_addr = XEXP (to, 0);
1937 int max_size = MOVE_MAX + 1;
1938
1939 data.offset = 0;
1940 data.to_addr = to_addr;
1941 data.to = to;
1942 data.autinc_to
1943 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1944 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1945
1946 data.explicit_inc_to = 0;
1947 data.reverse
1948 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1949 if (data.reverse) data.offset = len;
1950 data.len = len;
1951
1952 data.to_struct = MEM_IN_STRUCT_P (to);
1953
1954 /* If copying requires more than two move insns,
1955 copy addresses to registers (to make displacements shorter)
1956 and use post-increment if available. */
1957 if (!data.autinc_to
1958 && move_by_pieces_ninsns (len, align) > 2)
1959 {
1960 #ifdef HAVE_PRE_DECREMENT
1961 if (data.reverse && ! data.autinc_to)
1962 {
1963 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1964 data.autinc_to = 1;
1965 data.explicit_inc_to = -1;
1966 }
1967 #endif
1968 #ifdef HAVE_POST_INCREMENT
1969 if (! data.reverse && ! data.autinc_to)
1970 {
1971 data.to_addr = copy_addr_to_reg (to_addr);
1972 data.autinc_to = 1;
1973 data.explicit_inc_to = 1;
1974 }
1975 #endif
1976 if (!data.autinc_to && CONSTANT_P (to_addr))
1977 data.to_addr = copy_addr_to_reg (to_addr);
1978 }
1979
1980 if (! SLOW_UNALIGNED_ACCESS
1981 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1982 align = MOVE_MAX;
1983
1984 /* First move what we can in the largest integer mode, then go to
1985 successively smaller modes. */
1986
1987 while (max_size > 1)
1988 {
1989 enum machine_mode mode = VOIDmode, tmode;
1990 enum insn_code icode;
1991
1992 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1993 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1994 if (GET_MODE_SIZE (tmode) < max_size)
1995 mode = tmode;
1996
1997 if (mode == VOIDmode)
1998 break;
1999
2000 icode = mov_optab->handlers[(int) mode].insn_code;
2001 if (icode != CODE_FOR_nothing
2002 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2003 GET_MODE_SIZE (mode)))
2004 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2005
2006 max_size = GET_MODE_SIZE (mode);
2007 }
2008
2009 /* The code above should have handled everything. */
2010 if (data.len != 0)
2011 abort ();
2012 }
2013
2014 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2015 with move instructions for mode MODE. GENFUN is the gen_... function
2016 to make a move insn for that mode. DATA has all the other info. */
2017
2018 static void
2019 clear_by_pieces_1 (genfun, mode, data)
2020 rtx (*genfun) PROTO ((rtx, ...));
2021 enum machine_mode mode;
2022 struct clear_by_pieces *data;
2023 {
2024 register int size = GET_MODE_SIZE (mode);
2025 register rtx to1;
2026
2027 while (data->len >= size)
2028 {
2029 if (data->reverse) data->offset -= size;
2030
2031 to1 = (data->autinc_to
2032 ? gen_rtx_MEM (mode, data->to_addr)
2033 : copy_rtx (change_address (data->to, mode,
2034 plus_constant (data->to_addr,
2035 data->offset))));
2036 MEM_IN_STRUCT_P (to1) = data->to_struct;
2037
2038 #ifdef HAVE_PRE_DECREMENT
2039 if (data->explicit_inc_to < 0)
2040 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2041 #endif
2042
2043 emit_insn ((*genfun) (to1, const0_rtx));
2044 #ifdef HAVE_POST_INCREMENT
2045 if (data->explicit_inc_to > 0)
2046 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2047 #endif
2048
2049 if (! data->reverse) data->offset += size;
2050
2051 data->len -= size;
2052 }
2053 }
2054 \f
2055 /* Write zeros through the storage of OBJECT.
2056 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2057 the maximum alignment we can is has, measured in bytes.
2058
2059 If we call a function that returns the length of the block, return it. */
2060
2061 rtx
2062 clear_storage (object, size, align)
2063 rtx object;
2064 rtx size;
2065 int align;
2066 {
2067 rtx retval = 0;
2068
2069 if (GET_MODE (object) == BLKmode)
2070 {
2071 object = protect_from_queue (object, 1);
2072 size = protect_from_queue (size, 0);
2073
2074 if (GET_CODE (size) == CONST_INT
2075 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2076 clear_by_pieces (object, INTVAL (size), align);
2077
2078 else
2079 {
2080 /* Try the most limited insn first, because there's no point
2081 including more than one in the machine description unless
2082 the more limited one has some advantage. */
2083
2084 rtx opalign = GEN_INT (align);
2085 enum machine_mode mode;
2086
2087 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2088 mode = GET_MODE_WIDER_MODE (mode))
2089 {
2090 enum insn_code code = clrstr_optab[(int) mode];
2091
2092 if (code != CODE_FOR_nothing
2093 /* We don't need MODE to be narrower than
2094 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2095 the mode mask, as it is returned by the macro, it will
2096 definitely be less than the actual mode mask. */
2097 && ((GET_CODE (size) == CONST_INT
2098 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2099 <= GET_MODE_MASK (mode)))
2100 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2101 && (insn_operand_predicate[(int) code][0] == 0
2102 || (*insn_operand_predicate[(int) code][0]) (object,
2103 BLKmode))
2104 && (insn_operand_predicate[(int) code][2] == 0
2105 || (*insn_operand_predicate[(int) code][2]) (opalign,
2106 VOIDmode)))
2107 {
2108 rtx op1;
2109 rtx last = get_last_insn ();
2110 rtx pat;
2111
2112 op1 = convert_to_mode (mode, size, 1);
2113 if (insn_operand_predicate[(int) code][1] != 0
2114 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2115 mode))
2116 op1 = copy_to_mode_reg (mode, op1);
2117
2118 pat = GEN_FCN ((int) code) (object, op1, opalign);
2119 if (pat)
2120 {
2121 emit_insn (pat);
2122 return 0;
2123 }
2124 else
2125 delete_insns_since (last);
2126 }
2127 }
2128
2129
2130 #ifdef TARGET_MEM_FUNCTIONS
2131 retval
2132 = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
2133 ptr_mode, 3,
2134 XEXP (object, 0), Pmode,
2135 const0_rtx,
2136 TYPE_MODE (integer_type_node),
2137 convert_to_mode
2138 (TYPE_MODE (sizetype), size,
2139 TREE_UNSIGNED (sizetype)),
2140 TYPE_MODE (sizetype));
2141 #else
2142 emit_library_call (bzero_libfunc, 0,
2143 VOIDmode, 2,
2144 XEXP (object, 0), Pmode,
2145 convert_to_mode
2146 (TYPE_MODE (integer_type_node), size,
2147 TREE_UNSIGNED (integer_type_node)),
2148 TYPE_MODE (integer_type_node));
2149 #endif
2150 }
2151 }
2152 else
2153 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2154
2155 return retval;
2156 }
2157
2158 /* Generate code to copy Y into X.
2159 Both Y and X must have the same mode, except that
2160 Y can be a constant with VOIDmode.
2161 This mode cannot be BLKmode; use emit_block_move for that.
2162
2163 Return the last instruction emitted. */
2164
2165 rtx
2166 emit_move_insn (x, y)
2167 rtx x, y;
2168 {
2169 enum machine_mode mode = GET_MODE (x);
2170
2171 x = protect_from_queue (x, 1);
2172 y = protect_from_queue (y, 0);
2173
2174 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2175 abort ();
2176
2177 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2178 y = force_const_mem (mode, y);
2179
2180 /* If X or Y are memory references, verify that their addresses are valid
2181 for the machine. */
2182 if (GET_CODE (x) == MEM
2183 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2184 && ! push_operand (x, GET_MODE (x)))
2185 || (flag_force_addr
2186 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2187 x = change_address (x, VOIDmode, XEXP (x, 0));
2188
2189 if (GET_CODE (y) == MEM
2190 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2191 || (flag_force_addr
2192 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2193 y = change_address (y, VOIDmode, XEXP (y, 0));
2194
2195 if (mode == BLKmode)
2196 abort ();
2197
2198 return emit_move_insn_1 (x, y);
2199 }
2200
2201 /* Low level part of emit_move_insn.
2202 Called just like emit_move_insn, but assumes X and Y
2203 are basically valid. */
2204
2205 rtx
2206 emit_move_insn_1 (x, y)
2207 rtx x, y;
2208 {
2209 enum machine_mode mode = GET_MODE (x);
2210 enum machine_mode submode;
2211 enum mode_class class = GET_MODE_CLASS (mode);
2212 int i;
2213
2214 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2215 return
2216 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2217
2218 /* Expand complex moves by moving real part and imag part, if possible. */
2219 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2220 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2221 * BITS_PER_UNIT),
2222 (class == MODE_COMPLEX_INT
2223 ? MODE_INT : MODE_FLOAT),
2224 0))
2225 && (mov_optab->handlers[(int) submode].insn_code
2226 != CODE_FOR_nothing))
2227 {
2228 /* Don't split destination if it is a stack push. */
2229 int stack = push_operand (x, GET_MODE (x));
2230
2231 /* If this is a stack, push the highpart first, so it
2232 will be in the argument order.
2233
2234 In that case, change_address is used only to convert
2235 the mode, not to change the address. */
2236 if (stack)
2237 {
2238 /* Note that the real part always precedes the imag part in memory
2239 regardless of machine's endianness. */
2240 #ifdef STACK_GROWS_DOWNWARD
2241 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2242 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2243 gen_imagpart (submode, y)));
2244 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2245 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2246 gen_realpart (submode, y)));
2247 #else
2248 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2249 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2250 gen_realpart (submode, y)));
2251 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2252 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2253 gen_imagpart (submode, y)));
2254 #endif
2255 }
2256 else
2257 {
2258 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2259 (gen_realpart (submode, x), gen_realpart (submode, y)));
2260 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2261 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2262 }
2263
2264 return get_last_insn ();
2265 }
2266
2267 /* This will handle any multi-word mode that lacks a move_insn pattern.
2268 However, you will get better code if you define such patterns,
2269 even if they must turn into multiple assembler instructions. */
2270 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2271 {
2272 rtx last_insn = 0;
2273
2274 #ifdef PUSH_ROUNDING
2275
2276 /* If X is a push on the stack, do the push now and replace
2277 X with a reference to the stack pointer. */
2278 if (push_operand (x, GET_MODE (x)))
2279 {
2280 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2281 x = change_address (x, VOIDmode, stack_pointer_rtx);
2282 }
2283 #endif
2284
2285 /* Show the output dies here. */
2286 if (x != y)
2287 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2288
2289 for (i = 0;
2290 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2291 i++)
2292 {
2293 rtx xpart = operand_subword (x, i, 1, mode);
2294 rtx ypart = operand_subword (y, i, 1, mode);
2295
2296 /* If we can't get a part of Y, put Y into memory if it is a
2297 constant. Otherwise, force it into a register. If we still
2298 can't get a part of Y, abort. */
2299 if (ypart == 0 && CONSTANT_P (y))
2300 {
2301 y = force_const_mem (mode, y);
2302 ypart = operand_subword (y, i, 1, mode);
2303 }
2304 else if (ypart == 0)
2305 ypart = operand_subword_force (y, i, mode);
2306
2307 if (xpart == 0 || ypart == 0)
2308 abort ();
2309
2310 last_insn = emit_move_insn (xpart, ypart);
2311 }
2312
2313 return last_insn;
2314 }
2315 else
2316 abort ();
2317 }
2318 \f
2319 /* Pushing data onto the stack. */
2320
2321 /* Push a block of length SIZE (perhaps variable)
2322 and return an rtx to address the beginning of the block.
2323 Note that it is not possible for the value returned to be a QUEUED.
2324 The value may be virtual_outgoing_args_rtx.
2325
2326 EXTRA is the number of bytes of padding to push in addition to SIZE.
2327 BELOW nonzero means this padding comes at low addresses;
2328 otherwise, the padding comes at high addresses. */
2329
2330 rtx
2331 push_block (size, extra, below)
2332 rtx size;
2333 int extra, below;
2334 {
2335 register rtx temp;
2336
2337 size = convert_modes (Pmode, ptr_mode, size, 1);
2338 if (CONSTANT_P (size))
2339 anti_adjust_stack (plus_constant (size, extra));
2340 else if (GET_CODE (size) == REG && extra == 0)
2341 anti_adjust_stack (size);
2342 else
2343 {
2344 rtx temp = copy_to_mode_reg (Pmode, size);
2345 if (extra != 0)
2346 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2347 temp, 0, OPTAB_LIB_WIDEN);
2348 anti_adjust_stack (temp);
2349 }
2350
2351 #ifdef STACK_GROWS_DOWNWARD
2352 temp = virtual_outgoing_args_rtx;
2353 if (extra != 0 && below)
2354 temp = plus_constant (temp, extra);
2355 #else
2356 if (GET_CODE (size) == CONST_INT)
2357 temp = plus_constant (virtual_outgoing_args_rtx,
2358 - INTVAL (size) - (below ? 0 : extra));
2359 else if (extra != 0 && !below)
2360 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2361 negate_rtx (Pmode, plus_constant (size, extra)));
2362 else
2363 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2364 negate_rtx (Pmode, size));
2365 #endif
2366
2367 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2368 }
2369
2370 rtx
2371 gen_push_operand ()
2372 {
2373 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2374 }
2375
2376 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2377 block of SIZE bytes. */
2378
2379 static rtx
2380 get_push_address (size)
2381 int size;
2382 {
2383 register rtx temp;
2384
2385 if (STACK_PUSH_CODE == POST_DEC)
2386 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2387 else if (STACK_PUSH_CODE == POST_INC)
2388 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2389 else
2390 temp = stack_pointer_rtx;
2391
2392 return copy_to_reg (temp);
2393 }
2394
2395 /* Generate code to push X onto the stack, assuming it has mode MODE and
2396 type TYPE.
2397 MODE is redundant except when X is a CONST_INT (since they don't
2398 carry mode info).
2399 SIZE is an rtx for the size of data to be copied (in bytes),
2400 needed only if X is BLKmode.
2401
2402 ALIGN (in bytes) is maximum alignment we can assume.
2403
2404 If PARTIAL and REG are both nonzero, then copy that many of the first
2405 words of X into registers starting with REG, and push the rest of X.
2406 The amount of space pushed is decreased by PARTIAL words,
2407 rounded *down* to a multiple of PARM_BOUNDARY.
2408 REG must be a hard register in this case.
2409 If REG is zero but PARTIAL is not, take any all others actions for an
2410 argument partially in registers, but do not actually load any
2411 registers.
2412
2413 EXTRA is the amount in bytes of extra space to leave next to this arg.
2414 This is ignored if an argument block has already been allocated.
2415
2416 On a machine that lacks real push insns, ARGS_ADDR is the address of
2417 the bottom of the argument block for this call. We use indexing off there
2418 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2419 argument block has not been preallocated.
2420
2421 ARGS_SO_FAR is the size of args previously pushed for this call. */
2422
2423 void
2424 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2425 args_addr, args_so_far)
2426 register rtx x;
2427 enum machine_mode mode;
2428 tree type;
2429 rtx size;
2430 int align;
2431 int partial;
2432 rtx reg;
2433 int extra;
2434 rtx args_addr;
2435 rtx args_so_far;
2436 {
2437 rtx xinner;
2438 enum direction stack_direction
2439 #ifdef STACK_GROWS_DOWNWARD
2440 = downward;
2441 #else
2442 = upward;
2443 #endif
2444
2445 /* Decide where to pad the argument: `downward' for below,
2446 `upward' for above, or `none' for don't pad it.
2447 Default is below for small data on big-endian machines; else above. */
2448 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2449
2450 /* Invert direction if stack is post-update. */
2451 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2452 if (where_pad != none)
2453 where_pad = (where_pad == downward ? upward : downward);
2454
2455 xinner = x = protect_from_queue (x, 0);
2456
2457 if (mode == BLKmode)
2458 {
2459 /* Copy a block into the stack, entirely or partially. */
2460
2461 register rtx temp;
2462 int used = partial * UNITS_PER_WORD;
2463 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2464 int skip;
2465
2466 if (size == 0)
2467 abort ();
2468
2469 used -= offset;
2470
2471 /* USED is now the # of bytes we need not copy to the stack
2472 because registers will take care of them. */
2473
2474 if (partial != 0)
2475 xinner = change_address (xinner, BLKmode,
2476 plus_constant (XEXP (xinner, 0), used));
2477
2478 /* If the partial register-part of the arg counts in its stack size,
2479 skip the part of stack space corresponding to the registers.
2480 Otherwise, start copying to the beginning of the stack space,
2481 by setting SKIP to 0. */
2482 #ifndef REG_PARM_STACK_SPACE
2483 skip = 0;
2484 #else
2485 skip = used;
2486 #endif
2487
2488 #ifdef PUSH_ROUNDING
2489 /* Do it with several push insns if that doesn't take lots of insns
2490 and if there is no difficulty with push insns that skip bytes
2491 on the stack for alignment purposes. */
2492 if (args_addr == 0
2493 && GET_CODE (size) == CONST_INT
2494 && skip == 0
2495 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2496 < MOVE_RATIO)
2497 /* Here we avoid the case of a structure whose weak alignment
2498 forces many pushes of a small amount of data,
2499 and such small pushes do rounding that causes trouble. */
2500 && ((! SLOW_UNALIGNED_ACCESS)
2501 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2502 || PUSH_ROUNDING (align) == align)
2503 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2504 {
2505 /* Push padding now if padding above and stack grows down,
2506 or if padding below and stack grows up.
2507 But if space already allocated, this has already been done. */
2508 if (extra && args_addr == 0
2509 && where_pad != none && where_pad != stack_direction)
2510 anti_adjust_stack (GEN_INT (extra));
2511
2512 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2513 INTVAL (size) - used, align);
2514
2515 if (flag_check_memory_usage && ! in_check_memory_usage)
2516 {
2517 rtx temp;
2518
2519 in_check_memory_usage = 1;
2520 temp = get_push_address (INTVAL(size) - used);
2521 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2522 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2523 temp, ptr_mode,
2524 XEXP (xinner, 0), ptr_mode,
2525 GEN_INT (INTVAL(size) - used),
2526 TYPE_MODE (sizetype));
2527 else
2528 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2529 temp, ptr_mode,
2530 GEN_INT (INTVAL(size) - used),
2531 TYPE_MODE (sizetype),
2532 GEN_INT (MEMORY_USE_RW),
2533 TYPE_MODE (integer_type_node));
2534 in_check_memory_usage = 0;
2535 }
2536 }
2537 else
2538 #endif /* PUSH_ROUNDING */
2539 {
2540 /* Otherwise make space on the stack and copy the data
2541 to the address of that space. */
2542
2543 /* Deduct words put into registers from the size we must copy. */
2544 if (partial != 0)
2545 {
2546 if (GET_CODE (size) == CONST_INT)
2547 size = GEN_INT (INTVAL (size) - used);
2548 else
2549 size = expand_binop (GET_MODE (size), sub_optab, size,
2550 GEN_INT (used), NULL_RTX, 0,
2551 OPTAB_LIB_WIDEN);
2552 }
2553
2554 /* Get the address of the stack space.
2555 In this case, we do not deal with EXTRA separately.
2556 A single stack adjust will do. */
2557 if (! args_addr)
2558 {
2559 temp = push_block (size, extra, where_pad == downward);
2560 extra = 0;
2561 }
2562 else if (GET_CODE (args_so_far) == CONST_INT)
2563 temp = memory_address (BLKmode,
2564 plus_constant (args_addr,
2565 skip + INTVAL (args_so_far)));
2566 else
2567 temp = memory_address (BLKmode,
2568 plus_constant (gen_rtx_PLUS (Pmode,
2569 args_addr,
2570 args_so_far),
2571 skip));
2572 if (flag_check_memory_usage && ! in_check_memory_usage)
2573 {
2574 rtx target;
2575
2576 in_check_memory_usage = 1;
2577 target = copy_to_reg (temp);
2578 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2579 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2580 target, ptr_mode,
2581 XEXP (xinner, 0), ptr_mode,
2582 size, TYPE_MODE (sizetype));
2583 else
2584 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2585 target, ptr_mode,
2586 size, TYPE_MODE (sizetype),
2587 GEN_INT (MEMORY_USE_RW),
2588 TYPE_MODE (integer_type_node));
2589 in_check_memory_usage = 0;
2590 }
2591
2592 /* TEMP is the address of the block. Copy the data there. */
2593 if (GET_CODE (size) == CONST_INT
2594 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2595 < MOVE_RATIO))
2596 {
2597 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2598 INTVAL (size), align);
2599 goto ret;
2600 }
2601 /* Try the most limited insn first, because there's no point
2602 including more than one in the machine description unless
2603 the more limited one has some advantage. */
2604 #ifdef HAVE_movstrqi
2605 if (HAVE_movstrqi
2606 && GET_CODE (size) == CONST_INT
2607 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2608 <= GET_MODE_MASK (QImode)))
2609 {
2610 rtx pat = gen_movstrqi (gen_rtx_MEM (BLKmode, temp),
2611 xinner, size, GEN_INT (align));
2612 if (pat != 0)
2613 {
2614 emit_insn (pat);
2615 goto ret;
2616 }
2617 }
2618 #endif
2619 #ifdef HAVE_movstrhi
2620 if (HAVE_movstrhi
2621 && GET_CODE (size) == CONST_INT
2622 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2623 <= GET_MODE_MASK (HImode)))
2624 {
2625 rtx pat = gen_movstrhi (gen_rtx_MEM (BLKmode, temp),
2626 xinner, size, GEN_INT (align));
2627 if (pat != 0)
2628 {
2629 emit_insn (pat);
2630 goto ret;
2631 }
2632 }
2633 #endif
2634 #ifdef HAVE_movstrsi
2635 if (HAVE_movstrsi)
2636 {
2637 rtx pat = gen_movstrsi (gen_rtx_MEM (BLKmode, temp),
2638 xinner, size, GEN_INT (align));
2639 if (pat != 0)
2640 {
2641 emit_insn (pat);
2642 goto ret;
2643 }
2644 }
2645 #endif
2646 #ifdef HAVE_movstrdi
2647 if (HAVE_movstrdi)
2648 {
2649 rtx pat = gen_movstrdi (gen_rtx_MEM (BLKmode, temp),
2650 xinner, size, GEN_INT (align));
2651 if (pat != 0)
2652 {
2653 emit_insn (pat);
2654 goto ret;
2655 }
2656 }
2657 #endif
2658 #ifdef HAVE_movstrti
2659 if (HAVE_movstrti)
2660 {
2661 rtx pat = gen_movstrti (gen_rtx (MEM, BLKmode, temp),
2662 xinner, size, GEN_INT (align));
2663 if (pat != 0)
2664 {
2665 emit_insn (pat);
2666 goto ret;
2667 }
2668 }
2669 #endif
2670
2671 #ifndef ACCUMULATE_OUTGOING_ARGS
2672 /* If the source is referenced relative to the stack pointer,
2673 copy it to another register to stabilize it. We do not need
2674 to do this if we know that we won't be changing sp. */
2675
2676 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2677 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2678 temp = copy_to_reg (temp);
2679 #endif
2680
2681 /* Make inhibit_defer_pop nonzero around the library call
2682 to force it to pop the bcopy-arguments right away. */
2683 NO_DEFER_POP;
2684 #ifdef TARGET_MEM_FUNCTIONS
2685 emit_library_call (memcpy_libfunc, 0,
2686 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2687 convert_to_mode (TYPE_MODE (sizetype),
2688 size, TREE_UNSIGNED (sizetype)),
2689 TYPE_MODE (sizetype));
2690 #else
2691 emit_library_call (bcopy_libfunc, 0,
2692 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2693 convert_to_mode (TYPE_MODE (integer_type_node),
2694 size,
2695 TREE_UNSIGNED (integer_type_node)),
2696 TYPE_MODE (integer_type_node));
2697 #endif
2698 OK_DEFER_POP;
2699 }
2700 }
2701 else if (partial > 0)
2702 {
2703 /* Scalar partly in registers. */
2704
2705 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2706 int i;
2707 int not_stack;
2708 /* # words of start of argument
2709 that we must make space for but need not store. */
2710 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2711 int args_offset = INTVAL (args_so_far);
2712 int skip;
2713
2714 /* Push padding now if padding above and stack grows down,
2715 or if padding below and stack grows up.
2716 But if space already allocated, this has already been done. */
2717 if (extra && args_addr == 0
2718 && where_pad != none && where_pad != stack_direction)
2719 anti_adjust_stack (GEN_INT (extra));
2720
2721 /* If we make space by pushing it, we might as well push
2722 the real data. Otherwise, we can leave OFFSET nonzero
2723 and leave the space uninitialized. */
2724 if (args_addr == 0)
2725 offset = 0;
2726
2727 /* Now NOT_STACK gets the number of words that we don't need to
2728 allocate on the stack. */
2729 not_stack = partial - offset;
2730
2731 /* If the partial register-part of the arg counts in its stack size,
2732 skip the part of stack space corresponding to the registers.
2733 Otherwise, start copying to the beginning of the stack space,
2734 by setting SKIP to 0. */
2735 #ifndef REG_PARM_STACK_SPACE
2736 skip = 0;
2737 #else
2738 skip = not_stack;
2739 #endif
2740
2741 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2742 x = validize_mem (force_const_mem (mode, x));
2743
2744 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2745 SUBREGs of such registers are not allowed. */
2746 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2747 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2748 x = copy_to_reg (x);
2749
2750 /* Loop over all the words allocated on the stack for this arg. */
2751 /* We can do it by words, because any scalar bigger than a word
2752 has a size a multiple of a word. */
2753 #ifndef PUSH_ARGS_REVERSED
2754 for (i = not_stack; i < size; i++)
2755 #else
2756 for (i = size - 1; i >= not_stack; i--)
2757 #endif
2758 if (i >= not_stack + offset)
2759 emit_push_insn (operand_subword_force (x, i, mode),
2760 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2761 0, args_addr,
2762 GEN_INT (args_offset + ((i - not_stack + skip)
2763 * UNITS_PER_WORD)));
2764 }
2765 else
2766 {
2767 rtx addr;
2768 rtx target = NULL_RTX;
2769
2770 /* Push padding now if padding above and stack grows down,
2771 or if padding below and stack grows up.
2772 But if space already allocated, this has already been done. */
2773 if (extra && args_addr == 0
2774 && where_pad != none && where_pad != stack_direction)
2775 anti_adjust_stack (GEN_INT (extra));
2776
2777 #ifdef PUSH_ROUNDING
2778 if (args_addr == 0)
2779 addr = gen_push_operand ();
2780 else
2781 #endif
2782 {
2783 if (GET_CODE (args_so_far) == CONST_INT)
2784 addr
2785 = memory_address (mode,
2786 plus_constant (args_addr,
2787 INTVAL (args_so_far)));
2788 else
2789 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
2790 args_so_far));
2791 target = addr;
2792 }
2793
2794 emit_move_insn (gen_rtx_MEM (mode, addr), x);
2795
2796 if (flag_check_memory_usage && ! in_check_memory_usage)
2797 {
2798 in_check_memory_usage = 1;
2799 if (target == 0)
2800 target = get_push_address (GET_MODE_SIZE (mode));
2801
2802 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2803 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2804 target, ptr_mode,
2805 XEXP (x, 0), ptr_mode,
2806 GEN_INT (GET_MODE_SIZE (mode)),
2807 TYPE_MODE (sizetype));
2808 else
2809 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2810 target, ptr_mode,
2811 GEN_INT (GET_MODE_SIZE (mode)),
2812 TYPE_MODE (sizetype),
2813 GEN_INT (MEMORY_USE_RW),
2814 TYPE_MODE (integer_type_node));
2815 in_check_memory_usage = 0;
2816 }
2817 }
2818
2819 ret:
2820 /* If part should go in registers, copy that part
2821 into the appropriate registers. Do this now, at the end,
2822 since mem-to-mem copies above may do function calls. */
2823 if (partial > 0 && reg != 0)
2824 {
2825 /* Handle calls that pass values in multiple non-contiguous locations.
2826 The Irix 6 ABI has examples of this. */
2827 if (GET_CODE (reg) == PARALLEL)
2828 emit_group_load (reg, x);
2829 else
2830 move_block_to_reg (REGNO (reg), x, partial, mode);
2831 }
2832
2833 if (extra && args_addr == 0 && where_pad == stack_direction)
2834 anti_adjust_stack (GEN_INT (extra));
2835 }
2836 \f
2837 /* Expand an assignment that stores the value of FROM into TO.
2838 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2839 (This may contain a QUEUED rtx;
2840 if the value is constant, this rtx is a constant.)
2841 Otherwise, the returned value is NULL_RTX.
2842
2843 SUGGEST_REG is no longer actually used.
2844 It used to mean, copy the value through a register
2845 and return that register, if that is possible.
2846 We now use WANT_VALUE to decide whether to do this. */
2847
2848 rtx
2849 expand_assignment (to, from, want_value, suggest_reg)
2850 tree to, from;
2851 int want_value;
2852 int suggest_reg;
2853 {
2854 register rtx to_rtx = 0;
2855 rtx result;
2856
2857 /* Don't crash if the lhs of the assignment was erroneous. */
2858
2859 if (TREE_CODE (to) == ERROR_MARK)
2860 {
2861 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2862 return want_value ? result : NULL_RTX;
2863 }
2864
2865 /* Assignment of a structure component needs special treatment
2866 if the structure component's rtx is not simply a MEM.
2867 Assignment of an array element at a constant index, and assignment of
2868 an array element in an unaligned packed structure field, has the same
2869 problem. */
2870
2871 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2872 || TREE_CODE (to) == ARRAY_REF)
2873 {
2874 enum machine_mode mode1;
2875 int bitsize;
2876 int bitpos;
2877 tree offset;
2878 int unsignedp;
2879 int volatilep = 0;
2880 tree tem;
2881 int alignment;
2882
2883 push_temp_slots ();
2884 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2885 &unsignedp, &volatilep, &alignment);
2886
2887 /* If we are going to use store_bit_field and extract_bit_field,
2888 make sure to_rtx will be safe for multiple use. */
2889
2890 if (mode1 == VOIDmode && want_value)
2891 tem = stabilize_reference (tem);
2892
2893 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
2894 if (offset != 0)
2895 {
2896 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2897
2898 if (GET_CODE (to_rtx) != MEM)
2899 abort ();
2900 to_rtx = change_address (to_rtx, VOIDmode,
2901 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
2902 force_reg (ptr_mode, offset_rtx)));
2903 }
2904 if (volatilep)
2905 {
2906 if (GET_CODE (to_rtx) == MEM)
2907 {
2908 /* When the offset is zero, to_rtx is the address of the
2909 structure we are storing into, and hence may be shared.
2910 We must make a new MEM before setting the volatile bit. */
2911 if (offset == 0)
2912 to_rtx = copy_rtx (to_rtx);
2913
2914 MEM_VOLATILE_P (to_rtx) = 1;
2915 }
2916 #if 0 /* This was turned off because, when a field is volatile
2917 in an object which is not volatile, the object may be in a register,
2918 and then we would abort over here. */
2919 else
2920 abort ();
2921 #endif
2922 }
2923
2924 if (TREE_CODE (to) == COMPONENT_REF
2925 && TREE_READONLY (TREE_OPERAND (to, 1)))
2926 {
2927 if (offset == 0)
2928 to_rtx = copy_rtx (to_rtx);
2929
2930 RTX_UNCHANGING_P (to_rtx) = 1;
2931 }
2932
2933 /* Check the access. */
2934 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2935 {
2936 rtx to_addr;
2937 int size;
2938 int best_mode_size;
2939 enum machine_mode best_mode;
2940
2941 best_mode = get_best_mode (bitsize, bitpos,
2942 TYPE_ALIGN (TREE_TYPE (tem)),
2943 mode1, volatilep);
2944 if (best_mode == VOIDmode)
2945 best_mode = QImode;
2946
2947 best_mode_size = GET_MODE_BITSIZE (best_mode);
2948 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
2949 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
2950 size *= GET_MODE_SIZE (best_mode);
2951
2952 /* Check the access right of the pointer. */
2953 if (size)
2954 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
2955 to_addr, ptr_mode,
2956 GEN_INT (size), TYPE_MODE (sizetype),
2957 GEN_INT (MEMORY_USE_WO),
2958 TYPE_MODE (integer_type_node));
2959 }
2960
2961 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2962 (want_value
2963 /* Spurious cast makes HPUX compiler happy. */
2964 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2965 : VOIDmode),
2966 unsignedp,
2967 /* Required alignment of containing datum. */
2968 alignment,
2969 int_size_in_bytes (TREE_TYPE (tem)));
2970 preserve_temp_slots (result);
2971 free_temp_slots ();
2972 pop_temp_slots ();
2973
2974 /* If the value is meaningful, convert RESULT to the proper mode.
2975 Otherwise, return nothing. */
2976 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2977 TYPE_MODE (TREE_TYPE (from)),
2978 result,
2979 TREE_UNSIGNED (TREE_TYPE (to)))
2980 : NULL_RTX);
2981 }
2982
2983 /* If the rhs is a function call and its value is not an aggregate,
2984 call the function before we start to compute the lhs.
2985 This is needed for correct code for cases such as
2986 val = setjmp (buf) on machines where reference to val
2987 requires loading up part of an address in a separate insn.
2988
2989 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2990 a promoted variable where the zero- or sign- extension needs to be done.
2991 Handling this in the normal way is safe because no computation is done
2992 before the call. */
2993 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2994 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2995 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2996 {
2997 rtx value;
2998
2999 push_temp_slots ();
3000 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3001 if (to_rtx == 0)
3002 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3003
3004 /* Handle calls that return values in multiple non-contiguous locations.
3005 The Irix 6 ABI has examples of this. */
3006 if (GET_CODE (to_rtx) == PARALLEL)
3007 emit_group_load (to_rtx, value);
3008 else if (GET_MODE (to_rtx) == BLKmode)
3009 emit_block_move (to_rtx, value, expr_size (from),
3010 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3011 else
3012 emit_move_insn (to_rtx, value);
3013 preserve_temp_slots (to_rtx);
3014 free_temp_slots ();
3015 pop_temp_slots ();
3016 return want_value ? to_rtx : NULL_RTX;
3017 }
3018
3019 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3020 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3021
3022 if (to_rtx == 0)
3023 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3024
3025 /* Don't move directly into a return register. */
3026 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3027 {
3028 rtx temp;
3029
3030 push_temp_slots ();
3031 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3032 emit_move_insn (to_rtx, temp);
3033 preserve_temp_slots (to_rtx);
3034 free_temp_slots ();
3035 pop_temp_slots ();
3036 return want_value ? to_rtx : NULL_RTX;
3037 }
3038
3039 /* In case we are returning the contents of an object which overlaps
3040 the place the value is being stored, use a safe function when copying
3041 a value through a pointer into a structure value return block. */
3042 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3043 && current_function_returns_struct
3044 && !current_function_returns_pcc_struct)
3045 {
3046 rtx from_rtx, size;
3047
3048 push_temp_slots ();
3049 size = expr_size (from);
3050 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3051 EXPAND_MEMORY_USE_DONT);
3052
3053 /* Copy the rights of the bitmap. */
3054 if (flag_check_memory_usage)
3055 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3056 XEXP (to_rtx, 0), ptr_mode,
3057 XEXP (from_rtx, 0), ptr_mode,
3058 convert_to_mode (TYPE_MODE (sizetype),
3059 size, TREE_UNSIGNED (sizetype)),
3060 TYPE_MODE (sizetype));
3061
3062 #ifdef TARGET_MEM_FUNCTIONS
3063 emit_library_call (memcpy_libfunc, 0,
3064 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3065 XEXP (from_rtx, 0), Pmode,
3066 convert_to_mode (TYPE_MODE (sizetype),
3067 size, TREE_UNSIGNED (sizetype)),
3068 TYPE_MODE (sizetype));
3069 #else
3070 emit_library_call (bcopy_libfunc, 0,
3071 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3072 XEXP (to_rtx, 0), Pmode,
3073 convert_to_mode (TYPE_MODE (integer_type_node),
3074 size, TREE_UNSIGNED (integer_type_node)),
3075 TYPE_MODE (integer_type_node));
3076 #endif
3077
3078 preserve_temp_slots (to_rtx);
3079 free_temp_slots ();
3080 pop_temp_slots ();
3081 return want_value ? to_rtx : NULL_RTX;
3082 }
3083
3084 /* Compute FROM and store the value in the rtx we got. */
3085
3086 push_temp_slots ();
3087 result = store_expr (from, to_rtx, want_value);
3088 preserve_temp_slots (result);
3089 free_temp_slots ();
3090 pop_temp_slots ();
3091 return want_value ? result : NULL_RTX;
3092 }
3093
3094 /* Generate code for computing expression EXP,
3095 and storing the value into TARGET.
3096 TARGET may contain a QUEUED rtx.
3097
3098 If WANT_VALUE is nonzero, return a copy of the value
3099 not in TARGET, so that we can be sure to use the proper
3100 value in a containing expression even if TARGET has something
3101 else stored in it. If possible, we copy the value through a pseudo
3102 and return that pseudo. Or, if the value is constant, we try to
3103 return the constant. In some cases, we return a pseudo
3104 copied *from* TARGET.
3105
3106 If the mode is BLKmode then we may return TARGET itself.
3107 It turns out that in BLKmode it doesn't cause a problem.
3108 because C has no operators that could combine two different
3109 assignments into the same BLKmode object with different values
3110 with no sequence point. Will other languages need this to
3111 be more thorough?
3112
3113 If WANT_VALUE is 0, we return NULL, to make sure
3114 to catch quickly any cases where the caller uses the value
3115 and fails to set WANT_VALUE. */
3116
3117 rtx
3118 store_expr (exp, target, want_value)
3119 register tree exp;
3120 register rtx target;
3121 int want_value;
3122 {
3123 register rtx temp;
3124 int dont_return_target = 0;
3125
3126 if (TREE_CODE (exp) == COMPOUND_EXPR)
3127 {
3128 /* Perform first part of compound expression, then assign from second
3129 part. */
3130 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3131 emit_queue ();
3132 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3133 }
3134 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3135 {
3136 /* For conditional expression, get safe form of the target. Then
3137 test the condition, doing the appropriate assignment on either
3138 side. This avoids the creation of unnecessary temporaries.
3139 For non-BLKmode, it is more efficient not to do this. */
3140
3141 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3142
3143 emit_queue ();
3144 target = protect_from_queue (target, 1);
3145
3146 do_pending_stack_adjust ();
3147 NO_DEFER_POP;
3148 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3149 start_cleanup_deferral ();
3150 store_expr (TREE_OPERAND (exp, 1), target, 0);
3151 end_cleanup_deferral ();
3152 emit_queue ();
3153 emit_jump_insn (gen_jump (lab2));
3154 emit_barrier ();
3155 emit_label (lab1);
3156 start_cleanup_deferral ();
3157 store_expr (TREE_OPERAND (exp, 2), target, 0);
3158 end_cleanup_deferral ();
3159 emit_queue ();
3160 emit_label (lab2);
3161 OK_DEFER_POP;
3162
3163 return want_value ? target : NULL_RTX;
3164 }
3165 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3166 && GET_MODE (target) != BLKmode)
3167 /* If target is in memory and caller wants value in a register instead,
3168 arrange that. Pass TARGET as target for expand_expr so that,
3169 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3170 We know expand_expr will not use the target in that case.
3171 Don't do this if TARGET is volatile because we are supposed
3172 to write it and then read it. */
3173 {
3174 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3175 GET_MODE (target), 0);
3176 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3177 temp = copy_to_reg (temp);
3178 dont_return_target = 1;
3179 }
3180 else if (queued_subexp_p (target))
3181 /* If target contains a postincrement, let's not risk
3182 using it as the place to generate the rhs. */
3183 {
3184 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3185 {
3186 /* Expand EXP into a new pseudo. */
3187 temp = gen_reg_rtx (GET_MODE (target));
3188 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3189 }
3190 else
3191 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3192
3193 /* If target is volatile, ANSI requires accessing the value
3194 *from* the target, if it is accessed. So make that happen.
3195 In no case return the target itself. */
3196 if (! MEM_VOLATILE_P (target) && want_value)
3197 dont_return_target = 1;
3198 }
3199 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3200 /* If this is an scalar in a register that is stored in a wider mode
3201 than the declared mode, compute the result into its declared mode
3202 and then convert to the wider mode. Our value is the computed
3203 expression. */
3204 {
3205 /* If we don't want a value, we can do the conversion inside EXP,
3206 which will often result in some optimizations. Do the conversion
3207 in two steps: first change the signedness, if needed, then
3208 the extend. But don't do this if the type of EXP is a subtype
3209 of something else since then the conversion might involve
3210 more than just converting modes. */
3211 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3212 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3213 {
3214 if (TREE_UNSIGNED (TREE_TYPE (exp))
3215 != SUBREG_PROMOTED_UNSIGNED_P (target))
3216 exp
3217 = convert
3218 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3219 TREE_TYPE (exp)),
3220 exp);
3221
3222 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3223 SUBREG_PROMOTED_UNSIGNED_P (target)),
3224 exp);
3225 }
3226
3227 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3228
3229 /* If TEMP is a volatile MEM and we want a result value, make
3230 the access now so it gets done only once. Likewise if
3231 it contains TARGET. */
3232 if (GET_CODE (temp) == MEM && want_value
3233 && (MEM_VOLATILE_P (temp)
3234 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3235 temp = copy_to_reg (temp);
3236
3237 /* If TEMP is a VOIDmode constant, use convert_modes to make
3238 sure that we properly convert it. */
3239 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3240 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3241 TYPE_MODE (TREE_TYPE (exp)), temp,
3242 SUBREG_PROMOTED_UNSIGNED_P (target));
3243
3244 convert_move (SUBREG_REG (target), temp,
3245 SUBREG_PROMOTED_UNSIGNED_P (target));
3246 return want_value ? temp : NULL_RTX;
3247 }
3248 else
3249 {
3250 temp = expand_expr (exp, target, GET_MODE (target), 0);
3251 /* Return TARGET if it's a specified hardware register.
3252 If TARGET is a volatile mem ref, either return TARGET
3253 or return a reg copied *from* TARGET; ANSI requires this.
3254
3255 Otherwise, if TEMP is not TARGET, return TEMP
3256 if it is constant (for efficiency),
3257 or if we really want the correct value. */
3258 if (!(target && GET_CODE (target) == REG
3259 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3260 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3261 && ! rtx_equal_p (temp, target)
3262 && (CONSTANT_P (temp) || want_value))
3263 dont_return_target = 1;
3264 }
3265
3266 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3267 the same as that of TARGET, adjust the constant. This is needed, for
3268 example, in case it is a CONST_DOUBLE and we want only a word-sized
3269 value. */
3270 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3271 && TREE_CODE (exp) != ERROR_MARK
3272 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3273 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3274 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3275
3276 if (flag_check_memory_usage
3277 && GET_CODE (target) == MEM
3278 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3279 {
3280 if (GET_CODE (temp) == MEM)
3281 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3282 XEXP (target, 0), ptr_mode,
3283 XEXP (temp, 0), ptr_mode,
3284 expr_size (exp), TYPE_MODE (sizetype));
3285 else
3286 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3287 XEXP (target, 0), ptr_mode,
3288 expr_size (exp), TYPE_MODE (sizetype),
3289 GEN_INT (MEMORY_USE_WO),
3290 TYPE_MODE (integer_type_node));
3291 }
3292
3293 /* If value was not generated in the target, store it there.
3294 Convert the value to TARGET's type first if nec. */
3295
3296 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3297 {
3298 target = protect_from_queue (target, 1);
3299 if (GET_MODE (temp) != GET_MODE (target)
3300 && GET_MODE (temp) != VOIDmode)
3301 {
3302 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3303 if (dont_return_target)
3304 {
3305 /* In this case, we will return TEMP,
3306 so make sure it has the proper mode.
3307 But don't forget to store the value into TARGET. */
3308 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3309 emit_move_insn (target, temp);
3310 }
3311 else
3312 convert_move (target, temp, unsignedp);
3313 }
3314
3315 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3316 {
3317 /* Handle copying a string constant into an array.
3318 The string constant may be shorter than the array.
3319 So copy just the string's actual length, and clear the rest. */
3320 rtx size;
3321 rtx addr;
3322
3323 /* Get the size of the data type of the string,
3324 which is actually the size of the target. */
3325 size = expr_size (exp);
3326 if (GET_CODE (size) == CONST_INT
3327 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3328 emit_block_move (target, temp, size,
3329 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3330 else
3331 {
3332 /* Compute the size of the data to copy from the string. */
3333 tree copy_size
3334 = size_binop (MIN_EXPR,
3335 make_tree (sizetype, size),
3336 convert (sizetype,
3337 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3338 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3339 VOIDmode, 0);
3340 rtx label = 0;
3341
3342 /* Copy that much. */
3343 emit_block_move (target, temp, copy_size_rtx,
3344 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3345
3346 /* Figure out how much is left in TARGET that we have to clear.
3347 Do all calculations in ptr_mode. */
3348
3349 addr = XEXP (target, 0);
3350 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3351
3352 if (GET_CODE (copy_size_rtx) == CONST_INT)
3353 {
3354 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3355 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3356 }
3357 else
3358 {
3359 addr = force_reg (ptr_mode, addr);
3360 addr = expand_binop (ptr_mode, add_optab, addr,
3361 copy_size_rtx, NULL_RTX, 0,
3362 OPTAB_LIB_WIDEN);
3363
3364 size = expand_binop (ptr_mode, sub_optab, size,
3365 copy_size_rtx, NULL_RTX, 0,
3366 OPTAB_LIB_WIDEN);
3367
3368 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3369 GET_MODE (size), 0, 0);
3370 label = gen_label_rtx ();
3371 emit_jump_insn (gen_blt (label));
3372 }
3373
3374 if (size != const0_rtx)
3375 {
3376 /* Be sure we can write on ADDR. */
3377 if (flag_check_memory_usage)
3378 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3379 addr, ptr_mode,
3380 size, TYPE_MODE (sizetype),
3381 GEN_INT (MEMORY_USE_WO),
3382 TYPE_MODE (integer_type_node));
3383 #ifdef TARGET_MEM_FUNCTIONS
3384 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3385 addr, ptr_mode,
3386 const0_rtx, TYPE_MODE (integer_type_node),
3387 convert_to_mode (TYPE_MODE (sizetype),
3388 size,
3389 TREE_UNSIGNED (sizetype)),
3390 TYPE_MODE (sizetype));
3391 #else
3392 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3393 addr, ptr_mode,
3394 convert_to_mode (TYPE_MODE (integer_type_node),
3395 size,
3396 TREE_UNSIGNED (integer_type_node)),
3397 TYPE_MODE (integer_type_node));
3398 #endif
3399 }
3400
3401 if (label)
3402 emit_label (label);
3403 }
3404 }
3405 /* Handle calls that return values in multiple non-contiguous locations.
3406 The Irix 6 ABI has examples of this. */
3407 else if (GET_CODE (target) == PARALLEL)
3408 emit_group_load (target, temp);
3409 else if (GET_MODE (temp) == BLKmode)
3410 emit_block_move (target, temp, expr_size (exp),
3411 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3412 else
3413 emit_move_insn (target, temp);
3414 }
3415
3416 /* If we don't want a value, return NULL_RTX. */
3417 if (! want_value)
3418 return NULL_RTX;
3419
3420 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3421 ??? The latter test doesn't seem to make sense. */
3422 else if (dont_return_target && GET_CODE (temp) != MEM)
3423 return temp;
3424
3425 /* Return TARGET itself if it is a hard register. */
3426 else if (want_value && GET_MODE (target) != BLKmode
3427 && ! (GET_CODE (target) == REG
3428 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3429 return copy_to_reg (target);
3430
3431 else
3432 return target;
3433 }
3434 \f
3435 /* Return 1 if EXP just contains zeros. */
3436
3437 static int
3438 is_zeros_p (exp)
3439 tree exp;
3440 {
3441 tree elt;
3442
3443 switch (TREE_CODE (exp))
3444 {
3445 case CONVERT_EXPR:
3446 case NOP_EXPR:
3447 case NON_LVALUE_EXPR:
3448 return is_zeros_p (TREE_OPERAND (exp, 0));
3449
3450 case INTEGER_CST:
3451 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3452
3453 case COMPLEX_CST:
3454 return
3455 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3456
3457 case REAL_CST:
3458 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3459
3460 case CONSTRUCTOR:
3461 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3462 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3463 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3464 if (! is_zeros_p (TREE_VALUE (elt)))
3465 return 0;
3466
3467 return 1;
3468
3469 default:
3470 return 0;
3471 }
3472 }
3473
3474 /* Return 1 if EXP contains mostly (3/4) zeros. */
3475
3476 static int
3477 mostly_zeros_p (exp)
3478 tree exp;
3479 {
3480 if (TREE_CODE (exp) == CONSTRUCTOR)
3481 {
3482 int elts = 0, zeros = 0;
3483 tree elt = CONSTRUCTOR_ELTS (exp);
3484 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3485 {
3486 /* If there are no ranges of true bits, it is all zero. */
3487 return elt == NULL_TREE;
3488 }
3489 for (; elt; elt = TREE_CHAIN (elt))
3490 {
3491 /* We do not handle the case where the index is a RANGE_EXPR,
3492 so the statistic will be somewhat inaccurate.
3493 We do make a more accurate count in store_constructor itself,
3494 so since this function is only used for nested array elements,
3495 this should be close enough. */
3496 if (mostly_zeros_p (TREE_VALUE (elt)))
3497 zeros++;
3498 elts++;
3499 }
3500
3501 return 4 * zeros >= 3 * elts;
3502 }
3503
3504 return is_zeros_p (exp);
3505 }
3506 \f
3507 /* Helper function for store_constructor.
3508 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3509 TYPE is the type of the CONSTRUCTOR, not the element type.
3510 CLEARED is as for store_constructor.
3511
3512 This provides a recursive shortcut back to store_constructor when it isn't
3513 necessary to go through store_field. This is so that we can pass through
3514 the cleared field to let store_constructor know that we may not have to
3515 clear a substructure if the outer structure has already been cleared. */
3516
3517 static void
3518 store_constructor_field (target, bitsize, bitpos,
3519 mode, exp, type, cleared)
3520 rtx target;
3521 int bitsize, bitpos;
3522 enum machine_mode mode;
3523 tree exp, type;
3524 int cleared;
3525 {
3526 if (TREE_CODE (exp) == CONSTRUCTOR
3527 && bitpos % BITS_PER_UNIT == 0
3528 /* If we have a non-zero bitpos for a register target, then we just
3529 let store_field do the bitfield handling. This is unlikely to
3530 generate unnecessary clear instructions anyways. */
3531 && (bitpos == 0 || GET_CODE (target) == MEM))
3532 {
3533 if (bitpos != 0)
3534 target = change_address (target, VOIDmode,
3535 plus_constant (XEXP (target, 0),
3536 bitpos / BITS_PER_UNIT));
3537 store_constructor (exp, target, cleared);
3538 }
3539 else
3540 store_field (target, bitsize, bitpos, mode, exp,
3541 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3542 int_size_in_bytes (type));
3543 }
3544
3545 /* Store the value of constructor EXP into the rtx TARGET.
3546 TARGET is either a REG or a MEM.
3547 CLEARED is true if TARGET is known to have been zero'd. */
3548
3549 static void
3550 store_constructor (exp, target, cleared)
3551 tree exp;
3552 rtx target;
3553 int cleared;
3554 {
3555 tree type = TREE_TYPE (exp);
3556
3557 /* We know our target cannot conflict, since safe_from_p has been called. */
3558 #if 0
3559 /* Don't try copying piece by piece into a hard register
3560 since that is vulnerable to being clobbered by EXP.
3561 Instead, construct in a pseudo register and then copy it all. */
3562 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3563 {
3564 rtx temp = gen_reg_rtx (GET_MODE (target));
3565 store_constructor (exp, temp, 0);
3566 emit_move_insn (target, temp);
3567 return;
3568 }
3569 #endif
3570
3571 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3572 || TREE_CODE (type) == QUAL_UNION_TYPE)
3573 {
3574 register tree elt;
3575
3576 /* Inform later passes that the whole union value is dead. */
3577 if (TREE_CODE (type) == UNION_TYPE
3578 || TREE_CODE (type) == QUAL_UNION_TYPE)
3579 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3580
3581 /* If we are building a static constructor into a register,
3582 set the initial value as zero so we can fold the value into
3583 a constant. But if more than one register is involved,
3584 this probably loses. */
3585 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3586 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3587 {
3588 if (! cleared)
3589 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
3590
3591 cleared = 1;
3592 }
3593
3594 /* If the constructor has fewer fields than the structure
3595 or if we are initializing the structure to mostly zeros,
3596 clear the whole structure first. */
3597 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3598 != list_length (TYPE_FIELDS (type)))
3599 || mostly_zeros_p (exp))
3600 {
3601 if (! cleared)
3602 clear_storage (target, expr_size (exp),
3603 TYPE_ALIGN (type) / BITS_PER_UNIT);
3604
3605 cleared = 1;
3606 }
3607 else
3608 /* Inform later passes that the old value is dead. */
3609 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3610
3611 /* Store each element of the constructor into
3612 the corresponding field of TARGET. */
3613
3614 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3615 {
3616 register tree field = TREE_PURPOSE (elt);
3617 register enum machine_mode mode;
3618 int bitsize;
3619 int bitpos = 0;
3620 int unsignedp;
3621 tree pos, constant = 0, offset = 0;
3622 rtx to_rtx = target;
3623
3624 /* Just ignore missing fields.
3625 We cleared the whole structure, above,
3626 if any fields are missing. */
3627 if (field == 0)
3628 continue;
3629
3630 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3631 continue;
3632
3633 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3634 unsignedp = TREE_UNSIGNED (field);
3635 mode = DECL_MODE (field);
3636 if (DECL_BIT_FIELD (field))
3637 mode = VOIDmode;
3638
3639 pos = DECL_FIELD_BITPOS (field);
3640 if (TREE_CODE (pos) == INTEGER_CST)
3641 constant = pos;
3642 else if (TREE_CODE (pos) == PLUS_EXPR
3643 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3644 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3645 else
3646 offset = pos;
3647
3648 if (constant)
3649 bitpos = TREE_INT_CST_LOW (constant);
3650
3651 if (offset)
3652 {
3653 rtx offset_rtx;
3654
3655 if (contains_placeholder_p (offset))
3656 offset = build (WITH_RECORD_EXPR, sizetype,
3657 offset, make_tree (TREE_TYPE (exp), target));
3658
3659 offset = size_binop (FLOOR_DIV_EXPR, offset,
3660 size_int (BITS_PER_UNIT));
3661
3662 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3663 if (GET_CODE (to_rtx) != MEM)
3664 abort ();
3665
3666 to_rtx
3667 = change_address (to_rtx, VOIDmode,
3668 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3669 force_reg (ptr_mode, offset_rtx)));
3670 }
3671 if (TREE_READONLY (field))
3672 {
3673 if (GET_CODE (to_rtx) == MEM)
3674 to_rtx = copy_rtx (to_rtx);
3675
3676 RTX_UNCHANGING_P (to_rtx) = 1;
3677 }
3678
3679 store_constructor_field (to_rtx, bitsize, bitpos,
3680 mode, TREE_VALUE (elt), type, cleared);
3681 }
3682 }
3683 else if (TREE_CODE (type) == ARRAY_TYPE)
3684 {
3685 register tree elt;
3686 register int i;
3687 int need_to_clear;
3688 tree domain = TYPE_DOMAIN (type);
3689 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3690 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3691 tree elttype = TREE_TYPE (type);
3692
3693 /* If the constructor has fewer elements than the array,
3694 clear the whole array first. Similarly if this this is
3695 static constructor of a non-BLKmode object. */
3696 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3697 need_to_clear = 1;
3698 else
3699 {
3700 HOST_WIDE_INT count = 0, zero_count = 0;
3701 need_to_clear = 0;
3702 /* This loop is a more accurate version of the loop in
3703 mostly_zeros_p (it handles RANGE_EXPR in an index).
3704 It is also needed to check for missing elements. */
3705 for (elt = CONSTRUCTOR_ELTS (exp);
3706 elt != NULL_TREE;
3707 elt = TREE_CHAIN (elt))
3708 {
3709 tree index = TREE_PURPOSE (elt);
3710 HOST_WIDE_INT this_node_count;
3711 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3712 {
3713 tree lo_index = TREE_OPERAND (index, 0);
3714 tree hi_index = TREE_OPERAND (index, 1);
3715 if (TREE_CODE (lo_index) != INTEGER_CST
3716 || TREE_CODE (hi_index) != INTEGER_CST)
3717 {
3718 need_to_clear = 1;
3719 break;
3720 }
3721 this_node_count = TREE_INT_CST_LOW (hi_index)
3722 - TREE_INT_CST_LOW (lo_index) + 1;
3723 }
3724 else
3725 this_node_count = 1;
3726 count += this_node_count;
3727 if (mostly_zeros_p (TREE_VALUE (elt)))
3728 zero_count += this_node_count;
3729 }
3730 /* Clear the entire array first if there are any missing elements,
3731 or if the incidence of zero elements is >= 75%. */
3732 if (count < maxelt - minelt + 1
3733 || 4 * zero_count >= 3 * count)
3734 need_to_clear = 1;
3735 }
3736 if (need_to_clear)
3737 {
3738 if (! cleared)
3739 clear_storage (target, expr_size (exp),
3740 TYPE_ALIGN (type) / BITS_PER_UNIT);
3741 cleared = 1;
3742 }
3743 else
3744 /* Inform later passes that the old value is dead. */
3745 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3746
3747 /* Store each element of the constructor into
3748 the corresponding element of TARGET, determined
3749 by counting the elements. */
3750 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3751 elt;
3752 elt = TREE_CHAIN (elt), i++)
3753 {
3754 register enum machine_mode mode;
3755 int bitsize;
3756 int bitpos;
3757 int unsignedp;
3758 tree value = TREE_VALUE (elt);
3759 tree index = TREE_PURPOSE (elt);
3760 rtx xtarget = target;
3761
3762 if (cleared && is_zeros_p (value))
3763 continue;
3764
3765 mode = TYPE_MODE (elttype);
3766 bitsize = GET_MODE_BITSIZE (mode);
3767 unsignedp = TREE_UNSIGNED (elttype);
3768
3769 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3770 {
3771 tree lo_index = TREE_OPERAND (index, 0);
3772 tree hi_index = TREE_OPERAND (index, 1);
3773 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3774 struct nesting *loop;
3775 HOST_WIDE_INT lo, hi, count;
3776 tree position;
3777
3778 /* If the range is constant and "small", unroll the loop. */
3779 if (TREE_CODE (lo_index) == INTEGER_CST
3780 && TREE_CODE (hi_index) == INTEGER_CST
3781 && (lo = TREE_INT_CST_LOW (lo_index),
3782 hi = TREE_INT_CST_LOW (hi_index),
3783 count = hi - lo + 1,
3784 (GET_CODE (target) != MEM
3785 || count <= 2
3786 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3787 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3788 <= 40 * 8))))
3789 {
3790 lo -= minelt; hi -= minelt;
3791 for (; lo <= hi; lo++)
3792 {
3793 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3794 store_constructor_field (target, bitsize, bitpos,
3795 mode, value, type, cleared);
3796 }
3797 }
3798 else
3799 {
3800 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3801 loop_top = gen_label_rtx ();
3802 loop_end = gen_label_rtx ();
3803
3804 unsignedp = TREE_UNSIGNED (domain);
3805
3806 index = build_decl (VAR_DECL, NULL_TREE, domain);
3807
3808 DECL_RTL (index) = index_r
3809 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3810 &unsignedp, 0));
3811
3812 if (TREE_CODE (value) == SAVE_EXPR
3813 && SAVE_EXPR_RTL (value) == 0)
3814 {
3815 /* Make sure value gets expanded once before the
3816 loop. */
3817 expand_expr (value, const0_rtx, VOIDmode, 0);
3818 emit_queue ();
3819 }
3820 store_expr (lo_index, index_r, 0);
3821 loop = expand_start_loop (0);
3822
3823 /* Assign value to element index. */
3824 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3825 size_int (BITS_PER_UNIT));
3826 position = size_binop (MULT_EXPR,
3827 size_binop (MINUS_EXPR, index,
3828 TYPE_MIN_VALUE (domain)),
3829 position);
3830 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3831 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
3832 xtarget = change_address (target, mode, addr);
3833 if (TREE_CODE (value) == CONSTRUCTOR)
3834 store_constructor (value, xtarget, cleared);
3835 else
3836 store_expr (value, xtarget, 0);
3837
3838 expand_exit_loop_if_false (loop,
3839 build (LT_EXPR, integer_type_node,
3840 index, hi_index));
3841
3842 expand_increment (build (PREINCREMENT_EXPR,
3843 TREE_TYPE (index),
3844 index, integer_one_node), 0, 0);
3845 expand_end_loop ();
3846 emit_label (loop_end);
3847
3848 /* Needed by stupid register allocation. to extend the
3849 lifetime of pseudo-regs used by target past the end
3850 of the loop. */
3851 emit_insn (gen_rtx_USE (GET_MODE (target), target));
3852 }
3853 }
3854 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3855 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3856 {
3857 rtx pos_rtx, addr;
3858 tree position;
3859
3860 if (index == 0)
3861 index = size_int (i);
3862
3863 if (minelt)
3864 index = size_binop (MINUS_EXPR, index,
3865 TYPE_MIN_VALUE (domain));
3866 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3867 size_int (BITS_PER_UNIT));
3868 position = size_binop (MULT_EXPR, index, position);
3869 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3870 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
3871 xtarget = change_address (target, mode, addr);
3872 store_expr (value, xtarget, 0);
3873 }
3874 else
3875 {
3876 if (index != 0)
3877 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3878 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3879 else
3880 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3881 store_constructor_field (target, bitsize, bitpos,
3882 mode, value, type, cleared);
3883 }
3884 }
3885 }
3886 /* set constructor assignments */
3887 else if (TREE_CODE (type) == SET_TYPE)
3888 {
3889 tree elt = CONSTRUCTOR_ELTS (exp);
3890 int nbytes = int_size_in_bytes (type), nbits;
3891 tree domain = TYPE_DOMAIN (type);
3892 tree domain_min, domain_max, bitlength;
3893
3894 /* The default implementation strategy is to extract the constant
3895 parts of the constructor, use that to initialize the target,
3896 and then "or" in whatever non-constant ranges we need in addition.
3897
3898 If a large set is all zero or all ones, it is
3899 probably better to set it using memset (if available) or bzero.
3900 Also, if a large set has just a single range, it may also be
3901 better to first clear all the first clear the set (using
3902 bzero/memset), and set the bits we want. */
3903
3904 /* Check for all zeros. */
3905 if (elt == NULL_TREE)
3906 {
3907 if (!cleared)
3908 clear_storage (target, expr_size (exp),
3909 TYPE_ALIGN (type) / BITS_PER_UNIT);
3910 return;
3911 }
3912
3913 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3914 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3915 bitlength = size_binop (PLUS_EXPR,
3916 size_binop (MINUS_EXPR, domain_max, domain_min),
3917 size_one_node);
3918
3919 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3920 abort ();
3921 nbits = TREE_INT_CST_LOW (bitlength);
3922
3923 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3924 are "complicated" (more than one range), initialize (the
3925 constant parts) by copying from a constant. */
3926 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3927 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3928 {
3929 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3930 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3931 char *bit_buffer = (char *) alloca (nbits);
3932 HOST_WIDE_INT word = 0;
3933 int bit_pos = 0;
3934 int ibit = 0;
3935 int offset = 0; /* In bytes from beginning of set. */
3936 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3937 for (;;)
3938 {
3939 if (bit_buffer[ibit])
3940 {
3941 if (BYTES_BIG_ENDIAN)
3942 word |= (1 << (set_word_size - 1 - bit_pos));
3943 else
3944 word |= 1 << bit_pos;
3945 }
3946 bit_pos++; ibit++;
3947 if (bit_pos >= set_word_size || ibit == nbits)
3948 {
3949 if (word != 0 || ! cleared)
3950 {
3951 rtx datum = GEN_INT (word);
3952 rtx to_rtx;
3953 /* The assumption here is that it is safe to use
3954 XEXP if the set is multi-word, but not if
3955 it's single-word. */
3956 if (GET_CODE (target) == MEM)
3957 {
3958 to_rtx = plus_constant (XEXP (target, 0), offset);
3959 to_rtx = change_address (target, mode, to_rtx);
3960 }
3961 else if (offset == 0)
3962 to_rtx = target;
3963 else
3964 abort ();
3965 emit_move_insn (to_rtx, datum);
3966 }
3967 if (ibit == nbits)
3968 break;
3969 word = 0;
3970 bit_pos = 0;
3971 offset += set_word_size / BITS_PER_UNIT;
3972 }
3973 }
3974 }
3975 else if (!cleared)
3976 {
3977 /* Don't bother clearing storage if the set is all ones. */
3978 if (TREE_CHAIN (elt) != NULL_TREE
3979 || (TREE_PURPOSE (elt) == NULL_TREE
3980 ? nbits != 1
3981 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3982 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3983 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3984 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3985 != nbits))))
3986 clear_storage (target, expr_size (exp),
3987 TYPE_ALIGN (type) / BITS_PER_UNIT);
3988 }
3989
3990 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3991 {
3992 /* start of range of element or NULL */
3993 tree startbit = TREE_PURPOSE (elt);
3994 /* end of range of element, or element value */
3995 tree endbit = TREE_VALUE (elt);
3996 #ifdef TARGET_MEM_FUNCTIONS
3997 HOST_WIDE_INT startb, endb;
3998 #endif
3999 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4000
4001 bitlength_rtx = expand_expr (bitlength,
4002 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4003
4004 /* handle non-range tuple element like [ expr ] */
4005 if (startbit == NULL_TREE)
4006 {
4007 startbit = save_expr (endbit);
4008 endbit = startbit;
4009 }
4010 startbit = convert (sizetype, startbit);
4011 endbit = convert (sizetype, endbit);
4012 if (! integer_zerop (domain_min))
4013 {
4014 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4015 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4016 }
4017 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4018 EXPAND_CONST_ADDRESS);
4019 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4020 EXPAND_CONST_ADDRESS);
4021
4022 if (REG_P (target))
4023 {
4024 targetx = assign_stack_temp (GET_MODE (target),
4025 GET_MODE_SIZE (GET_MODE (target)),
4026 0);
4027 emit_move_insn (targetx, target);
4028 }
4029 else if (GET_CODE (target) == MEM)
4030 targetx = target;
4031 else
4032 abort ();
4033
4034 #ifdef TARGET_MEM_FUNCTIONS
4035 /* Optimization: If startbit and endbit are
4036 constants divisible by BITS_PER_UNIT,
4037 call memset instead. */
4038 if (TREE_CODE (startbit) == INTEGER_CST
4039 && TREE_CODE (endbit) == INTEGER_CST
4040 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4041 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4042 {
4043 emit_library_call (memset_libfunc, 0,
4044 VOIDmode, 3,
4045 plus_constant (XEXP (targetx, 0),
4046 startb / BITS_PER_UNIT),
4047 Pmode,
4048 constm1_rtx, TYPE_MODE (integer_type_node),
4049 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4050 TYPE_MODE (sizetype));
4051 }
4052 else
4053 #endif
4054 {
4055 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4056 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4057 bitlength_rtx, TYPE_MODE (sizetype),
4058 startbit_rtx, TYPE_MODE (sizetype),
4059 endbit_rtx, TYPE_MODE (sizetype));
4060 }
4061 if (REG_P (target))
4062 emit_move_insn (target, targetx);
4063 }
4064 }
4065
4066 else
4067 abort ();
4068 }
4069
4070 /* Store the value of EXP (an expression tree)
4071 into a subfield of TARGET which has mode MODE and occupies
4072 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4073 If MODE is VOIDmode, it means that we are storing into a bit-field.
4074
4075 If VALUE_MODE is VOIDmode, return nothing in particular.
4076 UNSIGNEDP is not used in this case.
4077
4078 Otherwise, return an rtx for the value stored. This rtx
4079 has mode VALUE_MODE if that is convenient to do.
4080 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4081
4082 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4083 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4084
4085 static rtx
4086 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4087 unsignedp, align, total_size)
4088 rtx target;
4089 int bitsize, bitpos;
4090 enum machine_mode mode;
4091 tree exp;
4092 enum machine_mode value_mode;
4093 int unsignedp;
4094 int align;
4095 int total_size;
4096 {
4097 HOST_WIDE_INT width_mask = 0;
4098
4099 if (TREE_CODE (exp) == ERROR_MARK)
4100 return const0_rtx;
4101
4102 if (bitsize < HOST_BITS_PER_WIDE_INT)
4103 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4104
4105 /* If we are storing into an unaligned field of an aligned union that is
4106 in a register, we may have the mode of TARGET being an integer mode but
4107 MODE == BLKmode. In that case, get an aligned object whose size and
4108 alignment are the same as TARGET and store TARGET into it (we can avoid
4109 the store if the field being stored is the entire width of TARGET). Then
4110 call ourselves recursively to store the field into a BLKmode version of
4111 that object. Finally, load from the object into TARGET. This is not
4112 very efficient in general, but should only be slightly more expensive
4113 than the otherwise-required unaligned accesses. Perhaps this can be
4114 cleaned up later. */
4115
4116 if (mode == BLKmode
4117 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4118 {
4119 rtx object = assign_stack_temp (GET_MODE (target),
4120 GET_MODE_SIZE (GET_MODE (target)), 0);
4121 rtx blk_object = copy_rtx (object);
4122
4123 MEM_IN_STRUCT_P (object) = 1;
4124 MEM_IN_STRUCT_P (blk_object) = 1;
4125 PUT_MODE (blk_object, BLKmode);
4126
4127 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4128 emit_move_insn (object, target);
4129
4130 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4131 align, total_size);
4132
4133 /* Even though we aren't returning target, we need to
4134 give it the updated value. */
4135 emit_move_insn (target, object);
4136
4137 return blk_object;
4138 }
4139
4140 /* If the structure is in a register or if the component
4141 is a bit field, we cannot use addressing to access it.
4142 Use bit-field techniques or SUBREG to store in it. */
4143
4144 if (mode == VOIDmode
4145 || (mode != BLKmode && ! direct_store[(int) mode])
4146 || GET_CODE (target) == REG
4147 || GET_CODE (target) == SUBREG
4148 /* If the field isn't aligned enough to store as an ordinary memref,
4149 store it as a bit field. */
4150 || (SLOW_UNALIGNED_ACCESS
4151 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4152 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4153 {
4154 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4155
4156 /* If BITSIZE is narrower than the size of the type of EXP
4157 we will be narrowing TEMP. Normally, what's wanted are the
4158 low-order bits. However, if EXP's type is a record and this is
4159 big-endian machine, we want the upper BITSIZE bits. */
4160 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4161 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4162 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4163 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4164 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4165 - bitsize),
4166 temp, 1);
4167
4168 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4169 MODE. */
4170 if (mode != VOIDmode && mode != BLKmode
4171 && mode != TYPE_MODE (TREE_TYPE (exp)))
4172 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4173
4174 /* If the modes of TARGET and TEMP are both BLKmode, both
4175 must be in memory and BITPOS must be aligned on a byte
4176 boundary. If so, we simply do a block copy. */
4177 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4178 {
4179 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4180 || bitpos % BITS_PER_UNIT != 0)
4181 abort ();
4182
4183 target = change_address (target, VOIDmode,
4184 plus_constant (XEXP (target, 0),
4185 bitpos / BITS_PER_UNIT));
4186
4187 emit_block_move (target, temp,
4188 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4189 / BITS_PER_UNIT),
4190 1);
4191
4192 return value_mode == VOIDmode ? const0_rtx : target;
4193 }
4194
4195 /* Store the value in the bitfield. */
4196 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4197 if (value_mode != VOIDmode)
4198 {
4199 /* The caller wants an rtx for the value. */
4200 /* If possible, avoid refetching from the bitfield itself. */
4201 if (width_mask != 0
4202 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4203 {
4204 tree count;
4205 enum machine_mode tmode;
4206
4207 if (unsignedp)
4208 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4209 tmode = GET_MODE (temp);
4210 if (tmode == VOIDmode)
4211 tmode = value_mode;
4212 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4213 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4214 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4215 }
4216 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4217 NULL_RTX, value_mode, 0, align,
4218 total_size);
4219 }
4220 return const0_rtx;
4221 }
4222 else
4223 {
4224 rtx addr = XEXP (target, 0);
4225 rtx to_rtx;
4226
4227 /* If a value is wanted, it must be the lhs;
4228 so make the address stable for multiple use. */
4229
4230 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4231 && ! CONSTANT_ADDRESS_P (addr)
4232 /* A frame-pointer reference is already stable. */
4233 && ! (GET_CODE (addr) == PLUS
4234 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4235 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4236 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4237 addr = copy_to_reg (addr);
4238
4239 /* Now build a reference to just the desired component. */
4240
4241 to_rtx = copy_rtx (change_address (target, mode,
4242 plus_constant (addr,
4243 (bitpos
4244 / BITS_PER_UNIT))));
4245 MEM_IN_STRUCT_P (to_rtx) = 1;
4246
4247 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4248 }
4249 }
4250 \f
4251 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4252 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4253 ARRAY_REFs and find the ultimate containing object, which we return.
4254
4255 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4256 bit position, and *PUNSIGNEDP to the signedness of the field.
4257 If the position of the field is variable, we store a tree
4258 giving the variable offset (in units) in *POFFSET.
4259 This offset is in addition to the bit position.
4260 If the position is not variable, we store 0 in *POFFSET.
4261 We set *PALIGNMENT to the alignment in bytes of the address that will be
4262 computed. This is the alignment of the thing we return if *POFFSET
4263 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4264
4265 If any of the extraction expressions is volatile,
4266 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4267
4268 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4269 is a mode that can be used to access the field. In that case, *PBITSIZE
4270 is redundant.
4271
4272 If the field describes a variable-sized object, *PMODE is set to
4273 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4274 this case, but the address of the object can be found. */
4275
4276 tree
4277 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4278 punsignedp, pvolatilep, palignment)
4279 tree exp;
4280 int *pbitsize;
4281 int *pbitpos;
4282 tree *poffset;
4283 enum machine_mode *pmode;
4284 int *punsignedp;
4285 int *pvolatilep;
4286 int *palignment;
4287 {
4288 tree orig_exp = exp;
4289 tree size_tree = 0;
4290 enum machine_mode mode = VOIDmode;
4291 tree offset = integer_zero_node;
4292 int alignment = BIGGEST_ALIGNMENT;
4293
4294 if (TREE_CODE (exp) == COMPONENT_REF)
4295 {
4296 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4297 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4298 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4299 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4300 }
4301 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4302 {
4303 size_tree = TREE_OPERAND (exp, 1);
4304 *punsignedp = TREE_UNSIGNED (exp);
4305 }
4306 else
4307 {
4308 mode = TYPE_MODE (TREE_TYPE (exp));
4309 *pbitsize = GET_MODE_BITSIZE (mode);
4310 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4311 }
4312
4313 if (size_tree)
4314 {
4315 if (TREE_CODE (size_tree) != INTEGER_CST)
4316 mode = BLKmode, *pbitsize = -1;
4317 else
4318 *pbitsize = TREE_INT_CST_LOW (size_tree);
4319 }
4320
4321 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4322 and find the ultimate containing object. */
4323
4324 *pbitpos = 0;
4325
4326 while (1)
4327 {
4328 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4329 {
4330 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4331 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4332 : TREE_OPERAND (exp, 2));
4333 tree constant = integer_zero_node, var = pos;
4334
4335 /* If this field hasn't been filled in yet, don't go
4336 past it. This should only happen when folding expressions
4337 made during type construction. */
4338 if (pos == 0)
4339 break;
4340
4341 /* Assume here that the offset is a multiple of a unit.
4342 If not, there should be an explicitly added constant. */
4343 if (TREE_CODE (pos) == PLUS_EXPR
4344 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4345 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4346 else if (TREE_CODE (pos) == INTEGER_CST)
4347 constant = pos, var = integer_zero_node;
4348
4349 *pbitpos += TREE_INT_CST_LOW (constant);
4350 offset = size_binop (PLUS_EXPR, offset,
4351 size_binop (EXACT_DIV_EXPR, var,
4352 size_int (BITS_PER_UNIT)));
4353 }
4354
4355 else if (TREE_CODE (exp) == ARRAY_REF)
4356 {
4357 /* This code is based on the code in case ARRAY_REF in expand_expr
4358 below. We assume here that the size of an array element is
4359 always an integral multiple of BITS_PER_UNIT. */
4360
4361 tree index = TREE_OPERAND (exp, 1);
4362 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4363 tree low_bound
4364 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4365 tree index_type = TREE_TYPE (index);
4366
4367 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4368 {
4369 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4370 index);
4371 index_type = TREE_TYPE (index);
4372 }
4373
4374 if (! integer_zerop (low_bound))
4375 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4376
4377 if (TREE_CODE (index) == INTEGER_CST)
4378 {
4379 index = convert (sbitsizetype, index);
4380 index_type = TREE_TYPE (index);
4381 }
4382
4383 index = fold (build (MULT_EXPR, sbitsizetype, index,
4384 convert (sbitsizetype,
4385 TYPE_SIZE (TREE_TYPE (exp)))));
4386
4387 if (TREE_CODE (index) == INTEGER_CST
4388 && TREE_INT_CST_HIGH (index) == 0)
4389 *pbitpos += TREE_INT_CST_LOW (index);
4390 else
4391 {
4392 offset = size_binop (PLUS_EXPR, offset,
4393 convert (sizetype,
4394 size_binop (FLOOR_DIV_EXPR, index,
4395 size_int (BITS_PER_UNIT))));
4396 if (contains_placeholder_p (offset))
4397 offset = build (WITH_RECORD_EXPR, sizetype, offset, exp);
4398 }
4399 }
4400 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4401 && ! ((TREE_CODE (exp) == NOP_EXPR
4402 || TREE_CODE (exp) == CONVERT_EXPR)
4403 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4404 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4405 != UNION_TYPE))
4406 && (TYPE_MODE (TREE_TYPE (exp))
4407 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4408 break;
4409
4410 /* If any reference in the chain is volatile, the effect is volatile. */
4411 if (TREE_THIS_VOLATILE (exp))
4412 *pvolatilep = 1;
4413
4414 /* If the offset is non-constant already, then we can't assume any
4415 alignment more than the alignment here. */
4416 if (! integer_zerop (offset))
4417 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4418
4419 exp = TREE_OPERAND (exp, 0);
4420 }
4421
4422 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4423 alignment = MIN (alignment, DECL_ALIGN (exp));
4424 else if (TREE_TYPE (exp) != 0)
4425 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4426
4427 if (integer_zerop (offset))
4428 offset = 0;
4429
4430 if (offset != 0 && contains_placeholder_p (offset))
4431 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4432
4433 *pmode = mode;
4434 *poffset = offset;
4435 *palignment = alignment / BITS_PER_UNIT;
4436 return exp;
4437 }
4438
4439 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4440 static enum memory_use_mode
4441 get_memory_usage_from_modifier (modifier)
4442 enum expand_modifier modifier;
4443 {
4444 switch (modifier)
4445 {
4446 case EXPAND_NORMAL:
4447 return MEMORY_USE_RO;
4448 break;
4449 case EXPAND_MEMORY_USE_WO:
4450 return MEMORY_USE_WO;
4451 break;
4452 case EXPAND_MEMORY_USE_RW:
4453 return MEMORY_USE_RW;
4454 break;
4455 case EXPAND_INITIALIZER:
4456 case EXPAND_MEMORY_USE_DONT:
4457 case EXPAND_SUM:
4458 case EXPAND_CONST_ADDRESS:
4459 return MEMORY_USE_DONT;
4460 case EXPAND_MEMORY_USE_BAD:
4461 default:
4462 abort ();
4463 }
4464 }
4465 \f
4466 /* Given an rtx VALUE that may contain additions and multiplications,
4467 return an equivalent value that just refers to a register or memory.
4468 This is done by generating instructions to perform the arithmetic
4469 and returning a pseudo-register containing the value.
4470
4471 The returned value may be a REG, SUBREG, MEM or constant. */
4472
4473 rtx
4474 force_operand (value, target)
4475 rtx value, target;
4476 {
4477 register optab binoptab = 0;
4478 /* Use a temporary to force order of execution of calls to
4479 `force_operand'. */
4480 rtx tmp;
4481 register rtx op2;
4482 /* Use subtarget as the target for operand 0 of a binary operation. */
4483 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4484
4485 if (GET_CODE (value) == PLUS)
4486 binoptab = add_optab;
4487 else if (GET_CODE (value) == MINUS)
4488 binoptab = sub_optab;
4489 else if (GET_CODE (value) == MULT)
4490 {
4491 op2 = XEXP (value, 1);
4492 if (!CONSTANT_P (op2)
4493 && !(GET_CODE (op2) == REG && op2 != subtarget))
4494 subtarget = 0;
4495 tmp = force_operand (XEXP (value, 0), subtarget);
4496 return expand_mult (GET_MODE (value), tmp,
4497 force_operand (op2, NULL_RTX),
4498 target, 0);
4499 }
4500
4501 if (binoptab)
4502 {
4503 op2 = XEXP (value, 1);
4504 if (!CONSTANT_P (op2)
4505 && !(GET_CODE (op2) == REG && op2 != subtarget))
4506 subtarget = 0;
4507 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4508 {
4509 binoptab = add_optab;
4510 op2 = negate_rtx (GET_MODE (value), op2);
4511 }
4512
4513 /* Check for an addition with OP2 a constant integer and our first
4514 operand a PLUS of a virtual register and something else. In that
4515 case, we want to emit the sum of the virtual register and the
4516 constant first and then add the other value. This allows virtual
4517 register instantiation to simply modify the constant rather than
4518 creating another one around this addition. */
4519 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4520 && GET_CODE (XEXP (value, 0)) == PLUS
4521 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4522 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4523 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4524 {
4525 rtx temp = expand_binop (GET_MODE (value), binoptab,
4526 XEXP (XEXP (value, 0), 0), op2,
4527 subtarget, 0, OPTAB_LIB_WIDEN);
4528 return expand_binop (GET_MODE (value), binoptab, temp,
4529 force_operand (XEXP (XEXP (value, 0), 1), 0),
4530 target, 0, OPTAB_LIB_WIDEN);
4531 }
4532
4533 tmp = force_operand (XEXP (value, 0), subtarget);
4534 return expand_binop (GET_MODE (value), binoptab, tmp,
4535 force_operand (op2, NULL_RTX),
4536 target, 0, OPTAB_LIB_WIDEN);
4537 /* We give UNSIGNEDP = 0 to expand_binop
4538 because the only operations we are expanding here are signed ones. */
4539 }
4540 return value;
4541 }
4542 \f
4543 /* Subroutine of expand_expr:
4544 save the non-copied parts (LIST) of an expr (LHS), and return a list
4545 which can restore these values to their previous values,
4546 should something modify their storage. */
4547
4548 static tree
4549 save_noncopied_parts (lhs, list)
4550 tree lhs;
4551 tree list;
4552 {
4553 tree tail;
4554 tree parts = 0;
4555
4556 for (tail = list; tail; tail = TREE_CHAIN (tail))
4557 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4558 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4559 else
4560 {
4561 tree part = TREE_VALUE (tail);
4562 tree part_type = TREE_TYPE (part);
4563 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4564 rtx target = assign_temp (part_type, 0, 1, 1);
4565 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4566 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4567 parts = tree_cons (to_be_saved,
4568 build (RTL_EXPR, part_type, NULL_TREE,
4569 (tree) target),
4570 parts);
4571 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4572 }
4573 return parts;
4574 }
4575
4576 /* Subroutine of expand_expr:
4577 record the non-copied parts (LIST) of an expr (LHS), and return a list
4578 which specifies the initial values of these parts. */
4579
4580 static tree
4581 init_noncopied_parts (lhs, list)
4582 tree lhs;
4583 tree list;
4584 {
4585 tree tail;
4586 tree parts = 0;
4587
4588 for (tail = list; tail; tail = TREE_CHAIN (tail))
4589 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4590 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4591 else
4592 {
4593 tree part = TREE_VALUE (tail);
4594 tree part_type = TREE_TYPE (part);
4595 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4596 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4597 }
4598 return parts;
4599 }
4600
4601 /* Subroutine of expand_expr: return nonzero iff there is no way that
4602 EXP can reference X, which is being modified. */
4603
4604 static int
4605 safe_from_p (x, exp)
4606 rtx x;
4607 tree exp;
4608 {
4609 rtx exp_rtl = 0;
4610 int i, nops;
4611
4612 if (x == 0
4613 /* If EXP has varying size, we MUST use a target since we currently
4614 have no way of allocating temporaries of variable size
4615 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4616 So we assume here that something at a higher level has prevented a
4617 clash. This is somewhat bogus, but the best we can do. Only
4618 do this when X is BLKmode. */
4619 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4620 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4621 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4622 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4623 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4624 != INTEGER_CST)
4625 && GET_MODE (x) == BLKmode))
4626 return 1;
4627
4628 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4629 find the underlying pseudo. */
4630 if (GET_CODE (x) == SUBREG)
4631 {
4632 x = SUBREG_REG (x);
4633 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4634 return 0;
4635 }
4636
4637 /* If X is a location in the outgoing argument area, it is always safe. */
4638 if (GET_CODE (x) == MEM
4639 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4640 || (GET_CODE (XEXP (x, 0)) == PLUS
4641 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4642 return 1;
4643
4644 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4645 {
4646 case 'd':
4647 exp_rtl = DECL_RTL (exp);
4648 break;
4649
4650 case 'c':
4651 return 1;
4652
4653 case 'x':
4654 if (TREE_CODE (exp) == TREE_LIST)
4655 return ((TREE_VALUE (exp) == 0
4656 || safe_from_p (x, TREE_VALUE (exp)))
4657 && (TREE_CHAIN (exp) == 0
4658 || safe_from_p (x, TREE_CHAIN (exp))));
4659 else
4660 return 0;
4661
4662 case '1':
4663 return safe_from_p (x, TREE_OPERAND (exp, 0));
4664
4665 case '2':
4666 case '<':
4667 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4668 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4669
4670 case 'e':
4671 case 'r':
4672 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4673 the expression. If it is set, we conflict iff we are that rtx or
4674 both are in memory. Otherwise, we check all operands of the
4675 expression recursively. */
4676
4677 switch (TREE_CODE (exp))
4678 {
4679 case ADDR_EXPR:
4680 return (staticp (TREE_OPERAND (exp, 0))
4681 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4682
4683 case INDIRECT_REF:
4684 if (GET_CODE (x) == MEM)
4685 return 0;
4686 break;
4687
4688 case CALL_EXPR:
4689 exp_rtl = CALL_EXPR_RTL (exp);
4690 if (exp_rtl == 0)
4691 {
4692 /* Assume that the call will clobber all hard registers and
4693 all of memory. */
4694 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4695 || GET_CODE (x) == MEM)
4696 return 0;
4697 }
4698
4699 break;
4700
4701 case RTL_EXPR:
4702 /* If a sequence exists, we would have to scan every instruction
4703 in the sequence to see if it was safe. This is probably not
4704 worthwhile. */
4705 if (RTL_EXPR_SEQUENCE (exp))
4706 return 0;
4707
4708 exp_rtl = RTL_EXPR_RTL (exp);
4709 break;
4710
4711 case WITH_CLEANUP_EXPR:
4712 exp_rtl = RTL_EXPR_RTL (exp);
4713 break;
4714
4715 case CLEANUP_POINT_EXPR:
4716 return safe_from_p (x, TREE_OPERAND (exp, 0));
4717
4718 case SAVE_EXPR:
4719 exp_rtl = SAVE_EXPR_RTL (exp);
4720 break;
4721
4722 case BIND_EXPR:
4723 /* The only operand we look at is operand 1. The rest aren't
4724 part of the expression. */
4725 return safe_from_p (x, TREE_OPERAND (exp, 1));
4726
4727 case METHOD_CALL_EXPR:
4728 /* This takes a rtx argument, but shouldn't appear here. */
4729 abort ();
4730
4731 default:
4732 break;
4733 }
4734
4735 /* If we have an rtx, we do not need to scan our operands. */
4736 if (exp_rtl)
4737 break;
4738
4739 nops = tree_code_length[(int) TREE_CODE (exp)];
4740 for (i = 0; i < nops; i++)
4741 if (TREE_OPERAND (exp, i) != 0
4742 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4743 return 0;
4744 }
4745
4746 /* If we have an rtl, find any enclosed object. Then see if we conflict
4747 with it. */
4748 if (exp_rtl)
4749 {
4750 if (GET_CODE (exp_rtl) == SUBREG)
4751 {
4752 exp_rtl = SUBREG_REG (exp_rtl);
4753 if (GET_CODE (exp_rtl) == REG
4754 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4755 return 0;
4756 }
4757
4758 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4759 are memory and EXP is not readonly. */
4760 return ! (rtx_equal_p (x, exp_rtl)
4761 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4762 && ! TREE_READONLY (exp)));
4763 }
4764
4765 /* If we reach here, it is safe. */
4766 return 1;
4767 }
4768
4769 /* Subroutine of expand_expr: return nonzero iff EXP is an
4770 expression whose type is statically determinable. */
4771
4772 static int
4773 fixed_type_p (exp)
4774 tree exp;
4775 {
4776 if (TREE_CODE (exp) == PARM_DECL
4777 || TREE_CODE (exp) == VAR_DECL
4778 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4779 || TREE_CODE (exp) == COMPONENT_REF
4780 || TREE_CODE (exp) == ARRAY_REF)
4781 return 1;
4782 return 0;
4783 }
4784
4785 /* Subroutine of expand_expr: return rtx if EXP is a
4786 variable or parameter; else return 0. */
4787
4788 static rtx
4789 var_rtx (exp)
4790 tree exp;
4791 {
4792 STRIP_NOPS (exp);
4793 switch (TREE_CODE (exp))
4794 {
4795 case PARM_DECL:
4796 case VAR_DECL:
4797 return DECL_RTL (exp);
4798 default:
4799 return 0;
4800 }
4801 }
4802 \f
4803 /* expand_expr: generate code for computing expression EXP.
4804 An rtx for the computed value is returned. The value is never null.
4805 In the case of a void EXP, const0_rtx is returned.
4806
4807 The value may be stored in TARGET if TARGET is nonzero.
4808 TARGET is just a suggestion; callers must assume that
4809 the rtx returned may not be the same as TARGET.
4810
4811 If TARGET is CONST0_RTX, it means that the value will be ignored.
4812
4813 If TMODE is not VOIDmode, it suggests generating the
4814 result in mode TMODE. But this is done only when convenient.
4815 Otherwise, TMODE is ignored and the value generated in its natural mode.
4816 TMODE is just a suggestion; callers must assume that
4817 the rtx returned may not have mode TMODE.
4818
4819 Note that TARGET may have neither TMODE nor MODE. In that case, it
4820 probably will not be used.
4821
4822 If MODIFIER is EXPAND_SUM then when EXP is an addition
4823 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4824 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4825 products as above, or REG or MEM, or constant.
4826 Ordinarily in such cases we would output mul or add instructions
4827 and then return a pseudo reg containing the sum.
4828
4829 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4830 it also marks a label as absolutely required (it can't be dead).
4831 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4832 This is used for outputting expressions used in initializers.
4833
4834 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4835 with a constant address even if that address is not normally legitimate.
4836 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4837
4838 rtx
4839 expand_expr (exp, target, tmode, modifier)
4840 register tree exp;
4841 rtx target;
4842 enum machine_mode tmode;
4843 enum expand_modifier modifier;
4844 {
4845 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4846 This is static so it will be accessible to our recursive callees. */
4847 static tree placeholder_list = 0;
4848 register rtx op0, op1, temp;
4849 tree type = TREE_TYPE (exp);
4850 int unsignedp = TREE_UNSIGNED (type);
4851 register enum machine_mode mode = TYPE_MODE (type);
4852 register enum tree_code code = TREE_CODE (exp);
4853 optab this_optab;
4854 /* Use subtarget as the target for operand 0 of a binary operation. */
4855 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4856 rtx original_target = target;
4857 int ignore = (target == const0_rtx
4858 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4859 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4860 || code == COND_EXPR)
4861 && TREE_CODE (type) == VOID_TYPE));
4862 tree context;
4863 /* Used by check-memory-usage to make modifier read only. */
4864 enum expand_modifier ro_modifier;
4865
4866 /* Make a read-only version of the modifier. */
4867 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4868 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4869 ro_modifier = modifier;
4870 else
4871 ro_modifier = EXPAND_NORMAL;
4872
4873 /* Don't use hard regs as subtargets, because the combiner
4874 can only handle pseudo regs. */
4875 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4876 subtarget = 0;
4877 /* Avoid subtargets inside loops,
4878 since they hide some invariant expressions. */
4879 if (preserve_subexpressions_p ())
4880 subtarget = 0;
4881
4882 /* If we are going to ignore this result, we need only do something
4883 if there is a side-effect somewhere in the expression. If there
4884 is, short-circuit the most common cases here. Note that we must
4885 not call expand_expr with anything but const0_rtx in case this
4886 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4887
4888 if (ignore)
4889 {
4890 if (! TREE_SIDE_EFFECTS (exp))
4891 return const0_rtx;
4892
4893 /* Ensure we reference a volatile object even if value is ignored. */
4894 if (TREE_THIS_VOLATILE (exp)
4895 && TREE_CODE (exp) != FUNCTION_DECL
4896 && mode != VOIDmode && mode != BLKmode)
4897 {
4898 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
4899 if (GET_CODE (temp) == MEM)
4900 temp = copy_to_reg (temp);
4901 return const0_rtx;
4902 }
4903
4904 if (TREE_CODE_CLASS (code) == '1')
4905 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4906 VOIDmode, ro_modifier);
4907 else if (TREE_CODE_CLASS (code) == '2'
4908 || TREE_CODE_CLASS (code) == '<')
4909 {
4910 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
4911 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
4912 return const0_rtx;
4913 }
4914 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4915 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4916 /* If the second operand has no side effects, just evaluate
4917 the first. */
4918 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4919 VOIDmode, ro_modifier);
4920
4921 target = 0;
4922 }
4923
4924 /* If will do cse, generate all results into pseudo registers
4925 since 1) that allows cse to find more things
4926 and 2) otherwise cse could produce an insn the machine
4927 cannot support. */
4928
4929 if (! cse_not_expected && mode != BLKmode && target
4930 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4931 target = subtarget;
4932
4933 switch (code)
4934 {
4935 case LABEL_DECL:
4936 {
4937 tree function = decl_function_context (exp);
4938 /* Handle using a label in a containing function. */
4939 if (function != current_function_decl
4940 && function != inline_function_decl && function != 0)
4941 {
4942 struct function *p = find_function_data (function);
4943 /* Allocate in the memory associated with the function
4944 that the label is in. */
4945 push_obstacks (p->function_obstack,
4946 p->function_maybepermanent_obstack);
4947
4948 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
4949 label_rtx (exp),
4950 p->forced_labels);
4951 pop_obstacks ();
4952 }
4953 else if (modifier == EXPAND_INITIALIZER)
4954 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
4955 label_rtx (exp), forced_labels);
4956 temp = gen_rtx_MEM (FUNCTION_MODE,
4957 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
4958 if (function != current_function_decl
4959 && function != inline_function_decl && function != 0)
4960 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4961 return temp;
4962 }
4963
4964 case PARM_DECL:
4965 if (DECL_RTL (exp) == 0)
4966 {
4967 error_with_decl (exp, "prior parameter's size depends on `%s'");
4968 return CONST0_RTX (mode);
4969 }
4970
4971 /* ... fall through ... */
4972
4973 case VAR_DECL:
4974 /* If a static var's type was incomplete when the decl was written,
4975 but the type is complete now, lay out the decl now. */
4976 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4977 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4978 {
4979 push_obstacks_nochange ();
4980 end_temporary_allocation ();
4981 layout_decl (exp, 0);
4982 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4983 pop_obstacks ();
4984 }
4985
4986 /* Only check automatic variables. Currently, function arguments are
4987 not checked (this can be done at compile-time with prototypes).
4988 Aggregates are not checked. */
4989 if (flag_check_memory_usage && code == VAR_DECL
4990 && GET_CODE (DECL_RTL (exp)) == MEM
4991 && DECL_CONTEXT (exp) != NULL_TREE
4992 && ! TREE_STATIC (exp)
4993 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4994 {
4995 enum memory_use_mode memory_usage;
4996 memory_usage = get_memory_usage_from_modifier (modifier);
4997
4998 if (memory_usage != MEMORY_USE_DONT)
4999 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5000 XEXP (DECL_RTL (exp), 0), ptr_mode,
5001 GEN_INT (int_size_in_bytes (type)),
5002 TYPE_MODE (sizetype),
5003 GEN_INT (memory_usage),
5004 TYPE_MODE (integer_type_node));
5005 }
5006
5007 /* ... fall through ... */
5008
5009 case FUNCTION_DECL:
5010 case RESULT_DECL:
5011 if (DECL_RTL (exp) == 0)
5012 abort ();
5013
5014 /* Ensure variable marked as used even if it doesn't go through
5015 a parser. If it hasn't be used yet, write out an external
5016 definition. */
5017 if (! TREE_USED (exp))
5018 {
5019 assemble_external (exp);
5020 TREE_USED (exp) = 1;
5021 }
5022
5023 /* Show we haven't gotten RTL for this yet. */
5024 temp = 0;
5025
5026 /* Handle variables inherited from containing functions. */
5027 context = decl_function_context (exp);
5028
5029 /* We treat inline_function_decl as an alias for the current function
5030 because that is the inline function whose vars, types, etc.
5031 are being merged into the current function.
5032 See expand_inline_function. */
5033
5034 if (context != 0 && context != current_function_decl
5035 && context != inline_function_decl
5036 /* If var is static, we don't need a static chain to access it. */
5037 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5038 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5039 {
5040 rtx addr;
5041
5042 /* Mark as non-local and addressable. */
5043 DECL_NONLOCAL (exp) = 1;
5044 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5045 abort ();
5046 mark_addressable (exp);
5047 if (GET_CODE (DECL_RTL (exp)) != MEM)
5048 abort ();
5049 addr = XEXP (DECL_RTL (exp), 0);
5050 if (GET_CODE (addr) == MEM)
5051 addr = gen_rtx_MEM (Pmode,
5052 fix_lexical_addr (XEXP (addr, 0), exp));
5053 else
5054 addr = fix_lexical_addr (addr, exp);
5055 temp = change_address (DECL_RTL (exp), mode, addr);
5056 }
5057
5058 /* This is the case of an array whose size is to be determined
5059 from its initializer, while the initializer is still being parsed.
5060 See expand_decl. */
5061
5062 else if (GET_CODE (DECL_RTL (exp)) == MEM
5063 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5064 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5065 XEXP (DECL_RTL (exp), 0));
5066
5067 /* If DECL_RTL is memory, we are in the normal case and either
5068 the address is not valid or it is not a register and -fforce-addr
5069 is specified, get the address into a register. */
5070
5071 else if (GET_CODE (DECL_RTL (exp)) == MEM
5072 && modifier != EXPAND_CONST_ADDRESS
5073 && modifier != EXPAND_SUM
5074 && modifier != EXPAND_INITIALIZER
5075 && (! memory_address_p (DECL_MODE (exp),
5076 XEXP (DECL_RTL (exp), 0))
5077 || (flag_force_addr
5078 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5079 temp = change_address (DECL_RTL (exp), VOIDmode,
5080 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5081
5082 /* If we got something, return it. But first, set the alignment
5083 the address is a register. */
5084 if (temp != 0)
5085 {
5086 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5087 mark_reg_pointer (XEXP (temp, 0),
5088 DECL_ALIGN (exp) / BITS_PER_UNIT);
5089
5090 return temp;
5091 }
5092
5093 /* If the mode of DECL_RTL does not match that of the decl, it
5094 must be a promoted value. We return a SUBREG of the wanted mode,
5095 but mark it so that we know that it was already extended. */
5096
5097 if (GET_CODE (DECL_RTL (exp)) == REG
5098 && GET_MODE (DECL_RTL (exp)) != mode)
5099 {
5100 /* Get the signedness used for this variable. Ensure we get the
5101 same mode we got when the variable was declared. */
5102 if (GET_MODE (DECL_RTL (exp))
5103 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5104 abort ();
5105
5106 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5107 SUBREG_PROMOTED_VAR_P (temp) = 1;
5108 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5109 return temp;
5110 }
5111
5112 return DECL_RTL (exp);
5113
5114 case INTEGER_CST:
5115 return immed_double_const (TREE_INT_CST_LOW (exp),
5116 TREE_INT_CST_HIGH (exp),
5117 mode);
5118
5119 case CONST_DECL:
5120 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5121 EXPAND_MEMORY_USE_BAD);
5122
5123 case REAL_CST:
5124 /* If optimized, generate immediate CONST_DOUBLE
5125 which will be turned into memory by reload if necessary.
5126
5127 We used to force a register so that loop.c could see it. But
5128 this does not allow gen_* patterns to perform optimizations with
5129 the constants. It also produces two insns in cases like "x = 1.0;".
5130 On most machines, floating-point constants are not permitted in
5131 many insns, so we'd end up copying it to a register in any case.
5132
5133 Now, we do the copying in expand_binop, if appropriate. */
5134 return immed_real_const (exp);
5135
5136 case COMPLEX_CST:
5137 case STRING_CST:
5138 if (! TREE_CST_RTL (exp))
5139 output_constant_def (exp);
5140
5141 /* TREE_CST_RTL probably contains a constant address.
5142 On RISC machines where a constant address isn't valid,
5143 make some insns to get that address into a register. */
5144 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5145 && modifier != EXPAND_CONST_ADDRESS
5146 && modifier != EXPAND_INITIALIZER
5147 && modifier != EXPAND_SUM
5148 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5149 || (flag_force_addr
5150 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5151 return change_address (TREE_CST_RTL (exp), VOIDmode,
5152 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5153 return TREE_CST_RTL (exp);
5154
5155 case SAVE_EXPR:
5156 context = decl_function_context (exp);
5157
5158 /* If this SAVE_EXPR was at global context, assume we are an
5159 initialization function and move it into our context. */
5160 if (context == 0)
5161 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5162
5163 /* We treat inline_function_decl as an alias for the current function
5164 because that is the inline function whose vars, types, etc.
5165 are being merged into the current function.
5166 See expand_inline_function. */
5167 if (context == current_function_decl || context == inline_function_decl)
5168 context = 0;
5169
5170 /* If this is non-local, handle it. */
5171 if (context)
5172 {
5173 /* The following call just exists to abort if the context is
5174 not of a containing function. */
5175 find_function_data (context);
5176
5177 temp = SAVE_EXPR_RTL (exp);
5178 if (temp && GET_CODE (temp) == REG)
5179 {
5180 put_var_into_stack (exp);
5181 temp = SAVE_EXPR_RTL (exp);
5182 }
5183 if (temp == 0 || GET_CODE (temp) != MEM)
5184 abort ();
5185 return change_address (temp, mode,
5186 fix_lexical_addr (XEXP (temp, 0), exp));
5187 }
5188 if (SAVE_EXPR_RTL (exp) == 0)
5189 {
5190 if (mode == VOIDmode)
5191 temp = const0_rtx;
5192 else
5193 temp = assign_temp (type, 0, 0, 0);
5194
5195 SAVE_EXPR_RTL (exp) = temp;
5196 if (!optimize && GET_CODE (temp) == REG)
5197 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5198 save_expr_regs);
5199
5200 /* If the mode of TEMP does not match that of the expression, it
5201 must be a promoted value. We pass store_expr a SUBREG of the
5202 wanted mode but mark it so that we know that it was already
5203 extended. Note that `unsignedp' was modified above in
5204 this case. */
5205
5206 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5207 {
5208 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5209 SUBREG_PROMOTED_VAR_P (temp) = 1;
5210 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5211 }
5212
5213 if (temp == const0_rtx)
5214 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5215 EXPAND_MEMORY_USE_BAD);
5216 else
5217 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5218 }
5219
5220 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5221 must be a promoted value. We return a SUBREG of the wanted mode,
5222 but mark it so that we know that it was already extended. */
5223
5224 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5225 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5226 {
5227 /* Compute the signedness and make the proper SUBREG. */
5228 promote_mode (type, mode, &unsignedp, 0);
5229 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5230 SUBREG_PROMOTED_VAR_P (temp) = 1;
5231 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5232 return temp;
5233 }
5234
5235 return SAVE_EXPR_RTL (exp);
5236
5237 case UNSAVE_EXPR:
5238 {
5239 rtx temp;
5240 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5241 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5242 return temp;
5243 }
5244
5245 case PLACEHOLDER_EXPR:
5246 {
5247 tree placeholder_expr;
5248
5249 /* If there is an object on the head of the placeholder list,
5250 see if some object in its references is of type TYPE. For
5251 further information, see tree.def. */
5252 for (placeholder_expr = placeholder_list;
5253 placeholder_expr != 0;
5254 placeholder_expr = TREE_CHAIN (placeholder_expr))
5255 {
5256 tree need_type = TYPE_MAIN_VARIANT (type);
5257 tree object = 0;
5258 tree old_list = placeholder_list;
5259 tree elt;
5260
5261 /* See if the object is the type that we want. */
5262 if ((TYPE_MAIN_VARIANT (TREE_TYPE
5263 (TREE_PURPOSE (placeholder_expr)))
5264 == need_type))
5265 object = TREE_PURPOSE (placeholder_expr);
5266
5267 /* Find the outermost reference that is of the type we want. */
5268 for (elt = TREE_PURPOSE (placeholder_expr);
5269 elt != 0 && object == 0
5270 && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5271 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5272 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5273 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
5274 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5275 || TREE_CODE (elt) == COND_EXPR)
5276 ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
5277 if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5278 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
5279 == need_type))
5280 object = TREE_OPERAND (elt, 0);
5281
5282 if (object != 0)
5283 {
5284 /* Expand this object skipping the list entries before
5285 it was found in case it is also a PLACEHOLDER_EXPR.
5286 In that case, we want to translate it using subsequent
5287 entries. */
5288 placeholder_list = TREE_CHAIN (placeholder_expr);
5289 temp = expand_expr (object, original_target, tmode,
5290 ro_modifier);
5291 placeholder_list = old_list;
5292 return temp;
5293 }
5294 }
5295 }
5296
5297 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5298 abort ();
5299
5300 case WITH_RECORD_EXPR:
5301 /* Put the object on the placeholder list, expand our first operand,
5302 and pop the list. */
5303 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5304 placeholder_list);
5305 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5306 tmode, ro_modifier);
5307 placeholder_list = TREE_CHAIN (placeholder_list);
5308 return target;
5309
5310 case EXIT_EXPR:
5311 expand_exit_loop_if_false (NULL_PTR,
5312 invert_truthvalue (TREE_OPERAND (exp, 0)));
5313 return const0_rtx;
5314
5315 case LOOP_EXPR:
5316 push_temp_slots ();
5317 expand_start_loop (1);
5318 expand_expr_stmt (TREE_OPERAND (exp, 0));
5319 expand_end_loop ();
5320 pop_temp_slots ();
5321
5322 return const0_rtx;
5323
5324 case BIND_EXPR:
5325 {
5326 tree vars = TREE_OPERAND (exp, 0);
5327 int vars_need_expansion = 0;
5328
5329 /* Need to open a binding contour here because
5330 if there are any cleanups they must be contained here. */
5331 expand_start_bindings (0);
5332
5333 /* Mark the corresponding BLOCK for output in its proper place. */
5334 if (TREE_OPERAND (exp, 2) != 0
5335 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5336 insert_block (TREE_OPERAND (exp, 2));
5337
5338 /* If VARS have not yet been expanded, expand them now. */
5339 while (vars)
5340 {
5341 if (DECL_RTL (vars) == 0)
5342 {
5343 vars_need_expansion = 1;
5344 expand_decl (vars);
5345 }
5346 expand_decl_init (vars);
5347 vars = TREE_CHAIN (vars);
5348 }
5349
5350 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
5351
5352 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5353
5354 return temp;
5355 }
5356
5357 case RTL_EXPR:
5358 if (RTL_EXPR_SEQUENCE (exp))
5359 {
5360 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5361 abort ();
5362 emit_insns (RTL_EXPR_SEQUENCE (exp));
5363 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5364 }
5365 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5366 free_temps_for_rtl_expr (exp);
5367 return RTL_EXPR_RTL (exp);
5368
5369 case CONSTRUCTOR:
5370 /* If we don't need the result, just ensure we evaluate any
5371 subexpressions. */
5372 if (ignore)
5373 {
5374 tree elt;
5375 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5376 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5377 EXPAND_MEMORY_USE_BAD);
5378 return const0_rtx;
5379 }
5380
5381 /* All elts simple constants => refer to a constant in memory. But
5382 if this is a non-BLKmode mode, let it store a field at a time
5383 since that should make a CONST_INT or CONST_DOUBLE when we
5384 fold. Likewise, if we have a target we can use, it is best to
5385 store directly into the target unless the type is large enough
5386 that memcpy will be used. If we are making an initializer and
5387 all operands are constant, put it in memory as well. */
5388 else if ((TREE_STATIC (exp)
5389 && ((mode == BLKmode
5390 && ! (target != 0 && safe_from_p (target, exp)))
5391 || TREE_ADDRESSABLE (exp)
5392 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5393 && (move_by_pieces_ninsns
5394 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5395 TYPE_ALIGN (type) / BITS_PER_UNIT)
5396 > MOVE_RATIO)
5397 && ! mostly_zeros_p (exp))))
5398 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5399 {
5400 rtx constructor = output_constant_def (exp);
5401 if (modifier != EXPAND_CONST_ADDRESS
5402 && modifier != EXPAND_INITIALIZER
5403 && modifier != EXPAND_SUM
5404 && (! memory_address_p (GET_MODE (constructor),
5405 XEXP (constructor, 0))
5406 || (flag_force_addr
5407 && GET_CODE (XEXP (constructor, 0)) != REG)))
5408 constructor = change_address (constructor, VOIDmode,
5409 XEXP (constructor, 0));
5410 return constructor;
5411 }
5412
5413 else
5414 {
5415 /* Handle calls that pass values in multiple non-contiguous
5416 locations. The Irix 6 ABI has examples of this. */
5417 if (target == 0 || ! safe_from_p (target, exp)
5418 || GET_CODE (target) == PARALLEL)
5419 {
5420 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5421 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5422 else
5423 target = assign_temp (type, 0, 1, 1);
5424 }
5425
5426 if (TREE_READONLY (exp))
5427 {
5428 if (GET_CODE (target) == MEM)
5429 target = copy_rtx (target);
5430
5431 RTX_UNCHANGING_P (target) = 1;
5432 }
5433
5434 store_constructor (exp, target, 0);
5435 return target;
5436 }
5437
5438 case INDIRECT_REF:
5439 {
5440 tree exp1 = TREE_OPERAND (exp, 0);
5441 tree exp2;
5442 tree index;
5443 tree string = string_constant (exp1, &index);
5444 int i;
5445
5446 if (string
5447 && TREE_CODE (string) == STRING_CST
5448 && TREE_CODE (index) == INTEGER_CST
5449 && !TREE_INT_CST_HIGH (index)
5450 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5451 && GET_MODE_CLASS (mode) == MODE_INT
5452 && GET_MODE_SIZE (mode) == 1)
5453 return GEN_INT (TREE_STRING_POINTER (string)[i]);
5454
5455 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5456 op0 = memory_address (mode, op0);
5457
5458 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5459 {
5460 enum memory_use_mode memory_usage;
5461 memory_usage = get_memory_usage_from_modifier (modifier);
5462
5463 if (memory_usage != MEMORY_USE_DONT)
5464 {
5465 in_check_memory_usage = 1;
5466 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5467 op0, ptr_mode,
5468 GEN_INT (int_size_in_bytes (type)),
5469 TYPE_MODE (sizetype),
5470 GEN_INT (memory_usage),
5471 TYPE_MODE (integer_type_node));
5472 in_check_memory_usage = 0;
5473 }
5474 }
5475
5476 temp = gen_rtx_MEM (mode, op0);
5477 /* If address was computed by addition,
5478 mark this as an element of an aggregate. */
5479 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5480 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5481 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5482 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5483 || (TREE_CODE (exp1) == ADDR_EXPR
5484 && (exp2 = TREE_OPERAND (exp1, 0))
5485 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5486 MEM_IN_STRUCT_P (temp) = 1;
5487 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5488
5489 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5490 here, because, in C and C++, the fact that a location is accessed
5491 through a pointer to const does not mean that the value there can
5492 never change. Languages where it can never change should
5493 also set TREE_STATIC. */
5494 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5495 return temp;
5496 }
5497
5498 case ARRAY_REF:
5499 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5500 abort ();
5501
5502 {
5503 tree array = TREE_OPERAND (exp, 0);
5504 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5505 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5506 tree index = TREE_OPERAND (exp, 1);
5507 tree index_type = TREE_TYPE (index);
5508 HOST_WIDE_INT i;
5509
5510 /* Optimize the special-case of a zero lower bound.
5511
5512 We convert the low_bound to sizetype to avoid some problems
5513 with constant folding. (E.g. suppose the lower bound is 1,
5514 and its mode is QI. Without the conversion, (ARRAY
5515 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5516 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5517
5518 But sizetype isn't quite right either (especially if
5519 the lowbound is negative). FIXME */
5520
5521 if (! integer_zerop (low_bound))
5522 index = fold (build (MINUS_EXPR, index_type, index,
5523 convert (sizetype, low_bound)));
5524
5525 /* Fold an expression like: "foo"[2].
5526 This is not done in fold so it won't happen inside &.
5527 Don't fold if this is for wide characters since it's too
5528 difficult to do correctly and this is a very rare case. */
5529
5530 if (TREE_CODE (array) == STRING_CST
5531 && TREE_CODE (index) == INTEGER_CST
5532 && !TREE_INT_CST_HIGH (index)
5533 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5534 && GET_MODE_CLASS (mode) == MODE_INT
5535 && GET_MODE_SIZE (mode) == 1)
5536 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5537
5538 /* If this is a constant index into a constant array,
5539 just get the value from the array. Handle both the cases when
5540 we have an explicit constructor and when our operand is a variable
5541 that was declared const. */
5542
5543 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5544 {
5545 if (TREE_CODE (index) == INTEGER_CST
5546 && TREE_INT_CST_HIGH (index) == 0)
5547 {
5548 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5549
5550 i = TREE_INT_CST_LOW (index);
5551 while (elem && i--)
5552 elem = TREE_CHAIN (elem);
5553 if (elem)
5554 return expand_expr (fold (TREE_VALUE (elem)), target,
5555 tmode, ro_modifier);
5556 }
5557 }
5558
5559 else if (optimize >= 1
5560 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5561 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5562 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5563 {
5564 if (TREE_CODE (index) == INTEGER_CST)
5565 {
5566 tree init = DECL_INITIAL (array);
5567
5568 i = TREE_INT_CST_LOW (index);
5569 if (TREE_CODE (init) == CONSTRUCTOR)
5570 {
5571 tree elem = CONSTRUCTOR_ELTS (init);
5572
5573 while (elem
5574 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5575 elem = TREE_CHAIN (elem);
5576 if (elem)
5577 return expand_expr (fold (TREE_VALUE (elem)), target,
5578 tmode, ro_modifier);
5579 }
5580 else if (TREE_CODE (init) == STRING_CST
5581 && TREE_INT_CST_HIGH (index) == 0
5582 && (TREE_INT_CST_LOW (index)
5583 < TREE_STRING_LENGTH (init)))
5584 return (GEN_INT
5585 (TREE_STRING_POINTER
5586 (init)[TREE_INT_CST_LOW (index)]));
5587 }
5588 }
5589 }
5590
5591 /* ... fall through ... */
5592
5593 case COMPONENT_REF:
5594 case BIT_FIELD_REF:
5595 /* If the operand is a CONSTRUCTOR, we can just extract the
5596 appropriate field if it is present. Don't do this if we have
5597 already written the data since we want to refer to that copy
5598 and varasm.c assumes that's what we'll do. */
5599 if (code != ARRAY_REF
5600 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5601 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5602 {
5603 tree elt;
5604
5605 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5606 elt = TREE_CHAIN (elt))
5607 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5608 /* We can normally use the value of the field in the
5609 CONSTRUCTOR. However, if this is a bitfield in
5610 an integral mode that we can fit in a HOST_WIDE_INT,
5611 we must mask only the number of bits in the bitfield,
5612 since this is done implicitly by the constructor. If
5613 the bitfield does not meet either of those conditions,
5614 we can't do this optimization. */
5615 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5616 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5617 == MODE_INT)
5618 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5619 <= HOST_BITS_PER_WIDE_INT))))
5620 {
5621 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5622 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5623 {
5624 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5625 enum machine_mode imode
5626 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5627
5628 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5629 {
5630 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5631 op0 = expand_and (op0, op1, target);
5632 }
5633 else
5634 {
5635 tree count
5636 = build_int_2 (imode - bitsize, 0);
5637
5638 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5639 target, 0);
5640 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5641 target, 0);
5642 }
5643 }
5644
5645 return op0;
5646 }
5647 }
5648
5649 {
5650 enum machine_mode mode1;
5651 int bitsize;
5652 int bitpos;
5653 tree offset;
5654 int volatilep = 0;
5655 int alignment;
5656 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5657 &mode1, &unsignedp, &volatilep,
5658 &alignment);
5659
5660 /* If we got back the original object, something is wrong. Perhaps
5661 we are evaluating an expression too early. In any event, don't
5662 infinitely recurse. */
5663 if (tem == exp)
5664 abort ();
5665
5666 /* If TEM's type is a union of variable size, pass TARGET to the inner
5667 computation, since it will need a temporary and TARGET is known
5668 to have to do. This occurs in unchecked conversion in Ada. */
5669
5670 op0 = expand_expr (tem,
5671 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5672 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5673 != INTEGER_CST)
5674 ? target : NULL_RTX),
5675 VOIDmode,
5676 modifier == EXPAND_INITIALIZER ? modifier : 0);
5677
5678 /* If this is a constant, put it into a register if it is a
5679 legitimate constant and memory if it isn't. */
5680 if (CONSTANT_P (op0))
5681 {
5682 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5683 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5684 op0 = force_reg (mode, op0);
5685 else
5686 op0 = validize_mem (force_const_mem (mode, op0));
5687 }
5688
5689 if (offset != 0)
5690 {
5691 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5692
5693 if (GET_CODE (op0) != MEM)
5694 abort ();
5695
5696 if (GET_MODE (offset_rtx) != ptr_mode)
5697 #ifdef POINTERS_EXTEND_UNSIGNED
5698 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
5699 #else
5700 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5701 #endif
5702
5703 op0 = change_address (op0, VOIDmode,
5704 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
5705 force_reg (ptr_mode, offset_rtx)));
5706 }
5707
5708 /* Don't forget about volatility even if this is a bitfield. */
5709 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5710 {
5711 op0 = copy_rtx (op0);
5712 MEM_VOLATILE_P (op0) = 1;
5713 }
5714
5715 /* Check the access. */
5716 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5717 {
5718 enum memory_use_mode memory_usage;
5719 memory_usage = get_memory_usage_from_modifier (modifier);
5720
5721 if (memory_usage != MEMORY_USE_DONT)
5722 {
5723 rtx to;
5724 int size;
5725
5726 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5727 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5728
5729 /* Check the access right of the pointer. */
5730 if (size > BITS_PER_UNIT)
5731 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5732 to, ptr_mode,
5733 GEN_INT (size / BITS_PER_UNIT),
5734 TYPE_MODE (sizetype),
5735 GEN_INT (memory_usage),
5736 TYPE_MODE (integer_type_node));
5737 }
5738 }
5739
5740 /* In cases where an aligned union has an unaligned object
5741 as a field, we might be extracting a BLKmode value from
5742 an integer-mode (e.g., SImode) object. Handle this case
5743 by doing the extract into an object as wide as the field
5744 (which we know to be the width of a basic mode), then
5745 storing into memory, and changing the mode to BLKmode.
5746 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5747 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5748 if (mode1 == VOIDmode
5749 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5750 || (modifier != EXPAND_CONST_ADDRESS
5751 && modifier != EXPAND_INITIALIZER
5752 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5753 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5754 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5755 /* If the field isn't aligned enough to fetch as a memref,
5756 fetch it as a bit field. */
5757 || (SLOW_UNALIGNED_ACCESS
5758 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5759 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5760 {
5761 enum machine_mode ext_mode = mode;
5762
5763 if (ext_mode == BLKmode)
5764 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5765
5766 if (ext_mode == BLKmode)
5767 {
5768 /* In this case, BITPOS must start at a byte boundary and
5769 TARGET, if specified, must be a MEM. */
5770 if (GET_CODE (op0) != MEM
5771 || (target != 0 && GET_CODE (target) != MEM)
5772 || bitpos % BITS_PER_UNIT != 0)
5773 abort ();
5774
5775 op0 = change_address (op0, VOIDmode,
5776 plus_constant (XEXP (op0, 0),
5777 bitpos / BITS_PER_UNIT));
5778 if (target == 0)
5779 target = assign_temp (type, 0, 1, 1);
5780
5781 emit_block_move (target, op0,
5782 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5783 / BITS_PER_UNIT),
5784 1);
5785
5786 return target;
5787 }
5788
5789 op0 = validize_mem (op0);
5790
5791 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5792 mark_reg_pointer (XEXP (op0, 0), alignment);
5793
5794 op0 = extract_bit_field (op0, bitsize, bitpos,
5795 unsignedp, target, ext_mode, ext_mode,
5796 alignment,
5797 int_size_in_bytes (TREE_TYPE (tem)));
5798
5799 /* If the result is a record type and BITSIZE is narrower than
5800 the mode of OP0, an integral mode, and this is a big endian
5801 machine, we must put the field into the high-order bits. */
5802 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5803 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5804 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5805 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5806 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5807 - bitsize),
5808 op0, 1);
5809
5810 if (mode == BLKmode)
5811 {
5812 rtx new = assign_stack_temp (ext_mode,
5813 bitsize / BITS_PER_UNIT, 0);
5814
5815 emit_move_insn (new, op0);
5816 op0 = copy_rtx (new);
5817 PUT_MODE (op0, BLKmode);
5818 MEM_IN_STRUCT_P (op0) = 1;
5819 }
5820
5821 return op0;
5822 }
5823
5824 /* If the result is BLKmode, use that to access the object
5825 now as well. */
5826 if (mode == BLKmode)
5827 mode1 = BLKmode;
5828
5829 /* Get a reference to just this component. */
5830 if (modifier == EXPAND_CONST_ADDRESS
5831 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5832 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
5833 (bitpos / BITS_PER_UNIT)));
5834 else
5835 op0 = change_address (op0, mode1,
5836 plus_constant (XEXP (op0, 0),
5837 (bitpos / BITS_PER_UNIT)));
5838 if (GET_CODE (XEXP (op0, 0)) == REG)
5839 mark_reg_pointer (XEXP (op0, 0), alignment);
5840
5841 MEM_IN_STRUCT_P (op0) = 1;
5842 MEM_VOLATILE_P (op0) |= volatilep;
5843 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5844 || modifier == EXPAND_CONST_ADDRESS
5845 || modifier == EXPAND_INITIALIZER)
5846 return op0;
5847 else if (target == 0)
5848 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5849
5850 convert_move (target, op0, unsignedp);
5851 return target;
5852 }
5853
5854 /* Intended for a reference to a buffer of a file-object in Pascal.
5855 But it's not certain that a special tree code will really be
5856 necessary for these. INDIRECT_REF might work for them. */
5857 case BUFFER_REF:
5858 abort ();
5859
5860 case IN_EXPR:
5861 {
5862 /* Pascal set IN expression.
5863
5864 Algorithm:
5865 rlo = set_low - (set_low%bits_per_word);
5866 the_word = set [ (index - rlo)/bits_per_word ];
5867 bit_index = index % bits_per_word;
5868 bitmask = 1 << bit_index;
5869 return !!(the_word & bitmask); */
5870
5871 tree set = TREE_OPERAND (exp, 0);
5872 tree index = TREE_OPERAND (exp, 1);
5873 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5874 tree set_type = TREE_TYPE (set);
5875 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5876 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5877 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5878 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5879 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5880 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5881 rtx setaddr = XEXP (setval, 0);
5882 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5883 rtx rlow;
5884 rtx diff, quo, rem, addr, bit, result;
5885
5886 preexpand_calls (exp);
5887
5888 /* If domain is empty, answer is no. Likewise if index is constant
5889 and out of bounds. */
5890 if (((TREE_CODE (set_high_bound) == INTEGER_CST
5891 && TREE_CODE (set_low_bound) == INTEGER_CST
5892 && tree_int_cst_lt (set_high_bound, set_low_bound))
5893 || (TREE_CODE (index) == INTEGER_CST
5894 && TREE_CODE (set_low_bound) == INTEGER_CST
5895 && tree_int_cst_lt (index, set_low_bound))
5896 || (TREE_CODE (set_high_bound) == INTEGER_CST
5897 && TREE_CODE (index) == INTEGER_CST
5898 && tree_int_cst_lt (set_high_bound, index))))
5899 return const0_rtx;
5900
5901 if (target == 0)
5902 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5903
5904 /* If we get here, we have to generate the code for both cases
5905 (in range and out of range). */
5906
5907 op0 = gen_label_rtx ();
5908 op1 = gen_label_rtx ();
5909
5910 if (! (GET_CODE (index_val) == CONST_INT
5911 && GET_CODE (lo_r) == CONST_INT))
5912 {
5913 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5914 GET_MODE (index_val), iunsignedp, 0);
5915 emit_jump_insn (gen_blt (op1));
5916 }
5917
5918 if (! (GET_CODE (index_val) == CONST_INT
5919 && GET_CODE (hi_r) == CONST_INT))
5920 {
5921 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5922 GET_MODE (index_val), iunsignedp, 0);
5923 emit_jump_insn (gen_bgt (op1));
5924 }
5925
5926 /* Calculate the element number of bit zero in the first word
5927 of the set. */
5928 if (GET_CODE (lo_r) == CONST_INT)
5929 rlow = GEN_INT (INTVAL (lo_r)
5930 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5931 else
5932 rlow = expand_binop (index_mode, and_optab, lo_r,
5933 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5934 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5935
5936 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5937 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5938
5939 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5940 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5941 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5942 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5943
5944 addr = memory_address (byte_mode,
5945 expand_binop (index_mode, add_optab, diff,
5946 setaddr, NULL_RTX, iunsignedp,
5947 OPTAB_LIB_WIDEN));
5948
5949 /* Extract the bit we want to examine */
5950 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5951 gen_rtx_MEM (byte_mode, addr),
5952 make_tree (TREE_TYPE (index), rem),
5953 NULL_RTX, 1);
5954 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5955 GET_MODE (target) == byte_mode ? target : 0,
5956 1, OPTAB_LIB_WIDEN);
5957
5958 if (result != target)
5959 convert_move (target, result, 1);
5960
5961 /* Output the code to handle the out-of-range case. */
5962 emit_jump (op0);
5963 emit_label (op1);
5964 emit_move_insn (target, const0_rtx);
5965 emit_label (op0);
5966 return target;
5967 }
5968
5969 case WITH_CLEANUP_EXPR:
5970 if (RTL_EXPR_RTL (exp) == 0)
5971 {
5972 RTL_EXPR_RTL (exp)
5973 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
5974 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
5975
5976 /* That's it for this cleanup. */
5977 TREE_OPERAND (exp, 2) = 0;
5978 }
5979 return RTL_EXPR_RTL (exp);
5980
5981 case CLEANUP_POINT_EXPR:
5982 {
5983 extern int temp_slot_level;
5984 /* Start a new binding layer that will keep track of all cleanup
5985 actions to be performed. */
5986 expand_start_bindings (0);
5987
5988 target_temp_slot_level = temp_slot_level;
5989
5990 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
5991 /* If we're going to use this value, load it up now. */
5992 if (! ignore)
5993 op0 = force_not_mem (op0);
5994 preserve_temp_slots (op0);
5995 expand_end_bindings (NULL_TREE, 0, 0);
5996 }
5997 return op0;
5998
5999 case CALL_EXPR:
6000 /* Check for a built-in function. */
6001 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6002 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6003 == FUNCTION_DECL)
6004 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6005 return expand_builtin (exp, target, subtarget, tmode, ignore);
6006
6007 /* If this call was expanded already by preexpand_calls,
6008 just return the result we got. */
6009 if (CALL_EXPR_RTL (exp) != 0)
6010 return CALL_EXPR_RTL (exp);
6011
6012 return expand_call (exp, target, ignore);
6013
6014 case NON_LVALUE_EXPR:
6015 case NOP_EXPR:
6016 case CONVERT_EXPR:
6017 case REFERENCE_EXPR:
6018 if (TREE_CODE (type) == UNION_TYPE)
6019 {
6020 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6021 if (target == 0)
6022 {
6023 if (mode != BLKmode)
6024 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6025 else
6026 target = assign_temp (type, 0, 1, 1);
6027 }
6028
6029 if (GET_CODE (target) == MEM)
6030 /* Store data into beginning of memory target. */
6031 store_expr (TREE_OPERAND (exp, 0),
6032 change_address (target, TYPE_MODE (valtype), 0), 0);
6033
6034 else if (GET_CODE (target) == REG)
6035 /* Store this field into a union of the proper type. */
6036 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6037 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6038 VOIDmode, 0, 1,
6039 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6040 else
6041 abort ();
6042
6043 /* Return the entire union. */
6044 return target;
6045 }
6046
6047 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6048 {
6049 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6050 ro_modifier);
6051
6052 /* If the signedness of the conversion differs and OP0 is
6053 a promoted SUBREG, clear that indication since we now
6054 have to do the proper extension. */
6055 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6056 && GET_CODE (op0) == SUBREG)
6057 SUBREG_PROMOTED_VAR_P (op0) = 0;
6058
6059 return op0;
6060 }
6061
6062 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6063 if (GET_MODE (op0) == mode)
6064 return op0;
6065
6066 /* If OP0 is a constant, just convert it into the proper mode. */
6067 if (CONSTANT_P (op0))
6068 return
6069 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6070 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6071
6072 if (modifier == EXPAND_INITIALIZER)
6073 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6074
6075 if (target == 0)
6076 return
6077 convert_to_mode (mode, op0,
6078 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6079 else
6080 convert_move (target, op0,
6081 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6082 return target;
6083
6084 case PLUS_EXPR:
6085 /* We come here from MINUS_EXPR when the second operand is a
6086 constant. */
6087 plus_expr:
6088 this_optab = add_optab;
6089
6090 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6091 something else, make sure we add the register to the constant and
6092 then to the other thing. This case can occur during strength
6093 reduction and doing it this way will produce better code if the
6094 frame pointer or argument pointer is eliminated.
6095
6096 fold-const.c will ensure that the constant is always in the inner
6097 PLUS_EXPR, so the only case we need to do anything about is if
6098 sp, ap, or fp is our second argument, in which case we must swap
6099 the innermost first argument and our second argument. */
6100
6101 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6102 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6103 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6104 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6105 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6106 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6107 {
6108 tree t = TREE_OPERAND (exp, 1);
6109
6110 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6111 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6112 }
6113
6114 /* If the result is to be ptr_mode and we are adding an integer to
6115 something, we might be forming a constant. So try to use
6116 plus_constant. If it produces a sum and we can't accept it,
6117 use force_operand. This allows P = &ARR[const] to generate
6118 efficient code on machines where a SYMBOL_REF is not a valid
6119 address.
6120
6121 If this is an EXPAND_SUM call, always return the sum. */
6122 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6123 || mode == ptr_mode)
6124 {
6125 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6126 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6127 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6128 {
6129 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6130 EXPAND_SUM);
6131 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6132 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6133 op1 = force_operand (op1, target);
6134 return op1;
6135 }
6136
6137 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6138 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6139 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6140 {
6141 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6142 EXPAND_SUM);
6143 if (! CONSTANT_P (op0))
6144 {
6145 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6146 VOIDmode, modifier);
6147 /* Don't go to both_summands if modifier
6148 says it's not right to return a PLUS. */
6149 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6150 goto binop2;
6151 goto both_summands;
6152 }
6153 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6154 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6155 op0 = force_operand (op0, target);
6156 return op0;
6157 }
6158 }
6159
6160 /* No sense saving up arithmetic to be done
6161 if it's all in the wrong mode to form part of an address.
6162 And force_operand won't know whether to sign-extend or
6163 zero-extend. */
6164 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6165 || mode != ptr_mode)
6166 goto binop;
6167
6168 preexpand_calls (exp);
6169 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6170 subtarget = 0;
6171
6172 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6173 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6174
6175 both_summands:
6176 /* Make sure any term that's a sum with a constant comes last. */
6177 if (GET_CODE (op0) == PLUS
6178 && CONSTANT_P (XEXP (op0, 1)))
6179 {
6180 temp = op0;
6181 op0 = op1;
6182 op1 = temp;
6183 }
6184 /* If adding to a sum including a constant,
6185 associate it to put the constant outside. */
6186 if (GET_CODE (op1) == PLUS
6187 && CONSTANT_P (XEXP (op1, 1)))
6188 {
6189 rtx constant_term = const0_rtx;
6190
6191 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6192 if (temp != 0)
6193 op0 = temp;
6194 /* Ensure that MULT comes first if there is one. */
6195 else if (GET_CODE (op0) == MULT)
6196 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6197 else
6198 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6199
6200 /* Let's also eliminate constants from op0 if possible. */
6201 op0 = eliminate_constant_term (op0, &constant_term);
6202
6203 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6204 their sum should be a constant. Form it into OP1, since the
6205 result we want will then be OP0 + OP1. */
6206
6207 temp = simplify_binary_operation (PLUS, mode, constant_term,
6208 XEXP (op1, 1));
6209 if (temp != 0)
6210 op1 = temp;
6211 else
6212 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6213 }
6214
6215 /* Put a constant term last and put a multiplication first. */
6216 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6217 temp = op1, op1 = op0, op0 = temp;
6218
6219 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6220 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6221
6222 case MINUS_EXPR:
6223 /* For initializers, we are allowed to return a MINUS of two
6224 symbolic constants. Here we handle all cases when both operands
6225 are constant. */
6226 /* Handle difference of two symbolic constants,
6227 for the sake of an initializer. */
6228 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6229 && really_constant_p (TREE_OPERAND (exp, 0))
6230 && really_constant_p (TREE_OPERAND (exp, 1)))
6231 {
6232 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6233 VOIDmode, ro_modifier);
6234 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6235 VOIDmode, ro_modifier);
6236
6237 /* If the last operand is a CONST_INT, use plus_constant of
6238 the negated constant. Else make the MINUS. */
6239 if (GET_CODE (op1) == CONST_INT)
6240 return plus_constant (op0, - INTVAL (op1));
6241 else
6242 return gen_rtx_MINUS (mode, op0, op1);
6243 }
6244 /* Convert A - const to A + (-const). */
6245 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6246 {
6247 tree negated = fold (build1 (NEGATE_EXPR, type,
6248 TREE_OPERAND (exp, 1)));
6249
6250 /* Deal with the case where we can't negate the constant
6251 in TYPE. */
6252 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6253 {
6254 tree newtype = signed_type (type);
6255 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6256 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6257 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6258
6259 if (! TREE_OVERFLOW (newneg))
6260 return expand_expr (convert (type,
6261 build (PLUS_EXPR, newtype,
6262 newop0, newneg)),
6263 target, tmode, ro_modifier);
6264 }
6265 else
6266 {
6267 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6268 goto plus_expr;
6269 }
6270 }
6271 this_optab = sub_optab;
6272 goto binop;
6273
6274 case MULT_EXPR:
6275 preexpand_calls (exp);
6276 /* If first operand is constant, swap them.
6277 Thus the following special case checks need only
6278 check the second operand. */
6279 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6280 {
6281 register tree t1 = TREE_OPERAND (exp, 0);
6282 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6283 TREE_OPERAND (exp, 1) = t1;
6284 }
6285
6286 /* Attempt to return something suitable for generating an
6287 indexed address, for machines that support that. */
6288
6289 if (modifier == EXPAND_SUM && mode == ptr_mode
6290 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6291 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6292 {
6293 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6294 EXPAND_SUM);
6295
6296 /* Apply distributive law if OP0 is x+c. */
6297 if (GET_CODE (op0) == PLUS
6298 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6299 return gen_rtx_PLUS (mode,
6300 gen_rtx_MULT (mode, XEXP (op0, 0),
6301 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6302 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6303 * INTVAL (XEXP (op0, 1))));
6304
6305 if (GET_CODE (op0) != REG)
6306 op0 = force_operand (op0, NULL_RTX);
6307 if (GET_CODE (op0) != REG)
6308 op0 = copy_to_mode_reg (mode, op0);
6309
6310 return gen_rtx_MULT (mode, op0,
6311 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6312 }
6313
6314 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6315 subtarget = 0;
6316
6317 /* Check for multiplying things that have been extended
6318 from a narrower type. If this machine supports multiplying
6319 in that narrower type with a result in the desired type,
6320 do it that way, and avoid the explicit type-conversion. */
6321 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6322 && TREE_CODE (type) == INTEGER_TYPE
6323 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6324 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6325 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6326 && int_fits_type_p (TREE_OPERAND (exp, 1),
6327 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6328 /* Don't use a widening multiply if a shift will do. */
6329 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6330 > HOST_BITS_PER_WIDE_INT)
6331 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6332 ||
6333 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6334 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6335 ==
6336 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6337 /* If both operands are extended, they must either both
6338 be zero-extended or both be sign-extended. */
6339 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6340 ==
6341 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6342 {
6343 enum machine_mode innermode
6344 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6345 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6346 ? smul_widen_optab : umul_widen_optab);
6347 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6348 ? umul_widen_optab : smul_widen_optab);
6349 if (mode == GET_MODE_WIDER_MODE (innermode))
6350 {
6351 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6352 {
6353 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6354 NULL_RTX, VOIDmode, 0);
6355 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6356 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6357 VOIDmode, 0);
6358 else
6359 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6360 NULL_RTX, VOIDmode, 0);
6361 goto binop2;
6362 }
6363 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6364 && innermode == word_mode)
6365 {
6366 rtx htem;
6367 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6368 NULL_RTX, VOIDmode, 0);
6369 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6370 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6371 VOIDmode, 0);
6372 else
6373 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6374 NULL_RTX, VOIDmode, 0);
6375 temp = expand_binop (mode, other_optab, op0, op1, target,
6376 unsignedp, OPTAB_LIB_WIDEN);
6377 htem = expand_mult_highpart_adjust (innermode,
6378 gen_highpart (innermode, temp),
6379 op0, op1,
6380 gen_highpart (innermode, temp),
6381 unsignedp);
6382 emit_move_insn (gen_highpart (innermode, temp), htem);
6383 return temp;
6384 }
6385 }
6386 }
6387 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6388 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6389 return expand_mult (mode, op0, op1, target, unsignedp);
6390
6391 case TRUNC_DIV_EXPR:
6392 case FLOOR_DIV_EXPR:
6393 case CEIL_DIV_EXPR:
6394 case ROUND_DIV_EXPR:
6395 case EXACT_DIV_EXPR:
6396 preexpand_calls (exp);
6397 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6398 subtarget = 0;
6399 /* Possible optimization: compute the dividend with EXPAND_SUM
6400 then if the divisor is constant can optimize the case
6401 where some terms of the dividend have coeffs divisible by it. */
6402 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6403 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6404 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6405
6406 case RDIV_EXPR:
6407 this_optab = flodiv_optab;
6408 goto binop;
6409
6410 case TRUNC_MOD_EXPR:
6411 case FLOOR_MOD_EXPR:
6412 case CEIL_MOD_EXPR:
6413 case ROUND_MOD_EXPR:
6414 preexpand_calls (exp);
6415 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6416 subtarget = 0;
6417 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6418 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6419 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6420
6421 case FIX_ROUND_EXPR:
6422 case FIX_FLOOR_EXPR:
6423 case FIX_CEIL_EXPR:
6424 abort (); /* Not used for C. */
6425
6426 case FIX_TRUNC_EXPR:
6427 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6428 if (target == 0)
6429 target = gen_reg_rtx (mode);
6430 expand_fix (target, op0, unsignedp);
6431 return target;
6432
6433 case FLOAT_EXPR:
6434 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6435 if (target == 0)
6436 target = gen_reg_rtx (mode);
6437 /* expand_float can't figure out what to do if FROM has VOIDmode.
6438 So give it the correct mode. With -O, cse will optimize this. */
6439 if (GET_MODE (op0) == VOIDmode)
6440 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6441 op0);
6442 expand_float (target, op0,
6443 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6444 return target;
6445
6446 case NEGATE_EXPR:
6447 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6448 temp = expand_unop (mode, neg_optab, op0, target, 0);
6449 if (temp == 0)
6450 abort ();
6451 return temp;
6452
6453 case ABS_EXPR:
6454 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6455
6456 /* Handle complex values specially. */
6457 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6458 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6459 return expand_complex_abs (mode, op0, target, unsignedp);
6460
6461 /* Unsigned abs is simply the operand. Testing here means we don't
6462 risk generating incorrect code below. */
6463 if (TREE_UNSIGNED (type))
6464 return op0;
6465
6466 return expand_abs (mode, op0, target, unsignedp,
6467 safe_from_p (target, TREE_OPERAND (exp, 0)));
6468
6469 case MAX_EXPR:
6470 case MIN_EXPR:
6471 target = original_target;
6472 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6473 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6474 || GET_MODE (target) != mode
6475 || (GET_CODE (target) == REG
6476 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6477 target = gen_reg_rtx (mode);
6478 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6479 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6480
6481 /* First try to do it with a special MIN or MAX instruction.
6482 If that does not win, use a conditional jump to select the proper
6483 value. */
6484 this_optab = (TREE_UNSIGNED (type)
6485 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6486 : (code == MIN_EXPR ? smin_optab : smax_optab));
6487
6488 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6489 OPTAB_WIDEN);
6490 if (temp != 0)
6491 return temp;
6492
6493 /* At this point, a MEM target is no longer useful; we will get better
6494 code without it. */
6495
6496 if (GET_CODE (target) == MEM)
6497 target = gen_reg_rtx (mode);
6498
6499 if (target != op0)
6500 emit_move_insn (target, op0);
6501
6502 op0 = gen_label_rtx ();
6503
6504 /* If this mode is an integer too wide to compare properly,
6505 compare word by word. Rely on cse to optimize constant cases. */
6506 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6507 {
6508 if (code == MAX_EXPR)
6509 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6510 target, op1, NULL_RTX, op0);
6511 else
6512 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6513 op1, target, NULL_RTX, op0);
6514 emit_move_insn (target, op1);
6515 }
6516 else
6517 {
6518 if (code == MAX_EXPR)
6519 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6520 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6521 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6522 else
6523 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6524 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6525 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6526 if (temp == const0_rtx)
6527 emit_move_insn (target, op1);
6528 else if (temp != const_true_rtx)
6529 {
6530 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6531 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6532 else
6533 abort ();
6534 emit_move_insn (target, op1);
6535 }
6536 }
6537 emit_label (op0);
6538 return target;
6539
6540 case BIT_NOT_EXPR:
6541 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6542 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6543 if (temp == 0)
6544 abort ();
6545 return temp;
6546
6547 case FFS_EXPR:
6548 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6549 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6550 if (temp == 0)
6551 abort ();
6552 return temp;
6553
6554 /* ??? Can optimize bitwise operations with one arg constant.
6555 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6556 and (a bitwise1 b) bitwise2 b (etc)
6557 but that is probably not worth while. */
6558
6559 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6560 boolean values when we want in all cases to compute both of them. In
6561 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6562 as actual zero-or-1 values and then bitwise anding. In cases where
6563 there cannot be any side effects, better code would be made by
6564 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6565 how to recognize those cases. */
6566
6567 case TRUTH_AND_EXPR:
6568 case BIT_AND_EXPR:
6569 this_optab = and_optab;
6570 goto binop;
6571
6572 case TRUTH_OR_EXPR:
6573 case BIT_IOR_EXPR:
6574 this_optab = ior_optab;
6575 goto binop;
6576
6577 case TRUTH_XOR_EXPR:
6578 case BIT_XOR_EXPR:
6579 this_optab = xor_optab;
6580 goto binop;
6581
6582 case LSHIFT_EXPR:
6583 case RSHIFT_EXPR:
6584 case LROTATE_EXPR:
6585 case RROTATE_EXPR:
6586 preexpand_calls (exp);
6587 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6588 subtarget = 0;
6589 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6590 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6591 unsignedp);
6592
6593 /* Could determine the answer when only additive constants differ. Also,
6594 the addition of one can be handled by changing the condition. */
6595 case LT_EXPR:
6596 case LE_EXPR:
6597 case GT_EXPR:
6598 case GE_EXPR:
6599 case EQ_EXPR:
6600 case NE_EXPR:
6601 preexpand_calls (exp);
6602 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6603 if (temp != 0)
6604 return temp;
6605
6606 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6607 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6608 && original_target
6609 && GET_CODE (original_target) == REG
6610 && (GET_MODE (original_target)
6611 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6612 {
6613 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6614 VOIDmode, 0);
6615
6616 if (temp != original_target)
6617 temp = copy_to_reg (temp);
6618
6619 op1 = gen_label_rtx ();
6620 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6621 GET_MODE (temp), unsignedp, 0);
6622 emit_jump_insn (gen_beq (op1));
6623 emit_move_insn (temp, const1_rtx);
6624 emit_label (op1);
6625 return temp;
6626 }
6627
6628 /* If no set-flag instruction, must generate a conditional
6629 store into a temporary variable. Drop through
6630 and handle this like && and ||. */
6631
6632 case TRUTH_ANDIF_EXPR:
6633 case TRUTH_ORIF_EXPR:
6634 if (! ignore
6635 && (target == 0 || ! safe_from_p (target, exp)
6636 /* Make sure we don't have a hard reg (such as function's return
6637 value) live across basic blocks, if not optimizing. */
6638 || (!optimize && GET_CODE (target) == REG
6639 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6640 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6641
6642 if (target)
6643 emit_clr_insn (target);
6644
6645 op1 = gen_label_rtx ();
6646 jumpifnot (exp, op1);
6647
6648 if (target)
6649 emit_0_to_1_insn (target);
6650
6651 emit_label (op1);
6652 return ignore ? const0_rtx : target;
6653
6654 case TRUTH_NOT_EXPR:
6655 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6656 /* The parser is careful to generate TRUTH_NOT_EXPR
6657 only with operands that are always zero or one. */
6658 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6659 target, 1, OPTAB_LIB_WIDEN);
6660 if (temp == 0)
6661 abort ();
6662 return temp;
6663
6664 case COMPOUND_EXPR:
6665 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6666 emit_queue ();
6667 return expand_expr (TREE_OPERAND (exp, 1),
6668 (ignore ? const0_rtx : target),
6669 VOIDmode, 0);
6670
6671 case COND_EXPR:
6672 /* If we would have a "singleton" (see below) were it not for a
6673 conversion in each arm, bring that conversion back out. */
6674 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6675 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6676 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6677 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6678 {
6679 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6680 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6681
6682 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6683 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6684 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6685 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6686 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6687 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6688 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6689 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6690 return expand_expr (build1 (NOP_EXPR, type,
6691 build (COND_EXPR, TREE_TYPE (true),
6692 TREE_OPERAND (exp, 0),
6693 true, false)),
6694 target, tmode, modifier);
6695 }
6696
6697 {
6698 /* Note that COND_EXPRs whose type is a structure or union
6699 are required to be constructed to contain assignments of
6700 a temporary variable, so that we can evaluate them here
6701 for side effect only. If type is void, we must do likewise. */
6702
6703 /* If an arm of the branch requires a cleanup,
6704 only that cleanup is performed. */
6705
6706 tree singleton = 0;
6707 tree binary_op = 0, unary_op = 0;
6708
6709 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6710 convert it to our mode, if necessary. */
6711 if (integer_onep (TREE_OPERAND (exp, 1))
6712 && integer_zerop (TREE_OPERAND (exp, 2))
6713 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6714 {
6715 if (ignore)
6716 {
6717 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6718 ro_modifier);
6719 return const0_rtx;
6720 }
6721
6722 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
6723 if (GET_MODE (op0) == mode)
6724 return op0;
6725
6726 if (target == 0)
6727 target = gen_reg_rtx (mode);
6728 convert_move (target, op0, unsignedp);
6729 return target;
6730 }
6731
6732 /* Check for X ? A + B : A. If we have this, we can copy A to the
6733 output and conditionally add B. Similarly for unary operations.
6734 Don't do this if X has side-effects because those side effects
6735 might affect A or B and the "?" operation is a sequence point in
6736 ANSI. (operand_equal_p tests for side effects.) */
6737
6738 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6739 && operand_equal_p (TREE_OPERAND (exp, 2),
6740 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6741 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6742 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6743 && operand_equal_p (TREE_OPERAND (exp, 1),
6744 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6745 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6746 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6747 && operand_equal_p (TREE_OPERAND (exp, 2),
6748 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6749 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6750 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6751 && operand_equal_p (TREE_OPERAND (exp, 1),
6752 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6753 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6754
6755 /* If we are not to produce a result, we have no target. Otherwise,
6756 if a target was specified use it; it will not be used as an
6757 intermediate target unless it is safe. If no target, use a
6758 temporary. */
6759
6760 if (ignore)
6761 temp = 0;
6762 else if (original_target
6763 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6764 || (singleton && GET_CODE (original_target) == REG
6765 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6766 && original_target == var_rtx (singleton)))
6767 && GET_MODE (original_target) == mode
6768 #ifdef HAVE_conditional_move
6769 && (! can_conditionally_move_p (mode)
6770 || GET_CODE (original_target) == REG
6771 || TREE_ADDRESSABLE (type))
6772 #endif
6773 && ! (GET_CODE (original_target) == MEM
6774 && MEM_VOLATILE_P (original_target)))
6775 temp = original_target;
6776 else if (TREE_ADDRESSABLE (type))
6777 abort ();
6778 else
6779 temp = assign_temp (type, 0, 0, 1);
6780
6781 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6782 do the test of X as a store-flag operation, do this as
6783 A + ((X != 0) << log C). Similarly for other simple binary
6784 operators. Only do for C == 1 if BRANCH_COST is low. */
6785 if (temp && singleton && binary_op
6786 && (TREE_CODE (binary_op) == PLUS_EXPR
6787 || TREE_CODE (binary_op) == MINUS_EXPR
6788 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6789 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6790 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6791 : integer_onep (TREE_OPERAND (binary_op, 1)))
6792 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6793 {
6794 rtx result;
6795 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6796 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6797 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6798 : xor_optab);
6799
6800 /* If we had X ? A : A + 1, do this as A + (X == 0).
6801
6802 We have to invert the truth value here and then put it
6803 back later if do_store_flag fails. We cannot simply copy
6804 TREE_OPERAND (exp, 0) to another variable and modify that
6805 because invert_truthvalue can modify the tree pointed to
6806 by its argument. */
6807 if (singleton == TREE_OPERAND (exp, 1))
6808 TREE_OPERAND (exp, 0)
6809 = invert_truthvalue (TREE_OPERAND (exp, 0));
6810
6811 result = do_store_flag (TREE_OPERAND (exp, 0),
6812 (safe_from_p (temp, singleton)
6813 ? temp : NULL_RTX),
6814 mode, BRANCH_COST <= 1);
6815
6816 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6817 result = expand_shift (LSHIFT_EXPR, mode, result,
6818 build_int_2 (tree_log2
6819 (TREE_OPERAND
6820 (binary_op, 1)),
6821 0),
6822 (safe_from_p (temp, singleton)
6823 ? temp : NULL_RTX), 0);
6824
6825 if (result)
6826 {
6827 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6828 return expand_binop (mode, boptab, op1, result, temp,
6829 unsignedp, OPTAB_LIB_WIDEN);
6830 }
6831 else if (singleton == TREE_OPERAND (exp, 1))
6832 TREE_OPERAND (exp, 0)
6833 = invert_truthvalue (TREE_OPERAND (exp, 0));
6834 }
6835
6836 do_pending_stack_adjust ();
6837 NO_DEFER_POP;
6838 op0 = gen_label_rtx ();
6839
6840 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6841 {
6842 if (temp != 0)
6843 {
6844 /* If the target conflicts with the other operand of the
6845 binary op, we can't use it. Also, we can't use the target
6846 if it is a hard register, because evaluating the condition
6847 might clobber it. */
6848 if ((binary_op
6849 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6850 || (GET_CODE (temp) == REG
6851 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6852 temp = gen_reg_rtx (mode);
6853 store_expr (singleton, temp, 0);
6854 }
6855 else
6856 expand_expr (singleton,
6857 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6858 if (singleton == TREE_OPERAND (exp, 1))
6859 jumpif (TREE_OPERAND (exp, 0), op0);
6860 else
6861 jumpifnot (TREE_OPERAND (exp, 0), op0);
6862
6863 start_cleanup_deferral ();
6864 if (binary_op && temp == 0)
6865 /* Just touch the other operand. */
6866 expand_expr (TREE_OPERAND (binary_op, 1),
6867 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6868 else if (binary_op)
6869 store_expr (build (TREE_CODE (binary_op), type,
6870 make_tree (type, temp),
6871 TREE_OPERAND (binary_op, 1)),
6872 temp, 0);
6873 else
6874 store_expr (build1 (TREE_CODE (unary_op), type,
6875 make_tree (type, temp)),
6876 temp, 0);
6877 op1 = op0;
6878 }
6879 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6880 comparison operator. If we have one of these cases, set the
6881 output to A, branch on A (cse will merge these two references),
6882 then set the output to FOO. */
6883 else if (temp
6884 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6885 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6886 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6887 TREE_OPERAND (exp, 1), 0)
6888 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6889 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
6890 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6891 {
6892 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6893 temp = gen_reg_rtx (mode);
6894 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6895 jumpif (TREE_OPERAND (exp, 0), op0);
6896
6897 start_cleanup_deferral ();
6898 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6899 op1 = op0;
6900 }
6901 else if (temp
6902 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6903 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6904 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6905 TREE_OPERAND (exp, 2), 0)
6906 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6907 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
6908 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6909 {
6910 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6911 temp = gen_reg_rtx (mode);
6912 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6913 jumpifnot (TREE_OPERAND (exp, 0), op0);
6914
6915 start_cleanup_deferral ();
6916 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6917 op1 = op0;
6918 }
6919 else
6920 {
6921 op1 = gen_label_rtx ();
6922 jumpifnot (TREE_OPERAND (exp, 0), op0);
6923
6924 start_cleanup_deferral ();
6925 if (temp != 0)
6926 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6927 else
6928 expand_expr (TREE_OPERAND (exp, 1),
6929 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6930 end_cleanup_deferral ();
6931 emit_queue ();
6932 emit_jump_insn (gen_jump (op1));
6933 emit_barrier ();
6934 emit_label (op0);
6935 start_cleanup_deferral ();
6936 if (temp != 0)
6937 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6938 else
6939 expand_expr (TREE_OPERAND (exp, 2),
6940 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6941 }
6942
6943 end_cleanup_deferral ();
6944
6945 emit_queue ();
6946 emit_label (op1);
6947 OK_DEFER_POP;
6948
6949 return temp;
6950 }
6951
6952 case TARGET_EXPR:
6953 {
6954 /* Something needs to be initialized, but we didn't know
6955 where that thing was when building the tree. For example,
6956 it could be the return value of a function, or a parameter
6957 to a function which lays down in the stack, or a temporary
6958 variable which must be passed by reference.
6959
6960 We guarantee that the expression will either be constructed
6961 or copied into our original target. */
6962
6963 tree slot = TREE_OPERAND (exp, 0);
6964 tree cleanups = NULL_TREE;
6965 tree exp1;
6966
6967 if (TREE_CODE (slot) != VAR_DECL)
6968 abort ();
6969
6970 if (! ignore)
6971 target = original_target;
6972
6973 if (target == 0)
6974 {
6975 if (DECL_RTL (slot) != 0)
6976 {
6977 target = DECL_RTL (slot);
6978 /* If we have already expanded the slot, so don't do
6979 it again. (mrs) */
6980 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6981 return target;
6982 }
6983 else
6984 {
6985 target = assign_temp (type, 2, 0, 1);
6986 /* All temp slots at this level must not conflict. */
6987 preserve_temp_slots (target);
6988 DECL_RTL (slot) = target;
6989 if (TREE_ADDRESSABLE (slot))
6990 {
6991 TREE_ADDRESSABLE (slot) = 0;
6992 mark_addressable (slot);
6993 }
6994
6995 /* Since SLOT is not known to the called function
6996 to belong to its stack frame, we must build an explicit
6997 cleanup. This case occurs when we must build up a reference
6998 to pass the reference as an argument. In this case,
6999 it is very likely that such a reference need not be
7000 built here. */
7001
7002 if (TREE_OPERAND (exp, 2) == 0)
7003 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7004 cleanups = TREE_OPERAND (exp, 2);
7005 }
7006 }
7007 else
7008 {
7009 /* This case does occur, when expanding a parameter which
7010 needs to be constructed on the stack. The target
7011 is the actual stack address that we want to initialize.
7012 The function we call will perform the cleanup in this case. */
7013
7014 /* If we have already assigned it space, use that space,
7015 not target that we were passed in, as our target
7016 parameter is only a hint. */
7017 if (DECL_RTL (slot) != 0)
7018 {
7019 target = DECL_RTL (slot);
7020 /* If we have already expanded the slot, so don't do
7021 it again. (mrs) */
7022 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7023 return target;
7024 }
7025 else
7026 {
7027 DECL_RTL (slot) = target;
7028 /* If we must have an addressable slot, then make sure that
7029 the RTL that we just stored in slot is OK. */
7030 if (TREE_ADDRESSABLE (slot))
7031 {
7032 TREE_ADDRESSABLE (slot) = 0;
7033 mark_addressable (slot);
7034 }
7035 }
7036 }
7037
7038 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7039 /* Mark it as expanded. */
7040 TREE_OPERAND (exp, 1) = NULL_TREE;
7041
7042 store_expr (exp1, target, 0);
7043
7044 expand_decl_cleanup (NULL_TREE, cleanups);
7045
7046 return target;
7047 }
7048
7049 case INIT_EXPR:
7050 {
7051 tree lhs = TREE_OPERAND (exp, 0);
7052 tree rhs = TREE_OPERAND (exp, 1);
7053 tree noncopied_parts = 0;
7054 tree lhs_type = TREE_TYPE (lhs);
7055
7056 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7057 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7058 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7059 TYPE_NONCOPIED_PARTS (lhs_type));
7060 while (noncopied_parts != 0)
7061 {
7062 expand_assignment (TREE_VALUE (noncopied_parts),
7063 TREE_PURPOSE (noncopied_parts), 0, 0);
7064 noncopied_parts = TREE_CHAIN (noncopied_parts);
7065 }
7066 return temp;
7067 }
7068
7069 case MODIFY_EXPR:
7070 {
7071 /* If lhs is complex, expand calls in rhs before computing it.
7072 That's so we don't compute a pointer and save it over a call.
7073 If lhs is simple, compute it first so we can give it as a
7074 target if the rhs is just a call. This avoids an extra temp and copy
7075 and that prevents a partial-subsumption which makes bad code.
7076 Actually we could treat component_ref's of vars like vars. */
7077
7078 tree lhs = TREE_OPERAND (exp, 0);
7079 tree rhs = TREE_OPERAND (exp, 1);
7080 tree noncopied_parts = 0;
7081 tree lhs_type = TREE_TYPE (lhs);
7082
7083 temp = 0;
7084
7085 if (TREE_CODE (lhs) != VAR_DECL
7086 && TREE_CODE (lhs) != RESULT_DECL
7087 && TREE_CODE (lhs) != PARM_DECL
7088 && ! (TREE_CODE (lhs) == INDIRECT_REF
7089 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7090 preexpand_calls (exp);
7091
7092 /* Check for |= or &= of a bitfield of size one into another bitfield
7093 of size 1. In this case, (unless we need the result of the
7094 assignment) we can do this more efficiently with a
7095 test followed by an assignment, if necessary.
7096
7097 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7098 things change so we do, this code should be enhanced to
7099 support it. */
7100 if (ignore
7101 && TREE_CODE (lhs) == COMPONENT_REF
7102 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7103 || TREE_CODE (rhs) == BIT_AND_EXPR)
7104 && TREE_OPERAND (rhs, 0) == lhs
7105 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7106 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7107 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7108 {
7109 rtx label = gen_label_rtx ();
7110
7111 do_jump (TREE_OPERAND (rhs, 1),
7112 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7113 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7114 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7115 (TREE_CODE (rhs) == BIT_IOR_EXPR
7116 ? integer_one_node
7117 : integer_zero_node)),
7118 0, 0);
7119 do_pending_stack_adjust ();
7120 emit_label (label);
7121 return const0_rtx;
7122 }
7123
7124 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7125 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7126 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7127 TYPE_NONCOPIED_PARTS (lhs_type));
7128
7129 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7130 while (noncopied_parts != 0)
7131 {
7132 expand_assignment (TREE_PURPOSE (noncopied_parts),
7133 TREE_VALUE (noncopied_parts), 0, 0);
7134 noncopied_parts = TREE_CHAIN (noncopied_parts);
7135 }
7136 return temp;
7137 }
7138
7139 case PREINCREMENT_EXPR:
7140 case PREDECREMENT_EXPR:
7141 return expand_increment (exp, 0, ignore);
7142
7143 case POSTINCREMENT_EXPR:
7144 case POSTDECREMENT_EXPR:
7145 /* Faster to treat as pre-increment if result is not used. */
7146 return expand_increment (exp, ! ignore, ignore);
7147
7148 case ADDR_EXPR:
7149 /* If nonzero, TEMP will be set to the address of something that might
7150 be a MEM corresponding to a stack slot. */
7151 temp = 0;
7152
7153 /* Are we taking the address of a nested function? */
7154 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7155 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7156 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7157 {
7158 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7159 op0 = force_operand (op0, target);
7160 }
7161 /* If we are taking the address of something erroneous, just
7162 return a zero. */
7163 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7164 return const0_rtx;
7165 else
7166 {
7167 /* We make sure to pass const0_rtx down if we came in with
7168 ignore set, to avoid doing the cleanups twice for something. */
7169 op0 = expand_expr (TREE_OPERAND (exp, 0),
7170 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7171 (modifier == EXPAND_INITIALIZER
7172 ? modifier : EXPAND_CONST_ADDRESS));
7173
7174 /* If we are going to ignore the result, OP0 will have been set
7175 to const0_rtx, so just return it. Don't get confused and
7176 think we are taking the address of the constant. */
7177 if (ignore)
7178 return op0;
7179
7180 op0 = protect_from_queue (op0, 0);
7181
7182 /* We would like the object in memory. If it is a constant,
7183 we can have it be statically allocated into memory. For
7184 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7185 memory and store the value into it. */
7186
7187 if (CONSTANT_P (op0))
7188 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7189 op0);
7190 else if (GET_CODE (op0) == MEM)
7191 {
7192 mark_temp_addr_taken (op0);
7193 temp = XEXP (op0, 0);
7194 }
7195
7196 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7197 || GET_CODE (op0) == CONCAT)
7198 {
7199 /* If this object is in a register, it must be not
7200 be BLKmode. */
7201 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7202 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7203
7204 mark_temp_addr_taken (memloc);
7205 emit_move_insn (memloc, op0);
7206 op0 = memloc;
7207 }
7208
7209 if (GET_CODE (op0) != MEM)
7210 abort ();
7211
7212 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7213 {
7214 temp = XEXP (op0, 0);
7215 #ifdef POINTERS_EXTEND_UNSIGNED
7216 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7217 && mode == ptr_mode)
7218 temp = convert_memory_address (ptr_mode, temp);
7219 #endif
7220 return temp;
7221 }
7222
7223 op0 = force_operand (XEXP (op0, 0), target);
7224 }
7225
7226 if (flag_force_addr && GET_CODE (op0) != REG)
7227 op0 = force_reg (Pmode, op0);
7228
7229 if (GET_CODE (op0) == REG
7230 && ! REG_USERVAR_P (op0))
7231 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7232
7233 /* If we might have had a temp slot, add an equivalent address
7234 for it. */
7235 if (temp != 0)
7236 update_temp_slot_address (temp, op0);
7237
7238 #ifdef POINTERS_EXTEND_UNSIGNED
7239 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7240 && mode == ptr_mode)
7241 op0 = convert_memory_address (ptr_mode, op0);
7242 #endif
7243
7244 return op0;
7245
7246 case ENTRY_VALUE_EXPR:
7247 abort ();
7248
7249 /* COMPLEX type for Extended Pascal & Fortran */
7250 case COMPLEX_EXPR:
7251 {
7252 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7253 rtx insns;
7254
7255 /* Get the rtx code of the operands. */
7256 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7257 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7258
7259 if (! target)
7260 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7261
7262 start_sequence ();
7263
7264 /* Move the real (op0) and imaginary (op1) parts to their location. */
7265 emit_move_insn (gen_realpart (mode, target), op0);
7266 emit_move_insn (gen_imagpart (mode, target), op1);
7267
7268 insns = get_insns ();
7269 end_sequence ();
7270
7271 /* Complex construction should appear as a single unit. */
7272 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7273 each with a separate pseudo as destination.
7274 It's not correct for flow to treat them as a unit. */
7275 if (GET_CODE (target) != CONCAT)
7276 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7277 else
7278 emit_insns (insns);
7279
7280 return target;
7281 }
7282
7283 case REALPART_EXPR:
7284 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7285 return gen_realpart (mode, op0);
7286
7287 case IMAGPART_EXPR:
7288 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7289 return gen_imagpart (mode, op0);
7290
7291 case CONJ_EXPR:
7292 {
7293 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7294 rtx imag_t;
7295 rtx insns;
7296
7297 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7298
7299 if (! target)
7300 target = gen_reg_rtx (mode);
7301
7302 start_sequence ();
7303
7304 /* Store the realpart and the negated imagpart to target. */
7305 emit_move_insn (gen_realpart (partmode, target),
7306 gen_realpart (partmode, op0));
7307
7308 imag_t = gen_imagpart (partmode, target);
7309 temp = expand_unop (partmode, neg_optab,
7310 gen_imagpart (partmode, op0), imag_t, 0);
7311 if (temp != imag_t)
7312 emit_move_insn (imag_t, temp);
7313
7314 insns = get_insns ();
7315 end_sequence ();
7316
7317 /* Conjugate should appear as a single unit
7318 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7319 each with a separate pseudo as destination.
7320 It's not correct for flow to treat them as a unit. */
7321 if (GET_CODE (target) != CONCAT)
7322 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7323 else
7324 emit_insns (insns);
7325
7326 return target;
7327 }
7328
7329 case TRY_CATCH_EXPR:
7330 {
7331 tree handler = TREE_OPERAND (exp, 1);
7332
7333 expand_eh_region_start ();
7334
7335 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7336
7337 expand_eh_region_end (handler);
7338
7339 return op0;
7340 }
7341
7342 case POPDCC_EXPR:
7343 {
7344 rtx dcc = get_dynamic_cleanup_chain ();
7345 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
7346 return const0_rtx;
7347 }
7348
7349 case POPDHC_EXPR:
7350 {
7351 rtx dhc = get_dynamic_handler_chain ();
7352 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
7353 return const0_rtx;
7354 }
7355
7356 case ERROR_MARK:
7357 op0 = CONST0_RTX (tmode);
7358 if (op0 != 0)
7359 return op0;
7360 return const0_rtx;
7361
7362 default:
7363 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7364 }
7365
7366 /* Here to do an ordinary binary operator, generating an instruction
7367 from the optab already placed in `this_optab'. */
7368 binop:
7369 preexpand_calls (exp);
7370 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7371 subtarget = 0;
7372 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7373 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7374 binop2:
7375 temp = expand_binop (mode, this_optab, op0, op1, target,
7376 unsignedp, OPTAB_LIB_WIDEN);
7377 if (temp == 0)
7378 abort ();
7379 return temp;
7380 }
7381
7382
7383 \f
7384 /* Return the alignment in bits of EXP, a pointer valued expression.
7385 But don't return more than MAX_ALIGN no matter what.
7386 The alignment returned is, by default, the alignment of the thing that
7387 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7388
7389 Otherwise, look at the expression to see if we can do better, i.e., if the
7390 expression is actually pointing at an object whose alignment is tighter. */
7391
7392 static int
7393 get_pointer_alignment (exp, max_align)
7394 tree exp;
7395 unsigned max_align;
7396 {
7397 unsigned align, inner;
7398
7399 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7400 return 0;
7401
7402 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7403 align = MIN (align, max_align);
7404
7405 while (1)
7406 {
7407 switch (TREE_CODE (exp))
7408 {
7409 case NOP_EXPR:
7410 case CONVERT_EXPR:
7411 case NON_LVALUE_EXPR:
7412 exp = TREE_OPERAND (exp, 0);
7413 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7414 return align;
7415 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7416 align = MIN (inner, max_align);
7417 break;
7418
7419 case PLUS_EXPR:
7420 /* If sum of pointer + int, restrict our maximum alignment to that
7421 imposed by the integer. If not, we can't do any better than
7422 ALIGN. */
7423 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7424 return align;
7425
7426 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7427 & (max_align - 1))
7428 != 0)
7429 max_align >>= 1;
7430
7431 exp = TREE_OPERAND (exp, 0);
7432 break;
7433
7434 case ADDR_EXPR:
7435 /* See what we are pointing at and look at its alignment. */
7436 exp = TREE_OPERAND (exp, 0);
7437 if (TREE_CODE (exp) == FUNCTION_DECL)
7438 align = FUNCTION_BOUNDARY;
7439 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7440 align = DECL_ALIGN (exp);
7441 #ifdef CONSTANT_ALIGNMENT
7442 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7443 align = CONSTANT_ALIGNMENT (exp, align);
7444 #endif
7445 return MIN (align, max_align);
7446
7447 default:
7448 return align;
7449 }
7450 }
7451 }
7452 \f
7453 /* Return the tree node and offset if a given argument corresponds to
7454 a string constant. */
7455
7456 static tree
7457 string_constant (arg, ptr_offset)
7458 tree arg;
7459 tree *ptr_offset;
7460 {
7461 STRIP_NOPS (arg);
7462
7463 if (TREE_CODE (arg) == ADDR_EXPR
7464 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7465 {
7466 *ptr_offset = integer_zero_node;
7467 return TREE_OPERAND (arg, 0);
7468 }
7469 else if (TREE_CODE (arg) == PLUS_EXPR)
7470 {
7471 tree arg0 = TREE_OPERAND (arg, 0);
7472 tree arg1 = TREE_OPERAND (arg, 1);
7473
7474 STRIP_NOPS (arg0);
7475 STRIP_NOPS (arg1);
7476
7477 if (TREE_CODE (arg0) == ADDR_EXPR
7478 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7479 {
7480 *ptr_offset = arg1;
7481 return TREE_OPERAND (arg0, 0);
7482 }
7483 else if (TREE_CODE (arg1) == ADDR_EXPR
7484 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7485 {
7486 *ptr_offset = arg0;
7487 return TREE_OPERAND (arg1, 0);
7488 }
7489 }
7490
7491 return 0;
7492 }
7493
7494 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7495 way, because it could contain a zero byte in the middle.
7496 TREE_STRING_LENGTH is the size of the character array, not the string.
7497
7498 Unfortunately, string_constant can't access the values of const char
7499 arrays with initializers, so neither can we do so here. */
7500
7501 static tree
7502 c_strlen (src)
7503 tree src;
7504 {
7505 tree offset_node;
7506 int offset, max;
7507 char *ptr;
7508
7509 src = string_constant (src, &offset_node);
7510 if (src == 0)
7511 return 0;
7512 max = TREE_STRING_LENGTH (src);
7513 ptr = TREE_STRING_POINTER (src);
7514 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7515 {
7516 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7517 compute the offset to the following null if we don't know where to
7518 start searching for it. */
7519 int i;
7520 for (i = 0; i < max; i++)
7521 if (ptr[i] == 0)
7522 return 0;
7523 /* We don't know the starting offset, but we do know that the string
7524 has no internal zero bytes. We can assume that the offset falls
7525 within the bounds of the string; otherwise, the programmer deserves
7526 what he gets. Subtract the offset from the length of the string,
7527 and return that. */
7528 /* This would perhaps not be valid if we were dealing with named
7529 arrays in addition to literal string constants. */
7530 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7531 }
7532
7533 /* We have a known offset into the string. Start searching there for
7534 a null character. */
7535 if (offset_node == 0)
7536 offset = 0;
7537 else
7538 {
7539 /* Did we get a long long offset? If so, punt. */
7540 if (TREE_INT_CST_HIGH (offset_node) != 0)
7541 return 0;
7542 offset = TREE_INT_CST_LOW (offset_node);
7543 }
7544 /* If the offset is known to be out of bounds, warn, and call strlen at
7545 runtime. */
7546 if (offset < 0 || offset > max)
7547 {
7548 warning ("offset outside bounds of constant string");
7549 return 0;
7550 }
7551 /* Use strlen to search for the first zero byte. Since any strings
7552 constructed with build_string will have nulls appended, we win even
7553 if we get handed something like (char[4])"abcd".
7554
7555 Since OFFSET is our starting index into the string, no further
7556 calculation is needed. */
7557 return size_int (strlen (ptr + offset));
7558 }
7559
7560 rtx
7561 expand_builtin_return_addr (fndecl_code, count, tem)
7562 enum built_in_function fndecl_code;
7563 int count;
7564 rtx tem;
7565 {
7566 int i;
7567
7568 /* Some machines need special handling before we can access
7569 arbitrary frames. For example, on the sparc, we must first flush
7570 all register windows to the stack. */
7571 #ifdef SETUP_FRAME_ADDRESSES
7572 if (count > 0)
7573 SETUP_FRAME_ADDRESSES ();
7574 #endif
7575
7576 /* On the sparc, the return address is not in the frame, it is in a
7577 register. There is no way to access it off of the current frame
7578 pointer, but it can be accessed off the previous frame pointer by
7579 reading the value from the register window save area. */
7580 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7581 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7582 count--;
7583 #endif
7584
7585 /* Scan back COUNT frames to the specified frame. */
7586 for (i = 0; i < count; i++)
7587 {
7588 /* Assume the dynamic chain pointer is in the word that the
7589 frame address points to, unless otherwise specified. */
7590 #ifdef DYNAMIC_CHAIN_ADDRESS
7591 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7592 #endif
7593 tem = memory_address (Pmode, tem);
7594 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
7595 }
7596
7597 /* For __builtin_frame_address, return what we've got. */
7598 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7599 return tem;
7600
7601 /* For __builtin_return_address, Get the return address from that
7602 frame. */
7603 #ifdef RETURN_ADDR_RTX
7604 tem = RETURN_ADDR_RTX (count, tem);
7605 #else
7606 tem = memory_address (Pmode,
7607 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7608 tem = gen_rtx_MEM (Pmode, tem);
7609 #endif
7610 return tem;
7611 }
7612
7613 /* __builtin_setjmp is passed a pointer to an array of five words (not
7614 all will be used on all machines). It operates similarly to the C
7615 library function of the same name, but is more efficient. Much of
7616 the code below (and for longjmp) is copied from the handling of
7617 non-local gotos.
7618
7619 NOTE: This is intended for use by GNAT and the exception handling
7620 scheme in the compiler and will only work in the method used by
7621 them. */
7622
7623 rtx
7624 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
7625 rtx buf_addr;
7626 rtx target;
7627 rtx first_label, next_label;
7628 {
7629 rtx lab1 = gen_label_rtx ();
7630 enum machine_mode sa_mode = Pmode, value_mode;
7631 rtx stack_save;
7632
7633 value_mode = TYPE_MODE (integer_type_node);
7634
7635 #ifdef POINTERS_EXTEND_UNSIGNED
7636 buf_addr = convert_memory_address (Pmode, buf_addr);
7637 #endif
7638
7639 buf_addr = force_reg (Pmode, buf_addr);
7640
7641 if (target == 0 || GET_CODE (target) != REG
7642 || REGNO (target) < FIRST_PSEUDO_REGISTER)
7643 target = gen_reg_rtx (value_mode);
7644
7645 emit_queue ();
7646
7647 /* We store the frame pointer and the address of lab1 in the buffer
7648 and use the rest of it for the stack save area, which is
7649 machine-dependent. */
7650 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
7651 virtual_stack_vars_rtx);
7652 emit_move_insn (validize_mem
7653 (gen_rtx_MEM (Pmode,
7654 plus_constant (buf_addr,
7655 GET_MODE_SIZE (Pmode)))),
7656 gen_rtx_LABEL_REF (Pmode, lab1));
7657
7658 #ifdef HAVE_save_stack_nonlocal
7659 if (HAVE_save_stack_nonlocal)
7660 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
7661 #endif
7662
7663 stack_save = gen_rtx_MEM (sa_mode,
7664 plus_constant (buf_addr,
7665 2 * GET_MODE_SIZE (Pmode)));
7666 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
7667
7668 /* If there is further processing to do, do it. */
7669 #ifdef HAVE_builtin_setjmp_setup
7670 if (HAVE_builtin_setjmp_setup)
7671 emit_insn (gen_builtin_setjmp_setup (buf_addr));
7672 #endif
7673
7674 /* Set TARGET to zero and branch to the first-time-through label. */
7675 emit_move_insn (target, const0_rtx);
7676 emit_jump_insn (gen_jump (first_label));
7677 emit_barrier ();
7678 emit_label (lab1);
7679
7680 /* Tell flow about the strange goings on. */
7681 current_function_has_nonlocal_label = 1;
7682
7683 /* Clobber the FP when we get here, so we have to make sure it's
7684 marked as used by this function. */
7685 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
7686
7687 /* Mark the static chain as clobbered here so life information
7688 doesn't get messed up for it. */
7689 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
7690
7691 /* Now put in the code to restore the frame pointer, and argument
7692 pointer, if needed. The code below is from expand_end_bindings
7693 in stmt.c; see detailed documentation there. */
7694 #ifdef HAVE_nonlocal_goto
7695 if (! HAVE_nonlocal_goto)
7696 #endif
7697 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
7698
7699 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7700 if (fixed_regs[ARG_POINTER_REGNUM])
7701 {
7702 #ifdef ELIMINABLE_REGS
7703 int i;
7704 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
7705
7706 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
7707 if (elim_regs[i].from == ARG_POINTER_REGNUM
7708 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
7709 break;
7710
7711 if (i == sizeof elim_regs / sizeof elim_regs [0])
7712 #endif
7713 {
7714 /* Now restore our arg pointer from the address at which it
7715 was saved in our stack frame.
7716 If there hasn't be space allocated for it yet, make
7717 some now. */
7718 if (arg_pointer_save_area == 0)
7719 arg_pointer_save_area
7720 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
7721 emit_move_insn (virtual_incoming_args_rtx,
7722 copy_to_reg (arg_pointer_save_area));
7723 }
7724 }
7725 #endif
7726
7727 #ifdef HAVE_builtin_setjmp_receiver
7728 if (HAVE_builtin_setjmp_receiver)
7729 emit_insn (gen_builtin_setjmp_receiver (lab1));
7730 else
7731 #endif
7732 #ifdef HAVE_nonlocal_goto_receiver
7733 if (HAVE_nonlocal_goto_receiver)
7734 emit_insn (gen_nonlocal_goto_receiver ());
7735 else
7736 #endif
7737 {
7738 ; /* Nothing */
7739 }
7740
7741 /* Set TARGET, and branch to the next-time-through label. */
7742 emit_move_insn (target, gen_lowpart (GET_MODE (target), static_chain_rtx));
7743 emit_jump_insn (gen_jump (next_label));
7744 emit_barrier ();
7745
7746 return target;
7747 }
7748
7749 void
7750 expand_builtin_longjmp (buf_addr, value)
7751 rtx buf_addr, value;
7752 {
7753 rtx fp, lab, stack;
7754 enum machine_mode sa_mode;
7755
7756 #ifdef POINTERS_EXTEND_UNSIGNED
7757 buf_addr = convert_memory_address (Pmode, buf_addr);
7758 #endif
7759 buf_addr = force_reg (Pmode, buf_addr);
7760
7761 /* The value sent by longjmp is not allowed to be zero. Force it
7762 to one if so. */
7763 if (GET_CODE (value) == CONST_INT)
7764 {
7765 if (INTVAL (value) == 0)
7766 value = const1_rtx;
7767 }
7768 else
7769 {
7770 lab = gen_label_rtx ();
7771
7772 emit_cmp_insn (value, const0_rtx, NE, NULL_RTX, GET_MODE (value), 0, 0);
7773 emit_jump_insn (gen_bne (lab));
7774 emit_move_insn (value, const1_rtx);
7775 emit_label (lab);
7776 }
7777
7778 /* Make sure the value is in the right mode to be copied to the chain. */
7779 if (GET_MODE (value) != VOIDmode)
7780 value = gen_lowpart (GET_MODE (static_chain_rtx), value);
7781
7782 #ifdef HAVE_builtin_longjmp
7783 if (HAVE_builtin_longjmp)
7784 {
7785 /* Copy the "return value" to the static chain reg. */
7786 emit_move_insn (static_chain_rtx, value);
7787 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7788 emit_insn (gen_builtin_longjmp (buf_addr));
7789 }
7790 else
7791 #endif
7792 {
7793 fp = gen_rtx_MEM (Pmode, buf_addr);
7794 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
7795 GET_MODE_SIZE (Pmode)));
7796
7797 #ifdef HAVE_save_stack_nonlocal
7798 sa_mode = (HAVE_save_stack_nonlocal
7799 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
7800 : Pmode);
7801 #else
7802 sa_mode = Pmode;
7803 #endif
7804
7805 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
7806 2 * GET_MODE_SIZE (Pmode)));
7807
7808 /* Pick up FP, label, and SP from the block and jump. This code is
7809 from expand_goto in stmt.c; see there for detailed comments. */
7810 #if HAVE_nonlocal_goto
7811 if (HAVE_nonlocal_goto)
7812 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
7813 else
7814 #endif
7815 {
7816 lab = copy_to_reg (lab);
7817
7818 /* Copy the "return value" to the static chain reg. */
7819 emit_move_insn (static_chain_rtx, value);
7820
7821 emit_move_insn (hard_frame_pointer_rtx, fp);
7822 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
7823
7824 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
7825 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
7826 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7827 emit_indirect_jump (lab);
7828 }
7829 }
7830 }
7831
7832 \f
7833 /* Expand an expression EXP that calls a built-in function,
7834 with result going to TARGET if that's convenient
7835 (and in mode MODE if that's convenient).
7836 SUBTARGET may be used as the target for computing one of EXP's operands.
7837 IGNORE is nonzero if the value is to be ignored. */
7838
7839 #define CALLED_AS_BUILT_IN(NODE) \
7840 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7841
7842 static rtx
7843 expand_builtin (exp, target, subtarget, mode, ignore)
7844 tree exp;
7845 rtx target;
7846 rtx subtarget;
7847 enum machine_mode mode;
7848 int ignore;
7849 {
7850 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7851 tree arglist = TREE_OPERAND (exp, 1);
7852 rtx op0;
7853 rtx lab1, insns;
7854 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7855 optab builtin_optab;
7856
7857 switch (DECL_FUNCTION_CODE (fndecl))
7858 {
7859 case BUILT_IN_ABS:
7860 case BUILT_IN_LABS:
7861 case BUILT_IN_FABS:
7862 /* build_function_call changes these into ABS_EXPR. */
7863 abort ();
7864
7865 case BUILT_IN_SIN:
7866 case BUILT_IN_COS:
7867 /* Treat these like sqrt, but only if the user asks for them. */
7868 if (! flag_fast_math)
7869 break;
7870 case BUILT_IN_FSQRT:
7871 /* If not optimizing, call the library function. */
7872 if (! optimize)
7873 break;
7874
7875 if (arglist == 0
7876 /* Arg could be wrong type if user redeclared this fcn wrong. */
7877 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7878 break;
7879
7880 /* Stabilize and compute the argument. */
7881 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7882 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7883 {
7884 exp = copy_node (exp);
7885 arglist = copy_node (arglist);
7886 TREE_OPERAND (exp, 1) = arglist;
7887 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7888 }
7889 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7890
7891 /* Make a suitable register to place result in. */
7892 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7893
7894 emit_queue ();
7895 start_sequence ();
7896
7897 switch (DECL_FUNCTION_CODE (fndecl))
7898 {
7899 case BUILT_IN_SIN:
7900 builtin_optab = sin_optab; break;
7901 case BUILT_IN_COS:
7902 builtin_optab = cos_optab; break;
7903 case BUILT_IN_FSQRT:
7904 builtin_optab = sqrt_optab; break;
7905 default:
7906 abort ();
7907 }
7908
7909 /* Compute into TARGET.
7910 Set TARGET to wherever the result comes back. */
7911 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7912 builtin_optab, op0, target, 0);
7913
7914 /* If we were unable to expand via the builtin, stop the
7915 sequence (without outputting the insns) and break, causing
7916 a call the the library function. */
7917 if (target == 0)
7918 {
7919 end_sequence ();
7920 break;
7921 }
7922
7923 /* Check the results by default. But if flag_fast_math is turned on,
7924 then assume sqrt will always be called with valid arguments. */
7925
7926 if (! flag_fast_math)
7927 {
7928 /* Don't define the builtin FP instructions
7929 if your machine is not IEEE. */
7930 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7931 abort ();
7932
7933 lab1 = gen_label_rtx ();
7934
7935 /* Test the result; if it is NaN, set errno=EDOM because
7936 the argument was not in the domain. */
7937 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7938 emit_jump_insn (gen_beq (lab1));
7939
7940 #ifdef TARGET_EDOM
7941 {
7942 #ifdef GEN_ERRNO_RTX
7943 rtx errno_rtx = GEN_ERRNO_RTX;
7944 #else
7945 rtx errno_rtx
7946 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
7947 #endif
7948
7949 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7950 }
7951 #else
7952 /* We can't set errno=EDOM directly; let the library call do it.
7953 Pop the arguments right away in case the call gets deleted. */
7954 NO_DEFER_POP;
7955 expand_call (exp, target, 0);
7956 OK_DEFER_POP;
7957 #endif
7958
7959 emit_label (lab1);
7960 }
7961
7962 /* Output the entire sequence. */
7963 insns = get_insns ();
7964 end_sequence ();
7965 emit_insns (insns);
7966
7967 return target;
7968
7969 case BUILT_IN_FMOD:
7970 break;
7971
7972 /* __builtin_apply_args returns block of memory allocated on
7973 the stack into which is stored the arg pointer, structure
7974 value address, static chain, and all the registers that might
7975 possibly be used in performing a function call. The code is
7976 moved to the start of the function so the incoming values are
7977 saved. */
7978 case BUILT_IN_APPLY_ARGS:
7979 /* Don't do __builtin_apply_args more than once in a function.
7980 Save the result of the first call and reuse it. */
7981 if (apply_args_value != 0)
7982 return apply_args_value;
7983 {
7984 /* When this function is called, it means that registers must be
7985 saved on entry to this function. So we migrate the
7986 call to the first insn of this function. */
7987 rtx temp;
7988 rtx seq;
7989
7990 start_sequence ();
7991 temp = expand_builtin_apply_args ();
7992 seq = get_insns ();
7993 end_sequence ();
7994
7995 apply_args_value = temp;
7996
7997 /* Put the sequence after the NOTE that starts the function.
7998 If this is inside a SEQUENCE, make the outer-level insn
7999 chain current, so the code is placed at the start of the
8000 function. */
8001 push_topmost_sequence ();
8002 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8003 pop_topmost_sequence ();
8004 return temp;
8005 }
8006
8007 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8008 FUNCTION with a copy of the parameters described by
8009 ARGUMENTS, and ARGSIZE. It returns a block of memory
8010 allocated on the stack into which is stored all the registers
8011 that might possibly be used for returning the result of a
8012 function. ARGUMENTS is the value returned by
8013 __builtin_apply_args. ARGSIZE is the number of bytes of
8014 arguments that must be copied. ??? How should this value be
8015 computed? We'll also need a safe worst case value for varargs
8016 functions. */
8017 case BUILT_IN_APPLY:
8018 if (arglist == 0
8019 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8020 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8021 || TREE_CHAIN (arglist) == 0
8022 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8023 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8024 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8025 return const0_rtx;
8026 else
8027 {
8028 int i;
8029 tree t;
8030 rtx ops[3];
8031
8032 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8033 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8034
8035 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8036 }
8037
8038 /* __builtin_return (RESULT) causes the function to return the
8039 value described by RESULT. RESULT is address of the block of
8040 memory returned by __builtin_apply. */
8041 case BUILT_IN_RETURN:
8042 if (arglist
8043 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8044 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8045 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8046 NULL_RTX, VOIDmode, 0));
8047 return const0_rtx;
8048
8049 case BUILT_IN_SAVEREGS:
8050 /* Don't do __builtin_saveregs more than once in a function.
8051 Save the result of the first call and reuse it. */
8052 if (saveregs_value != 0)
8053 return saveregs_value;
8054 {
8055 /* When this function is called, it means that registers must be
8056 saved on entry to this function. So we migrate the
8057 call to the first insn of this function. */
8058 rtx temp;
8059 rtx seq;
8060
8061 /* Now really call the function. `expand_call' does not call
8062 expand_builtin, so there is no danger of infinite recursion here. */
8063 start_sequence ();
8064
8065 #ifdef EXPAND_BUILTIN_SAVEREGS
8066 /* Do whatever the machine needs done in this case. */
8067 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8068 #else
8069 /* The register where the function returns its value
8070 is likely to have something else in it, such as an argument.
8071 So preserve that register around the call. */
8072
8073 if (value_mode != VOIDmode)
8074 {
8075 rtx valreg = hard_libcall_value (value_mode);
8076 rtx saved_valreg = gen_reg_rtx (value_mode);
8077
8078 emit_move_insn (saved_valreg, valreg);
8079 temp = expand_call (exp, target, ignore);
8080 emit_move_insn (valreg, saved_valreg);
8081 }
8082 else
8083 /* Generate the call, putting the value in a pseudo. */
8084 temp = expand_call (exp, target, ignore);
8085 #endif
8086
8087 seq = get_insns ();
8088 end_sequence ();
8089
8090 saveregs_value = temp;
8091
8092 /* Put the sequence after the NOTE that starts the function.
8093 If this is inside a SEQUENCE, make the outer-level insn
8094 chain current, so the code is placed at the start of the
8095 function. */
8096 push_topmost_sequence ();
8097 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8098 pop_topmost_sequence ();
8099 return temp;
8100 }
8101
8102 /* __builtin_args_info (N) returns word N of the arg space info
8103 for the current function. The number and meanings of words
8104 is controlled by the definition of CUMULATIVE_ARGS. */
8105 case BUILT_IN_ARGS_INFO:
8106 {
8107 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8108 int *word_ptr = (int *) &current_function_args_info;
8109 #if 0
8110 /* These are used by the code below that is if 0'ed away */
8111 int i;
8112 tree type, elts, result;
8113 #endif
8114
8115 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8116 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8117 __FILE__, __LINE__);
8118
8119 if (arglist != 0)
8120 {
8121 tree arg = TREE_VALUE (arglist);
8122 if (TREE_CODE (arg) != INTEGER_CST)
8123 error ("argument of `__builtin_args_info' must be constant");
8124 else
8125 {
8126 int wordnum = TREE_INT_CST_LOW (arg);
8127
8128 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8129 error ("argument of `__builtin_args_info' out of range");
8130 else
8131 return GEN_INT (word_ptr[wordnum]);
8132 }
8133 }
8134 else
8135 error ("missing argument in `__builtin_args_info'");
8136
8137 return const0_rtx;
8138
8139 #if 0
8140 for (i = 0; i < nwords; i++)
8141 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8142
8143 type = build_array_type (integer_type_node,
8144 build_index_type (build_int_2 (nwords, 0)));
8145 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8146 TREE_CONSTANT (result) = 1;
8147 TREE_STATIC (result) = 1;
8148 result = build (INDIRECT_REF, build_pointer_type (type), result);
8149 TREE_CONSTANT (result) = 1;
8150 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8151 #endif
8152 }
8153
8154 /* Return the address of the first anonymous stack arg. */
8155 case BUILT_IN_NEXT_ARG:
8156 {
8157 tree fntype = TREE_TYPE (current_function_decl);
8158
8159 if ((TYPE_ARG_TYPES (fntype) == 0
8160 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8161 == void_type_node))
8162 && ! current_function_varargs)
8163 {
8164 error ("`va_start' used in function with fixed args");
8165 return const0_rtx;
8166 }
8167
8168 if (arglist)
8169 {
8170 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8171 tree arg = TREE_VALUE (arglist);
8172
8173 /* Strip off all nops for the sake of the comparison. This
8174 is not quite the same as STRIP_NOPS. It does more.
8175 We must also strip off INDIRECT_EXPR for C++ reference
8176 parameters. */
8177 while (TREE_CODE (arg) == NOP_EXPR
8178 || TREE_CODE (arg) == CONVERT_EXPR
8179 || TREE_CODE (arg) == NON_LVALUE_EXPR
8180 || TREE_CODE (arg) == INDIRECT_REF)
8181 arg = TREE_OPERAND (arg, 0);
8182 if (arg != last_parm)
8183 warning ("second parameter of `va_start' not last named argument");
8184 }
8185 else if (! current_function_varargs)
8186 /* Evidently an out of date version of <stdarg.h>; can't validate
8187 va_start's second argument, but can still work as intended. */
8188 warning ("`__builtin_next_arg' called without an argument");
8189 }
8190
8191 return expand_binop (Pmode, add_optab,
8192 current_function_internal_arg_pointer,
8193 current_function_arg_offset_rtx,
8194 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8195
8196 case BUILT_IN_CLASSIFY_TYPE:
8197 if (arglist != 0)
8198 {
8199 tree type = TREE_TYPE (TREE_VALUE (arglist));
8200 enum tree_code code = TREE_CODE (type);
8201 if (code == VOID_TYPE)
8202 return GEN_INT (void_type_class);
8203 if (code == INTEGER_TYPE)
8204 return GEN_INT (integer_type_class);
8205 if (code == CHAR_TYPE)
8206 return GEN_INT (char_type_class);
8207 if (code == ENUMERAL_TYPE)
8208 return GEN_INT (enumeral_type_class);
8209 if (code == BOOLEAN_TYPE)
8210 return GEN_INT (boolean_type_class);
8211 if (code == POINTER_TYPE)
8212 return GEN_INT (pointer_type_class);
8213 if (code == REFERENCE_TYPE)
8214 return GEN_INT (reference_type_class);
8215 if (code == OFFSET_TYPE)
8216 return GEN_INT (offset_type_class);
8217 if (code == REAL_TYPE)
8218 return GEN_INT (real_type_class);
8219 if (code == COMPLEX_TYPE)
8220 return GEN_INT (complex_type_class);
8221 if (code == FUNCTION_TYPE)
8222 return GEN_INT (function_type_class);
8223 if (code == METHOD_TYPE)
8224 return GEN_INT (method_type_class);
8225 if (code == RECORD_TYPE)
8226 return GEN_INT (record_type_class);
8227 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8228 return GEN_INT (union_type_class);
8229 if (code == ARRAY_TYPE)
8230 {
8231 if (TYPE_STRING_FLAG (type))
8232 return GEN_INT (string_type_class);
8233 else
8234 return GEN_INT (array_type_class);
8235 }
8236 if (code == SET_TYPE)
8237 return GEN_INT (set_type_class);
8238 if (code == FILE_TYPE)
8239 return GEN_INT (file_type_class);
8240 if (code == LANG_TYPE)
8241 return GEN_INT (lang_type_class);
8242 }
8243 return GEN_INT (no_type_class);
8244
8245 case BUILT_IN_CONSTANT_P:
8246 if (arglist == 0)
8247 return const0_rtx;
8248 else
8249 {
8250 tree arg = TREE_VALUE (arglist);
8251
8252 STRIP_NOPS (arg);
8253 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8254 || (TREE_CODE (arg) == ADDR_EXPR
8255 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8256 ? const1_rtx : const0_rtx);
8257 }
8258
8259 case BUILT_IN_FRAME_ADDRESS:
8260 /* The argument must be a nonnegative integer constant.
8261 It counts the number of frames to scan up the stack.
8262 The value is the address of that frame. */
8263 case BUILT_IN_RETURN_ADDRESS:
8264 /* The argument must be a nonnegative integer constant.
8265 It counts the number of frames to scan up the stack.
8266 The value is the return address saved in that frame. */
8267 if (arglist == 0)
8268 /* Warning about missing arg was already issued. */
8269 return const0_rtx;
8270 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8271 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8272 {
8273 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8274 error ("invalid arg to `__builtin_frame_address'");
8275 else
8276 error ("invalid arg to `__builtin_return_address'");
8277 return const0_rtx;
8278 }
8279 else
8280 {
8281 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8282 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8283 hard_frame_pointer_rtx);
8284
8285 /* Some ports cannot access arbitrary stack frames. */
8286 if (tem == NULL)
8287 {
8288 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8289 warning ("unsupported arg to `__builtin_frame_address'");
8290 else
8291 warning ("unsupported arg to `__builtin_return_address'");
8292 return const0_rtx;
8293 }
8294
8295 /* For __builtin_frame_address, return what we've got. */
8296 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8297 return tem;
8298
8299 if (GET_CODE (tem) != REG)
8300 tem = copy_to_reg (tem);
8301 return tem;
8302 }
8303
8304 /* Returns the address of the area where the structure is returned.
8305 0 otherwise. */
8306 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8307 if (arglist != 0
8308 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8309 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8310 return const0_rtx;
8311 else
8312 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8313
8314 case BUILT_IN_ALLOCA:
8315 if (arglist == 0
8316 /* Arg could be non-integer if user redeclared this fcn wrong. */
8317 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8318 break;
8319
8320 /* Compute the argument. */
8321 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8322
8323 /* Allocate the desired space. */
8324 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8325
8326 case BUILT_IN_FFS:
8327 /* If not optimizing, call the library function. */
8328 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8329 break;
8330
8331 if (arglist == 0
8332 /* Arg could be non-integer if user redeclared this fcn wrong. */
8333 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8334 break;
8335
8336 /* Compute the argument. */
8337 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8338 /* Compute ffs, into TARGET if possible.
8339 Set TARGET to wherever the result comes back. */
8340 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8341 ffs_optab, op0, target, 1);
8342 if (target == 0)
8343 abort ();
8344 return target;
8345
8346 case BUILT_IN_STRLEN:
8347 /* If not optimizing, call the library function. */
8348 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8349 break;
8350
8351 if (arglist == 0
8352 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8353 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8354 break;
8355 else
8356 {
8357 tree src = TREE_VALUE (arglist);
8358 tree len = c_strlen (src);
8359
8360 int align
8361 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8362
8363 rtx result, src_rtx, char_rtx;
8364 enum machine_mode insn_mode = value_mode, char_mode;
8365 enum insn_code icode;
8366
8367 /* If the length is known, just return it. */
8368 if (len != 0)
8369 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
8370
8371 /* If SRC is not a pointer type, don't do this operation inline. */
8372 if (align == 0)
8373 break;
8374
8375 /* Call a function if we can't compute strlen in the right mode. */
8376
8377 while (insn_mode != VOIDmode)
8378 {
8379 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8380 if (icode != CODE_FOR_nothing)
8381 break;
8382
8383 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8384 }
8385 if (insn_mode == VOIDmode)
8386 break;
8387
8388 /* Make a place to write the result of the instruction. */
8389 result = target;
8390 if (! (result != 0
8391 && GET_CODE (result) == REG
8392 && GET_MODE (result) == insn_mode
8393 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8394 result = gen_reg_rtx (insn_mode);
8395
8396 /* Make sure the operands are acceptable to the predicates. */
8397
8398 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8399 result = gen_reg_rtx (insn_mode);
8400 src_rtx = memory_address (BLKmode,
8401 expand_expr (src, NULL_RTX, ptr_mode,
8402 EXPAND_NORMAL));
8403
8404 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8405 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8406
8407 /* Check the string is readable and has an end. */
8408 if (flag_check_memory_usage)
8409 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
8410 src_rtx, ptr_mode,
8411 GEN_INT (MEMORY_USE_RO),
8412 TYPE_MODE (integer_type_node));
8413
8414 char_rtx = const0_rtx;
8415 char_mode = insn_operand_mode[(int)icode][2];
8416 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8417 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8418
8419 emit_insn (GEN_FCN (icode) (result,
8420 gen_rtx_MEM (BLKmode, src_rtx),
8421 char_rtx, GEN_INT (align)));
8422
8423 /* Return the value in the proper mode for this function. */
8424 if (GET_MODE (result) == value_mode)
8425 return result;
8426 else if (target != 0)
8427 {
8428 convert_move (target, result, 0);
8429 return target;
8430 }
8431 else
8432 return convert_to_mode (value_mode, result, 0);
8433 }
8434
8435 case BUILT_IN_STRCPY:
8436 /* If not optimizing, call the library function. */
8437 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8438 break;
8439
8440 if (arglist == 0
8441 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8442 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8443 || TREE_CHAIN (arglist) == 0
8444 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8445 break;
8446 else
8447 {
8448 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8449
8450 if (len == 0)
8451 break;
8452
8453 len = size_binop (PLUS_EXPR, len, integer_one_node);
8454
8455 chainon (arglist, build_tree_list (NULL_TREE, len));
8456 }
8457
8458 /* Drops in. */
8459 case BUILT_IN_MEMCPY:
8460 /* If not optimizing, call the library function. */
8461 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8462 break;
8463
8464 if (arglist == 0
8465 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8466 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8467 || TREE_CHAIN (arglist) == 0
8468 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8469 != POINTER_TYPE)
8470 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8471 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8472 (TREE_CHAIN (TREE_CHAIN (arglist)))))
8473 != INTEGER_TYPE))
8474 break;
8475 else
8476 {
8477 tree dest = TREE_VALUE (arglist);
8478 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8479 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8480 tree type;
8481
8482 int src_align
8483 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8484 int dest_align
8485 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8486 rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
8487
8488 /* If either SRC or DEST is not a pointer type, don't do
8489 this operation in-line. */
8490 if (src_align == 0 || dest_align == 0)
8491 {
8492 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8493 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8494 break;
8495 }
8496
8497 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8498 dest_mem = gen_rtx_MEM (BLKmode,
8499 memory_address (BLKmode, dest_rtx));
8500 /* There could be a void* cast on top of the object. */
8501 while (TREE_CODE (dest) == NOP_EXPR)
8502 dest = TREE_OPERAND (dest, 0);
8503 type = TREE_TYPE (TREE_TYPE (dest));
8504 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8505 src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
8506 src_mem = gen_rtx_MEM (BLKmode,
8507 memory_address (BLKmode, src_rtx));
8508 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8509
8510 /* Just copy the rights of SRC to the rights of DEST. */
8511 if (flag_check_memory_usage)
8512 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
8513 dest_rtx, ptr_mode,
8514 src_rtx, ptr_mode,
8515 len_rtx, TYPE_MODE (sizetype));
8516
8517 /* There could be a void* cast on top of the object. */
8518 while (TREE_CODE (src) == NOP_EXPR)
8519 src = TREE_OPERAND (src, 0);
8520 type = TREE_TYPE (TREE_TYPE (src));
8521 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8522
8523 /* Copy word part most expediently. */
8524 dest_addr
8525 = emit_block_move (dest_mem, src_mem, len_rtx,
8526 MIN (src_align, dest_align));
8527
8528 if (dest_addr == 0)
8529 dest_addr = force_operand (dest_rtx, NULL_RTX);
8530
8531 return dest_addr;
8532 }
8533
8534 case BUILT_IN_MEMSET:
8535 /* If not optimizing, call the library function. */
8536 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8537 break;
8538
8539 if (arglist == 0
8540 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8541 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8542 || TREE_CHAIN (arglist) == 0
8543 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8544 != INTEGER_TYPE)
8545 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8546 || (INTEGER_TYPE
8547 != (TREE_CODE (TREE_TYPE
8548 (TREE_VALUE
8549 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8550 break;
8551 else
8552 {
8553 tree dest = TREE_VALUE (arglist);
8554 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8555 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8556 tree type;
8557
8558 int dest_align
8559 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8560 rtx dest_rtx, dest_mem, dest_addr, len_rtx;
8561
8562 /* If DEST is not a pointer type, don't do this
8563 operation in-line. */
8564 if (dest_align == 0)
8565 break;
8566
8567 /* If VAL is not 0, don't do this operation in-line. */
8568 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8569 break;
8570
8571 /* If LEN does not expand to a constant, don't do this
8572 operation in-line. */
8573 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8574 if (GET_CODE (len_rtx) != CONST_INT)
8575 break;
8576
8577 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8578 dest_mem = gen_rtx_MEM (BLKmode,
8579 memory_address (BLKmode, dest_rtx));
8580
8581 /* Just check DST is writable and mark it as readable. */
8582 if (flag_check_memory_usage)
8583 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8584 dest_rtx, ptr_mode,
8585 len_rtx, TYPE_MODE (sizetype),
8586 GEN_INT (MEMORY_USE_WO),
8587 TYPE_MODE (integer_type_node));
8588
8589 /* There could be a void* cast on top of the object. */
8590 while (TREE_CODE (dest) == NOP_EXPR)
8591 dest = TREE_OPERAND (dest, 0);
8592 type = TREE_TYPE (TREE_TYPE (dest));
8593 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8594
8595 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
8596
8597 if (dest_addr == 0)
8598 dest_addr = force_operand (dest_rtx, NULL_RTX);
8599
8600 return dest_addr;
8601 }
8602
8603 /* These comparison functions need an instruction that returns an actual
8604 index. An ordinary compare that just sets the condition codes
8605 is not enough. */
8606 #ifdef HAVE_cmpstrsi
8607 case BUILT_IN_STRCMP:
8608 /* If not optimizing, call the library function. */
8609 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8610 break;
8611
8612 /* If we need to check memory accesses, call the library function. */
8613 if (flag_check_memory_usage)
8614 break;
8615
8616 if (arglist == 0
8617 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8618 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8619 || TREE_CHAIN (arglist) == 0
8620 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8621 break;
8622 else if (!HAVE_cmpstrsi)
8623 break;
8624 {
8625 tree arg1 = TREE_VALUE (arglist);
8626 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8627 tree len, len2;
8628
8629 len = c_strlen (arg1);
8630 if (len)
8631 len = size_binop (PLUS_EXPR, integer_one_node, len);
8632 len2 = c_strlen (arg2);
8633 if (len2)
8634 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8635
8636 /* If we don't have a constant length for the first, use the length
8637 of the second, if we know it. We don't require a constant for
8638 this case; some cost analysis could be done if both are available
8639 but neither is constant. For now, assume they're equally cheap.
8640
8641 If both strings have constant lengths, use the smaller. This
8642 could arise if optimization results in strcpy being called with
8643 two fixed strings, or if the code was machine-generated. We should
8644 add some code to the `memcmp' handler below to deal with such
8645 situations, someday. */
8646 if (!len || TREE_CODE (len) != INTEGER_CST)
8647 {
8648 if (len2)
8649 len = len2;
8650 else if (len == 0)
8651 break;
8652 }
8653 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8654 {
8655 if (tree_int_cst_lt (len2, len))
8656 len = len2;
8657 }
8658
8659 chainon (arglist, build_tree_list (NULL_TREE, len));
8660 }
8661
8662 /* Drops in. */
8663 case BUILT_IN_MEMCMP:
8664 /* If not optimizing, call the library function. */
8665 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8666 break;
8667
8668 /* If we need to check memory accesses, call the library function. */
8669 if (flag_check_memory_usage)
8670 break;
8671
8672 if (arglist == 0
8673 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8674 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8675 || TREE_CHAIN (arglist) == 0
8676 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8677 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8678 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8679 break;
8680 else if (!HAVE_cmpstrsi)
8681 break;
8682 {
8683 tree arg1 = TREE_VALUE (arglist);
8684 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8685 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8686 rtx result;
8687
8688 int arg1_align
8689 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8690 int arg2_align
8691 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8692 enum machine_mode insn_mode
8693 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8694
8695 /* If we don't have POINTER_TYPE, call the function. */
8696 if (arg1_align == 0 || arg2_align == 0)
8697 {
8698 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8699 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8700 break;
8701 }
8702
8703 /* Make a place to write the result of the instruction. */
8704 result = target;
8705 if (! (result != 0
8706 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8707 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8708 result = gen_reg_rtx (insn_mode);
8709
8710 emit_insn (gen_cmpstrsi (result,
8711 gen_rtx_MEM (BLKmode,
8712 expand_expr (arg1, NULL_RTX,
8713 ptr_mode,
8714 EXPAND_NORMAL)),
8715 gen_rtx_MEM (BLKmode,
8716 expand_expr (arg2, NULL_RTX,
8717 ptr_mode,
8718 EXPAND_NORMAL)),
8719 expand_expr (len, NULL_RTX, VOIDmode, 0),
8720 GEN_INT (MIN (arg1_align, arg2_align))));
8721
8722 /* Return the value in the proper mode for this function. */
8723 mode = TYPE_MODE (TREE_TYPE (exp));
8724 if (GET_MODE (result) == mode)
8725 return result;
8726 else if (target != 0)
8727 {
8728 convert_move (target, result, 0);
8729 return target;
8730 }
8731 else
8732 return convert_to_mode (mode, result, 0);
8733 }
8734 #else
8735 case BUILT_IN_STRCMP:
8736 case BUILT_IN_MEMCMP:
8737 break;
8738 #endif
8739
8740 case BUILT_IN_SETJMP:
8741 if (arglist == 0
8742 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8743 break;
8744 else
8745 {
8746 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8747 VOIDmode, 0);
8748 rtx lab = gen_label_rtx ();
8749 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
8750 emit_label (lab);
8751 return ret;
8752 }
8753
8754 /* __builtin_longjmp is passed a pointer to an array of five words.
8755 It's similar to the C library longjmp function but works with
8756 __builtin_setjmp above. */
8757 case BUILT_IN_LONGJMP:
8758 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8759 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8760 break;
8761 else
8762 {
8763 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8764 VOIDmode, 0);
8765 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8766 const0_rtx, VOIDmode, 0);
8767 expand_builtin_longjmp (buf_addr, value);
8768 return const0_rtx;
8769 }
8770
8771 /* Various hooks for the DWARF 2 __throw routine. */
8772 case BUILT_IN_UNWIND_INIT:
8773 expand_builtin_unwind_init ();
8774 return const0_rtx;
8775 case BUILT_IN_FP:
8776 return frame_pointer_rtx;
8777 case BUILT_IN_SP:
8778 return stack_pointer_rtx;
8779 #ifdef DWARF2_UNWIND_INFO
8780 case BUILT_IN_DWARF_FP_REGNUM:
8781 return expand_builtin_dwarf_fp_regnum ();
8782 case BUILT_IN_DWARF_REG_SIZE:
8783 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
8784 #endif
8785 case BUILT_IN_FROB_RETURN_ADDR:
8786 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
8787 case BUILT_IN_EXTRACT_RETURN_ADDR:
8788 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
8789 case BUILT_IN_SET_RETURN_ADDR_REG:
8790 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
8791 return const0_rtx;
8792 case BUILT_IN_EH_STUB:
8793 return expand_builtin_eh_stub ();
8794 case BUILT_IN_SET_EH_REGS:
8795 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
8796 TREE_VALUE (TREE_CHAIN (arglist)));
8797 return const0_rtx;
8798
8799 default: /* just do library call, if unknown builtin */
8800 error ("built-in function `%s' not currently supported",
8801 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8802 }
8803
8804 /* The switch statement above can drop through to cause the function
8805 to be called normally. */
8806
8807 return expand_call (exp, target, ignore);
8808 }
8809 \f
8810 /* Built-in functions to perform an untyped call and return. */
8811
8812 /* For each register that may be used for calling a function, this
8813 gives a mode used to copy the register's value. VOIDmode indicates
8814 the register is not used for calling a function. If the machine
8815 has register windows, this gives only the outbound registers.
8816 INCOMING_REGNO gives the corresponding inbound register. */
8817 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
8818
8819 /* For each register that may be used for returning values, this gives
8820 a mode used to copy the register's value. VOIDmode indicates the
8821 register is not used for returning values. If the machine has
8822 register windows, this gives only the outbound registers.
8823 INCOMING_REGNO gives the corresponding inbound register. */
8824 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
8825
8826 /* For each register that may be used for calling a function, this
8827 gives the offset of that register into the block returned by
8828 __builtin_apply_args. 0 indicates that the register is not
8829 used for calling a function. */
8830 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8831
8832 /* Return the offset of register REGNO into the block returned by
8833 __builtin_apply_args. This is not declared static, since it is
8834 needed in objc-act.c. */
8835
8836 int
8837 apply_args_register_offset (regno)
8838 int regno;
8839 {
8840 apply_args_size ();
8841
8842 /* Arguments are always put in outgoing registers (in the argument
8843 block) if such make sense. */
8844 #ifdef OUTGOING_REGNO
8845 regno = OUTGOING_REGNO(regno);
8846 #endif
8847 return apply_args_reg_offset[regno];
8848 }
8849
8850 /* Return the size required for the block returned by __builtin_apply_args,
8851 and initialize apply_args_mode. */
8852
8853 static int
8854 apply_args_size ()
8855 {
8856 static int size = -1;
8857 int align, regno;
8858 enum machine_mode mode;
8859
8860 /* The values computed by this function never change. */
8861 if (size < 0)
8862 {
8863 /* The first value is the incoming arg-pointer. */
8864 size = GET_MODE_SIZE (Pmode);
8865
8866 /* The second value is the structure value address unless this is
8867 passed as an "invisible" first argument. */
8868 if (struct_value_rtx)
8869 size += GET_MODE_SIZE (Pmode);
8870
8871 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8872 if (FUNCTION_ARG_REGNO_P (regno))
8873 {
8874 /* Search for the proper mode for copying this register's
8875 value. I'm not sure this is right, but it works so far. */
8876 enum machine_mode best_mode = VOIDmode;
8877
8878 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8879 mode != VOIDmode;
8880 mode = GET_MODE_WIDER_MODE (mode))
8881 if (HARD_REGNO_MODE_OK (regno, mode)
8882 && HARD_REGNO_NREGS (regno, mode) == 1)
8883 best_mode = mode;
8884
8885 if (best_mode == VOIDmode)
8886 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8887 mode != VOIDmode;
8888 mode = GET_MODE_WIDER_MODE (mode))
8889 if (HARD_REGNO_MODE_OK (regno, mode)
8890 && (mov_optab->handlers[(int) mode].insn_code
8891 != CODE_FOR_nothing))
8892 best_mode = mode;
8893
8894 mode = best_mode;
8895 if (mode == VOIDmode)
8896 abort ();
8897
8898 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8899 if (size % align != 0)
8900 size = CEIL (size, align) * align;
8901 apply_args_reg_offset[regno] = size;
8902 size += GET_MODE_SIZE (mode);
8903 apply_args_mode[regno] = mode;
8904 }
8905 else
8906 {
8907 apply_args_mode[regno] = VOIDmode;
8908 apply_args_reg_offset[regno] = 0;
8909 }
8910 }
8911 return size;
8912 }
8913
8914 /* Return the size required for the block returned by __builtin_apply,
8915 and initialize apply_result_mode. */
8916
8917 static int
8918 apply_result_size ()
8919 {
8920 static int size = -1;
8921 int align, regno;
8922 enum machine_mode mode;
8923
8924 /* The values computed by this function never change. */
8925 if (size < 0)
8926 {
8927 size = 0;
8928
8929 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8930 if (FUNCTION_VALUE_REGNO_P (regno))
8931 {
8932 /* Search for the proper mode for copying this register's
8933 value. I'm not sure this is right, but it works so far. */
8934 enum machine_mode best_mode = VOIDmode;
8935
8936 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8937 mode != TImode;
8938 mode = GET_MODE_WIDER_MODE (mode))
8939 if (HARD_REGNO_MODE_OK (regno, mode))
8940 best_mode = mode;
8941
8942 if (best_mode == VOIDmode)
8943 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8944 mode != VOIDmode;
8945 mode = GET_MODE_WIDER_MODE (mode))
8946 if (HARD_REGNO_MODE_OK (regno, mode)
8947 && (mov_optab->handlers[(int) mode].insn_code
8948 != CODE_FOR_nothing))
8949 best_mode = mode;
8950
8951 mode = best_mode;
8952 if (mode == VOIDmode)
8953 abort ();
8954
8955 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8956 if (size % align != 0)
8957 size = CEIL (size, align) * align;
8958 size += GET_MODE_SIZE (mode);
8959 apply_result_mode[regno] = mode;
8960 }
8961 else
8962 apply_result_mode[regno] = VOIDmode;
8963
8964 /* Allow targets that use untyped_call and untyped_return to override
8965 the size so that machine-specific information can be stored here. */
8966 #ifdef APPLY_RESULT_SIZE
8967 size = APPLY_RESULT_SIZE;
8968 #endif
8969 }
8970 return size;
8971 }
8972
8973 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8974 /* Create a vector describing the result block RESULT. If SAVEP is true,
8975 the result block is used to save the values; otherwise it is used to
8976 restore the values. */
8977
8978 static rtx
8979 result_vector (savep, result)
8980 int savep;
8981 rtx result;
8982 {
8983 int regno, size, align, nelts;
8984 enum machine_mode mode;
8985 rtx reg, mem;
8986 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8987
8988 size = nelts = 0;
8989 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8990 if ((mode = apply_result_mode[regno]) != VOIDmode)
8991 {
8992 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8993 if (size % align != 0)
8994 size = CEIL (size, align) * align;
8995 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
8996 mem = change_address (result, mode,
8997 plus_constant (XEXP (result, 0), size));
8998 savevec[nelts++] = (savep
8999 ? gen_rtx_SET (VOIDmode, mem, reg)
9000 : gen_rtx_SET (VOIDmode, reg, mem));
9001 size += GET_MODE_SIZE (mode);
9002 }
9003 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9004 }
9005 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9006
9007 /* Save the state required to perform an untyped call with the same
9008 arguments as were passed to the current function. */
9009
9010 static rtx
9011 expand_builtin_apply_args ()
9012 {
9013 rtx registers;
9014 int size, align, regno;
9015 enum machine_mode mode;
9016
9017 /* Create a block where the arg-pointer, structure value address,
9018 and argument registers can be saved. */
9019 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9020
9021 /* Walk past the arg-pointer and structure value address. */
9022 size = GET_MODE_SIZE (Pmode);
9023 if (struct_value_rtx)
9024 size += GET_MODE_SIZE (Pmode);
9025
9026 /* Save each register used in calling a function to the block. */
9027 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9028 if ((mode = apply_args_mode[regno]) != VOIDmode)
9029 {
9030 rtx tem;
9031
9032 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9033 if (size % align != 0)
9034 size = CEIL (size, align) * align;
9035
9036 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9037
9038 #ifdef STACK_REGS
9039 /* For reg-stack.c's stack register household.
9040 Compare with a similar piece of code in function.c. */
9041
9042 emit_insn (gen_rtx_USE (mode, tem));
9043 #endif
9044
9045 emit_move_insn (change_address (registers, mode,
9046 plus_constant (XEXP (registers, 0),
9047 size)),
9048 tem);
9049 size += GET_MODE_SIZE (mode);
9050 }
9051
9052 /* Save the arg pointer to the block. */
9053 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9054 copy_to_reg (virtual_incoming_args_rtx));
9055 size = GET_MODE_SIZE (Pmode);
9056
9057 /* Save the structure value address unless this is passed as an
9058 "invisible" first argument. */
9059 if (struct_value_incoming_rtx)
9060 {
9061 emit_move_insn (change_address (registers, Pmode,
9062 plus_constant (XEXP (registers, 0),
9063 size)),
9064 copy_to_reg (struct_value_incoming_rtx));
9065 size += GET_MODE_SIZE (Pmode);
9066 }
9067
9068 /* Return the address of the block. */
9069 return copy_addr_to_reg (XEXP (registers, 0));
9070 }
9071
9072 /* Perform an untyped call and save the state required to perform an
9073 untyped return of whatever value was returned by the given function. */
9074
9075 static rtx
9076 expand_builtin_apply (function, arguments, argsize)
9077 rtx function, arguments, argsize;
9078 {
9079 int size, align, regno;
9080 enum machine_mode mode;
9081 rtx incoming_args, result, reg, dest, call_insn;
9082 rtx old_stack_level = 0;
9083 rtx call_fusage = 0;
9084
9085 /* Create a block where the return registers can be saved. */
9086 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9087
9088 /* ??? The argsize value should be adjusted here. */
9089
9090 /* Fetch the arg pointer from the ARGUMENTS block. */
9091 incoming_args = gen_reg_rtx (Pmode);
9092 emit_move_insn (incoming_args,
9093 gen_rtx_MEM (Pmode, arguments));
9094 #ifndef STACK_GROWS_DOWNWARD
9095 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9096 incoming_args, 0, OPTAB_LIB_WIDEN);
9097 #endif
9098
9099 /* Perform postincrements before actually calling the function. */
9100 emit_queue ();
9101
9102 /* Push a new argument block and copy the arguments. */
9103 do_pending_stack_adjust ();
9104
9105 /* Save the stack with nonlocal if available */
9106 #ifdef HAVE_save_stack_nonlocal
9107 if (HAVE_save_stack_nonlocal)
9108 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9109 else
9110 #endif
9111 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9112
9113 /* Push a block of memory onto the stack to store the memory arguments.
9114 Save the address in a register, and copy the memory arguments. ??? I
9115 haven't figured out how the calling convention macros effect this,
9116 but it's likely that the source and/or destination addresses in
9117 the block copy will need updating in machine specific ways. */
9118 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9119 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9120 gen_rtx_MEM (BLKmode, incoming_args),
9121 argsize,
9122 PARM_BOUNDARY / BITS_PER_UNIT);
9123
9124 /* Refer to the argument block. */
9125 apply_args_size ();
9126 arguments = gen_rtx_MEM (BLKmode, arguments);
9127
9128 /* Walk past the arg-pointer and structure value address. */
9129 size = GET_MODE_SIZE (Pmode);
9130 if (struct_value_rtx)
9131 size += GET_MODE_SIZE (Pmode);
9132
9133 /* Restore each of the registers previously saved. Make USE insns
9134 for each of these registers for use in making the call. */
9135 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9136 if ((mode = apply_args_mode[regno]) != VOIDmode)
9137 {
9138 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9139 if (size % align != 0)
9140 size = CEIL (size, align) * align;
9141 reg = gen_rtx_REG (mode, regno);
9142 emit_move_insn (reg,
9143 change_address (arguments, mode,
9144 plus_constant (XEXP (arguments, 0),
9145 size)));
9146
9147 use_reg (&call_fusage, reg);
9148 size += GET_MODE_SIZE (mode);
9149 }
9150
9151 /* Restore the structure value address unless this is passed as an
9152 "invisible" first argument. */
9153 size = GET_MODE_SIZE (Pmode);
9154 if (struct_value_rtx)
9155 {
9156 rtx value = gen_reg_rtx (Pmode);
9157 emit_move_insn (value,
9158 change_address (arguments, Pmode,
9159 plus_constant (XEXP (arguments, 0),
9160 size)));
9161 emit_move_insn (struct_value_rtx, value);
9162 if (GET_CODE (struct_value_rtx) == REG)
9163 use_reg (&call_fusage, struct_value_rtx);
9164 size += GET_MODE_SIZE (Pmode);
9165 }
9166
9167 /* All arguments and registers used for the call are set up by now! */
9168 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9169
9170 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9171 and we don't want to load it into a register as an optimization,
9172 because prepare_call_address already did it if it should be done. */
9173 if (GET_CODE (function) != SYMBOL_REF)
9174 function = memory_address (FUNCTION_MODE, function);
9175
9176 /* Generate the actual call instruction and save the return value. */
9177 #ifdef HAVE_untyped_call
9178 if (HAVE_untyped_call)
9179 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9180 result, result_vector (1, result)));
9181 else
9182 #endif
9183 #ifdef HAVE_call_value
9184 if (HAVE_call_value)
9185 {
9186 rtx valreg = 0;
9187
9188 /* Locate the unique return register. It is not possible to
9189 express a call that sets more than one return register using
9190 call_value; use untyped_call for that. In fact, untyped_call
9191 only needs to save the return registers in the given block. */
9192 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9193 if ((mode = apply_result_mode[regno]) != VOIDmode)
9194 {
9195 if (valreg)
9196 abort (); /* HAVE_untyped_call required. */
9197 valreg = gen_rtx_REG (mode, regno);
9198 }
9199
9200 emit_call_insn (gen_call_value (valreg,
9201 gen_rtx_MEM (FUNCTION_MODE, function),
9202 const0_rtx, NULL_RTX, const0_rtx));
9203
9204 emit_move_insn (change_address (result, GET_MODE (valreg),
9205 XEXP (result, 0)),
9206 valreg);
9207 }
9208 else
9209 #endif
9210 abort ();
9211
9212 /* Find the CALL insn we just emitted. */
9213 for (call_insn = get_last_insn ();
9214 call_insn && GET_CODE (call_insn) != CALL_INSN;
9215 call_insn = PREV_INSN (call_insn))
9216 ;
9217
9218 if (! call_insn)
9219 abort ();
9220
9221 /* Put the register usage information on the CALL. If there is already
9222 some usage information, put ours at the end. */
9223 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9224 {
9225 rtx link;
9226
9227 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9228 link = XEXP (link, 1))
9229 ;
9230
9231 XEXP (link, 1) = call_fusage;
9232 }
9233 else
9234 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9235
9236 /* Restore the stack. */
9237 #ifdef HAVE_save_stack_nonlocal
9238 if (HAVE_save_stack_nonlocal)
9239 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9240 else
9241 #endif
9242 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9243
9244 /* Return the address of the result block. */
9245 return copy_addr_to_reg (XEXP (result, 0));
9246 }
9247
9248 /* Perform an untyped return. */
9249
9250 static void
9251 expand_builtin_return (result)
9252 rtx result;
9253 {
9254 int size, align, regno;
9255 enum machine_mode mode;
9256 rtx reg;
9257 rtx call_fusage = 0;
9258
9259 apply_result_size ();
9260 result = gen_rtx_MEM (BLKmode, result);
9261
9262 #ifdef HAVE_untyped_return
9263 if (HAVE_untyped_return)
9264 {
9265 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9266 emit_barrier ();
9267 return;
9268 }
9269 #endif
9270
9271 /* Restore the return value and note that each value is used. */
9272 size = 0;
9273 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9274 if ((mode = apply_result_mode[regno]) != VOIDmode)
9275 {
9276 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9277 if (size % align != 0)
9278 size = CEIL (size, align) * align;
9279 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9280 emit_move_insn (reg,
9281 change_address (result, mode,
9282 plus_constant (XEXP (result, 0),
9283 size)));
9284
9285 push_to_sequence (call_fusage);
9286 emit_insn (gen_rtx_USE (VOIDmode, reg));
9287 call_fusage = get_insns ();
9288 end_sequence ();
9289 size += GET_MODE_SIZE (mode);
9290 }
9291
9292 /* Put the USE insns before the return. */
9293 emit_insns (call_fusage);
9294
9295 /* Return whatever values was restored by jumping directly to the end
9296 of the function. */
9297 expand_null_return ();
9298 }
9299 \f
9300 /* Expand code for a post- or pre- increment or decrement
9301 and return the RTX for the result.
9302 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9303
9304 static rtx
9305 expand_increment (exp, post, ignore)
9306 register tree exp;
9307 int post, ignore;
9308 {
9309 register rtx op0, op1;
9310 register rtx temp, value;
9311 register tree incremented = TREE_OPERAND (exp, 0);
9312 optab this_optab = add_optab;
9313 int icode;
9314 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9315 int op0_is_copy = 0;
9316 int single_insn = 0;
9317 /* 1 means we can't store into OP0 directly,
9318 because it is a subreg narrower than a word,
9319 and we don't dare clobber the rest of the word. */
9320 int bad_subreg = 0;
9321
9322 /* Stabilize any component ref that might need to be
9323 evaluated more than once below. */
9324 if (!post
9325 || TREE_CODE (incremented) == BIT_FIELD_REF
9326 || (TREE_CODE (incremented) == COMPONENT_REF
9327 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9328 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9329 incremented = stabilize_reference (incremented);
9330 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9331 ones into save exprs so that they don't accidentally get evaluated
9332 more than once by the code below. */
9333 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9334 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9335 incremented = save_expr (incremented);
9336
9337 /* Compute the operands as RTX.
9338 Note whether OP0 is the actual lvalue or a copy of it:
9339 I believe it is a copy iff it is a register or subreg
9340 and insns were generated in computing it. */
9341
9342 temp = get_last_insn ();
9343 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9344
9345 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9346 in place but instead must do sign- or zero-extension during assignment,
9347 so we copy it into a new register and let the code below use it as
9348 a copy.
9349
9350 Note that we can safely modify this SUBREG since it is know not to be
9351 shared (it was made by the expand_expr call above). */
9352
9353 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9354 {
9355 if (post)
9356 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9357 else
9358 bad_subreg = 1;
9359 }
9360 else if (GET_CODE (op0) == SUBREG
9361 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9362 {
9363 /* We cannot increment this SUBREG in place. If we are
9364 post-incrementing, get a copy of the old value. Otherwise,
9365 just mark that we cannot increment in place. */
9366 if (post)
9367 op0 = copy_to_reg (op0);
9368 else
9369 bad_subreg = 1;
9370 }
9371
9372 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9373 && temp != get_last_insn ());
9374 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9375 EXPAND_MEMORY_USE_BAD);
9376
9377 /* Decide whether incrementing or decrementing. */
9378 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9379 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9380 this_optab = sub_optab;
9381
9382 /* Convert decrement by a constant into a negative increment. */
9383 if (this_optab == sub_optab
9384 && GET_CODE (op1) == CONST_INT)
9385 {
9386 op1 = GEN_INT (- INTVAL (op1));
9387 this_optab = add_optab;
9388 }
9389
9390 /* For a preincrement, see if we can do this with a single instruction. */
9391 if (!post)
9392 {
9393 icode = (int) this_optab->handlers[(int) mode].insn_code;
9394 if (icode != (int) CODE_FOR_nothing
9395 /* Make sure that OP0 is valid for operands 0 and 1
9396 of the insn we want to queue. */
9397 && (*insn_operand_predicate[icode][0]) (op0, mode)
9398 && (*insn_operand_predicate[icode][1]) (op0, mode)
9399 && (*insn_operand_predicate[icode][2]) (op1, mode))
9400 single_insn = 1;
9401 }
9402
9403 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9404 then we cannot just increment OP0. We must therefore contrive to
9405 increment the original value. Then, for postincrement, we can return
9406 OP0 since it is a copy of the old value. For preincrement, expand here
9407 unless we can do it with a single insn.
9408
9409 Likewise if storing directly into OP0 would clobber high bits
9410 we need to preserve (bad_subreg). */
9411 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9412 {
9413 /* This is the easiest way to increment the value wherever it is.
9414 Problems with multiple evaluation of INCREMENTED are prevented
9415 because either (1) it is a component_ref or preincrement,
9416 in which case it was stabilized above, or (2) it is an array_ref
9417 with constant index in an array in a register, which is
9418 safe to reevaluate. */
9419 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9420 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9421 ? MINUS_EXPR : PLUS_EXPR),
9422 TREE_TYPE (exp),
9423 incremented,
9424 TREE_OPERAND (exp, 1));
9425
9426 while (TREE_CODE (incremented) == NOP_EXPR
9427 || TREE_CODE (incremented) == CONVERT_EXPR)
9428 {
9429 newexp = convert (TREE_TYPE (incremented), newexp);
9430 incremented = TREE_OPERAND (incremented, 0);
9431 }
9432
9433 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9434 return post ? op0 : temp;
9435 }
9436
9437 if (post)
9438 {
9439 /* We have a true reference to the value in OP0.
9440 If there is an insn to add or subtract in this mode, queue it.
9441 Queueing the increment insn avoids the register shuffling
9442 that often results if we must increment now and first save
9443 the old value for subsequent use. */
9444
9445 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9446 op0 = stabilize (op0);
9447 #endif
9448
9449 icode = (int) this_optab->handlers[(int) mode].insn_code;
9450 if (icode != (int) CODE_FOR_nothing
9451 /* Make sure that OP0 is valid for operands 0 and 1
9452 of the insn we want to queue. */
9453 && (*insn_operand_predicate[icode][0]) (op0, mode)
9454 && (*insn_operand_predicate[icode][1]) (op0, mode))
9455 {
9456 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9457 op1 = force_reg (mode, op1);
9458
9459 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9460 }
9461 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9462 {
9463 rtx addr = (general_operand (XEXP (op0, 0), mode)
9464 ? force_reg (Pmode, XEXP (op0, 0))
9465 : copy_to_reg (XEXP (op0, 0)));
9466 rtx temp, result;
9467
9468 op0 = change_address (op0, VOIDmode, addr);
9469 temp = force_reg (GET_MODE (op0), op0);
9470 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9471 op1 = force_reg (mode, op1);
9472
9473 /* The increment queue is LIFO, thus we have to `queue'
9474 the instructions in reverse order. */
9475 enqueue_insn (op0, gen_move_insn (op0, temp));
9476 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9477 return result;
9478 }
9479 }
9480
9481 /* Preincrement, or we can't increment with one simple insn. */
9482 if (post)
9483 /* Save a copy of the value before inc or dec, to return it later. */
9484 temp = value = copy_to_reg (op0);
9485 else
9486 /* Arrange to return the incremented value. */
9487 /* Copy the rtx because expand_binop will protect from the queue,
9488 and the results of that would be invalid for us to return
9489 if our caller does emit_queue before using our result. */
9490 temp = copy_rtx (value = op0);
9491
9492 /* Increment however we can. */
9493 op1 = expand_binop (mode, this_optab, value, op1,
9494 flag_check_memory_usage ? NULL_RTX : op0,
9495 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9496 /* Make sure the value is stored into OP0. */
9497 if (op1 != op0)
9498 emit_move_insn (op0, op1);
9499
9500 return temp;
9501 }
9502 \f
9503 /* Expand all function calls contained within EXP, innermost ones first.
9504 But don't look within expressions that have sequence points.
9505 For each CALL_EXPR, record the rtx for its value
9506 in the CALL_EXPR_RTL field. */
9507
9508 static void
9509 preexpand_calls (exp)
9510 tree exp;
9511 {
9512 register int nops, i;
9513 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9514
9515 if (! do_preexpand_calls)
9516 return;
9517
9518 /* Only expressions and references can contain calls. */
9519
9520 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9521 return;
9522
9523 switch (TREE_CODE (exp))
9524 {
9525 case CALL_EXPR:
9526 /* Do nothing if already expanded. */
9527 if (CALL_EXPR_RTL (exp) != 0
9528 /* Do nothing if the call returns a variable-sized object. */
9529 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9530 /* Do nothing to built-in functions. */
9531 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9532 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9533 == FUNCTION_DECL)
9534 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9535 return;
9536
9537 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9538 return;
9539
9540 case COMPOUND_EXPR:
9541 case COND_EXPR:
9542 case TRUTH_ANDIF_EXPR:
9543 case TRUTH_ORIF_EXPR:
9544 /* If we find one of these, then we can be sure
9545 the adjust will be done for it (since it makes jumps).
9546 Do it now, so that if this is inside an argument
9547 of a function, we don't get the stack adjustment
9548 after some other args have already been pushed. */
9549 do_pending_stack_adjust ();
9550 return;
9551
9552 case BLOCK:
9553 case RTL_EXPR:
9554 case WITH_CLEANUP_EXPR:
9555 case CLEANUP_POINT_EXPR:
9556 case TRY_CATCH_EXPR:
9557 return;
9558
9559 case SAVE_EXPR:
9560 if (SAVE_EXPR_RTL (exp) != 0)
9561 return;
9562
9563 default:
9564 break;
9565 }
9566
9567 nops = tree_code_length[(int) TREE_CODE (exp)];
9568 for (i = 0; i < nops; i++)
9569 if (TREE_OPERAND (exp, i) != 0)
9570 {
9571 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9572 if (type == 'e' || type == '<' || type == '1' || type == '2'
9573 || type == 'r')
9574 preexpand_calls (TREE_OPERAND (exp, i));
9575 }
9576 }
9577 \f
9578 /* At the start of a function, record that we have no previously-pushed
9579 arguments waiting to be popped. */
9580
9581 void
9582 init_pending_stack_adjust ()
9583 {
9584 pending_stack_adjust = 0;
9585 }
9586
9587 /* When exiting from function, if safe, clear out any pending stack adjust
9588 so the adjustment won't get done.
9589
9590 Note, if the current function calls alloca, then it must have a
9591 frame pointer regardless of the value of flag_omit_frame_pointer. */
9592
9593 void
9594 clear_pending_stack_adjust ()
9595 {
9596 #ifdef EXIT_IGNORE_STACK
9597 if (optimize > 0
9598 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9599 && EXIT_IGNORE_STACK
9600 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9601 && ! flag_inline_functions)
9602 pending_stack_adjust = 0;
9603 #endif
9604 }
9605
9606 /* Pop any previously-pushed arguments that have not been popped yet. */
9607
9608 void
9609 do_pending_stack_adjust ()
9610 {
9611 if (inhibit_defer_pop == 0)
9612 {
9613 if (pending_stack_adjust != 0)
9614 adjust_stack (GEN_INT (pending_stack_adjust));
9615 pending_stack_adjust = 0;
9616 }
9617 }
9618 \f
9619 /* Expand conditional expressions. */
9620
9621 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9622 LABEL is an rtx of code CODE_LABEL, in this function and all the
9623 functions here. */
9624
9625 void
9626 jumpifnot (exp, label)
9627 tree exp;
9628 rtx label;
9629 {
9630 do_jump (exp, label, NULL_RTX);
9631 }
9632
9633 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9634
9635 void
9636 jumpif (exp, label)
9637 tree exp;
9638 rtx label;
9639 {
9640 do_jump (exp, NULL_RTX, label);
9641 }
9642
9643 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9644 the result is zero, or IF_TRUE_LABEL if the result is one.
9645 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9646 meaning fall through in that case.
9647
9648 do_jump always does any pending stack adjust except when it does not
9649 actually perform a jump. An example where there is no jump
9650 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9651
9652 This function is responsible for optimizing cases such as
9653 &&, || and comparison operators in EXP. */
9654
9655 void
9656 do_jump (exp, if_false_label, if_true_label)
9657 tree exp;
9658 rtx if_false_label, if_true_label;
9659 {
9660 register enum tree_code code = TREE_CODE (exp);
9661 /* Some cases need to create a label to jump to
9662 in order to properly fall through.
9663 These cases set DROP_THROUGH_LABEL nonzero. */
9664 rtx drop_through_label = 0;
9665 rtx temp;
9666 rtx comparison = 0;
9667 int i;
9668 tree type;
9669 enum machine_mode mode;
9670
9671 emit_queue ();
9672
9673 switch (code)
9674 {
9675 case ERROR_MARK:
9676 break;
9677
9678 case INTEGER_CST:
9679 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9680 if (temp)
9681 emit_jump (temp);
9682 break;
9683
9684 #if 0
9685 /* This is not true with #pragma weak */
9686 case ADDR_EXPR:
9687 /* The address of something can never be zero. */
9688 if (if_true_label)
9689 emit_jump (if_true_label);
9690 break;
9691 #endif
9692
9693 case NOP_EXPR:
9694 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9695 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9696 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9697 goto normal;
9698 case CONVERT_EXPR:
9699 /* If we are narrowing the operand, we have to do the compare in the
9700 narrower mode. */
9701 if ((TYPE_PRECISION (TREE_TYPE (exp))
9702 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9703 goto normal;
9704 case NON_LVALUE_EXPR:
9705 case REFERENCE_EXPR:
9706 case ABS_EXPR:
9707 case NEGATE_EXPR:
9708 case LROTATE_EXPR:
9709 case RROTATE_EXPR:
9710 /* These cannot change zero->non-zero or vice versa. */
9711 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9712 break;
9713
9714 #if 0
9715 /* This is never less insns than evaluating the PLUS_EXPR followed by
9716 a test and can be longer if the test is eliminated. */
9717 case PLUS_EXPR:
9718 /* Reduce to minus. */
9719 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9720 TREE_OPERAND (exp, 0),
9721 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9722 TREE_OPERAND (exp, 1))));
9723 /* Process as MINUS. */
9724 #endif
9725
9726 case MINUS_EXPR:
9727 /* Non-zero iff operands of minus differ. */
9728 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9729 TREE_OPERAND (exp, 0),
9730 TREE_OPERAND (exp, 1)),
9731 NE, NE);
9732 break;
9733
9734 case BIT_AND_EXPR:
9735 /* If we are AND'ing with a small constant, do this comparison in the
9736 smallest type that fits. If the machine doesn't have comparisons
9737 that small, it will be converted back to the wider comparison.
9738 This helps if we are testing the sign bit of a narrower object.
9739 combine can't do this for us because it can't know whether a
9740 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9741
9742 if (! SLOW_BYTE_ACCESS
9743 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9744 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9745 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9746 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9747 && (type = type_for_mode (mode, 1)) != 0
9748 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9749 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9750 != CODE_FOR_nothing))
9751 {
9752 do_jump (convert (type, exp), if_false_label, if_true_label);
9753 break;
9754 }
9755 goto normal;
9756
9757 case TRUTH_NOT_EXPR:
9758 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9759 break;
9760
9761 case TRUTH_ANDIF_EXPR:
9762 if (if_false_label == 0)
9763 if_false_label = drop_through_label = gen_label_rtx ();
9764 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9765 start_cleanup_deferral ();
9766 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9767 end_cleanup_deferral ();
9768 break;
9769
9770 case TRUTH_ORIF_EXPR:
9771 if (if_true_label == 0)
9772 if_true_label = drop_through_label = gen_label_rtx ();
9773 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9774 start_cleanup_deferral ();
9775 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9776 end_cleanup_deferral ();
9777 break;
9778
9779 case COMPOUND_EXPR:
9780 push_temp_slots ();
9781 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9782 preserve_temp_slots (NULL_RTX);
9783 free_temp_slots ();
9784 pop_temp_slots ();
9785 emit_queue ();
9786 do_pending_stack_adjust ();
9787 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9788 break;
9789
9790 case COMPONENT_REF:
9791 case BIT_FIELD_REF:
9792 case ARRAY_REF:
9793 {
9794 int bitsize, bitpos, unsignedp;
9795 enum machine_mode mode;
9796 tree type;
9797 tree offset;
9798 int volatilep = 0;
9799 int alignment;
9800
9801 /* Get description of this reference. We don't actually care
9802 about the underlying object here. */
9803 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9804 &mode, &unsignedp, &volatilep,
9805 &alignment);
9806
9807 type = type_for_size (bitsize, unsignedp);
9808 if (! SLOW_BYTE_ACCESS
9809 && type != 0 && bitsize >= 0
9810 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9811 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9812 != CODE_FOR_nothing))
9813 {
9814 do_jump (convert (type, exp), if_false_label, if_true_label);
9815 break;
9816 }
9817 goto normal;
9818 }
9819
9820 case COND_EXPR:
9821 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9822 if (integer_onep (TREE_OPERAND (exp, 1))
9823 && integer_zerop (TREE_OPERAND (exp, 2)))
9824 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9825
9826 else if (integer_zerop (TREE_OPERAND (exp, 1))
9827 && integer_onep (TREE_OPERAND (exp, 2)))
9828 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9829
9830 else
9831 {
9832 register rtx label1 = gen_label_rtx ();
9833 drop_through_label = gen_label_rtx ();
9834
9835 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9836
9837 start_cleanup_deferral ();
9838 /* Now the THEN-expression. */
9839 do_jump (TREE_OPERAND (exp, 1),
9840 if_false_label ? if_false_label : drop_through_label,
9841 if_true_label ? if_true_label : drop_through_label);
9842 /* In case the do_jump just above never jumps. */
9843 do_pending_stack_adjust ();
9844 emit_label (label1);
9845
9846 /* Now the ELSE-expression. */
9847 do_jump (TREE_OPERAND (exp, 2),
9848 if_false_label ? if_false_label : drop_through_label,
9849 if_true_label ? if_true_label : drop_through_label);
9850 end_cleanup_deferral ();
9851 }
9852 break;
9853
9854 case EQ_EXPR:
9855 {
9856 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9857
9858 if (integer_zerop (TREE_OPERAND (exp, 1)))
9859 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9860 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9861 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9862 do_jump
9863 (fold
9864 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9865 fold (build (EQ_EXPR, TREE_TYPE (exp),
9866 fold (build1 (REALPART_EXPR,
9867 TREE_TYPE (inner_type),
9868 TREE_OPERAND (exp, 0))),
9869 fold (build1 (REALPART_EXPR,
9870 TREE_TYPE (inner_type),
9871 TREE_OPERAND (exp, 1))))),
9872 fold (build (EQ_EXPR, TREE_TYPE (exp),
9873 fold (build1 (IMAGPART_EXPR,
9874 TREE_TYPE (inner_type),
9875 TREE_OPERAND (exp, 0))),
9876 fold (build1 (IMAGPART_EXPR,
9877 TREE_TYPE (inner_type),
9878 TREE_OPERAND (exp, 1))))))),
9879 if_false_label, if_true_label);
9880 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9881 && !can_compare_p (TYPE_MODE (inner_type)))
9882 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9883 else
9884 comparison = compare (exp, EQ, EQ);
9885 break;
9886 }
9887
9888 case NE_EXPR:
9889 {
9890 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9891
9892 if (integer_zerop (TREE_OPERAND (exp, 1)))
9893 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9894 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9895 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9896 do_jump
9897 (fold
9898 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9899 fold (build (NE_EXPR, TREE_TYPE (exp),
9900 fold (build1 (REALPART_EXPR,
9901 TREE_TYPE (inner_type),
9902 TREE_OPERAND (exp, 0))),
9903 fold (build1 (REALPART_EXPR,
9904 TREE_TYPE (inner_type),
9905 TREE_OPERAND (exp, 1))))),
9906 fold (build (NE_EXPR, TREE_TYPE (exp),
9907 fold (build1 (IMAGPART_EXPR,
9908 TREE_TYPE (inner_type),
9909 TREE_OPERAND (exp, 0))),
9910 fold (build1 (IMAGPART_EXPR,
9911 TREE_TYPE (inner_type),
9912 TREE_OPERAND (exp, 1))))))),
9913 if_false_label, if_true_label);
9914 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9915 && !can_compare_p (TYPE_MODE (inner_type)))
9916 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9917 else
9918 comparison = compare (exp, NE, NE);
9919 break;
9920 }
9921
9922 case LT_EXPR:
9923 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9924 == MODE_INT)
9925 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9926 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9927 else
9928 comparison = compare (exp, LT, LTU);
9929 break;
9930
9931 case LE_EXPR:
9932 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9933 == MODE_INT)
9934 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9935 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9936 else
9937 comparison = compare (exp, LE, LEU);
9938 break;
9939
9940 case GT_EXPR:
9941 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9942 == MODE_INT)
9943 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9944 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9945 else
9946 comparison = compare (exp, GT, GTU);
9947 break;
9948
9949 case GE_EXPR:
9950 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9951 == MODE_INT)
9952 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9953 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9954 else
9955 comparison = compare (exp, GE, GEU);
9956 break;
9957
9958 default:
9959 normal:
9960 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9961 #if 0
9962 /* This is not needed any more and causes poor code since it causes
9963 comparisons and tests from non-SI objects to have different code
9964 sequences. */
9965 /* Copy to register to avoid generating bad insns by cse
9966 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9967 if (!cse_not_expected && GET_CODE (temp) == MEM)
9968 temp = copy_to_reg (temp);
9969 #endif
9970 do_pending_stack_adjust ();
9971 if (GET_CODE (temp) == CONST_INT)
9972 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9973 else if (GET_CODE (temp) == LABEL_REF)
9974 comparison = const_true_rtx;
9975 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9976 && !can_compare_p (GET_MODE (temp)))
9977 /* Note swapping the labels gives us not-equal. */
9978 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9979 else if (GET_MODE (temp) != VOIDmode)
9980 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9981 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9982 GET_MODE (temp), NULL_RTX, 0);
9983 else
9984 abort ();
9985 }
9986
9987 /* Do any postincrements in the expression that was tested. */
9988 emit_queue ();
9989
9990 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9991 straight into a conditional jump instruction as the jump condition.
9992 Otherwise, all the work has been done already. */
9993
9994 if (comparison == const_true_rtx)
9995 {
9996 if (if_true_label)
9997 emit_jump (if_true_label);
9998 }
9999 else if (comparison == const0_rtx)
10000 {
10001 if (if_false_label)
10002 emit_jump (if_false_label);
10003 }
10004 else if (comparison)
10005 do_jump_for_compare (comparison, if_false_label, if_true_label);
10006
10007 if (drop_through_label)
10008 {
10009 /* If do_jump produces code that might be jumped around,
10010 do any stack adjusts from that code, before the place
10011 where control merges in. */
10012 do_pending_stack_adjust ();
10013 emit_label (drop_through_label);
10014 }
10015 }
10016 \f
10017 /* Given a comparison expression EXP for values too wide to be compared
10018 with one insn, test the comparison and jump to the appropriate label.
10019 The code of EXP is ignored; we always test GT if SWAP is 0,
10020 and LT if SWAP is 1. */
10021
10022 static void
10023 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10024 tree exp;
10025 int swap;
10026 rtx if_false_label, if_true_label;
10027 {
10028 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10029 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10030 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10031 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10032 rtx drop_through_label = 0;
10033 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10034 int i;
10035
10036 if (! if_true_label || ! if_false_label)
10037 drop_through_label = gen_label_rtx ();
10038 if (! if_true_label)
10039 if_true_label = drop_through_label;
10040 if (! if_false_label)
10041 if_false_label = drop_through_label;
10042
10043 /* Compare a word at a time, high order first. */
10044 for (i = 0; i < nwords; i++)
10045 {
10046 rtx comp;
10047 rtx op0_word, op1_word;
10048
10049 if (WORDS_BIG_ENDIAN)
10050 {
10051 op0_word = operand_subword_force (op0, i, mode);
10052 op1_word = operand_subword_force (op1, i, mode);
10053 }
10054 else
10055 {
10056 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10057 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10058 }
10059
10060 /* All but high-order word must be compared as unsigned. */
10061 comp = compare_from_rtx (op0_word, op1_word,
10062 (unsignedp || i > 0) ? GTU : GT,
10063 unsignedp, word_mode, NULL_RTX, 0);
10064 if (comp == const_true_rtx)
10065 emit_jump (if_true_label);
10066 else if (comp != const0_rtx)
10067 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10068
10069 /* Consider lower words only if these are equal. */
10070 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10071 NULL_RTX, 0);
10072 if (comp == const_true_rtx)
10073 emit_jump (if_false_label);
10074 else if (comp != const0_rtx)
10075 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10076 }
10077
10078 if (if_false_label)
10079 emit_jump (if_false_label);
10080 if (drop_through_label)
10081 emit_label (drop_through_label);
10082 }
10083
10084 /* Compare OP0 with OP1, word at a time, in mode MODE.
10085 UNSIGNEDP says to do unsigned comparison.
10086 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10087
10088 void
10089 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10090 enum machine_mode mode;
10091 int unsignedp;
10092 rtx op0, op1;
10093 rtx if_false_label, if_true_label;
10094 {
10095 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10096 rtx drop_through_label = 0;
10097 int i;
10098
10099 if (! if_true_label || ! if_false_label)
10100 drop_through_label = gen_label_rtx ();
10101 if (! if_true_label)
10102 if_true_label = drop_through_label;
10103 if (! if_false_label)
10104 if_false_label = drop_through_label;
10105
10106 /* Compare a word at a time, high order first. */
10107 for (i = 0; i < nwords; i++)
10108 {
10109 rtx comp;
10110 rtx op0_word, op1_word;
10111
10112 if (WORDS_BIG_ENDIAN)
10113 {
10114 op0_word = operand_subword_force (op0, i, mode);
10115 op1_word = operand_subword_force (op1, i, mode);
10116 }
10117 else
10118 {
10119 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10120 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10121 }
10122
10123 /* All but high-order word must be compared as unsigned. */
10124 comp = compare_from_rtx (op0_word, op1_word,
10125 (unsignedp || i > 0) ? GTU : GT,
10126 unsignedp, word_mode, NULL_RTX, 0);
10127 if (comp == const_true_rtx)
10128 emit_jump (if_true_label);
10129 else if (comp != const0_rtx)
10130 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10131
10132 /* Consider lower words only if these are equal. */
10133 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10134 NULL_RTX, 0);
10135 if (comp == const_true_rtx)
10136 emit_jump (if_false_label);
10137 else if (comp != const0_rtx)
10138 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10139 }
10140
10141 if (if_false_label)
10142 emit_jump (if_false_label);
10143 if (drop_through_label)
10144 emit_label (drop_through_label);
10145 }
10146
10147 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10148 with one insn, test the comparison and jump to the appropriate label. */
10149
10150 static void
10151 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10152 tree exp;
10153 rtx if_false_label, if_true_label;
10154 {
10155 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10156 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10157 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10158 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10159 int i;
10160 rtx drop_through_label = 0;
10161
10162 if (! if_false_label)
10163 drop_through_label = if_false_label = gen_label_rtx ();
10164
10165 for (i = 0; i < nwords; i++)
10166 {
10167 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10168 operand_subword_force (op1, i, mode),
10169 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10170 word_mode, NULL_RTX, 0);
10171 if (comp == const_true_rtx)
10172 emit_jump (if_false_label);
10173 else if (comp != const0_rtx)
10174 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10175 }
10176
10177 if (if_true_label)
10178 emit_jump (if_true_label);
10179 if (drop_through_label)
10180 emit_label (drop_through_label);
10181 }
10182 \f
10183 /* Jump according to whether OP0 is 0.
10184 We assume that OP0 has an integer mode that is too wide
10185 for the available compare insns. */
10186
10187 static void
10188 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10189 rtx op0;
10190 rtx if_false_label, if_true_label;
10191 {
10192 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10193 rtx part;
10194 int i;
10195 rtx drop_through_label = 0;
10196
10197 /* The fastest way of doing this comparison on almost any machine is to
10198 "or" all the words and compare the result. If all have to be loaded
10199 from memory and this is a very wide item, it's possible this may
10200 be slower, but that's highly unlikely. */
10201
10202 part = gen_reg_rtx (word_mode);
10203 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10204 for (i = 1; i < nwords && part != 0; i++)
10205 part = expand_binop (word_mode, ior_optab, part,
10206 operand_subword_force (op0, i, GET_MODE (op0)),
10207 part, 1, OPTAB_WIDEN);
10208
10209 if (part != 0)
10210 {
10211 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10212 NULL_RTX, 0);
10213
10214 if (comp == const_true_rtx)
10215 emit_jump (if_false_label);
10216 else if (comp == const0_rtx)
10217 emit_jump (if_true_label);
10218 else
10219 do_jump_for_compare (comp, if_false_label, if_true_label);
10220
10221 return;
10222 }
10223
10224 /* If we couldn't do the "or" simply, do this with a series of compares. */
10225 if (! if_false_label)
10226 drop_through_label = if_false_label = gen_label_rtx ();
10227
10228 for (i = 0; i < nwords; i++)
10229 {
10230 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10231 GET_MODE (op0)),
10232 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10233 if (comp == const_true_rtx)
10234 emit_jump (if_false_label);
10235 else if (comp != const0_rtx)
10236 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10237 }
10238
10239 if (if_true_label)
10240 emit_jump (if_true_label);
10241
10242 if (drop_through_label)
10243 emit_label (drop_through_label);
10244 }
10245
10246 /* Given a comparison expression in rtl form, output conditional branches to
10247 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10248
10249 static void
10250 do_jump_for_compare (comparison, if_false_label, if_true_label)
10251 rtx comparison, if_false_label, if_true_label;
10252 {
10253 if (if_true_label)
10254 {
10255 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10256 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10257 else
10258 abort ();
10259
10260 if (if_false_label)
10261 emit_jump (if_false_label);
10262 }
10263 else if (if_false_label)
10264 {
10265 rtx insn;
10266 rtx prev = get_last_insn ();
10267 rtx branch = 0;
10268
10269 /* Output the branch with the opposite condition. Then try to invert
10270 what is generated. If more than one insn is a branch, or if the
10271 branch is not the last insn written, abort. If we can't invert
10272 the branch, emit make a true label, redirect this jump to that,
10273 emit a jump to the false label and define the true label. */
10274
10275 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10276 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10277 else
10278 abort ();
10279
10280 /* Here we get the first insn that was just emitted. It used to be the
10281 case that, on some machines, emitting the branch would discard
10282 the previous compare insn and emit a replacement. This isn't
10283 done anymore, but abort if we see that PREV is deleted. */
10284
10285 if (prev == 0)
10286 insn = get_insns ();
10287 else if (INSN_DELETED_P (prev))
10288 abort ();
10289 else
10290 insn = NEXT_INSN (prev);
10291
10292 for (; insn; insn = NEXT_INSN (insn))
10293 if (GET_CODE (insn) == JUMP_INSN)
10294 {
10295 if (branch)
10296 abort ();
10297 branch = insn;
10298 }
10299
10300 if (branch != get_last_insn ())
10301 abort ();
10302
10303 JUMP_LABEL (branch) = if_false_label;
10304 if (! invert_jump (branch, if_false_label))
10305 {
10306 if_true_label = gen_label_rtx ();
10307 redirect_jump (branch, if_true_label);
10308 emit_jump (if_false_label);
10309 emit_label (if_true_label);
10310 }
10311 }
10312 }
10313 \f
10314 /* Generate code for a comparison expression EXP
10315 (including code to compute the values to be compared)
10316 and set (CC0) according to the result.
10317 SIGNED_CODE should be the rtx operation for this comparison for
10318 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10319
10320 We force a stack adjustment unless there are currently
10321 things pushed on the stack that aren't yet used. */
10322
10323 static rtx
10324 compare (exp, signed_code, unsigned_code)
10325 register tree exp;
10326 enum rtx_code signed_code, unsigned_code;
10327 {
10328 register rtx op0
10329 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10330 register rtx op1
10331 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10332 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10333 register enum machine_mode mode = TYPE_MODE (type);
10334 int unsignedp = TREE_UNSIGNED (type);
10335 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10336
10337 #ifdef HAVE_canonicalize_funcptr_for_compare
10338 /* If function pointers need to be "canonicalized" before they can
10339 be reliably compared, then canonicalize them. */
10340 if (HAVE_canonicalize_funcptr_for_compare
10341 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10342 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10343 == FUNCTION_TYPE))
10344 {
10345 rtx new_op0 = gen_reg_rtx (mode);
10346
10347 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10348 op0 = new_op0;
10349 }
10350
10351 if (HAVE_canonicalize_funcptr_for_compare
10352 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10353 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10354 == FUNCTION_TYPE))
10355 {
10356 rtx new_op1 = gen_reg_rtx (mode);
10357
10358 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10359 op1 = new_op1;
10360 }
10361 #endif
10362
10363 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10364 ((mode == BLKmode)
10365 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10366 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10367 }
10368
10369 /* Like compare but expects the values to compare as two rtx's.
10370 The decision as to signed or unsigned comparison must be made by the caller.
10371
10372 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10373 compared.
10374
10375 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10376 size of MODE should be used. */
10377
10378 rtx
10379 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10380 register rtx op0, op1;
10381 enum rtx_code code;
10382 int unsignedp;
10383 enum machine_mode mode;
10384 rtx size;
10385 int align;
10386 {
10387 rtx tem;
10388
10389 /* If one operand is constant, make it the second one. Only do this
10390 if the other operand is not constant as well. */
10391
10392 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10393 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10394 {
10395 tem = op0;
10396 op0 = op1;
10397 op1 = tem;
10398 code = swap_condition (code);
10399 }
10400
10401 if (flag_force_mem)
10402 {
10403 op0 = force_not_mem (op0);
10404 op1 = force_not_mem (op1);
10405 }
10406
10407 do_pending_stack_adjust ();
10408
10409 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10410 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10411 return tem;
10412
10413 #if 0
10414 /* There's no need to do this now that combine.c can eliminate lots of
10415 sign extensions. This can be less efficient in certain cases on other
10416 machines. */
10417
10418 /* If this is a signed equality comparison, we can do it as an
10419 unsigned comparison since zero-extension is cheaper than sign
10420 extension and comparisons with zero are done as unsigned. This is
10421 the case even on machines that can do fast sign extension, since
10422 zero-extension is easier to combine with other operations than
10423 sign-extension is. If we are comparing against a constant, we must
10424 convert it to what it would look like unsigned. */
10425 if ((code == EQ || code == NE) && ! unsignedp
10426 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10427 {
10428 if (GET_CODE (op1) == CONST_INT
10429 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10430 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10431 unsignedp = 1;
10432 }
10433 #endif
10434
10435 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10436
10437 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10438 }
10439 \f
10440 /* Generate code to calculate EXP using a store-flag instruction
10441 and return an rtx for the result. EXP is either a comparison
10442 or a TRUTH_NOT_EXPR whose operand is a comparison.
10443
10444 If TARGET is nonzero, store the result there if convenient.
10445
10446 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10447 cheap.
10448
10449 Return zero if there is no suitable set-flag instruction
10450 available on this machine.
10451
10452 Once expand_expr has been called on the arguments of the comparison,
10453 we are committed to doing the store flag, since it is not safe to
10454 re-evaluate the expression. We emit the store-flag insn by calling
10455 emit_store_flag, but only expand the arguments if we have a reason
10456 to believe that emit_store_flag will be successful. If we think that
10457 it will, but it isn't, we have to simulate the store-flag with a
10458 set/jump/set sequence. */
10459
10460 static rtx
10461 do_store_flag (exp, target, mode, only_cheap)
10462 tree exp;
10463 rtx target;
10464 enum machine_mode mode;
10465 int only_cheap;
10466 {
10467 enum rtx_code code;
10468 tree arg0, arg1, type;
10469 tree tem;
10470 enum machine_mode operand_mode;
10471 int invert = 0;
10472 int unsignedp;
10473 rtx op0, op1;
10474 enum insn_code icode;
10475 rtx subtarget = target;
10476 rtx result, label;
10477
10478 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10479 result at the end. We can't simply invert the test since it would
10480 have already been inverted if it were valid. This case occurs for
10481 some floating-point comparisons. */
10482
10483 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10484 invert = 1, exp = TREE_OPERAND (exp, 0);
10485
10486 arg0 = TREE_OPERAND (exp, 0);
10487 arg1 = TREE_OPERAND (exp, 1);
10488 type = TREE_TYPE (arg0);
10489 operand_mode = TYPE_MODE (type);
10490 unsignedp = TREE_UNSIGNED (type);
10491
10492 /* We won't bother with BLKmode store-flag operations because it would mean
10493 passing a lot of information to emit_store_flag. */
10494 if (operand_mode == BLKmode)
10495 return 0;
10496
10497 /* We won't bother with store-flag operations involving function pointers
10498 when function pointers must be canonicalized before comparisons. */
10499 #ifdef HAVE_canonicalize_funcptr_for_compare
10500 if (HAVE_canonicalize_funcptr_for_compare
10501 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10502 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10503 == FUNCTION_TYPE))
10504 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10505 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10506 == FUNCTION_TYPE))))
10507 return 0;
10508 #endif
10509
10510 STRIP_NOPS (arg0);
10511 STRIP_NOPS (arg1);
10512
10513 /* Get the rtx comparison code to use. We know that EXP is a comparison
10514 operation of some type. Some comparisons against 1 and -1 can be
10515 converted to comparisons with zero. Do so here so that the tests
10516 below will be aware that we have a comparison with zero. These
10517 tests will not catch constants in the first operand, but constants
10518 are rarely passed as the first operand. */
10519
10520 switch (TREE_CODE (exp))
10521 {
10522 case EQ_EXPR:
10523 code = EQ;
10524 break;
10525 case NE_EXPR:
10526 code = NE;
10527 break;
10528 case LT_EXPR:
10529 if (integer_onep (arg1))
10530 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10531 else
10532 code = unsignedp ? LTU : LT;
10533 break;
10534 case LE_EXPR:
10535 if (! unsignedp && integer_all_onesp (arg1))
10536 arg1 = integer_zero_node, code = LT;
10537 else
10538 code = unsignedp ? LEU : LE;
10539 break;
10540 case GT_EXPR:
10541 if (! unsignedp && integer_all_onesp (arg1))
10542 arg1 = integer_zero_node, code = GE;
10543 else
10544 code = unsignedp ? GTU : GT;
10545 break;
10546 case GE_EXPR:
10547 if (integer_onep (arg1))
10548 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10549 else
10550 code = unsignedp ? GEU : GE;
10551 break;
10552 default:
10553 abort ();
10554 }
10555
10556 /* Put a constant second. */
10557 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10558 {
10559 tem = arg0; arg0 = arg1; arg1 = tem;
10560 code = swap_condition (code);
10561 }
10562
10563 /* If this is an equality or inequality test of a single bit, we can
10564 do this by shifting the bit being tested to the low-order bit and
10565 masking the result with the constant 1. If the condition was EQ,
10566 we xor it with 1. This does not require an scc insn and is faster
10567 than an scc insn even if we have it. */
10568
10569 if ((code == NE || code == EQ)
10570 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10571 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10572 {
10573 tree inner = TREE_OPERAND (arg0, 0);
10574 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10575 int ops_unsignedp;
10576
10577 /* If INNER is a right shift of a constant and it plus BITNUM does
10578 not overflow, adjust BITNUM and INNER. */
10579
10580 if (TREE_CODE (inner) == RSHIFT_EXPR
10581 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10582 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10583 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10584 < TYPE_PRECISION (type)))
10585 {
10586 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10587 inner = TREE_OPERAND (inner, 0);
10588 }
10589
10590 /* If we are going to be able to omit the AND below, we must do our
10591 operations as unsigned. If we must use the AND, we have a choice.
10592 Normally unsigned is faster, but for some machines signed is. */
10593 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10594 #ifdef LOAD_EXTEND_OP
10595 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10596 #else
10597 : 1
10598 #endif
10599 );
10600
10601 if (subtarget == 0 || GET_CODE (subtarget) != REG
10602 || GET_MODE (subtarget) != operand_mode
10603 || ! safe_from_p (subtarget, inner))
10604 subtarget = 0;
10605
10606 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10607
10608 if (bitnum != 0)
10609 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10610 size_int (bitnum), subtarget, ops_unsignedp);
10611
10612 if (GET_MODE (op0) != mode)
10613 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10614
10615 if ((code == EQ && ! invert) || (code == NE && invert))
10616 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10617 ops_unsignedp, OPTAB_LIB_WIDEN);
10618
10619 /* Put the AND last so it can combine with more things. */
10620 if (bitnum != TYPE_PRECISION (type) - 1)
10621 op0 = expand_and (op0, const1_rtx, subtarget);
10622
10623 return op0;
10624 }
10625
10626 /* Now see if we are likely to be able to do this. Return if not. */
10627 if (! can_compare_p (operand_mode))
10628 return 0;
10629 icode = setcc_gen_code[(int) code];
10630 if (icode == CODE_FOR_nothing
10631 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10632 {
10633 /* We can only do this if it is one of the special cases that
10634 can be handled without an scc insn. */
10635 if ((code == LT && integer_zerop (arg1))
10636 || (! only_cheap && code == GE && integer_zerop (arg1)))
10637 ;
10638 else if (BRANCH_COST >= 0
10639 && ! only_cheap && (code == NE || code == EQ)
10640 && TREE_CODE (type) != REAL_TYPE
10641 && ((abs_optab->handlers[(int) operand_mode].insn_code
10642 != CODE_FOR_nothing)
10643 || (ffs_optab->handlers[(int) operand_mode].insn_code
10644 != CODE_FOR_nothing)))
10645 ;
10646 else
10647 return 0;
10648 }
10649
10650 preexpand_calls (exp);
10651 if (subtarget == 0 || GET_CODE (subtarget) != REG
10652 || GET_MODE (subtarget) != operand_mode
10653 || ! safe_from_p (subtarget, arg1))
10654 subtarget = 0;
10655
10656 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10657 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10658
10659 if (target == 0)
10660 target = gen_reg_rtx (mode);
10661
10662 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10663 because, if the emit_store_flag does anything it will succeed and
10664 OP0 and OP1 will not be used subsequently. */
10665
10666 result = emit_store_flag (target, code,
10667 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10668 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10669 operand_mode, unsignedp, 1);
10670
10671 if (result)
10672 {
10673 if (invert)
10674 result = expand_binop (mode, xor_optab, result, const1_rtx,
10675 result, 0, OPTAB_LIB_WIDEN);
10676 return result;
10677 }
10678
10679 /* If this failed, we have to do this with set/compare/jump/set code. */
10680 if (GET_CODE (target) != REG
10681 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10682 target = gen_reg_rtx (GET_MODE (target));
10683
10684 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10685 result = compare_from_rtx (op0, op1, code, unsignedp,
10686 operand_mode, NULL_RTX, 0);
10687 if (GET_CODE (result) == CONST_INT)
10688 return (((result == const0_rtx && ! invert)
10689 || (result != const0_rtx && invert))
10690 ? const0_rtx : const1_rtx);
10691
10692 label = gen_label_rtx ();
10693 if (bcc_gen_fctn[(int) code] == 0)
10694 abort ();
10695
10696 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10697 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10698 emit_label (label);
10699
10700 return target;
10701 }
10702 \f
10703 /* Generate a tablejump instruction (used for switch statements). */
10704
10705 #ifdef HAVE_tablejump
10706
10707 /* INDEX is the value being switched on, with the lowest value
10708 in the table already subtracted.
10709 MODE is its expected mode (needed if INDEX is constant).
10710 RANGE is the length of the jump table.
10711 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10712
10713 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10714 index value is out of range. */
10715
10716 void
10717 do_tablejump (index, mode, range, table_label, default_label)
10718 rtx index, range, table_label, default_label;
10719 enum machine_mode mode;
10720 {
10721 register rtx temp, vector;
10722
10723 /* Do an unsigned comparison (in the proper mode) between the index
10724 expression and the value which represents the length of the range.
10725 Since we just finished subtracting the lower bound of the range
10726 from the index expression, this comparison allows us to simultaneously
10727 check that the original index expression value is both greater than
10728 or equal to the minimum value of the range and less than or equal to
10729 the maximum value of the range. */
10730
10731 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10732 emit_jump_insn (gen_bgtu (default_label));
10733
10734 /* If index is in range, it must fit in Pmode.
10735 Convert to Pmode so we can index with it. */
10736 if (mode != Pmode)
10737 index = convert_to_mode (Pmode, index, 1);
10738
10739 /* Don't let a MEM slip thru, because then INDEX that comes
10740 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10741 and break_out_memory_refs will go to work on it and mess it up. */
10742 #ifdef PIC_CASE_VECTOR_ADDRESS
10743 if (flag_pic && GET_CODE (index) != REG)
10744 index = copy_to_mode_reg (Pmode, index);
10745 #endif
10746
10747 /* If flag_force_addr were to affect this address
10748 it could interfere with the tricky assumptions made
10749 about addresses that contain label-refs,
10750 which may be valid only very near the tablejump itself. */
10751 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10752 GET_MODE_SIZE, because this indicates how large insns are. The other
10753 uses should all be Pmode, because they are addresses. This code
10754 could fail if addresses and insns are not the same size. */
10755 index = gen_rtx_PLUS (Pmode,
10756 gen_rtx_MULT (Pmode, index,
10757 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10758 gen_rtx_LABEL_REF (Pmode, table_label));
10759 #ifdef PIC_CASE_VECTOR_ADDRESS
10760 if (flag_pic)
10761 index = PIC_CASE_VECTOR_ADDRESS (index);
10762 else
10763 #endif
10764 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10765 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10766 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10767 RTX_UNCHANGING_P (vector) = 1;
10768 convert_move (temp, vector, 0);
10769
10770 emit_jump_insn (gen_tablejump (temp, table_label));
10771
10772 /* If we are generating PIC code or if the table is PC-relative, the
10773 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10774 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10775 emit_barrier ();
10776 }
10777
10778 #endif /* HAVE_tablejump */