* Check in merge from gcc2. See ChangeLog.11 and ChangeLog.12
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "expr.h"
36 #include "insn-config.h"
37 #include "recog.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "defaults.h"
41
42 #define CEIL(x,y) (((x) + (y) - 1) / (y))
43
44 /* Decide whether a function's arguments should be processed
45 from first to last or from last to first.
46
47 They should if the stack and args grow in opposite directions, but
48 only if we have push insns. */
49
50 #ifdef PUSH_ROUNDING
51
52 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
53 #define PUSH_ARGS_REVERSED /* If it's last to first */
54 #endif
55
56 #endif
57
58 #ifndef STACK_PUSH_CODE
59 #ifdef STACK_GROWS_DOWNWARD
60 #define STACK_PUSH_CODE PRE_DEC
61 #else
62 #define STACK_PUSH_CODE PRE_INC
63 #endif
64 #endif
65
66 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
67 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
68
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
72 #endif
73
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80 int cse_not_expected;
81
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
86
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
90
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
96
97 /* When temporaries are created by TARGET_EXPRs, they are created at
98 this level of temp_slot_level, so that they can remain allocated
99 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
100 of TARGET_EXPRs. */
101 int target_temp_slot_level;
102
103 /* Nonzero means __builtin_saveregs has already been done in this function.
104 The value is the pseudoreg containing the value __builtin_saveregs
105 returned. */
106 static rtx saveregs_value;
107
108 /* Similarly for __builtin_apply_args. */
109 static rtx apply_args_value;
110
111 /* Don't check memory usage, since code is being emitted to check a memory
112 usage. Used when flag_check_memory_usage is true, to avoid infinite
113 recursion. */
114 static int in_check_memory_usage;
115
116 /* This structure is used by move_by_pieces to describe the move to
117 be performed. */
118 struct move_by_pieces
119 {
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
124 int to_struct;
125 rtx from;
126 rtx from_addr;
127 int autinc_from;
128 int explicit_inc_from;
129 int from_struct;
130 int len;
131 int offset;
132 int reverse;
133 };
134
135 /* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
137
138 struct clear_by_pieces
139 {
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 int to_struct;
145 int len;
146 int offset;
147 int reverse;
148 };
149
150 extern struct obstack permanent_obstack;
151 extern rtx arg_pointer_save_area;
152
153 static rtx get_push_address PROTO ((int));
154
155 static rtx enqueue_insn PROTO((rtx, rtx));
156 static int queued_subexp_p PROTO((rtx));
157 static void init_queue PROTO((void));
158 static void move_by_pieces PROTO((rtx, rtx, int, int));
159 static int move_by_pieces_ninsns PROTO((unsigned int, int));
160 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
161 struct move_by_pieces *));
162 static void clear_by_pieces PROTO((rtx, int, int));
163 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
164 struct clear_by_pieces *));
165 static int is_zeros_p PROTO((tree));
166 static int mostly_zeros_p PROTO((tree));
167 static void store_constructor PROTO((tree, rtx, int));
168 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
169 enum machine_mode, int, int, int));
170 static tree save_noncopied_parts PROTO((tree, tree));
171 static tree init_noncopied_parts PROTO((tree, tree));
172 static int safe_from_p PROTO((rtx, tree, int));
173 static int fixed_type_p PROTO((tree));
174 static rtx var_rtx PROTO((tree));
175 static int get_pointer_alignment PROTO((tree, unsigned));
176 static tree string_constant PROTO((tree, tree *));
177 static tree c_strlen PROTO((tree));
178 static rtx expand_builtin PROTO((tree, rtx, rtx,
179 enum machine_mode, int));
180 static int apply_args_size PROTO((void));
181 static int apply_result_size PROTO((void));
182 static rtx result_vector PROTO((int, rtx));
183 static rtx expand_builtin_apply_args PROTO((void));
184 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
185 static void expand_builtin_return PROTO((rtx));
186 static rtx expand_increment PROTO((tree, int, int));
187 static void preexpand_calls PROTO((tree));
188 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
189 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
190 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
191 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
192 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
193 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
194 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
195 extern tree truthvalue_conversion PROTO((tree));
196
197 /* Record for each mode whether we can move a register directly to or
198 from an object of that mode in memory. If we can't, we won't try
199 to use that mode directly when accessing a field of that mode. */
200
201 static char direct_load[NUM_MACHINE_MODES];
202 static char direct_store[NUM_MACHINE_MODES];
203
204 /* MOVE_RATIO is the number of move instructions that is better than
205 a block move. */
206
207 #ifndef MOVE_RATIO
208 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
209 #define MOVE_RATIO 2
210 #else
211 /* A value of around 6 would minimize code size; infinity would minimize
212 execution time. */
213 #define MOVE_RATIO 15
214 #endif
215 #endif
216
217 /* This array records the insn_code of insns to perform block moves. */
218 enum insn_code movstr_optab[NUM_MACHINE_MODES];
219
220 /* This array records the insn_code of insns to perform block clears. */
221 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
222
223 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
224
225 #ifndef SLOW_UNALIGNED_ACCESS
226 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
227 #endif
228
229 /* Register mappings for target machines without register windows. */
230 #ifndef INCOMING_REGNO
231 #define INCOMING_REGNO(OUT) (OUT)
232 #endif
233 #ifndef OUTGOING_REGNO
234 #define OUTGOING_REGNO(IN) (IN)
235 #endif
236 \f
237 /* This is run once per compilation to set up which modes can be used
238 directly in memory and to initialize the block move optab. */
239
240 void
241 init_expr_once ()
242 {
243 rtx insn, pat;
244 enum machine_mode mode;
245 /* Try indexing by frame ptr and try by stack ptr.
246 It is known that on the Convex the stack ptr isn't a valid index.
247 With luck, one or the other is valid on any machine. */
248 rtx mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
249 rtx mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
250
251 start_sequence ();
252 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
253 pat = PATTERN (insn);
254
255 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
256 mode = (enum machine_mode) ((int) mode + 1))
257 {
258 int regno;
259 rtx reg;
260 int num_clobbers;
261
262 direct_load[(int) mode] = direct_store[(int) mode] = 0;
263 PUT_MODE (mem, mode);
264 PUT_MODE (mem1, mode);
265
266 /* See if there is some register that can be used in this mode and
267 directly loaded or stored from memory. */
268
269 if (mode != VOIDmode && mode != BLKmode)
270 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
271 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
272 regno++)
273 {
274 if (! HARD_REGNO_MODE_OK (regno, mode))
275 continue;
276
277 reg = gen_rtx_REG (mode, regno);
278
279 SET_SRC (pat) = mem;
280 SET_DEST (pat) = reg;
281 if (recog (pat, insn, &num_clobbers) >= 0)
282 direct_load[(int) mode] = 1;
283
284 SET_SRC (pat) = mem1;
285 SET_DEST (pat) = reg;
286 if (recog (pat, insn, &num_clobbers) >= 0)
287 direct_load[(int) mode] = 1;
288
289 SET_SRC (pat) = reg;
290 SET_DEST (pat) = mem;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_store[(int) mode] = 1;
293
294 SET_SRC (pat) = reg;
295 SET_DEST (pat) = mem1;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_store[(int) mode] = 1;
298 }
299 }
300
301 end_sequence ();
302 }
303
304 /* This is run at the start of compiling a function. */
305
306 void
307 init_expr ()
308 {
309 init_queue ();
310
311 pending_stack_adjust = 0;
312 inhibit_defer_pop = 0;
313 saveregs_value = 0;
314 apply_args_value = 0;
315 forced_labels = 0;
316 }
317
318 /* Save all variables describing the current status into the structure *P.
319 This is used before starting a nested function. */
320
321 void
322 save_expr_status (p)
323 struct function *p;
324 {
325 /* Instead of saving the postincrement queue, empty it. */
326 emit_queue ();
327
328 p->pending_stack_adjust = pending_stack_adjust;
329 p->inhibit_defer_pop = inhibit_defer_pop;
330 p->saveregs_value = saveregs_value;
331 p->apply_args_value = apply_args_value;
332 p->forced_labels = forced_labels;
333
334 pending_stack_adjust = 0;
335 inhibit_defer_pop = 0;
336 saveregs_value = 0;
337 apply_args_value = 0;
338 forced_labels = 0;
339 }
340
341 /* Restore all variables describing the current status from the structure *P.
342 This is used after a nested function. */
343
344 void
345 restore_expr_status (p)
346 struct function *p;
347 {
348 pending_stack_adjust = p->pending_stack_adjust;
349 inhibit_defer_pop = p->inhibit_defer_pop;
350 saveregs_value = p->saveregs_value;
351 apply_args_value = p->apply_args_value;
352 forced_labels = p->forced_labels;
353 }
354 \f
355 /* Manage the queue of increment instructions to be output
356 for POSTINCREMENT_EXPR expressions, etc. */
357
358 static rtx pending_chain;
359
360 /* Queue up to increment (or change) VAR later. BODY says how:
361 BODY should be the same thing you would pass to emit_insn
362 to increment right away. It will go to emit_insn later on.
363
364 The value is a QUEUED expression to be used in place of VAR
365 where you want to guarantee the pre-incrementation value of VAR. */
366
367 static rtx
368 enqueue_insn (var, body)
369 rtx var, body;
370 {
371 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
372 var, NULL_RTX, NULL_RTX, body,
373 pending_chain);
374 return pending_chain;
375 }
376
377 /* Use protect_from_queue to convert a QUEUED expression
378 into something that you can put immediately into an instruction.
379 If the queued incrementation has not happened yet,
380 protect_from_queue returns the variable itself.
381 If the incrementation has happened, protect_from_queue returns a temp
382 that contains a copy of the old value of the variable.
383
384 Any time an rtx which might possibly be a QUEUED is to be put
385 into an instruction, it must be passed through protect_from_queue first.
386 QUEUED expressions are not meaningful in instructions.
387
388 Do not pass a value through protect_from_queue and then hold
389 on to it for a while before putting it in an instruction!
390 If the queue is flushed in between, incorrect code will result. */
391
392 rtx
393 protect_from_queue (x, modify)
394 register rtx x;
395 int modify;
396 {
397 register RTX_CODE code = GET_CODE (x);
398
399 #if 0 /* A QUEUED can hang around after the queue is forced out. */
400 /* Shortcut for most common case. */
401 if (pending_chain == 0)
402 return x;
403 #endif
404
405 if (code != QUEUED)
406 {
407 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
408 use of autoincrement. Make a copy of the contents of the memory
409 location rather than a copy of the address, but not if the value is
410 of mode BLKmode. Don't modify X in place since it might be
411 shared. */
412 if (code == MEM && GET_MODE (x) != BLKmode
413 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
414 {
415 register rtx y = XEXP (x, 0);
416 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
417
418 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
419 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
420 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
421
422 if (QUEUED_INSN (y))
423 {
424 register rtx temp = gen_reg_rtx (GET_MODE (new));
425 emit_insn_before (gen_move_insn (temp, new),
426 QUEUED_INSN (y));
427 return temp;
428 }
429 return new;
430 }
431 /* Otherwise, recursively protect the subexpressions of all
432 the kinds of rtx's that can contain a QUEUED. */
433 if (code == MEM)
434 {
435 rtx tem = protect_from_queue (XEXP (x, 0), 0);
436 if (tem != XEXP (x, 0))
437 {
438 x = copy_rtx (x);
439 XEXP (x, 0) = tem;
440 }
441 }
442 else if (code == PLUS || code == MULT)
443 {
444 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
445 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
446 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
447 {
448 x = copy_rtx (x);
449 XEXP (x, 0) = new0;
450 XEXP (x, 1) = new1;
451 }
452 }
453 return x;
454 }
455 /* If the increment has not happened, use the variable itself. */
456 if (QUEUED_INSN (x) == 0)
457 return QUEUED_VAR (x);
458 /* If the increment has happened and a pre-increment copy exists,
459 use that copy. */
460 if (QUEUED_COPY (x) != 0)
461 return QUEUED_COPY (x);
462 /* The increment has happened but we haven't set up a pre-increment copy.
463 Set one up now, and use it. */
464 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
465 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
466 QUEUED_INSN (x));
467 return QUEUED_COPY (x);
468 }
469
470 /* Return nonzero if X contains a QUEUED expression:
471 if it contains anything that will be altered by a queued increment.
472 We handle only combinations of MEM, PLUS, MINUS and MULT operators
473 since memory addresses generally contain only those. */
474
475 static int
476 queued_subexp_p (x)
477 rtx x;
478 {
479 register enum rtx_code code = GET_CODE (x);
480 switch (code)
481 {
482 case QUEUED:
483 return 1;
484 case MEM:
485 return queued_subexp_p (XEXP (x, 0));
486 case MULT:
487 case PLUS:
488 case MINUS:
489 return (queued_subexp_p (XEXP (x, 0))
490 || queued_subexp_p (XEXP (x, 1)));
491 default:
492 return 0;
493 }
494 }
495
496 /* Perform all the pending incrementations. */
497
498 void
499 emit_queue ()
500 {
501 register rtx p;
502 while ((p = pending_chain))
503 {
504 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
505 pending_chain = QUEUED_NEXT (p);
506 }
507 }
508
509 static void
510 init_queue ()
511 {
512 if (pending_chain)
513 abort ();
514 }
515 \f
516 /* Copy data from FROM to TO, where the machine modes are not the same.
517 Both modes may be integer, or both may be floating.
518 UNSIGNEDP should be nonzero if FROM is an unsigned type.
519 This causes zero-extension instead of sign-extension. */
520
521 void
522 convert_move (to, from, unsignedp)
523 register rtx to, from;
524 int unsignedp;
525 {
526 enum machine_mode to_mode = GET_MODE (to);
527 enum machine_mode from_mode = GET_MODE (from);
528 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
529 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
530 enum insn_code code;
531 rtx libcall;
532
533 /* rtx code for making an equivalent value. */
534 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
535
536 to = protect_from_queue (to, 1);
537 from = protect_from_queue (from, 0);
538
539 if (to_real != from_real)
540 abort ();
541
542 /* If FROM is a SUBREG that indicates that we have already done at least
543 the required extension, strip it. We don't handle such SUBREGs as
544 TO here. */
545
546 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
547 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
548 >= GET_MODE_SIZE (to_mode))
549 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
550 from = gen_lowpart (to_mode, from), from_mode = to_mode;
551
552 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
553 abort ();
554
555 if (to_mode == from_mode
556 || (from_mode == VOIDmode && CONSTANT_P (from)))
557 {
558 emit_move_insn (to, from);
559 return;
560 }
561
562 if (to_real)
563 {
564 rtx value;
565
566 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
567 {
568 /* Try converting directly if the insn is supported. */
569 if ((code = can_extend_p (to_mode, from_mode, 0))
570 != CODE_FOR_nothing)
571 {
572 emit_unop_insn (code, to, from, UNKNOWN);
573 return;
574 }
575 }
576
577 #ifdef HAVE_trunchfqf2
578 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
579 {
580 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
581 return;
582 }
583 #endif
584 #ifdef HAVE_trunctqfqf2
585 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
586 {
587 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
588 return;
589 }
590 #endif
591 #ifdef HAVE_truncsfqf2
592 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
593 {
594 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
595 return;
596 }
597 #endif
598 #ifdef HAVE_truncdfqf2
599 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
602 return;
603 }
604 #endif
605 #ifdef HAVE_truncxfqf2
606 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
607 {
608 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
609 return;
610 }
611 #endif
612 #ifdef HAVE_trunctfqf2
613 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
614 {
615 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
616 return;
617 }
618 #endif
619
620 #ifdef HAVE_trunctqfhf2
621 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
622 {
623 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
624 return;
625 }
626 #endif
627 #ifdef HAVE_truncsfhf2
628 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
629 {
630 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
631 return;
632 }
633 #endif
634 #ifdef HAVE_truncdfhf2
635 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
636 {
637 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
638 return;
639 }
640 #endif
641 #ifdef HAVE_truncxfhf2
642 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
643 {
644 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
645 return;
646 }
647 #endif
648 #ifdef HAVE_trunctfhf2
649 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
650 {
651 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
652 return;
653 }
654 #endif
655
656 #ifdef HAVE_truncsftqf2
657 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
658 {
659 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
660 return;
661 }
662 #endif
663 #ifdef HAVE_truncdftqf2
664 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
665 {
666 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
667 return;
668 }
669 #endif
670 #ifdef HAVE_truncxftqf2
671 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
672 {
673 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
674 return;
675 }
676 #endif
677 #ifdef HAVE_trunctftqf2
678 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
679 {
680 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
681 return;
682 }
683 #endif
684
685 #ifdef HAVE_truncdfsf2
686 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
687 {
688 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
689 return;
690 }
691 #endif
692 #ifdef HAVE_truncxfsf2
693 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
694 {
695 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
696 return;
697 }
698 #endif
699 #ifdef HAVE_trunctfsf2
700 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
701 {
702 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
703 return;
704 }
705 #endif
706 #ifdef HAVE_truncxfdf2
707 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
708 {
709 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
710 return;
711 }
712 #endif
713 #ifdef HAVE_trunctfdf2
714 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
715 {
716 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
717 return;
718 }
719 #endif
720
721 libcall = (rtx) 0;
722 switch (from_mode)
723 {
724 case SFmode:
725 switch (to_mode)
726 {
727 case DFmode:
728 libcall = extendsfdf2_libfunc;
729 break;
730
731 case XFmode:
732 libcall = extendsfxf2_libfunc;
733 break;
734
735 case TFmode:
736 libcall = extendsftf2_libfunc;
737 break;
738
739 default:
740 break;
741 }
742 break;
743
744 case DFmode:
745 switch (to_mode)
746 {
747 case SFmode:
748 libcall = truncdfsf2_libfunc;
749 break;
750
751 case XFmode:
752 libcall = extenddfxf2_libfunc;
753 break;
754
755 case TFmode:
756 libcall = extenddftf2_libfunc;
757 break;
758
759 default:
760 break;
761 }
762 break;
763
764 case XFmode:
765 switch (to_mode)
766 {
767 case SFmode:
768 libcall = truncxfsf2_libfunc;
769 break;
770
771 case DFmode:
772 libcall = truncxfdf2_libfunc;
773 break;
774
775 default:
776 break;
777 }
778 break;
779
780 case TFmode:
781 switch (to_mode)
782 {
783 case SFmode:
784 libcall = trunctfsf2_libfunc;
785 break;
786
787 case DFmode:
788 libcall = trunctfdf2_libfunc;
789 break;
790
791 default:
792 break;
793 }
794 break;
795
796 default:
797 break;
798 }
799
800 if (libcall == (rtx) 0)
801 /* This conversion is not implemented yet. */
802 abort ();
803
804 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
805 1, from, from_mode);
806 emit_move_insn (to, value);
807 return;
808 }
809
810 /* Now both modes are integers. */
811
812 /* Handle expanding beyond a word. */
813 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
814 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
815 {
816 rtx insns;
817 rtx lowpart;
818 rtx fill_value;
819 rtx lowfrom;
820 int i;
821 enum machine_mode lowpart_mode;
822 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
823
824 /* Try converting directly if the insn is supported. */
825 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
826 != CODE_FOR_nothing)
827 {
828 /* If FROM is a SUBREG, put it into a register. Do this
829 so that we always generate the same set of insns for
830 better cse'ing; if an intermediate assignment occurred,
831 we won't be doing the operation directly on the SUBREG. */
832 if (optimize > 0 && GET_CODE (from) == SUBREG)
833 from = force_reg (from_mode, from);
834 emit_unop_insn (code, to, from, equiv_code);
835 return;
836 }
837 /* Next, try converting via full word. */
838 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
839 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
840 != CODE_FOR_nothing))
841 {
842 if (GET_CODE (to) == REG)
843 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
844 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
845 emit_unop_insn (code, to,
846 gen_lowpart (word_mode, to), equiv_code);
847 return;
848 }
849
850 /* No special multiword conversion insn; do it by hand. */
851 start_sequence ();
852
853 /* Since we will turn this into a no conflict block, we must ensure
854 that the source does not overlap the target. */
855
856 if (reg_overlap_mentioned_p (to, from))
857 from = force_reg (from_mode, from);
858
859 /* Get a copy of FROM widened to a word, if necessary. */
860 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
861 lowpart_mode = word_mode;
862 else
863 lowpart_mode = from_mode;
864
865 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
866
867 lowpart = gen_lowpart (lowpart_mode, to);
868 emit_move_insn (lowpart, lowfrom);
869
870 /* Compute the value to put in each remaining word. */
871 if (unsignedp)
872 fill_value = const0_rtx;
873 else
874 {
875 #ifdef HAVE_slt
876 if (HAVE_slt
877 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
878 && STORE_FLAG_VALUE == -1)
879 {
880 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
881 lowpart_mode, 0, 0);
882 fill_value = gen_reg_rtx (word_mode);
883 emit_insn (gen_slt (fill_value));
884 }
885 else
886 #endif
887 {
888 fill_value
889 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
890 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
891 NULL_RTX, 0);
892 fill_value = convert_to_mode (word_mode, fill_value, 1);
893 }
894 }
895
896 /* Fill the remaining words. */
897 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
898 {
899 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
900 rtx subword = operand_subword (to, index, 1, to_mode);
901
902 if (subword == 0)
903 abort ();
904
905 if (fill_value != subword)
906 emit_move_insn (subword, fill_value);
907 }
908
909 insns = get_insns ();
910 end_sequence ();
911
912 emit_no_conflict_block (insns, to, from, NULL_RTX,
913 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
914 return;
915 }
916
917 /* Truncating multi-word to a word or less. */
918 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
919 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
920 {
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 convert_move (to, gen_lowpart (word_mode, from), 0);
929 return;
930 }
931
932 /* Handle pointer conversion */ /* SPEE 900220 */
933 if (to_mode == PQImode)
934 {
935 if (from_mode != QImode)
936 from = convert_to_mode (QImode, from, unsignedp);
937
938 #ifdef HAVE_truncqipqi2
939 if (HAVE_truncqipqi2)
940 {
941 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
942 return;
943 }
944 #endif /* HAVE_truncqipqi2 */
945 abort ();
946 }
947
948 if (from_mode == PQImode)
949 {
950 if (to_mode != QImode)
951 {
952 from = convert_to_mode (QImode, from, unsignedp);
953 from_mode = QImode;
954 }
955 else
956 {
957 #ifdef HAVE_extendpqiqi2
958 if (HAVE_extendpqiqi2)
959 {
960 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
961 return;
962 }
963 #endif /* HAVE_extendpqiqi2 */
964 abort ();
965 }
966 }
967
968 if (to_mode == PSImode)
969 {
970 if (from_mode != SImode)
971 from = convert_to_mode (SImode, from, unsignedp);
972
973 #ifdef HAVE_truncsipsi2
974 if (HAVE_truncsipsi2)
975 {
976 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
977 return;
978 }
979 #endif /* HAVE_truncsipsi2 */
980 abort ();
981 }
982
983 if (from_mode == PSImode)
984 {
985 if (to_mode != SImode)
986 {
987 from = convert_to_mode (SImode, from, unsignedp);
988 from_mode = SImode;
989 }
990 else
991 {
992 #ifdef HAVE_extendpsisi2
993 if (HAVE_extendpsisi2)
994 {
995 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
996 return;
997 }
998 #endif /* HAVE_extendpsisi2 */
999 abort ();
1000 }
1001 }
1002
1003 if (to_mode == PDImode)
1004 {
1005 if (from_mode != DImode)
1006 from = convert_to_mode (DImode, from, unsignedp);
1007
1008 #ifdef HAVE_truncdipdi2
1009 if (HAVE_truncdipdi2)
1010 {
1011 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1012 return;
1013 }
1014 #endif /* HAVE_truncdipdi2 */
1015 abort ();
1016 }
1017
1018 if (from_mode == PDImode)
1019 {
1020 if (to_mode != DImode)
1021 {
1022 from = convert_to_mode (DImode, from, unsignedp);
1023 from_mode = DImode;
1024 }
1025 else
1026 {
1027 #ifdef HAVE_extendpdidi2
1028 if (HAVE_extendpdidi2)
1029 {
1030 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1031 return;
1032 }
1033 #endif /* HAVE_extendpdidi2 */
1034 abort ();
1035 }
1036 }
1037
1038 /* Now follow all the conversions between integers
1039 no more than a word long. */
1040
1041 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1042 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1043 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1044 GET_MODE_BITSIZE (from_mode)))
1045 {
1046 if (!((GET_CODE (from) == MEM
1047 && ! MEM_VOLATILE_P (from)
1048 && direct_load[(int) to_mode]
1049 && ! mode_dependent_address_p (XEXP (from, 0)))
1050 || GET_CODE (from) == REG
1051 || GET_CODE (from) == SUBREG))
1052 from = force_reg (from_mode, from);
1053 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1054 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1055 from = copy_to_reg (from);
1056 emit_move_insn (to, gen_lowpart (to_mode, from));
1057 return;
1058 }
1059
1060 /* Handle extension. */
1061 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1062 {
1063 /* Convert directly if that works. */
1064 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1065 != CODE_FOR_nothing)
1066 {
1067 emit_unop_insn (code, to, from, equiv_code);
1068 return;
1069 }
1070 else
1071 {
1072 enum machine_mode intermediate;
1073
1074 /* Search for a mode to convert via. */
1075 for (intermediate = from_mode; intermediate != VOIDmode;
1076 intermediate = GET_MODE_WIDER_MODE (intermediate))
1077 if (((can_extend_p (to_mode, intermediate, unsignedp)
1078 != CODE_FOR_nothing)
1079 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1080 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1081 && (can_extend_p (intermediate, from_mode, unsignedp)
1082 != CODE_FOR_nothing))
1083 {
1084 convert_move (to, convert_to_mode (intermediate, from,
1085 unsignedp), unsignedp);
1086 return;
1087 }
1088
1089 /* No suitable intermediate mode. */
1090 abort ();
1091 }
1092 }
1093
1094 /* Support special truncate insns for certain modes. */
1095
1096 if (from_mode == DImode && to_mode == SImode)
1097 {
1098 #ifdef HAVE_truncdisi2
1099 if (HAVE_truncdisi2)
1100 {
1101 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1102 return;
1103 }
1104 #endif
1105 convert_move (to, force_reg (from_mode, from), unsignedp);
1106 return;
1107 }
1108
1109 if (from_mode == DImode && to_mode == HImode)
1110 {
1111 #ifdef HAVE_truncdihi2
1112 if (HAVE_truncdihi2)
1113 {
1114 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1115 return;
1116 }
1117 #endif
1118 convert_move (to, force_reg (from_mode, from), unsignedp);
1119 return;
1120 }
1121
1122 if (from_mode == DImode && to_mode == QImode)
1123 {
1124 #ifdef HAVE_truncdiqi2
1125 if (HAVE_truncdiqi2)
1126 {
1127 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1128 return;
1129 }
1130 #endif
1131 convert_move (to, force_reg (from_mode, from), unsignedp);
1132 return;
1133 }
1134
1135 if (from_mode == SImode && to_mode == HImode)
1136 {
1137 #ifdef HAVE_truncsihi2
1138 if (HAVE_truncsihi2)
1139 {
1140 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1141 return;
1142 }
1143 #endif
1144 convert_move (to, force_reg (from_mode, from), unsignedp);
1145 return;
1146 }
1147
1148 if (from_mode == SImode && to_mode == QImode)
1149 {
1150 #ifdef HAVE_truncsiqi2
1151 if (HAVE_truncsiqi2)
1152 {
1153 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1154 return;
1155 }
1156 #endif
1157 convert_move (to, force_reg (from_mode, from), unsignedp);
1158 return;
1159 }
1160
1161 if (from_mode == HImode && to_mode == QImode)
1162 {
1163 #ifdef HAVE_trunchiqi2
1164 if (HAVE_trunchiqi2)
1165 {
1166 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1167 return;
1168 }
1169 #endif
1170 convert_move (to, force_reg (from_mode, from), unsignedp);
1171 return;
1172 }
1173
1174 if (from_mode == TImode && to_mode == DImode)
1175 {
1176 #ifdef HAVE_trunctidi2
1177 if (HAVE_trunctidi2)
1178 {
1179 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1180 return;
1181 }
1182 #endif
1183 convert_move (to, force_reg (from_mode, from), unsignedp);
1184 return;
1185 }
1186
1187 if (from_mode == TImode && to_mode == SImode)
1188 {
1189 #ifdef HAVE_trunctisi2
1190 if (HAVE_trunctisi2)
1191 {
1192 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1193 return;
1194 }
1195 #endif
1196 convert_move (to, force_reg (from_mode, from), unsignedp);
1197 return;
1198 }
1199
1200 if (from_mode == TImode && to_mode == HImode)
1201 {
1202 #ifdef HAVE_trunctihi2
1203 if (HAVE_trunctihi2)
1204 {
1205 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1206 return;
1207 }
1208 #endif
1209 convert_move (to, force_reg (from_mode, from), unsignedp);
1210 return;
1211 }
1212
1213 if (from_mode == TImode && to_mode == QImode)
1214 {
1215 #ifdef HAVE_trunctiqi2
1216 if (HAVE_trunctiqi2)
1217 {
1218 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1219 return;
1220 }
1221 #endif
1222 convert_move (to, force_reg (from_mode, from), unsignedp);
1223 return;
1224 }
1225
1226 /* Handle truncation of volatile memrefs, and so on;
1227 the things that couldn't be truncated directly,
1228 and for which there was no special instruction. */
1229 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1230 {
1231 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1232 emit_move_insn (to, temp);
1233 return;
1234 }
1235
1236 /* Mode combination is not recognized. */
1237 abort ();
1238 }
1239
1240 /* Return an rtx for a value that would result
1241 from converting X to mode MODE.
1242 Both X and MODE may be floating, or both integer.
1243 UNSIGNEDP is nonzero if X is an unsigned value.
1244 This can be done by referring to a part of X in place
1245 or by copying to a new temporary with conversion.
1246
1247 This function *must not* call protect_from_queue
1248 except when putting X into an insn (in which case convert_move does it). */
1249
1250 rtx
1251 convert_to_mode (mode, x, unsignedp)
1252 enum machine_mode mode;
1253 rtx x;
1254 int unsignedp;
1255 {
1256 return convert_modes (mode, VOIDmode, x, unsignedp);
1257 }
1258
1259 /* Return an rtx for a value that would result
1260 from converting X from mode OLDMODE to mode MODE.
1261 Both modes may be floating, or both integer.
1262 UNSIGNEDP is nonzero if X is an unsigned value.
1263
1264 This can be done by referring to a part of X in place
1265 or by copying to a new temporary with conversion.
1266
1267 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1268
1269 This function *must not* call protect_from_queue
1270 except when putting X into an insn (in which case convert_move does it). */
1271
1272 rtx
1273 convert_modes (mode, oldmode, x, unsignedp)
1274 enum machine_mode mode, oldmode;
1275 rtx x;
1276 int unsignedp;
1277 {
1278 register rtx temp;
1279
1280 /* If FROM is a SUBREG that indicates that we have already done at least
1281 the required extension, strip it. */
1282
1283 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1284 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1285 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1286 x = gen_lowpart (mode, x);
1287
1288 if (GET_MODE (x) != VOIDmode)
1289 oldmode = GET_MODE (x);
1290
1291 if (mode == oldmode)
1292 return x;
1293
1294 /* There is one case that we must handle specially: If we are converting
1295 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1296 we are to interpret the constant as unsigned, gen_lowpart will do
1297 the wrong if the constant appears negative. What we want to do is
1298 make the high-order word of the constant zero, not all ones. */
1299
1300 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1301 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1302 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1303 {
1304 HOST_WIDE_INT val = INTVAL (x);
1305
1306 if (oldmode != VOIDmode
1307 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1308 {
1309 int width = GET_MODE_BITSIZE (oldmode);
1310
1311 /* We need to zero extend VAL. */
1312 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1313 }
1314
1315 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1316 }
1317
1318 /* We can do this with a gen_lowpart if both desired and current modes
1319 are integer, and this is either a constant integer, a register, or a
1320 non-volatile MEM. Except for the constant case where MODE is no
1321 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1322
1323 if ((GET_CODE (x) == CONST_INT
1324 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1325 || (GET_MODE_CLASS (mode) == MODE_INT
1326 && GET_MODE_CLASS (oldmode) == MODE_INT
1327 && (GET_CODE (x) == CONST_DOUBLE
1328 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1329 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1330 && direct_load[(int) mode])
1331 || (GET_CODE (x) == REG
1332 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1333 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1334 {
1335 /* ?? If we don't know OLDMODE, we have to assume here that
1336 X does not need sign- or zero-extension. This may not be
1337 the case, but it's the best we can do. */
1338 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1339 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1340 {
1341 HOST_WIDE_INT val = INTVAL (x);
1342 int width = GET_MODE_BITSIZE (oldmode);
1343
1344 /* We must sign or zero-extend in this case. Start by
1345 zero-extending, then sign extend if we need to. */
1346 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1347 if (! unsignedp
1348 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1349 val |= (HOST_WIDE_INT) (-1) << width;
1350
1351 return GEN_INT (val);
1352 }
1353
1354 return gen_lowpart (mode, x);
1355 }
1356
1357 temp = gen_reg_rtx (mode);
1358 convert_move (temp, x, unsignedp);
1359 return temp;
1360 }
1361 \f
1362 /* Generate several move instructions to copy LEN bytes
1363 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1364 The caller must pass FROM and TO
1365 through protect_from_queue before calling.
1366 ALIGN (in bytes) is maximum alignment we can assume. */
1367
1368 static void
1369 move_by_pieces (to, from, len, align)
1370 rtx to, from;
1371 int len, align;
1372 {
1373 struct move_by_pieces data;
1374 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1375 int max_size = MOVE_MAX + 1;
1376
1377 data.offset = 0;
1378 data.to_addr = to_addr;
1379 data.from_addr = from_addr;
1380 data.to = to;
1381 data.from = from;
1382 data.autinc_to
1383 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1384 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1385 data.autinc_from
1386 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1387 || GET_CODE (from_addr) == POST_INC
1388 || GET_CODE (from_addr) == POST_DEC);
1389
1390 data.explicit_inc_from = 0;
1391 data.explicit_inc_to = 0;
1392 data.reverse
1393 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1394 if (data.reverse) data.offset = len;
1395 data.len = len;
1396
1397 data.to_struct = MEM_IN_STRUCT_P (to);
1398 data.from_struct = MEM_IN_STRUCT_P (from);
1399
1400 /* If copying requires more than two move insns,
1401 copy addresses to registers (to make displacements shorter)
1402 and use post-increment if available. */
1403 if (!(data.autinc_from && data.autinc_to)
1404 && move_by_pieces_ninsns (len, align) > 2)
1405 {
1406 #ifdef HAVE_PRE_DECREMENT
1407 if (data.reverse && ! data.autinc_from)
1408 {
1409 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1410 data.autinc_from = 1;
1411 data.explicit_inc_from = -1;
1412 }
1413 #endif
1414 #ifdef HAVE_POST_INCREMENT
1415 if (! data.autinc_from)
1416 {
1417 data.from_addr = copy_addr_to_reg (from_addr);
1418 data.autinc_from = 1;
1419 data.explicit_inc_from = 1;
1420 }
1421 #endif
1422 if (!data.autinc_from && CONSTANT_P (from_addr))
1423 data.from_addr = copy_addr_to_reg (from_addr);
1424 #ifdef HAVE_PRE_DECREMENT
1425 if (data.reverse && ! data.autinc_to)
1426 {
1427 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1428 data.autinc_to = 1;
1429 data.explicit_inc_to = -1;
1430 }
1431 #endif
1432 #ifdef HAVE_POST_INCREMENT
1433 if (! data.reverse && ! data.autinc_to)
1434 {
1435 data.to_addr = copy_addr_to_reg (to_addr);
1436 data.autinc_to = 1;
1437 data.explicit_inc_to = 1;
1438 }
1439 #endif
1440 if (!data.autinc_to && CONSTANT_P (to_addr))
1441 data.to_addr = copy_addr_to_reg (to_addr);
1442 }
1443
1444 if (! SLOW_UNALIGNED_ACCESS
1445 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1446 align = MOVE_MAX;
1447
1448 /* First move what we can in the largest integer mode, then go to
1449 successively smaller modes. */
1450
1451 while (max_size > 1)
1452 {
1453 enum machine_mode mode = VOIDmode, tmode;
1454 enum insn_code icode;
1455
1456 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1457 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1458 if (GET_MODE_SIZE (tmode) < max_size)
1459 mode = tmode;
1460
1461 if (mode == VOIDmode)
1462 break;
1463
1464 icode = mov_optab->handlers[(int) mode].insn_code;
1465 if (icode != CODE_FOR_nothing
1466 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1467 GET_MODE_SIZE (mode)))
1468 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1469
1470 max_size = GET_MODE_SIZE (mode);
1471 }
1472
1473 /* The code above should have handled everything. */
1474 if (data.len > 0)
1475 abort ();
1476 }
1477
1478 /* Return number of insns required to move L bytes by pieces.
1479 ALIGN (in bytes) is maximum alignment we can assume. */
1480
1481 static int
1482 move_by_pieces_ninsns (l, align)
1483 unsigned int l;
1484 int align;
1485 {
1486 register int n_insns = 0;
1487 int max_size = MOVE_MAX + 1;
1488
1489 if (! SLOW_UNALIGNED_ACCESS
1490 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1491 align = MOVE_MAX;
1492
1493 while (max_size > 1)
1494 {
1495 enum machine_mode mode = VOIDmode, tmode;
1496 enum insn_code icode;
1497
1498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1500 if (GET_MODE_SIZE (tmode) < max_size)
1501 mode = tmode;
1502
1503 if (mode == VOIDmode)
1504 break;
1505
1506 icode = mov_optab->handlers[(int) mode].insn_code;
1507 if (icode != CODE_FOR_nothing
1508 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1509 GET_MODE_SIZE (mode)))
1510 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1511
1512 max_size = GET_MODE_SIZE (mode);
1513 }
1514
1515 return n_insns;
1516 }
1517
1518 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1519 with move instructions for mode MODE. GENFUN is the gen_... function
1520 to make a move insn for that mode. DATA has all the other info. */
1521
1522 static void
1523 move_by_pieces_1 (genfun, mode, data)
1524 rtx (*genfun) PROTO ((rtx, ...));
1525 enum machine_mode mode;
1526 struct move_by_pieces *data;
1527 {
1528 register int size = GET_MODE_SIZE (mode);
1529 register rtx to1, from1;
1530
1531 while (data->len >= size)
1532 {
1533 if (data->reverse) data->offset -= size;
1534
1535 to1 = (data->autinc_to
1536 ? gen_rtx_MEM (mode, data->to_addr)
1537 : copy_rtx (change_address (data->to, mode,
1538 plus_constant (data->to_addr,
1539 data->offset))));
1540 MEM_IN_STRUCT_P (to1) = data->to_struct;
1541
1542 from1
1543 = (data->autinc_from
1544 ? gen_rtx_MEM (mode, data->from_addr)
1545 : copy_rtx (change_address (data->from, mode,
1546 plus_constant (data->from_addr,
1547 data->offset))));
1548 MEM_IN_STRUCT_P (from1) = data->from_struct;
1549
1550 #ifdef HAVE_PRE_DECREMENT
1551 if (data->explicit_inc_to < 0)
1552 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1553 if (data->explicit_inc_from < 0)
1554 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1555 #endif
1556
1557 emit_insn ((*genfun) (to1, from1));
1558 #ifdef HAVE_POST_INCREMENT
1559 if (data->explicit_inc_to > 0)
1560 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1561 if (data->explicit_inc_from > 0)
1562 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1563 #endif
1564
1565 if (! data->reverse) data->offset += size;
1566
1567 data->len -= size;
1568 }
1569 }
1570 \f
1571 /* Emit code to move a block Y to a block X.
1572 This may be done with string-move instructions,
1573 with multiple scalar move instructions, or with a library call.
1574
1575 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1576 with mode BLKmode.
1577 SIZE is an rtx that says how long they are.
1578 ALIGN is the maximum alignment we can assume they have,
1579 measured in bytes.
1580
1581 Return the address of the new block, if memcpy is called and returns it,
1582 0 otherwise. */
1583
1584 rtx
1585 emit_block_move (x, y, size, align)
1586 rtx x, y;
1587 rtx size;
1588 int align;
1589 {
1590 rtx retval = 0;
1591
1592 if (GET_MODE (x) != BLKmode)
1593 abort ();
1594
1595 if (GET_MODE (y) != BLKmode)
1596 abort ();
1597
1598 x = protect_from_queue (x, 1);
1599 y = protect_from_queue (y, 0);
1600 size = protect_from_queue (size, 0);
1601
1602 if (GET_CODE (x) != MEM)
1603 abort ();
1604 if (GET_CODE (y) != MEM)
1605 abort ();
1606 if (size == 0)
1607 abort ();
1608
1609 if (GET_CODE (size) == CONST_INT
1610 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1611 move_by_pieces (x, y, INTVAL (size), align);
1612 else
1613 {
1614 /* Try the most limited insn first, because there's no point
1615 including more than one in the machine description unless
1616 the more limited one has some advantage. */
1617
1618 rtx opalign = GEN_INT (align);
1619 enum machine_mode mode;
1620
1621 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1622 mode = GET_MODE_WIDER_MODE (mode))
1623 {
1624 enum insn_code code = movstr_optab[(int) mode];
1625
1626 if (code != CODE_FOR_nothing
1627 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1628 here because if SIZE is less than the mode mask, as it is
1629 returned by the macro, it will definitely be less than the
1630 actual mode mask. */
1631 && ((GET_CODE (size) == CONST_INT
1632 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1633 <= (GET_MODE_MASK (mode) >> 1)))
1634 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1635 && (insn_operand_predicate[(int) code][0] == 0
1636 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1637 && (insn_operand_predicate[(int) code][1] == 0
1638 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1639 && (insn_operand_predicate[(int) code][3] == 0
1640 || (*insn_operand_predicate[(int) code][3]) (opalign,
1641 VOIDmode)))
1642 {
1643 rtx op2;
1644 rtx last = get_last_insn ();
1645 rtx pat;
1646
1647 op2 = convert_to_mode (mode, size, 1);
1648 if (insn_operand_predicate[(int) code][2] != 0
1649 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1650 op2 = copy_to_mode_reg (mode, op2);
1651
1652 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1653 if (pat)
1654 {
1655 emit_insn (pat);
1656 return 0;
1657 }
1658 else
1659 delete_insns_since (last);
1660 }
1661 }
1662
1663 #ifdef TARGET_MEM_FUNCTIONS
1664 retval
1665 = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
1666 ptr_mode, 3, XEXP (x, 0), Pmode,
1667 XEXP (y, 0), Pmode,
1668 convert_to_mode (TYPE_MODE (sizetype), size,
1669 TREE_UNSIGNED (sizetype)),
1670 TYPE_MODE (sizetype));
1671 #else
1672 emit_library_call (bcopy_libfunc, 0,
1673 VOIDmode, 3, XEXP (y, 0), Pmode,
1674 XEXP (x, 0), Pmode,
1675 convert_to_mode (TYPE_MODE (integer_type_node), size,
1676 TREE_UNSIGNED (integer_type_node)),
1677 TYPE_MODE (integer_type_node));
1678 #endif
1679 }
1680
1681 return retval;
1682 }
1683 \f
1684 /* Copy all or part of a value X into registers starting at REGNO.
1685 The number of registers to be filled is NREGS. */
1686
1687 void
1688 move_block_to_reg (regno, x, nregs, mode)
1689 int regno;
1690 rtx x;
1691 int nregs;
1692 enum machine_mode mode;
1693 {
1694 int i;
1695 #ifdef HAVE_load_multiple
1696 rtx pat;
1697 rtx last;
1698 #endif
1699
1700 if (nregs == 0)
1701 return;
1702
1703 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1704 x = validize_mem (force_const_mem (mode, x));
1705
1706 /* See if the machine can do this with a load multiple insn. */
1707 #ifdef HAVE_load_multiple
1708 if (HAVE_load_multiple)
1709 {
1710 last = get_last_insn ();
1711 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1712 GEN_INT (nregs));
1713 if (pat)
1714 {
1715 emit_insn (pat);
1716 return;
1717 }
1718 else
1719 delete_insns_since (last);
1720 }
1721 #endif
1722
1723 for (i = 0; i < nregs; i++)
1724 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1725 operand_subword_force (x, i, mode));
1726 }
1727
1728 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1729 The number of registers to be filled is NREGS. SIZE indicates the number
1730 of bytes in the object X. */
1731
1732
1733 void
1734 move_block_from_reg (regno, x, nregs, size)
1735 int regno;
1736 rtx x;
1737 int nregs;
1738 int size;
1739 {
1740 int i;
1741 #ifdef HAVE_store_multiple
1742 rtx pat;
1743 rtx last;
1744 #endif
1745 enum machine_mode mode;
1746
1747 /* If SIZE is that of a mode no bigger than a word, just use that
1748 mode's store operation. */
1749 if (size <= UNITS_PER_WORD
1750 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1751 {
1752 emit_move_insn (change_address (x, mode, NULL),
1753 gen_rtx_REG (mode, regno));
1754 return;
1755 }
1756
1757 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1758 to the left before storing to memory. Note that the previous test
1759 doesn't handle all cases (e.g. SIZE == 3). */
1760 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1761 {
1762 rtx tem = operand_subword (x, 0, 1, BLKmode);
1763 rtx shift;
1764
1765 if (tem == 0)
1766 abort ();
1767
1768 shift = expand_shift (LSHIFT_EXPR, word_mode,
1769 gen_rtx_REG (word_mode, regno),
1770 build_int_2 ((UNITS_PER_WORD - size)
1771 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1772 emit_move_insn (tem, shift);
1773 return;
1774 }
1775
1776 /* See if the machine can do this with a store multiple insn. */
1777 #ifdef HAVE_store_multiple
1778 if (HAVE_store_multiple)
1779 {
1780 last = get_last_insn ();
1781 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1782 GEN_INT (nregs));
1783 if (pat)
1784 {
1785 emit_insn (pat);
1786 return;
1787 }
1788 else
1789 delete_insns_since (last);
1790 }
1791 #endif
1792
1793 for (i = 0; i < nregs; i++)
1794 {
1795 rtx tem = operand_subword (x, i, 1, BLKmode);
1796
1797 if (tem == 0)
1798 abort ();
1799
1800 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1801 }
1802 }
1803
1804 /* Emit code to move a block Y to a block X, where X is non-consecutive
1805 registers represented by a PARALLEL. */
1806
1807 void
1808 emit_group_load (x, y)
1809 rtx x, y;
1810 {
1811 rtx target_reg, source;
1812 int i;
1813
1814 if (GET_CODE (x) != PARALLEL)
1815 abort ();
1816
1817 /* Check for a NULL entry, used to indicate that the parameter goes
1818 both on the stack and in registers. */
1819 if (XEXP (XVECEXP (x, 0, 0), 0))
1820 i = 0;
1821 else
1822 i = 1;
1823
1824 for (; i < XVECLEN (x, 0); i++)
1825 {
1826 rtx element = XVECEXP (x, 0, i);
1827
1828 target_reg = XEXP (element, 0);
1829
1830 if (GET_CODE (y) == MEM)
1831 source = change_address (y, GET_MODE (target_reg),
1832 plus_constant (XEXP (y, 0),
1833 INTVAL (XEXP (element, 1))));
1834 else if (XEXP (element, 1) == const0_rtx)
1835 {
1836 if (GET_MODE (target_reg) == GET_MODE (y))
1837 source = y;
1838 /* Allow for the target_reg to be smaller than the input register
1839 to allow for AIX with 4 DF arguments after a single SI arg. The
1840 last DF argument will only load 1 word into the integer registers,
1841 but load a DF value into the float registers. */
1842 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1843 <= GET_MODE_SIZE (GET_MODE (y)))
1844 && GET_MODE (target_reg) == word_mode)
1845 /* This might be a const_double, so we can't just use SUBREG. */
1846 source = operand_subword (y, 0, 0, VOIDmode);
1847 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1848 == GET_MODE_SIZE (GET_MODE (y)))
1849 source = gen_lowpart (GET_MODE (target_reg), y);
1850 else
1851 abort ();
1852 }
1853 else
1854 abort ();
1855
1856 emit_move_insn (target_reg, source);
1857 }
1858 }
1859
1860 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1861 registers represented by a PARALLEL. */
1862
1863 void
1864 emit_group_store (x, y)
1865 rtx x, y;
1866 {
1867 rtx source_reg, target;
1868 int i;
1869
1870 if (GET_CODE (y) != PARALLEL)
1871 abort ();
1872
1873 /* Check for a NULL entry, used to indicate that the parameter goes
1874 both on the stack and in registers. */
1875 if (XEXP (XVECEXP (y, 0, 0), 0))
1876 i = 0;
1877 else
1878 i = 1;
1879
1880 for (; i < XVECLEN (y, 0); i++)
1881 {
1882 rtx element = XVECEXP (y, 0, i);
1883
1884 source_reg = XEXP (element, 0);
1885
1886 if (GET_CODE (x) == MEM)
1887 target = change_address (x, GET_MODE (source_reg),
1888 plus_constant (XEXP (x, 0),
1889 INTVAL (XEXP (element, 1))));
1890 else if (XEXP (element, 1) == const0_rtx)
1891 {
1892 target = x;
1893 if (GET_MODE (target) != GET_MODE (source_reg))
1894 target = gen_lowpart (GET_MODE (source_reg), target);
1895 }
1896 else
1897 abort ();
1898
1899 emit_move_insn (target, source_reg);
1900 }
1901 }
1902
1903 /* Add a USE expression for REG to the (possibly empty) list pointed
1904 to by CALL_FUSAGE. REG must denote a hard register. */
1905
1906 void
1907 use_reg (call_fusage, reg)
1908 rtx *call_fusage, reg;
1909 {
1910 if (GET_CODE (reg) != REG
1911 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1912 abort();
1913
1914 *call_fusage
1915 = gen_rtx_EXPR_LIST (VOIDmode,
1916 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1917 }
1918
1919 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1920 starting at REGNO. All of these registers must be hard registers. */
1921
1922 void
1923 use_regs (call_fusage, regno, nregs)
1924 rtx *call_fusage;
1925 int regno;
1926 int nregs;
1927 {
1928 int i;
1929
1930 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1931 abort ();
1932
1933 for (i = 0; i < nregs; i++)
1934 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
1935 }
1936
1937 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1938 PARALLEL REGS. This is for calls that pass values in multiple
1939 non-contiguous locations. The Irix 6 ABI has examples of this. */
1940
1941 void
1942 use_group_regs (call_fusage, regs)
1943 rtx *call_fusage;
1944 rtx regs;
1945 {
1946 int i;
1947
1948 for (i = 0; i < XVECLEN (regs, 0); i++)
1949 {
1950 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1951
1952 /* A NULL entry means the parameter goes both on the stack and in
1953 registers. This can also be a MEM for targets that pass values
1954 partially on the stack and partially in registers. */
1955 if (reg != 0 && GET_CODE (reg) == REG)
1956 use_reg (call_fusage, reg);
1957 }
1958 }
1959 \f
1960 /* Generate several move instructions to clear LEN bytes of block TO.
1961 (A MEM rtx with BLKmode). The caller must pass TO through
1962 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1963 we can assume. */
1964
1965 static void
1966 clear_by_pieces (to, len, align)
1967 rtx to;
1968 int len, align;
1969 {
1970 struct clear_by_pieces data;
1971 rtx to_addr = XEXP (to, 0);
1972 int max_size = MOVE_MAX + 1;
1973
1974 data.offset = 0;
1975 data.to_addr = to_addr;
1976 data.to = to;
1977 data.autinc_to
1978 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1979 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1980
1981 data.explicit_inc_to = 0;
1982 data.reverse
1983 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1984 if (data.reverse) data.offset = len;
1985 data.len = len;
1986
1987 data.to_struct = MEM_IN_STRUCT_P (to);
1988
1989 /* If copying requires more than two move insns,
1990 copy addresses to registers (to make displacements shorter)
1991 and use post-increment if available. */
1992 if (!data.autinc_to
1993 && move_by_pieces_ninsns (len, align) > 2)
1994 {
1995 #ifdef HAVE_PRE_DECREMENT
1996 if (data.reverse && ! data.autinc_to)
1997 {
1998 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1999 data.autinc_to = 1;
2000 data.explicit_inc_to = -1;
2001 }
2002 #endif
2003 #ifdef HAVE_POST_INCREMENT
2004 if (! data.reverse && ! data.autinc_to)
2005 {
2006 data.to_addr = copy_addr_to_reg (to_addr);
2007 data.autinc_to = 1;
2008 data.explicit_inc_to = 1;
2009 }
2010 #endif
2011 if (!data.autinc_to && CONSTANT_P (to_addr))
2012 data.to_addr = copy_addr_to_reg (to_addr);
2013 }
2014
2015 if (! SLOW_UNALIGNED_ACCESS
2016 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2017 align = MOVE_MAX;
2018
2019 /* First move what we can in the largest integer mode, then go to
2020 successively smaller modes. */
2021
2022 while (max_size > 1)
2023 {
2024 enum machine_mode mode = VOIDmode, tmode;
2025 enum insn_code icode;
2026
2027 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2028 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2029 if (GET_MODE_SIZE (tmode) < max_size)
2030 mode = tmode;
2031
2032 if (mode == VOIDmode)
2033 break;
2034
2035 icode = mov_optab->handlers[(int) mode].insn_code;
2036 if (icode != CODE_FOR_nothing
2037 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2038 GET_MODE_SIZE (mode)))
2039 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2040
2041 max_size = GET_MODE_SIZE (mode);
2042 }
2043
2044 /* The code above should have handled everything. */
2045 if (data.len != 0)
2046 abort ();
2047 }
2048
2049 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2050 with move instructions for mode MODE. GENFUN is the gen_... function
2051 to make a move insn for that mode. DATA has all the other info. */
2052
2053 static void
2054 clear_by_pieces_1 (genfun, mode, data)
2055 rtx (*genfun) PROTO ((rtx, ...));
2056 enum machine_mode mode;
2057 struct clear_by_pieces *data;
2058 {
2059 register int size = GET_MODE_SIZE (mode);
2060 register rtx to1;
2061
2062 while (data->len >= size)
2063 {
2064 if (data->reverse) data->offset -= size;
2065
2066 to1 = (data->autinc_to
2067 ? gen_rtx_MEM (mode, data->to_addr)
2068 : copy_rtx (change_address (data->to, mode,
2069 plus_constant (data->to_addr,
2070 data->offset))));
2071 MEM_IN_STRUCT_P (to1) = data->to_struct;
2072
2073 #ifdef HAVE_PRE_DECREMENT
2074 if (data->explicit_inc_to < 0)
2075 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2076 #endif
2077
2078 emit_insn ((*genfun) (to1, const0_rtx));
2079 #ifdef HAVE_POST_INCREMENT
2080 if (data->explicit_inc_to > 0)
2081 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2082 #endif
2083
2084 if (! data->reverse) data->offset += size;
2085
2086 data->len -= size;
2087 }
2088 }
2089 \f
2090 /* Write zeros through the storage of OBJECT.
2091 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2092 the maximum alignment we can is has, measured in bytes.
2093
2094 If we call a function that returns the length of the block, return it. */
2095
2096 rtx
2097 clear_storage (object, size, align)
2098 rtx object;
2099 rtx size;
2100 int align;
2101 {
2102 rtx retval = 0;
2103
2104 if (GET_MODE (object) == BLKmode)
2105 {
2106 object = protect_from_queue (object, 1);
2107 size = protect_from_queue (size, 0);
2108
2109 if (GET_CODE (size) == CONST_INT
2110 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2111 clear_by_pieces (object, INTVAL (size), align);
2112
2113 else
2114 {
2115 /* Try the most limited insn first, because there's no point
2116 including more than one in the machine description unless
2117 the more limited one has some advantage. */
2118
2119 rtx opalign = GEN_INT (align);
2120 enum machine_mode mode;
2121
2122 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2123 mode = GET_MODE_WIDER_MODE (mode))
2124 {
2125 enum insn_code code = clrstr_optab[(int) mode];
2126
2127 if (code != CODE_FOR_nothing
2128 /* We don't need MODE to be narrower than
2129 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2130 the mode mask, as it is returned by the macro, it will
2131 definitely be less than the actual mode mask. */
2132 && ((GET_CODE (size) == CONST_INT
2133 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2134 <= (GET_MODE_MASK (mode) >> 1)))
2135 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2136 && (insn_operand_predicate[(int) code][0] == 0
2137 || (*insn_operand_predicate[(int) code][0]) (object,
2138 BLKmode))
2139 && (insn_operand_predicate[(int) code][2] == 0
2140 || (*insn_operand_predicate[(int) code][2]) (opalign,
2141 VOIDmode)))
2142 {
2143 rtx op1;
2144 rtx last = get_last_insn ();
2145 rtx pat;
2146
2147 op1 = convert_to_mode (mode, size, 1);
2148 if (insn_operand_predicate[(int) code][1] != 0
2149 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2150 mode))
2151 op1 = copy_to_mode_reg (mode, op1);
2152
2153 pat = GEN_FCN ((int) code) (object, op1, opalign);
2154 if (pat)
2155 {
2156 emit_insn (pat);
2157 return 0;
2158 }
2159 else
2160 delete_insns_since (last);
2161 }
2162 }
2163
2164
2165 #ifdef TARGET_MEM_FUNCTIONS
2166 retval
2167 = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
2168 ptr_mode, 3,
2169 XEXP (object, 0), Pmode,
2170 const0_rtx,
2171 TYPE_MODE (integer_type_node),
2172 convert_to_mode
2173 (TYPE_MODE (sizetype), size,
2174 TREE_UNSIGNED (sizetype)),
2175 TYPE_MODE (sizetype));
2176 #else
2177 emit_library_call (bzero_libfunc, 0,
2178 VOIDmode, 2,
2179 XEXP (object, 0), Pmode,
2180 convert_to_mode
2181 (TYPE_MODE (integer_type_node), size,
2182 TREE_UNSIGNED (integer_type_node)),
2183 TYPE_MODE (integer_type_node));
2184 #endif
2185 }
2186 }
2187 else
2188 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2189
2190 return retval;
2191 }
2192
2193 /* Generate code to copy Y into X.
2194 Both Y and X must have the same mode, except that
2195 Y can be a constant with VOIDmode.
2196 This mode cannot be BLKmode; use emit_block_move for that.
2197
2198 Return the last instruction emitted. */
2199
2200 rtx
2201 emit_move_insn (x, y)
2202 rtx x, y;
2203 {
2204 enum machine_mode mode = GET_MODE (x);
2205
2206 x = protect_from_queue (x, 1);
2207 y = protect_from_queue (y, 0);
2208
2209 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2210 abort ();
2211
2212 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2213 y = force_const_mem (mode, y);
2214
2215 /* If X or Y are memory references, verify that their addresses are valid
2216 for the machine. */
2217 if (GET_CODE (x) == MEM
2218 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2219 && ! push_operand (x, GET_MODE (x)))
2220 || (flag_force_addr
2221 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2222 x = change_address (x, VOIDmode, XEXP (x, 0));
2223
2224 if (GET_CODE (y) == MEM
2225 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2226 || (flag_force_addr
2227 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2228 y = change_address (y, VOIDmode, XEXP (y, 0));
2229
2230 if (mode == BLKmode)
2231 abort ();
2232
2233 return emit_move_insn_1 (x, y);
2234 }
2235
2236 /* Low level part of emit_move_insn.
2237 Called just like emit_move_insn, but assumes X and Y
2238 are basically valid. */
2239
2240 rtx
2241 emit_move_insn_1 (x, y)
2242 rtx x, y;
2243 {
2244 enum machine_mode mode = GET_MODE (x);
2245 enum machine_mode submode;
2246 enum mode_class class = GET_MODE_CLASS (mode);
2247 int i;
2248
2249 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2250 return
2251 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2252
2253 /* Expand complex moves by moving real part and imag part, if possible. */
2254 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2255 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2256 * BITS_PER_UNIT),
2257 (class == MODE_COMPLEX_INT
2258 ? MODE_INT : MODE_FLOAT),
2259 0))
2260 && (mov_optab->handlers[(int) submode].insn_code
2261 != CODE_FOR_nothing))
2262 {
2263 /* Don't split destination if it is a stack push. */
2264 int stack = push_operand (x, GET_MODE (x));
2265
2266 /* If this is a stack, push the highpart first, so it
2267 will be in the argument order.
2268
2269 In that case, change_address is used only to convert
2270 the mode, not to change the address. */
2271 if (stack)
2272 {
2273 /* Note that the real part always precedes the imag part in memory
2274 regardless of machine's endianness. */
2275 #ifdef STACK_GROWS_DOWNWARD
2276 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2277 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2278 gen_imagpart (submode, y)));
2279 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2280 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2281 gen_realpart (submode, y)));
2282 #else
2283 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2284 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2285 gen_realpart (submode, y)));
2286 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2287 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2288 gen_imagpart (submode, y)));
2289 #endif
2290 }
2291 else
2292 {
2293 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2294 (gen_realpart (submode, x), gen_realpart (submode, y)));
2295 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2296 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2297 }
2298
2299 return get_last_insn ();
2300 }
2301
2302 /* This will handle any multi-word mode that lacks a move_insn pattern.
2303 However, you will get better code if you define such patterns,
2304 even if they must turn into multiple assembler instructions. */
2305 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2306 {
2307 rtx last_insn = 0;
2308
2309 #ifdef PUSH_ROUNDING
2310
2311 /* If X is a push on the stack, do the push now and replace
2312 X with a reference to the stack pointer. */
2313 if (push_operand (x, GET_MODE (x)))
2314 {
2315 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2316 x = change_address (x, VOIDmode, stack_pointer_rtx);
2317 }
2318 #endif
2319
2320 /* Show the output dies here. */
2321 if (x != y)
2322 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2323
2324 for (i = 0;
2325 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2326 i++)
2327 {
2328 rtx xpart = operand_subword (x, i, 1, mode);
2329 rtx ypart = operand_subword (y, i, 1, mode);
2330
2331 /* If we can't get a part of Y, put Y into memory if it is a
2332 constant. Otherwise, force it into a register. If we still
2333 can't get a part of Y, abort. */
2334 if (ypart == 0 && CONSTANT_P (y))
2335 {
2336 y = force_const_mem (mode, y);
2337 ypart = operand_subword (y, i, 1, mode);
2338 }
2339 else if (ypart == 0)
2340 ypart = operand_subword_force (y, i, mode);
2341
2342 if (xpart == 0 || ypart == 0)
2343 abort ();
2344
2345 last_insn = emit_move_insn (xpart, ypart);
2346 }
2347
2348 return last_insn;
2349 }
2350 else
2351 abort ();
2352 }
2353 \f
2354 /* Pushing data onto the stack. */
2355
2356 /* Push a block of length SIZE (perhaps variable)
2357 and return an rtx to address the beginning of the block.
2358 Note that it is not possible for the value returned to be a QUEUED.
2359 The value may be virtual_outgoing_args_rtx.
2360
2361 EXTRA is the number of bytes of padding to push in addition to SIZE.
2362 BELOW nonzero means this padding comes at low addresses;
2363 otherwise, the padding comes at high addresses. */
2364
2365 rtx
2366 push_block (size, extra, below)
2367 rtx size;
2368 int extra, below;
2369 {
2370 register rtx temp;
2371
2372 size = convert_modes (Pmode, ptr_mode, size, 1);
2373 if (CONSTANT_P (size))
2374 anti_adjust_stack (plus_constant (size, extra));
2375 else if (GET_CODE (size) == REG && extra == 0)
2376 anti_adjust_stack (size);
2377 else
2378 {
2379 rtx temp = copy_to_mode_reg (Pmode, size);
2380 if (extra != 0)
2381 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2382 temp, 0, OPTAB_LIB_WIDEN);
2383 anti_adjust_stack (temp);
2384 }
2385
2386 #ifdef STACK_GROWS_DOWNWARD
2387 temp = virtual_outgoing_args_rtx;
2388 if (extra != 0 && below)
2389 temp = plus_constant (temp, extra);
2390 #else
2391 if (GET_CODE (size) == CONST_INT)
2392 temp = plus_constant (virtual_outgoing_args_rtx,
2393 - INTVAL (size) - (below ? 0 : extra));
2394 else if (extra != 0 && !below)
2395 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2396 negate_rtx (Pmode, plus_constant (size, extra)));
2397 else
2398 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2399 negate_rtx (Pmode, size));
2400 #endif
2401
2402 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2403 }
2404
2405 rtx
2406 gen_push_operand ()
2407 {
2408 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2409 }
2410
2411 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2412 block of SIZE bytes. */
2413
2414 static rtx
2415 get_push_address (size)
2416 int size;
2417 {
2418 register rtx temp;
2419
2420 if (STACK_PUSH_CODE == POST_DEC)
2421 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2422 else if (STACK_PUSH_CODE == POST_INC)
2423 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2424 else
2425 temp = stack_pointer_rtx;
2426
2427 return copy_to_reg (temp);
2428 }
2429
2430 /* Generate code to push X onto the stack, assuming it has mode MODE and
2431 type TYPE.
2432 MODE is redundant except when X is a CONST_INT (since they don't
2433 carry mode info).
2434 SIZE is an rtx for the size of data to be copied (in bytes),
2435 needed only if X is BLKmode.
2436
2437 ALIGN (in bytes) is maximum alignment we can assume.
2438
2439 If PARTIAL and REG are both nonzero, then copy that many of the first
2440 words of X into registers starting with REG, and push the rest of X.
2441 The amount of space pushed is decreased by PARTIAL words,
2442 rounded *down* to a multiple of PARM_BOUNDARY.
2443 REG must be a hard register in this case.
2444 If REG is zero but PARTIAL is not, take any all others actions for an
2445 argument partially in registers, but do not actually load any
2446 registers.
2447
2448 EXTRA is the amount in bytes of extra space to leave next to this arg.
2449 This is ignored if an argument block has already been allocated.
2450
2451 On a machine that lacks real push insns, ARGS_ADDR is the address of
2452 the bottom of the argument block for this call. We use indexing off there
2453 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2454 argument block has not been preallocated.
2455
2456 ARGS_SO_FAR is the size of args previously pushed for this call.
2457
2458 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2459 for arguments passed in registers. If nonzero, it will be the number
2460 of bytes required. */
2461
2462 void
2463 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2464 args_addr, args_so_far, reg_parm_stack_space)
2465 register rtx x;
2466 enum machine_mode mode;
2467 tree type;
2468 rtx size;
2469 int align;
2470 int partial;
2471 rtx reg;
2472 int extra;
2473 rtx args_addr;
2474 rtx args_so_far;
2475 int reg_parm_stack_space;
2476 {
2477 rtx xinner;
2478 enum direction stack_direction
2479 #ifdef STACK_GROWS_DOWNWARD
2480 = downward;
2481 #else
2482 = upward;
2483 #endif
2484
2485 /* Decide where to pad the argument: `downward' for below,
2486 `upward' for above, or `none' for don't pad it.
2487 Default is below for small data on big-endian machines; else above. */
2488 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2489
2490 /* Invert direction if stack is post-update. */
2491 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2492 if (where_pad != none)
2493 where_pad = (where_pad == downward ? upward : downward);
2494
2495 xinner = x = protect_from_queue (x, 0);
2496
2497 if (mode == BLKmode)
2498 {
2499 /* Copy a block into the stack, entirely or partially. */
2500
2501 register rtx temp;
2502 int used = partial * UNITS_PER_WORD;
2503 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2504 int skip;
2505
2506 if (size == 0)
2507 abort ();
2508
2509 used -= offset;
2510
2511 /* USED is now the # of bytes we need not copy to the stack
2512 because registers will take care of them. */
2513
2514 if (partial != 0)
2515 xinner = change_address (xinner, BLKmode,
2516 plus_constant (XEXP (xinner, 0), used));
2517
2518 /* If the partial register-part of the arg counts in its stack size,
2519 skip the part of stack space corresponding to the registers.
2520 Otherwise, start copying to the beginning of the stack space,
2521 by setting SKIP to 0. */
2522 skip = (reg_parm_stack_space == 0) ? 0 : used;
2523
2524 #ifdef PUSH_ROUNDING
2525 /* Do it with several push insns if that doesn't take lots of insns
2526 and if there is no difficulty with push insns that skip bytes
2527 on the stack for alignment purposes. */
2528 if (args_addr == 0
2529 && GET_CODE (size) == CONST_INT
2530 && skip == 0
2531 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2532 < MOVE_RATIO)
2533 /* Here we avoid the case of a structure whose weak alignment
2534 forces many pushes of a small amount of data,
2535 and such small pushes do rounding that causes trouble. */
2536 && ((! SLOW_UNALIGNED_ACCESS)
2537 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2538 || PUSH_ROUNDING (align) == align)
2539 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2540 {
2541 /* Push padding now if padding above and stack grows down,
2542 or if padding below and stack grows up.
2543 But if space already allocated, this has already been done. */
2544 if (extra && args_addr == 0
2545 && where_pad != none && where_pad != stack_direction)
2546 anti_adjust_stack (GEN_INT (extra));
2547
2548 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2549 INTVAL (size) - used, align);
2550
2551 if (flag_check_memory_usage && ! in_check_memory_usage)
2552 {
2553 rtx temp;
2554
2555 in_check_memory_usage = 1;
2556 temp = get_push_address (INTVAL(size) - used);
2557 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2558 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2559 temp, ptr_mode,
2560 XEXP (xinner, 0), ptr_mode,
2561 GEN_INT (INTVAL(size) - used),
2562 TYPE_MODE (sizetype));
2563 else
2564 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2565 temp, ptr_mode,
2566 GEN_INT (INTVAL(size) - used),
2567 TYPE_MODE (sizetype),
2568 GEN_INT (MEMORY_USE_RW),
2569 TYPE_MODE (integer_type_node));
2570 in_check_memory_usage = 0;
2571 }
2572 }
2573 else
2574 #endif /* PUSH_ROUNDING */
2575 {
2576 /* Otherwise make space on the stack and copy the data
2577 to the address of that space. */
2578
2579 /* Deduct words put into registers from the size we must copy. */
2580 if (partial != 0)
2581 {
2582 if (GET_CODE (size) == CONST_INT)
2583 size = GEN_INT (INTVAL (size) - used);
2584 else
2585 size = expand_binop (GET_MODE (size), sub_optab, size,
2586 GEN_INT (used), NULL_RTX, 0,
2587 OPTAB_LIB_WIDEN);
2588 }
2589
2590 /* Get the address of the stack space.
2591 In this case, we do not deal with EXTRA separately.
2592 A single stack adjust will do. */
2593 if (! args_addr)
2594 {
2595 temp = push_block (size, extra, where_pad == downward);
2596 extra = 0;
2597 }
2598 else if (GET_CODE (args_so_far) == CONST_INT)
2599 temp = memory_address (BLKmode,
2600 plus_constant (args_addr,
2601 skip + INTVAL (args_so_far)));
2602 else
2603 temp = memory_address (BLKmode,
2604 plus_constant (gen_rtx_PLUS (Pmode,
2605 args_addr,
2606 args_so_far),
2607 skip));
2608 if (flag_check_memory_usage && ! in_check_memory_usage)
2609 {
2610 rtx target;
2611
2612 in_check_memory_usage = 1;
2613 target = copy_to_reg (temp);
2614 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2615 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2616 target, ptr_mode,
2617 XEXP (xinner, 0), ptr_mode,
2618 size, TYPE_MODE (sizetype));
2619 else
2620 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2621 target, ptr_mode,
2622 size, TYPE_MODE (sizetype),
2623 GEN_INT (MEMORY_USE_RW),
2624 TYPE_MODE (integer_type_node));
2625 in_check_memory_usage = 0;
2626 }
2627
2628 /* TEMP is the address of the block. Copy the data there. */
2629 if (GET_CODE (size) == CONST_INT
2630 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2631 < MOVE_RATIO))
2632 {
2633 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2634 INTVAL (size), align);
2635 goto ret;
2636 }
2637 else
2638 {
2639 rtx opalign = GEN_INT (align);
2640 enum machine_mode mode;
2641 rtx target = gen_rtx (MEM, BLKmode, temp);
2642
2643 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2644 mode != VOIDmode;
2645 mode = GET_MODE_WIDER_MODE (mode))
2646 {
2647 enum insn_code code = movstr_optab[(int) mode];
2648
2649 if (code != CODE_FOR_nothing
2650 && ((GET_CODE (size) == CONST_INT
2651 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2652 <= (GET_MODE_MASK (mode) >> 1)))
2653 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2654 && (insn_operand_predicate[(int) code][0] == 0
2655 || ((*insn_operand_predicate[(int) code][0])
2656 (target, BLKmode)))
2657 && (insn_operand_predicate[(int) code][1] == 0
2658 || ((*insn_operand_predicate[(int) code][1])
2659 (xinner, BLKmode)))
2660 && (insn_operand_predicate[(int) code][3] == 0
2661 || ((*insn_operand_predicate[(int) code][3])
2662 (opalign, VOIDmode))))
2663 {
2664 rtx op2 = convert_to_mode (mode, size, 1);
2665 rtx last = get_last_insn ();
2666 rtx pat;
2667
2668 if (insn_operand_predicate[(int) code][2] != 0
2669 && ! ((*insn_operand_predicate[(int) code][2])
2670 (op2, mode)))
2671 op2 = copy_to_mode_reg (mode, op2);
2672
2673 pat = GEN_FCN ((int) code) (target, xinner,
2674 op2, opalign);
2675 if (pat)
2676 {
2677 emit_insn (pat);
2678 goto ret;
2679 }
2680 else
2681 delete_insns_since (last);
2682 }
2683 }
2684 }
2685
2686 #ifndef ACCUMULATE_OUTGOING_ARGS
2687 /* If the source is referenced relative to the stack pointer,
2688 copy it to another register to stabilize it. We do not need
2689 to do this if we know that we won't be changing sp. */
2690
2691 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2692 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2693 temp = copy_to_reg (temp);
2694 #endif
2695
2696 /* Make inhibit_defer_pop nonzero around the library call
2697 to force it to pop the bcopy-arguments right away. */
2698 NO_DEFER_POP;
2699 #ifdef TARGET_MEM_FUNCTIONS
2700 emit_library_call (memcpy_libfunc, 0,
2701 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2702 convert_to_mode (TYPE_MODE (sizetype),
2703 size, TREE_UNSIGNED (sizetype)),
2704 TYPE_MODE (sizetype));
2705 #else
2706 emit_library_call (bcopy_libfunc, 0,
2707 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2708 convert_to_mode (TYPE_MODE (integer_type_node),
2709 size,
2710 TREE_UNSIGNED (integer_type_node)),
2711 TYPE_MODE (integer_type_node));
2712 #endif
2713 OK_DEFER_POP;
2714 }
2715 }
2716 else if (partial > 0)
2717 {
2718 /* Scalar partly in registers. */
2719
2720 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2721 int i;
2722 int not_stack;
2723 /* # words of start of argument
2724 that we must make space for but need not store. */
2725 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2726 int args_offset = INTVAL (args_so_far);
2727 int skip;
2728
2729 /* Push padding now if padding above and stack grows down,
2730 or if padding below and stack grows up.
2731 But if space already allocated, this has already been done. */
2732 if (extra && args_addr == 0
2733 && where_pad != none && where_pad != stack_direction)
2734 anti_adjust_stack (GEN_INT (extra));
2735
2736 /* If we make space by pushing it, we might as well push
2737 the real data. Otherwise, we can leave OFFSET nonzero
2738 and leave the space uninitialized. */
2739 if (args_addr == 0)
2740 offset = 0;
2741
2742 /* Now NOT_STACK gets the number of words that we don't need to
2743 allocate on the stack. */
2744 not_stack = partial - offset;
2745
2746 /* If the partial register-part of the arg counts in its stack size,
2747 skip the part of stack space corresponding to the registers.
2748 Otherwise, start copying to the beginning of the stack space,
2749 by setting SKIP to 0. */
2750 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
2751
2752 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2753 x = validize_mem (force_const_mem (mode, x));
2754
2755 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2756 SUBREGs of such registers are not allowed. */
2757 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2758 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2759 x = copy_to_reg (x);
2760
2761 /* Loop over all the words allocated on the stack for this arg. */
2762 /* We can do it by words, because any scalar bigger than a word
2763 has a size a multiple of a word. */
2764 #ifndef PUSH_ARGS_REVERSED
2765 for (i = not_stack; i < size; i++)
2766 #else
2767 for (i = size - 1; i >= not_stack; i--)
2768 #endif
2769 if (i >= not_stack + offset)
2770 emit_push_insn (operand_subword_force (x, i, mode),
2771 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2772 0, args_addr,
2773 GEN_INT (args_offset + ((i - not_stack + skip)
2774 * UNITS_PER_WORD)),
2775 reg_parm_stack_space);
2776 }
2777 else
2778 {
2779 rtx addr;
2780 rtx target = NULL_RTX;
2781
2782 /* Push padding now if padding above and stack grows down,
2783 or if padding below and stack grows up.
2784 But if space already allocated, this has already been done. */
2785 if (extra && args_addr == 0
2786 && where_pad != none && where_pad != stack_direction)
2787 anti_adjust_stack (GEN_INT (extra));
2788
2789 #ifdef PUSH_ROUNDING
2790 if (args_addr == 0)
2791 addr = gen_push_operand ();
2792 else
2793 #endif
2794 {
2795 if (GET_CODE (args_so_far) == CONST_INT)
2796 addr
2797 = memory_address (mode,
2798 plus_constant (args_addr,
2799 INTVAL (args_so_far)));
2800 else
2801 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
2802 args_so_far));
2803 target = addr;
2804 }
2805
2806 emit_move_insn (gen_rtx_MEM (mode, addr), x);
2807
2808 if (flag_check_memory_usage && ! in_check_memory_usage)
2809 {
2810 in_check_memory_usage = 1;
2811 if (target == 0)
2812 target = get_push_address (GET_MODE_SIZE (mode));
2813
2814 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2815 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2816 target, ptr_mode,
2817 XEXP (x, 0), ptr_mode,
2818 GEN_INT (GET_MODE_SIZE (mode)),
2819 TYPE_MODE (sizetype));
2820 else
2821 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2822 target, ptr_mode,
2823 GEN_INT (GET_MODE_SIZE (mode)),
2824 TYPE_MODE (sizetype),
2825 GEN_INT (MEMORY_USE_RW),
2826 TYPE_MODE (integer_type_node));
2827 in_check_memory_usage = 0;
2828 }
2829 }
2830
2831 ret:
2832 /* If part should go in registers, copy that part
2833 into the appropriate registers. Do this now, at the end,
2834 since mem-to-mem copies above may do function calls. */
2835 if (partial > 0 && reg != 0)
2836 {
2837 /* Handle calls that pass values in multiple non-contiguous locations.
2838 The Irix 6 ABI has examples of this. */
2839 if (GET_CODE (reg) == PARALLEL)
2840 emit_group_load (reg, x);
2841 else
2842 move_block_to_reg (REGNO (reg), x, partial, mode);
2843 }
2844
2845 if (extra && args_addr == 0 && where_pad == stack_direction)
2846 anti_adjust_stack (GEN_INT (extra));
2847 }
2848 \f
2849 /* Expand an assignment that stores the value of FROM into TO.
2850 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2851 (This may contain a QUEUED rtx;
2852 if the value is constant, this rtx is a constant.)
2853 Otherwise, the returned value is NULL_RTX.
2854
2855 SUGGEST_REG is no longer actually used.
2856 It used to mean, copy the value through a register
2857 and return that register, if that is possible.
2858 We now use WANT_VALUE to decide whether to do this. */
2859
2860 rtx
2861 expand_assignment (to, from, want_value, suggest_reg)
2862 tree to, from;
2863 int want_value;
2864 int suggest_reg;
2865 {
2866 register rtx to_rtx = 0;
2867 rtx result;
2868
2869 /* Don't crash if the lhs of the assignment was erroneous. */
2870
2871 if (TREE_CODE (to) == ERROR_MARK)
2872 {
2873 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2874 return want_value ? result : NULL_RTX;
2875 }
2876
2877 /* Assignment of a structure component needs special treatment
2878 if the structure component's rtx is not simply a MEM.
2879 Assignment of an array element at a constant index, and assignment of
2880 an array element in an unaligned packed structure field, has the same
2881 problem. */
2882
2883 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2884 || TREE_CODE (to) == ARRAY_REF)
2885 {
2886 enum machine_mode mode1;
2887 int bitsize;
2888 int bitpos;
2889 tree offset;
2890 int unsignedp;
2891 int volatilep = 0;
2892 tree tem;
2893 int alignment;
2894
2895 push_temp_slots ();
2896 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2897 &unsignedp, &volatilep, &alignment);
2898
2899 /* If we are going to use store_bit_field and extract_bit_field,
2900 make sure to_rtx will be safe for multiple use. */
2901
2902 if (mode1 == VOIDmode && want_value)
2903 tem = stabilize_reference (tem);
2904
2905 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
2906 if (offset != 0)
2907 {
2908 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2909
2910 if (GET_CODE (to_rtx) != MEM)
2911 abort ();
2912 to_rtx = change_address (to_rtx, VOIDmode,
2913 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
2914 force_reg (ptr_mode, offset_rtx)));
2915 }
2916 if (volatilep)
2917 {
2918 if (GET_CODE (to_rtx) == MEM)
2919 {
2920 /* When the offset is zero, to_rtx is the address of the
2921 structure we are storing into, and hence may be shared.
2922 We must make a new MEM before setting the volatile bit. */
2923 if (offset == 0)
2924 to_rtx = copy_rtx (to_rtx);
2925
2926 MEM_VOLATILE_P (to_rtx) = 1;
2927 }
2928 #if 0 /* This was turned off because, when a field is volatile
2929 in an object which is not volatile, the object may be in a register,
2930 and then we would abort over here. */
2931 else
2932 abort ();
2933 #endif
2934 }
2935
2936 if (TREE_CODE (to) == COMPONENT_REF
2937 && TREE_READONLY (TREE_OPERAND (to, 1)))
2938 {
2939 if (offset == 0)
2940 to_rtx = copy_rtx (to_rtx);
2941
2942 RTX_UNCHANGING_P (to_rtx) = 1;
2943 }
2944
2945 /* Check the access. */
2946 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2947 {
2948 rtx to_addr;
2949 int size;
2950 int best_mode_size;
2951 enum machine_mode best_mode;
2952
2953 best_mode = get_best_mode (bitsize, bitpos,
2954 TYPE_ALIGN (TREE_TYPE (tem)),
2955 mode1, volatilep);
2956 if (best_mode == VOIDmode)
2957 best_mode = QImode;
2958
2959 best_mode_size = GET_MODE_BITSIZE (best_mode);
2960 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
2961 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
2962 size *= GET_MODE_SIZE (best_mode);
2963
2964 /* Check the access right of the pointer. */
2965 if (size)
2966 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
2967 to_addr, ptr_mode,
2968 GEN_INT (size), TYPE_MODE (sizetype),
2969 GEN_INT (MEMORY_USE_WO),
2970 TYPE_MODE (integer_type_node));
2971 }
2972
2973 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2974 (want_value
2975 /* Spurious cast makes HPUX compiler happy. */
2976 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2977 : VOIDmode),
2978 unsignedp,
2979 /* Required alignment of containing datum. */
2980 alignment,
2981 int_size_in_bytes (TREE_TYPE (tem)));
2982 preserve_temp_slots (result);
2983 free_temp_slots ();
2984 pop_temp_slots ();
2985
2986 /* If the value is meaningful, convert RESULT to the proper mode.
2987 Otherwise, return nothing. */
2988 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2989 TYPE_MODE (TREE_TYPE (from)),
2990 result,
2991 TREE_UNSIGNED (TREE_TYPE (to)))
2992 : NULL_RTX);
2993 }
2994
2995 /* If the rhs is a function call and its value is not an aggregate,
2996 call the function before we start to compute the lhs.
2997 This is needed for correct code for cases such as
2998 val = setjmp (buf) on machines where reference to val
2999 requires loading up part of an address in a separate insn.
3000
3001 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3002 a promoted variable where the zero- or sign- extension needs to be done.
3003 Handling this in the normal way is safe because no computation is done
3004 before the call. */
3005 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3006 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3007 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3008 {
3009 rtx value;
3010
3011 push_temp_slots ();
3012 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3013 if (to_rtx == 0)
3014 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3015
3016 /* Handle calls that return values in multiple non-contiguous locations.
3017 The Irix 6 ABI has examples of this. */
3018 if (GET_CODE (to_rtx) == PARALLEL)
3019 emit_group_load (to_rtx, value);
3020 else if (GET_MODE (to_rtx) == BLKmode)
3021 emit_block_move (to_rtx, value, expr_size (from),
3022 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3023 else
3024 emit_move_insn (to_rtx, value);
3025 preserve_temp_slots (to_rtx);
3026 free_temp_slots ();
3027 pop_temp_slots ();
3028 return want_value ? to_rtx : NULL_RTX;
3029 }
3030
3031 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3032 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3033
3034 if (to_rtx == 0)
3035 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3036
3037 /* Don't move directly into a return register. */
3038 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3039 {
3040 rtx temp;
3041
3042 push_temp_slots ();
3043 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3044 emit_move_insn (to_rtx, temp);
3045 preserve_temp_slots (to_rtx);
3046 free_temp_slots ();
3047 pop_temp_slots ();
3048 return want_value ? to_rtx : NULL_RTX;
3049 }
3050
3051 /* In case we are returning the contents of an object which overlaps
3052 the place the value is being stored, use a safe function when copying
3053 a value through a pointer into a structure value return block. */
3054 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3055 && current_function_returns_struct
3056 && !current_function_returns_pcc_struct)
3057 {
3058 rtx from_rtx, size;
3059
3060 push_temp_slots ();
3061 size = expr_size (from);
3062 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3063 EXPAND_MEMORY_USE_DONT);
3064
3065 /* Copy the rights of the bitmap. */
3066 if (flag_check_memory_usage)
3067 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3068 XEXP (to_rtx, 0), ptr_mode,
3069 XEXP (from_rtx, 0), ptr_mode,
3070 convert_to_mode (TYPE_MODE (sizetype),
3071 size, TREE_UNSIGNED (sizetype)),
3072 TYPE_MODE (sizetype));
3073
3074 #ifdef TARGET_MEM_FUNCTIONS
3075 emit_library_call (memcpy_libfunc, 0,
3076 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3077 XEXP (from_rtx, 0), Pmode,
3078 convert_to_mode (TYPE_MODE (sizetype),
3079 size, TREE_UNSIGNED (sizetype)),
3080 TYPE_MODE (sizetype));
3081 #else
3082 emit_library_call (bcopy_libfunc, 0,
3083 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3084 XEXP (to_rtx, 0), Pmode,
3085 convert_to_mode (TYPE_MODE (integer_type_node),
3086 size, TREE_UNSIGNED (integer_type_node)),
3087 TYPE_MODE (integer_type_node));
3088 #endif
3089
3090 preserve_temp_slots (to_rtx);
3091 free_temp_slots ();
3092 pop_temp_slots ();
3093 return want_value ? to_rtx : NULL_RTX;
3094 }
3095
3096 /* Compute FROM and store the value in the rtx we got. */
3097
3098 push_temp_slots ();
3099 result = store_expr (from, to_rtx, want_value);
3100 preserve_temp_slots (result);
3101 free_temp_slots ();
3102 pop_temp_slots ();
3103 return want_value ? result : NULL_RTX;
3104 }
3105
3106 /* Generate code for computing expression EXP,
3107 and storing the value into TARGET.
3108 TARGET may contain a QUEUED rtx.
3109
3110 If WANT_VALUE is nonzero, return a copy of the value
3111 not in TARGET, so that we can be sure to use the proper
3112 value in a containing expression even if TARGET has something
3113 else stored in it. If possible, we copy the value through a pseudo
3114 and return that pseudo. Or, if the value is constant, we try to
3115 return the constant. In some cases, we return a pseudo
3116 copied *from* TARGET.
3117
3118 If the mode is BLKmode then we may return TARGET itself.
3119 It turns out that in BLKmode it doesn't cause a problem.
3120 because C has no operators that could combine two different
3121 assignments into the same BLKmode object with different values
3122 with no sequence point. Will other languages need this to
3123 be more thorough?
3124
3125 If WANT_VALUE is 0, we return NULL, to make sure
3126 to catch quickly any cases where the caller uses the value
3127 and fails to set WANT_VALUE. */
3128
3129 rtx
3130 store_expr (exp, target, want_value)
3131 register tree exp;
3132 register rtx target;
3133 int want_value;
3134 {
3135 register rtx temp;
3136 int dont_return_target = 0;
3137
3138 if (TREE_CODE (exp) == COMPOUND_EXPR)
3139 {
3140 /* Perform first part of compound expression, then assign from second
3141 part. */
3142 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3143 emit_queue ();
3144 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3145 }
3146 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3147 {
3148 /* For conditional expression, get safe form of the target. Then
3149 test the condition, doing the appropriate assignment on either
3150 side. This avoids the creation of unnecessary temporaries.
3151 For non-BLKmode, it is more efficient not to do this. */
3152
3153 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3154
3155 emit_queue ();
3156 target = protect_from_queue (target, 1);
3157
3158 do_pending_stack_adjust ();
3159 NO_DEFER_POP;
3160 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3161 start_cleanup_deferral ();
3162 store_expr (TREE_OPERAND (exp, 1), target, 0);
3163 end_cleanup_deferral ();
3164 emit_queue ();
3165 emit_jump_insn (gen_jump (lab2));
3166 emit_barrier ();
3167 emit_label (lab1);
3168 start_cleanup_deferral ();
3169 store_expr (TREE_OPERAND (exp, 2), target, 0);
3170 end_cleanup_deferral ();
3171 emit_queue ();
3172 emit_label (lab2);
3173 OK_DEFER_POP;
3174
3175 return want_value ? target : NULL_RTX;
3176 }
3177 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3178 && GET_MODE (target) != BLKmode)
3179 /* If target is in memory and caller wants value in a register instead,
3180 arrange that. Pass TARGET as target for expand_expr so that,
3181 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3182 We know expand_expr will not use the target in that case.
3183 Don't do this if TARGET is volatile because we are supposed
3184 to write it and then read it. */
3185 {
3186 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3187 GET_MODE (target), 0);
3188 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3189 temp = copy_to_reg (temp);
3190 dont_return_target = 1;
3191 }
3192 else if (queued_subexp_p (target))
3193 /* If target contains a postincrement, let's not risk
3194 using it as the place to generate the rhs. */
3195 {
3196 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3197 {
3198 /* Expand EXP into a new pseudo. */
3199 temp = gen_reg_rtx (GET_MODE (target));
3200 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3201 }
3202 else
3203 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3204
3205 /* If target is volatile, ANSI requires accessing the value
3206 *from* the target, if it is accessed. So make that happen.
3207 In no case return the target itself. */
3208 if (! MEM_VOLATILE_P (target) && want_value)
3209 dont_return_target = 1;
3210 }
3211 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3212 /* If this is an scalar in a register that is stored in a wider mode
3213 than the declared mode, compute the result into its declared mode
3214 and then convert to the wider mode. Our value is the computed
3215 expression. */
3216 {
3217 /* If we don't want a value, we can do the conversion inside EXP,
3218 which will often result in some optimizations. Do the conversion
3219 in two steps: first change the signedness, if needed, then
3220 the extend. But don't do this if the type of EXP is a subtype
3221 of something else since then the conversion might involve
3222 more than just converting modes. */
3223 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3224 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3225 {
3226 if (TREE_UNSIGNED (TREE_TYPE (exp))
3227 != SUBREG_PROMOTED_UNSIGNED_P (target))
3228 exp
3229 = convert
3230 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3231 TREE_TYPE (exp)),
3232 exp);
3233
3234 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3235 SUBREG_PROMOTED_UNSIGNED_P (target)),
3236 exp);
3237 }
3238
3239 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3240
3241 /* If TEMP is a volatile MEM and we want a result value, make
3242 the access now so it gets done only once. Likewise if
3243 it contains TARGET. */
3244 if (GET_CODE (temp) == MEM && want_value
3245 && (MEM_VOLATILE_P (temp)
3246 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3247 temp = copy_to_reg (temp);
3248
3249 /* If TEMP is a VOIDmode constant, use convert_modes to make
3250 sure that we properly convert it. */
3251 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3252 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3253 TYPE_MODE (TREE_TYPE (exp)), temp,
3254 SUBREG_PROMOTED_UNSIGNED_P (target));
3255
3256 convert_move (SUBREG_REG (target), temp,
3257 SUBREG_PROMOTED_UNSIGNED_P (target));
3258 return want_value ? temp : NULL_RTX;
3259 }
3260 else
3261 {
3262 temp = expand_expr (exp, target, GET_MODE (target), 0);
3263 /* Return TARGET if it's a specified hardware register.
3264 If TARGET is a volatile mem ref, either return TARGET
3265 or return a reg copied *from* TARGET; ANSI requires this.
3266
3267 Otherwise, if TEMP is not TARGET, return TEMP
3268 if it is constant (for efficiency),
3269 or if we really want the correct value. */
3270 if (!(target && GET_CODE (target) == REG
3271 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3272 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3273 && ! rtx_equal_p (temp, target)
3274 && (CONSTANT_P (temp) || want_value))
3275 dont_return_target = 1;
3276 }
3277
3278 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3279 the same as that of TARGET, adjust the constant. This is needed, for
3280 example, in case it is a CONST_DOUBLE and we want only a word-sized
3281 value. */
3282 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3283 && TREE_CODE (exp) != ERROR_MARK
3284 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3285 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3286 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3287
3288 if (flag_check_memory_usage
3289 && GET_CODE (target) == MEM
3290 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3291 {
3292 if (GET_CODE (temp) == MEM)
3293 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3294 XEXP (target, 0), ptr_mode,
3295 XEXP (temp, 0), ptr_mode,
3296 expr_size (exp), TYPE_MODE (sizetype));
3297 else
3298 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3299 XEXP (target, 0), ptr_mode,
3300 expr_size (exp), TYPE_MODE (sizetype),
3301 GEN_INT (MEMORY_USE_WO),
3302 TYPE_MODE (integer_type_node));
3303 }
3304
3305 /* If value was not generated in the target, store it there.
3306 Convert the value to TARGET's type first if nec. */
3307
3308 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3309 {
3310 target = protect_from_queue (target, 1);
3311 if (GET_MODE (temp) != GET_MODE (target)
3312 && GET_MODE (temp) != VOIDmode)
3313 {
3314 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3315 if (dont_return_target)
3316 {
3317 /* In this case, we will return TEMP,
3318 so make sure it has the proper mode.
3319 But don't forget to store the value into TARGET. */
3320 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3321 emit_move_insn (target, temp);
3322 }
3323 else
3324 convert_move (target, temp, unsignedp);
3325 }
3326
3327 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3328 {
3329 /* Handle copying a string constant into an array.
3330 The string constant may be shorter than the array.
3331 So copy just the string's actual length, and clear the rest. */
3332 rtx size;
3333 rtx addr;
3334
3335 /* Get the size of the data type of the string,
3336 which is actually the size of the target. */
3337 size = expr_size (exp);
3338 if (GET_CODE (size) == CONST_INT
3339 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3340 emit_block_move (target, temp, size,
3341 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3342 else
3343 {
3344 /* Compute the size of the data to copy from the string. */
3345 tree copy_size
3346 = size_binop (MIN_EXPR,
3347 make_tree (sizetype, size),
3348 convert (sizetype,
3349 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3350 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3351 VOIDmode, 0);
3352 rtx label = 0;
3353
3354 /* Copy that much. */
3355 emit_block_move (target, temp, copy_size_rtx,
3356 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3357
3358 /* Figure out how much is left in TARGET that we have to clear.
3359 Do all calculations in ptr_mode. */
3360
3361 addr = XEXP (target, 0);
3362 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3363
3364 if (GET_CODE (copy_size_rtx) == CONST_INT)
3365 {
3366 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3367 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3368 }
3369 else
3370 {
3371 addr = force_reg (ptr_mode, addr);
3372 addr = expand_binop (ptr_mode, add_optab, addr,
3373 copy_size_rtx, NULL_RTX, 0,
3374 OPTAB_LIB_WIDEN);
3375
3376 size = expand_binop (ptr_mode, sub_optab, size,
3377 copy_size_rtx, NULL_RTX, 0,
3378 OPTAB_LIB_WIDEN);
3379
3380 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3381 GET_MODE (size), 0, 0);
3382 label = gen_label_rtx ();
3383 emit_jump_insn (gen_blt (label));
3384 }
3385
3386 if (size != const0_rtx)
3387 {
3388 /* Be sure we can write on ADDR. */
3389 if (flag_check_memory_usage)
3390 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3391 addr, ptr_mode,
3392 size, TYPE_MODE (sizetype),
3393 GEN_INT (MEMORY_USE_WO),
3394 TYPE_MODE (integer_type_node));
3395 #ifdef TARGET_MEM_FUNCTIONS
3396 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3397 addr, ptr_mode,
3398 const0_rtx, TYPE_MODE (integer_type_node),
3399 convert_to_mode (TYPE_MODE (sizetype),
3400 size,
3401 TREE_UNSIGNED (sizetype)),
3402 TYPE_MODE (sizetype));
3403 #else
3404 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3405 addr, ptr_mode,
3406 convert_to_mode (TYPE_MODE (integer_type_node),
3407 size,
3408 TREE_UNSIGNED (integer_type_node)),
3409 TYPE_MODE (integer_type_node));
3410 #endif
3411 }
3412
3413 if (label)
3414 emit_label (label);
3415 }
3416 }
3417 /* Handle calls that return values in multiple non-contiguous locations.
3418 The Irix 6 ABI has examples of this. */
3419 else if (GET_CODE (target) == PARALLEL)
3420 emit_group_load (target, temp);
3421 else if (GET_MODE (temp) == BLKmode)
3422 emit_block_move (target, temp, expr_size (exp),
3423 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3424 else
3425 emit_move_insn (target, temp);
3426 }
3427
3428 /* If we don't want a value, return NULL_RTX. */
3429 if (! want_value)
3430 return NULL_RTX;
3431
3432 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3433 ??? The latter test doesn't seem to make sense. */
3434 else if (dont_return_target && GET_CODE (temp) != MEM)
3435 return temp;
3436
3437 /* Return TARGET itself if it is a hard register. */
3438 else if (want_value && GET_MODE (target) != BLKmode
3439 && ! (GET_CODE (target) == REG
3440 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3441 return copy_to_reg (target);
3442
3443 else
3444 return target;
3445 }
3446 \f
3447 /* Return 1 if EXP just contains zeros. */
3448
3449 static int
3450 is_zeros_p (exp)
3451 tree exp;
3452 {
3453 tree elt;
3454
3455 switch (TREE_CODE (exp))
3456 {
3457 case CONVERT_EXPR:
3458 case NOP_EXPR:
3459 case NON_LVALUE_EXPR:
3460 return is_zeros_p (TREE_OPERAND (exp, 0));
3461
3462 case INTEGER_CST:
3463 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3464
3465 case COMPLEX_CST:
3466 return
3467 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3468
3469 case REAL_CST:
3470 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3471
3472 case CONSTRUCTOR:
3473 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3474 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3475 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3476 if (! is_zeros_p (TREE_VALUE (elt)))
3477 return 0;
3478
3479 return 1;
3480
3481 default:
3482 return 0;
3483 }
3484 }
3485
3486 /* Return 1 if EXP contains mostly (3/4) zeros. */
3487
3488 static int
3489 mostly_zeros_p (exp)
3490 tree exp;
3491 {
3492 if (TREE_CODE (exp) == CONSTRUCTOR)
3493 {
3494 int elts = 0, zeros = 0;
3495 tree elt = CONSTRUCTOR_ELTS (exp);
3496 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3497 {
3498 /* If there are no ranges of true bits, it is all zero. */
3499 return elt == NULL_TREE;
3500 }
3501 for (; elt; elt = TREE_CHAIN (elt))
3502 {
3503 /* We do not handle the case where the index is a RANGE_EXPR,
3504 so the statistic will be somewhat inaccurate.
3505 We do make a more accurate count in store_constructor itself,
3506 so since this function is only used for nested array elements,
3507 this should be close enough. */
3508 if (mostly_zeros_p (TREE_VALUE (elt)))
3509 zeros++;
3510 elts++;
3511 }
3512
3513 return 4 * zeros >= 3 * elts;
3514 }
3515
3516 return is_zeros_p (exp);
3517 }
3518 \f
3519 /* Helper function for store_constructor.
3520 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3521 TYPE is the type of the CONSTRUCTOR, not the element type.
3522 CLEARED is as for store_constructor.
3523
3524 This provides a recursive shortcut back to store_constructor when it isn't
3525 necessary to go through store_field. This is so that we can pass through
3526 the cleared field to let store_constructor know that we may not have to
3527 clear a substructure if the outer structure has already been cleared. */
3528
3529 static void
3530 store_constructor_field (target, bitsize, bitpos,
3531 mode, exp, type, cleared)
3532 rtx target;
3533 int bitsize, bitpos;
3534 enum machine_mode mode;
3535 tree exp, type;
3536 int cleared;
3537 {
3538 if (TREE_CODE (exp) == CONSTRUCTOR
3539 && bitpos % BITS_PER_UNIT == 0
3540 /* If we have a non-zero bitpos for a register target, then we just
3541 let store_field do the bitfield handling. This is unlikely to
3542 generate unnecessary clear instructions anyways. */
3543 && (bitpos == 0 || GET_CODE (target) == MEM))
3544 {
3545 if (bitpos != 0)
3546 target = change_address (target, VOIDmode,
3547 plus_constant (XEXP (target, 0),
3548 bitpos / BITS_PER_UNIT));
3549 store_constructor (exp, target, cleared);
3550 }
3551 else
3552 store_field (target, bitsize, bitpos, mode, exp,
3553 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3554 int_size_in_bytes (type));
3555 }
3556
3557 /* Store the value of constructor EXP into the rtx TARGET.
3558 TARGET is either a REG or a MEM.
3559 CLEARED is true if TARGET is known to have been zero'd. */
3560
3561 static void
3562 store_constructor (exp, target, cleared)
3563 tree exp;
3564 rtx target;
3565 int cleared;
3566 {
3567 tree type = TREE_TYPE (exp);
3568
3569 /* We know our target cannot conflict, since safe_from_p has been called. */
3570 #if 0
3571 /* Don't try copying piece by piece into a hard register
3572 since that is vulnerable to being clobbered by EXP.
3573 Instead, construct in a pseudo register and then copy it all. */
3574 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3575 {
3576 rtx temp = gen_reg_rtx (GET_MODE (target));
3577 store_constructor (exp, temp, 0);
3578 emit_move_insn (target, temp);
3579 return;
3580 }
3581 #endif
3582
3583 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3584 || TREE_CODE (type) == QUAL_UNION_TYPE)
3585 {
3586 register tree elt;
3587
3588 /* Inform later passes that the whole union value is dead. */
3589 if (TREE_CODE (type) == UNION_TYPE
3590 || TREE_CODE (type) == QUAL_UNION_TYPE)
3591 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3592
3593 /* If we are building a static constructor into a register,
3594 set the initial value as zero so we can fold the value into
3595 a constant. But if more than one register is involved,
3596 this probably loses. */
3597 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3598 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3599 {
3600 if (! cleared)
3601 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
3602
3603 cleared = 1;
3604 }
3605
3606 /* If the constructor has fewer fields than the structure
3607 or if we are initializing the structure to mostly zeros,
3608 clear the whole structure first. */
3609 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3610 != list_length (TYPE_FIELDS (type)))
3611 || mostly_zeros_p (exp))
3612 {
3613 if (! cleared)
3614 clear_storage (target, expr_size (exp),
3615 TYPE_ALIGN (type) / BITS_PER_UNIT);
3616
3617 cleared = 1;
3618 }
3619 else
3620 /* Inform later passes that the old value is dead. */
3621 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3622
3623 /* Store each element of the constructor into
3624 the corresponding field of TARGET. */
3625
3626 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3627 {
3628 register tree field = TREE_PURPOSE (elt);
3629 register enum machine_mode mode;
3630 int bitsize;
3631 int bitpos = 0;
3632 int unsignedp;
3633 tree pos, constant = 0, offset = 0;
3634 rtx to_rtx = target;
3635
3636 /* Just ignore missing fields.
3637 We cleared the whole structure, above,
3638 if any fields are missing. */
3639 if (field == 0)
3640 continue;
3641
3642 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3643 continue;
3644
3645 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3646 unsignedp = TREE_UNSIGNED (field);
3647 mode = DECL_MODE (field);
3648 if (DECL_BIT_FIELD (field))
3649 mode = VOIDmode;
3650
3651 pos = DECL_FIELD_BITPOS (field);
3652 if (TREE_CODE (pos) == INTEGER_CST)
3653 constant = pos;
3654 else if (TREE_CODE (pos) == PLUS_EXPR
3655 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3656 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3657 else
3658 offset = pos;
3659
3660 if (constant)
3661 bitpos = TREE_INT_CST_LOW (constant);
3662
3663 if (offset)
3664 {
3665 rtx offset_rtx;
3666
3667 if (contains_placeholder_p (offset))
3668 offset = build (WITH_RECORD_EXPR, sizetype,
3669 offset, make_tree (TREE_TYPE (exp), target));
3670
3671 offset = size_binop (FLOOR_DIV_EXPR, offset,
3672 size_int (BITS_PER_UNIT));
3673
3674 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3675 if (GET_CODE (to_rtx) != MEM)
3676 abort ();
3677
3678 to_rtx
3679 = change_address (to_rtx, VOIDmode,
3680 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3681 force_reg (ptr_mode, offset_rtx)));
3682 }
3683 if (TREE_READONLY (field))
3684 {
3685 if (GET_CODE (to_rtx) == MEM)
3686 to_rtx = copy_rtx (to_rtx);
3687
3688 RTX_UNCHANGING_P (to_rtx) = 1;
3689 }
3690
3691 store_constructor_field (to_rtx, bitsize, bitpos,
3692 mode, TREE_VALUE (elt), type, cleared);
3693 }
3694 }
3695 else if (TREE_CODE (type) == ARRAY_TYPE)
3696 {
3697 register tree elt;
3698 register int i;
3699 int need_to_clear;
3700 tree domain = TYPE_DOMAIN (type);
3701 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3702 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3703 tree elttype = TREE_TYPE (type);
3704
3705 /* If the constructor has fewer elements than the array,
3706 clear the whole array first. Similarly if this this is
3707 static constructor of a non-BLKmode object. */
3708 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3709 need_to_clear = 1;
3710 else
3711 {
3712 HOST_WIDE_INT count = 0, zero_count = 0;
3713 need_to_clear = 0;
3714 /* This loop is a more accurate version of the loop in
3715 mostly_zeros_p (it handles RANGE_EXPR in an index).
3716 It is also needed to check for missing elements. */
3717 for (elt = CONSTRUCTOR_ELTS (exp);
3718 elt != NULL_TREE;
3719 elt = TREE_CHAIN (elt))
3720 {
3721 tree index = TREE_PURPOSE (elt);
3722 HOST_WIDE_INT this_node_count;
3723 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3724 {
3725 tree lo_index = TREE_OPERAND (index, 0);
3726 tree hi_index = TREE_OPERAND (index, 1);
3727 if (TREE_CODE (lo_index) != INTEGER_CST
3728 || TREE_CODE (hi_index) != INTEGER_CST)
3729 {
3730 need_to_clear = 1;
3731 break;
3732 }
3733 this_node_count = TREE_INT_CST_LOW (hi_index)
3734 - TREE_INT_CST_LOW (lo_index) + 1;
3735 }
3736 else
3737 this_node_count = 1;
3738 count += this_node_count;
3739 if (mostly_zeros_p (TREE_VALUE (elt)))
3740 zero_count += this_node_count;
3741 }
3742 /* Clear the entire array first if there are any missing elements,
3743 or if the incidence of zero elements is >= 75%. */
3744 if (count < maxelt - minelt + 1
3745 || 4 * zero_count >= 3 * count)
3746 need_to_clear = 1;
3747 }
3748 if (need_to_clear)
3749 {
3750 if (! cleared)
3751 clear_storage (target, expr_size (exp),
3752 TYPE_ALIGN (type) / BITS_PER_UNIT);
3753 cleared = 1;
3754 }
3755 else
3756 /* Inform later passes that the old value is dead. */
3757 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3758
3759 /* Store each element of the constructor into
3760 the corresponding element of TARGET, determined
3761 by counting the elements. */
3762 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3763 elt;
3764 elt = TREE_CHAIN (elt), i++)
3765 {
3766 register enum machine_mode mode;
3767 int bitsize;
3768 int bitpos;
3769 int unsignedp;
3770 tree value = TREE_VALUE (elt);
3771 tree index = TREE_PURPOSE (elt);
3772 rtx xtarget = target;
3773
3774 if (cleared && is_zeros_p (value))
3775 continue;
3776
3777 mode = TYPE_MODE (elttype);
3778 bitsize = GET_MODE_BITSIZE (mode);
3779 unsignedp = TREE_UNSIGNED (elttype);
3780
3781 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3782 {
3783 tree lo_index = TREE_OPERAND (index, 0);
3784 tree hi_index = TREE_OPERAND (index, 1);
3785 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3786 struct nesting *loop;
3787 HOST_WIDE_INT lo, hi, count;
3788 tree position;
3789
3790 /* If the range is constant and "small", unroll the loop. */
3791 if (TREE_CODE (lo_index) == INTEGER_CST
3792 && TREE_CODE (hi_index) == INTEGER_CST
3793 && (lo = TREE_INT_CST_LOW (lo_index),
3794 hi = TREE_INT_CST_LOW (hi_index),
3795 count = hi - lo + 1,
3796 (GET_CODE (target) != MEM
3797 || count <= 2
3798 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3799 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3800 <= 40 * 8))))
3801 {
3802 lo -= minelt; hi -= minelt;
3803 for (; lo <= hi; lo++)
3804 {
3805 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3806 store_constructor_field (target, bitsize, bitpos,
3807 mode, value, type, cleared);
3808 }
3809 }
3810 else
3811 {
3812 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3813 loop_top = gen_label_rtx ();
3814 loop_end = gen_label_rtx ();
3815
3816 unsignedp = TREE_UNSIGNED (domain);
3817
3818 index = build_decl (VAR_DECL, NULL_TREE, domain);
3819
3820 DECL_RTL (index) = index_r
3821 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3822 &unsignedp, 0));
3823
3824 if (TREE_CODE (value) == SAVE_EXPR
3825 && SAVE_EXPR_RTL (value) == 0)
3826 {
3827 /* Make sure value gets expanded once before the
3828 loop. */
3829 expand_expr (value, const0_rtx, VOIDmode, 0);
3830 emit_queue ();
3831 }
3832 store_expr (lo_index, index_r, 0);
3833 loop = expand_start_loop (0);
3834
3835 /* Assign value to element index. */
3836 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3837 size_int (BITS_PER_UNIT));
3838 position = size_binop (MULT_EXPR,
3839 size_binop (MINUS_EXPR, index,
3840 TYPE_MIN_VALUE (domain)),
3841 position);
3842 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3843 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
3844 xtarget = change_address (target, mode, addr);
3845 if (TREE_CODE (value) == CONSTRUCTOR)
3846 store_constructor (value, xtarget, cleared);
3847 else
3848 store_expr (value, xtarget, 0);
3849
3850 expand_exit_loop_if_false (loop,
3851 build (LT_EXPR, integer_type_node,
3852 index, hi_index));
3853
3854 expand_increment (build (PREINCREMENT_EXPR,
3855 TREE_TYPE (index),
3856 index, integer_one_node), 0, 0);
3857 expand_end_loop ();
3858 emit_label (loop_end);
3859
3860 /* Needed by stupid register allocation. to extend the
3861 lifetime of pseudo-regs used by target past the end
3862 of the loop. */
3863 emit_insn (gen_rtx_USE (GET_MODE (target), target));
3864 }
3865 }
3866 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3867 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3868 {
3869 rtx pos_rtx, addr;
3870 tree position;
3871
3872 if (index == 0)
3873 index = size_int (i);
3874
3875 if (minelt)
3876 index = size_binop (MINUS_EXPR, index,
3877 TYPE_MIN_VALUE (domain));
3878 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3879 size_int (BITS_PER_UNIT));
3880 position = size_binop (MULT_EXPR, index, position);
3881 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3882 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
3883 xtarget = change_address (target, mode, addr);
3884 store_expr (value, xtarget, 0);
3885 }
3886 else
3887 {
3888 if (index != 0)
3889 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3890 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3891 else
3892 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3893 store_constructor_field (target, bitsize, bitpos,
3894 mode, value, type, cleared);
3895 }
3896 }
3897 }
3898 /* set constructor assignments */
3899 else if (TREE_CODE (type) == SET_TYPE)
3900 {
3901 tree elt = CONSTRUCTOR_ELTS (exp);
3902 int nbytes = int_size_in_bytes (type), nbits;
3903 tree domain = TYPE_DOMAIN (type);
3904 tree domain_min, domain_max, bitlength;
3905
3906 /* The default implementation strategy is to extract the constant
3907 parts of the constructor, use that to initialize the target,
3908 and then "or" in whatever non-constant ranges we need in addition.
3909
3910 If a large set is all zero or all ones, it is
3911 probably better to set it using memset (if available) or bzero.
3912 Also, if a large set has just a single range, it may also be
3913 better to first clear all the first clear the set (using
3914 bzero/memset), and set the bits we want. */
3915
3916 /* Check for all zeros. */
3917 if (elt == NULL_TREE)
3918 {
3919 if (!cleared)
3920 clear_storage (target, expr_size (exp),
3921 TYPE_ALIGN (type) / BITS_PER_UNIT);
3922 return;
3923 }
3924
3925 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3926 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3927 bitlength = size_binop (PLUS_EXPR,
3928 size_binop (MINUS_EXPR, domain_max, domain_min),
3929 size_one_node);
3930
3931 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3932 abort ();
3933 nbits = TREE_INT_CST_LOW (bitlength);
3934
3935 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3936 are "complicated" (more than one range), initialize (the
3937 constant parts) by copying from a constant. */
3938 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3939 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3940 {
3941 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3942 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3943 char *bit_buffer = (char *) alloca (nbits);
3944 HOST_WIDE_INT word = 0;
3945 int bit_pos = 0;
3946 int ibit = 0;
3947 int offset = 0; /* In bytes from beginning of set. */
3948 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3949 for (;;)
3950 {
3951 if (bit_buffer[ibit])
3952 {
3953 if (BYTES_BIG_ENDIAN)
3954 word |= (1 << (set_word_size - 1 - bit_pos));
3955 else
3956 word |= 1 << bit_pos;
3957 }
3958 bit_pos++; ibit++;
3959 if (bit_pos >= set_word_size || ibit == nbits)
3960 {
3961 if (word != 0 || ! cleared)
3962 {
3963 rtx datum = GEN_INT (word);
3964 rtx to_rtx;
3965 /* The assumption here is that it is safe to use
3966 XEXP if the set is multi-word, but not if
3967 it's single-word. */
3968 if (GET_CODE (target) == MEM)
3969 {
3970 to_rtx = plus_constant (XEXP (target, 0), offset);
3971 to_rtx = change_address (target, mode, to_rtx);
3972 }
3973 else if (offset == 0)
3974 to_rtx = target;
3975 else
3976 abort ();
3977 emit_move_insn (to_rtx, datum);
3978 }
3979 if (ibit == nbits)
3980 break;
3981 word = 0;
3982 bit_pos = 0;
3983 offset += set_word_size / BITS_PER_UNIT;
3984 }
3985 }
3986 }
3987 else if (!cleared)
3988 {
3989 /* Don't bother clearing storage if the set is all ones. */
3990 if (TREE_CHAIN (elt) != NULL_TREE
3991 || (TREE_PURPOSE (elt) == NULL_TREE
3992 ? nbits != 1
3993 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3994 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3995 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3996 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3997 != nbits))))
3998 clear_storage (target, expr_size (exp),
3999 TYPE_ALIGN (type) / BITS_PER_UNIT);
4000 }
4001
4002 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4003 {
4004 /* start of range of element or NULL */
4005 tree startbit = TREE_PURPOSE (elt);
4006 /* end of range of element, or element value */
4007 tree endbit = TREE_VALUE (elt);
4008 #ifdef TARGET_MEM_FUNCTIONS
4009 HOST_WIDE_INT startb, endb;
4010 #endif
4011 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4012
4013 bitlength_rtx = expand_expr (bitlength,
4014 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4015
4016 /* handle non-range tuple element like [ expr ] */
4017 if (startbit == NULL_TREE)
4018 {
4019 startbit = save_expr (endbit);
4020 endbit = startbit;
4021 }
4022 startbit = convert (sizetype, startbit);
4023 endbit = convert (sizetype, endbit);
4024 if (! integer_zerop (domain_min))
4025 {
4026 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4027 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4028 }
4029 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4030 EXPAND_CONST_ADDRESS);
4031 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4032 EXPAND_CONST_ADDRESS);
4033
4034 if (REG_P (target))
4035 {
4036 targetx = assign_stack_temp (GET_MODE (target),
4037 GET_MODE_SIZE (GET_MODE (target)),
4038 0);
4039 emit_move_insn (targetx, target);
4040 }
4041 else if (GET_CODE (target) == MEM)
4042 targetx = target;
4043 else
4044 abort ();
4045
4046 #ifdef TARGET_MEM_FUNCTIONS
4047 /* Optimization: If startbit and endbit are
4048 constants divisible by BITS_PER_UNIT,
4049 call memset instead. */
4050 if (TREE_CODE (startbit) == INTEGER_CST
4051 && TREE_CODE (endbit) == INTEGER_CST
4052 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4053 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4054 {
4055 emit_library_call (memset_libfunc, 0,
4056 VOIDmode, 3,
4057 plus_constant (XEXP (targetx, 0),
4058 startb / BITS_PER_UNIT),
4059 Pmode,
4060 constm1_rtx, TYPE_MODE (integer_type_node),
4061 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4062 TYPE_MODE (sizetype));
4063 }
4064 else
4065 #endif
4066 {
4067 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4068 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4069 bitlength_rtx, TYPE_MODE (sizetype),
4070 startbit_rtx, TYPE_MODE (sizetype),
4071 endbit_rtx, TYPE_MODE (sizetype));
4072 }
4073 if (REG_P (target))
4074 emit_move_insn (target, targetx);
4075 }
4076 }
4077
4078 else
4079 abort ();
4080 }
4081
4082 /* Store the value of EXP (an expression tree)
4083 into a subfield of TARGET which has mode MODE and occupies
4084 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4085 If MODE is VOIDmode, it means that we are storing into a bit-field.
4086
4087 If VALUE_MODE is VOIDmode, return nothing in particular.
4088 UNSIGNEDP is not used in this case.
4089
4090 Otherwise, return an rtx for the value stored. This rtx
4091 has mode VALUE_MODE if that is convenient to do.
4092 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4093
4094 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4095 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4096
4097 static rtx
4098 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4099 unsignedp, align, total_size)
4100 rtx target;
4101 int bitsize, bitpos;
4102 enum machine_mode mode;
4103 tree exp;
4104 enum machine_mode value_mode;
4105 int unsignedp;
4106 int align;
4107 int total_size;
4108 {
4109 HOST_WIDE_INT width_mask = 0;
4110
4111 if (TREE_CODE (exp) == ERROR_MARK)
4112 return const0_rtx;
4113
4114 if (bitsize < HOST_BITS_PER_WIDE_INT)
4115 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4116
4117 /* If we are storing into an unaligned field of an aligned union that is
4118 in a register, we may have the mode of TARGET being an integer mode but
4119 MODE == BLKmode. In that case, get an aligned object whose size and
4120 alignment are the same as TARGET and store TARGET into it (we can avoid
4121 the store if the field being stored is the entire width of TARGET). Then
4122 call ourselves recursively to store the field into a BLKmode version of
4123 that object. Finally, load from the object into TARGET. This is not
4124 very efficient in general, but should only be slightly more expensive
4125 than the otherwise-required unaligned accesses. Perhaps this can be
4126 cleaned up later. */
4127
4128 if (mode == BLKmode
4129 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4130 {
4131 rtx object = assign_stack_temp (GET_MODE (target),
4132 GET_MODE_SIZE (GET_MODE (target)), 0);
4133 rtx blk_object = copy_rtx (object);
4134
4135 MEM_IN_STRUCT_P (object) = 1;
4136 MEM_IN_STRUCT_P (blk_object) = 1;
4137 PUT_MODE (blk_object, BLKmode);
4138
4139 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4140 emit_move_insn (object, target);
4141
4142 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4143 align, total_size);
4144
4145 /* Even though we aren't returning target, we need to
4146 give it the updated value. */
4147 emit_move_insn (target, object);
4148
4149 return blk_object;
4150 }
4151
4152 /* If the structure is in a register or if the component
4153 is a bit field, we cannot use addressing to access it.
4154 Use bit-field techniques or SUBREG to store in it. */
4155
4156 if (mode == VOIDmode
4157 || (mode != BLKmode && ! direct_store[(int) mode])
4158 || GET_CODE (target) == REG
4159 || GET_CODE (target) == SUBREG
4160 /* If the field isn't aligned enough to store as an ordinary memref,
4161 store it as a bit field. */
4162 || (SLOW_UNALIGNED_ACCESS
4163 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4164 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4165 {
4166 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4167
4168 /* If BITSIZE is narrower than the size of the type of EXP
4169 we will be narrowing TEMP. Normally, what's wanted are the
4170 low-order bits. However, if EXP's type is a record and this is
4171 big-endian machine, we want the upper BITSIZE bits. */
4172 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4173 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4174 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4175 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4176 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4177 - bitsize),
4178 temp, 1);
4179
4180 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4181 MODE. */
4182 if (mode != VOIDmode && mode != BLKmode
4183 && mode != TYPE_MODE (TREE_TYPE (exp)))
4184 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4185
4186 /* If the modes of TARGET and TEMP are both BLKmode, both
4187 must be in memory and BITPOS must be aligned on a byte
4188 boundary. If so, we simply do a block copy. */
4189 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4190 {
4191 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4192 || bitpos % BITS_PER_UNIT != 0)
4193 abort ();
4194
4195 target = change_address (target, VOIDmode,
4196 plus_constant (XEXP (target, 0),
4197 bitpos / BITS_PER_UNIT));
4198
4199 emit_block_move (target, temp,
4200 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4201 / BITS_PER_UNIT),
4202 1);
4203
4204 return value_mode == VOIDmode ? const0_rtx : target;
4205 }
4206
4207 /* Store the value in the bitfield. */
4208 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4209 if (value_mode != VOIDmode)
4210 {
4211 /* The caller wants an rtx for the value. */
4212 /* If possible, avoid refetching from the bitfield itself. */
4213 if (width_mask != 0
4214 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4215 {
4216 tree count;
4217 enum machine_mode tmode;
4218
4219 if (unsignedp)
4220 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4221 tmode = GET_MODE (temp);
4222 if (tmode == VOIDmode)
4223 tmode = value_mode;
4224 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4225 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4226 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4227 }
4228 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4229 NULL_RTX, value_mode, 0, align,
4230 total_size);
4231 }
4232 return const0_rtx;
4233 }
4234 else
4235 {
4236 rtx addr = XEXP (target, 0);
4237 rtx to_rtx;
4238
4239 /* If a value is wanted, it must be the lhs;
4240 so make the address stable for multiple use. */
4241
4242 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4243 && ! CONSTANT_ADDRESS_P (addr)
4244 /* A frame-pointer reference is already stable. */
4245 && ! (GET_CODE (addr) == PLUS
4246 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4247 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4248 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4249 addr = copy_to_reg (addr);
4250
4251 /* Now build a reference to just the desired component. */
4252
4253 to_rtx = copy_rtx (change_address (target, mode,
4254 plus_constant (addr,
4255 (bitpos
4256 / BITS_PER_UNIT))));
4257 MEM_IN_STRUCT_P (to_rtx) = 1;
4258
4259 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4260 }
4261 }
4262 \f
4263 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4264 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4265 ARRAY_REFs and find the ultimate containing object, which we return.
4266
4267 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4268 bit position, and *PUNSIGNEDP to the signedness of the field.
4269 If the position of the field is variable, we store a tree
4270 giving the variable offset (in units) in *POFFSET.
4271 This offset is in addition to the bit position.
4272 If the position is not variable, we store 0 in *POFFSET.
4273 We set *PALIGNMENT to the alignment in bytes of the address that will be
4274 computed. This is the alignment of the thing we return if *POFFSET
4275 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4276
4277 If any of the extraction expressions is volatile,
4278 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4279
4280 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4281 is a mode that can be used to access the field. In that case, *PBITSIZE
4282 is redundant.
4283
4284 If the field describes a variable-sized object, *PMODE is set to
4285 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4286 this case, but the address of the object can be found. */
4287
4288 tree
4289 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4290 punsignedp, pvolatilep, palignment)
4291 tree exp;
4292 int *pbitsize;
4293 int *pbitpos;
4294 tree *poffset;
4295 enum machine_mode *pmode;
4296 int *punsignedp;
4297 int *pvolatilep;
4298 int *palignment;
4299 {
4300 tree orig_exp = exp;
4301 tree size_tree = 0;
4302 enum machine_mode mode = VOIDmode;
4303 tree offset = integer_zero_node;
4304 int alignment = BIGGEST_ALIGNMENT;
4305
4306 if (TREE_CODE (exp) == COMPONENT_REF)
4307 {
4308 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4309 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4310 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4311 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4312 }
4313 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4314 {
4315 size_tree = TREE_OPERAND (exp, 1);
4316 *punsignedp = TREE_UNSIGNED (exp);
4317 }
4318 else
4319 {
4320 mode = TYPE_MODE (TREE_TYPE (exp));
4321 *pbitsize = GET_MODE_BITSIZE (mode);
4322 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4323 }
4324
4325 if (size_tree)
4326 {
4327 if (TREE_CODE (size_tree) != INTEGER_CST)
4328 mode = BLKmode, *pbitsize = -1;
4329 else
4330 *pbitsize = TREE_INT_CST_LOW (size_tree);
4331 }
4332
4333 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4334 and find the ultimate containing object. */
4335
4336 *pbitpos = 0;
4337
4338 while (1)
4339 {
4340 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4341 {
4342 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4343 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4344 : TREE_OPERAND (exp, 2));
4345 tree constant = integer_zero_node, var = pos;
4346
4347 /* If this field hasn't been filled in yet, don't go
4348 past it. This should only happen when folding expressions
4349 made during type construction. */
4350 if (pos == 0)
4351 break;
4352
4353 /* Assume here that the offset is a multiple of a unit.
4354 If not, there should be an explicitly added constant. */
4355 if (TREE_CODE (pos) == PLUS_EXPR
4356 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4357 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4358 else if (TREE_CODE (pos) == INTEGER_CST)
4359 constant = pos, var = integer_zero_node;
4360
4361 *pbitpos += TREE_INT_CST_LOW (constant);
4362 offset = size_binop (PLUS_EXPR, offset,
4363 size_binop (EXACT_DIV_EXPR, var,
4364 size_int (BITS_PER_UNIT)));
4365 }
4366
4367 else if (TREE_CODE (exp) == ARRAY_REF)
4368 {
4369 /* This code is based on the code in case ARRAY_REF in expand_expr
4370 below. We assume here that the size of an array element is
4371 always an integral multiple of BITS_PER_UNIT. */
4372
4373 tree index = TREE_OPERAND (exp, 1);
4374 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4375 tree low_bound
4376 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4377 tree index_type = TREE_TYPE (index);
4378
4379 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4380 {
4381 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4382 index);
4383 index_type = TREE_TYPE (index);
4384 }
4385
4386 if (! integer_zerop (low_bound))
4387 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4388
4389 if (TREE_CODE (index) == INTEGER_CST)
4390 {
4391 index = convert (sbitsizetype, index);
4392 index_type = TREE_TYPE (index);
4393 }
4394
4395 index = fold (build (MULT_EXPR, sbitsizetype, index,
4396 convert (sbitsizetype,
4397 TYPE_SIZE (TREE_TYPE (exp)))));
4398
4399 if (TREE_CODE (index) == INTEGER_CST
4400 && TREE_INT_CST_HIGH (index) == 0)
4401 *pbitpos += TREE_INT_CST_LOW (index);
4402 else
4403 {
4404 if (contains_placeholder_p (index))
4405 index = build (WITH_RECORD_EXPR, sizetype, index, exp);
4406
4407 offset = size_binop (PLUS_EXPR, offset,
4408 size_binop (FLOOR_DIV_EXPR, index,
4409 size_int (BITS_PER_UNIT)));
4410 }
4411 }
4412 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4413 && ! ((TREE_CODE (exp) == NOP_EXPR
4414 || TREE_CODE (exp) == CONVERT_EXPR)
4415 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4416 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4417 != UNION_TYPE))
4418 && (TYPE_MODE (TREE_TYPE (exp))
4419 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4420 break;
4421
4422 /* If any reference in the chain is volatile, the effect is volatile. */
4423 if (TREE_THIS_VOLATILE (exp))
4424 *pvolatilep = 1;
4425
4426 /* If the offset is non-constant already, then we can't assume any
4427 alignment more than the alignment here. */
4428 if (! integer_zerop (offset))
4429 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4430
4431 exp = TREE_OPERAND (exp, 0);
4432 }
4433
4434 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4435 alignment = MIN (alignment, DECL_ALIGN (exp));
4436 else if (TREE_TYPE (exp) != 0)
4437 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4438
4439 if (integer_zerop (offset))
4440 offset = 0;
4441
4442 if (offset != 0 && contains_placeholder_p (offset))
4443 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4444
4445 *pmode = mode;
4446 *poffset = offset;
4447 *palignment = alignment / BITS_PER_UNIT;
4448 return exp;
4449 }
4450
4451 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4452 static enum memory_use_mode
4453 get_memory_usage_from_modifier (modifier)
4454 enum expand_modifier modifier;
4455 {
4456 switch (modifier)
4457 {
4458 case EXPAND_NORMAL:
4459 case EXPAND_SUM:
4460 return MEMORY_USE_RO;
4461 break;
4462 case EXPAND_MEMORY_USE_WO:
4463 return MEMORY_USE_WO;
4464 break;
4465 case EXPAND_MEMORY_USE_RW:
4466 return MEMORY_USE_RW;
4467 break;
4468 case EXPAND_MEMORY_USE_DONT:
4469 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4470 MEMORY_USE_DONT, because they are modifiers to a call of
4471 expand_expr in the ADDR_EXPR case of expand_expr. */
4472 case EXPAND_CONST_ADDRESS:
4473 case EXPAND_INITIALIZER:
4474 return MEMORY_USE_DONT;
4475 case EXPAND_MEMORY_USE_BAD:
4476 default:
4477 abort ();
4478 }
4479 }
4480 \f
4481 /* Given an rtx VALUE that may contain additions and multiplications,
4482 return an equivalent value that just refers to a register or memory.
4483 This is done by generating instructions to perform the arithmetic
4484 and returning a pseudo-register containing the value.
4485
4486 The returned value may be a REG, SUBREG, MEM or constant. */
4487
4488 rtx
4489 force_operand (value, target)
4490 rtx value, target;
4491 {
4492 register optab binoptab = 0;
4493 /* Use a temporary to force order of execution of calls to
4494 `force_operand'. */
4495 rtx tmp;
4496 register rtx op2;
4497 /* Use subtarget as the target for operand 0 of a binary operation. */
4498 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4499
4500 if (GET_CODE (value) == PLUS)
4501 binoptab = add_optab;
4502 else if (GET_CODE (value) == MINUS)
4503 binoptab = sub_optab;
4504 else if (GET_CODE (value) == MULT)
4505 {
4506 op2 = XEXP (value, 1);
4507 if (!CONSTANT_P (op2)
4508 && !(GET_CODE (op2) == REG && op2 != subtarget))
4509 subtarget = 0;
4510 tmp = force_operand (XEXP (value, 0), subtarget);
4511 return expand_mult (GET_MODE (value), tmp,
4512 force_operand (op2, NULL_RTX),
4513 target, 0);
4514 }
4515
4516 if (binoptab)
4517 {
4518 op2 = XEXP (value, 1);
4519 if (!CONSTANT_P (op2)
4520 && !(GET_CODE (op2) == REG && op2 != subtarget))
4521 subtarget = 0;
4522 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4523 {
4524 binoptab = add_optab;
4525 op2 = negate_rtx (GET_MODE (value), op2);
4526 }
4527
4528 /* Check for an addition with OP2 a constant integer and our first
4529 operand a PLUS of a virtual register and something else. In that
4530 case, we want to emit the sum of the virtual register and the
4531 constant first and then add the other value. This allows virtual
4532 register instantiation to simply modify the constant rather than
4533 creating another one around this addition. */
4534 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4535 && GET_CODE (XEXP (value, 0)) == PLUS
4536 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4537 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4538 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4539 {
4540 rtx temp = expand_binop (GET_MODE (value), binoptab,
4541 XEXP (XEXP (value, 0), 0), op2,
4542 subtarget, 0, OPTAB_LIB_WIDEN);
4543 return expand_binop (GET_MODE (value), binoptab, temp,
4544 force_operand (XEXP (XEXP (value, 0), 1), 0),
4545 target, 0, OPTAB_LIB_WIDEN);
4546 }
4547
4548 tmp = force_operand (XEXP (value, 0), subtarget);
4549 return expand_binop (GET_MODE (value), binoptab, tmp,
4550 force_operand (op2, NULL_RTX),
4551 target, 0, OPTAB_LIB_WIDEN);
4552 /* We give UNSIGNEDP = 0 to expand_binop
4553 because the only operations we are expanding here are signed ones. */
4554 }
4555 return value;
4556 }
4557 \f
4558 /* Subroutine of expand_expr:
4559 save the non-copied parts (LIST) of an expr (LHS), and return a list
4560 which can restore these values to their previous values,
4561 should something modify their storage. */
4562
4563 static tree
4564 save_noncopied_parts (lhs, list)
4565 tree lhs;
4566 tree list;
4567 {
4568 tree tail;
4569 tree parts = 0;
4570
4571 for (tail = list; tail; tail = TREE_CHAIN (tail))
4572 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4573 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4574 else
4575 {
4576 tree part = TREE_VALUE (tail);
4577 tree part_type = TREE_TYPE (part);
4578 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4579 rtx target = assign_temp (part_type, 0, 1, 1);
4580 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4581 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4582 parts = tree_cons (to_be_saved,
4583 build (RTL_EXPR, part_type, NULL_TREE,
4584 (tree) target),
4585 parts);
4586 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4587 }
4588 return parts;
4589 }
4590
4591 /* Subroutine of expand_expr:
4592 record the non-copied parts (LIST) of an expr (LHS), and return a list
4593 which specifies the initial values of these parts. */
4594
4595 static tree
4596 init_noncopied_parts (lhs, list)
4597 tree lhs;
4598 tree list;
4599 {
4600 tree tail;
4601 tree parts = 0;
4602
4603 for (tail = list; tail; tail = TREE_CHAIN (tail))
4604 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4605 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4606 else
4607 {
4608 tree part = TREE_VALUE (tail);
4609 tree part_type = TREE_TYPE (part);
4610 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4611 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4612 }
4613 return parts;
4614 }
4615
4616 /* Subroutine of expand_expr: return nonzero iff there is no way that
4617 EXP can reference X, which is being modified. TOP_P is nonzero if this
4618 call is going to be used to determine whether we need a temporary
4619 for EXP, as opposed to a recursive call to this function. */
4620
4621 static int
4622 safe_from_p (x, exp, top_p)
4623 rtx x;
4624 tree exp;
4625 int top_p;
4626 {
4627 rtx exp_rtl = 0;
4628 int i, nops;
4629
4630 if (x == 0
4631 /* If EXP has varying size, we MUST use a target since we currently
4632 have no way of allocating temporaries of variable size
4633 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4634 So we assume here that something at a higher level has prevented a
4635 clash. This is somewhat bogus, but the best we can do. Only
4636 do this when X is BLKmode and when we are at the top level. */
4637 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4638 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4639 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4640 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4641 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4642 != INTEGER_CST)
4643 && GET_MODE (x) == BLKmode))
4644 return 1;
4645
4646 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4647 find the underlying pseudo. */
4648 if (GET_CODE (x) == SUBREG)
4649 {
4650 x = SUBREG_REG (x);
4651 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4652 return 0;
4653 }
4654
4655 /* If X is a location in the outgoing argument area, it is always safe. */
4656 if (GET_CODE (x) == MEM
4657 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4658 || (GET_CODE (XEXP (x, 0)) == PLUS
4659 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4660 return 1;
4661
4662 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4663 {
4664 case 'd':
4665 exp_rtl = DECL_RTL (exp);
4666 break;
4667
4668 case 'c':
4669 return 1;
4670
4671 case 'x':
4672 if (TREE_CODE (exp) == TREE_LIST)
4673 return ((TREE_VALUE (exp) == 0
4674 || safe_from_p (x, TREE_VALUE (exp), 0))
4675 && (TREE_CHAIN (exp) == 0
4676 || safe_from_p (x, TREE_CHAIN (exp), 0)));
4677 else
4678 return 0;
4679
4680 case '1':
4681 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
4682
4683 case '2':
4684 case '<':
4685 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4686 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
4687
4688 case 'e':
4689 case 'r':
4690 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4691 the expression. If it is set, we conflict iff we are that rtx or
4692 both are in memory. Otherwise, we check all operands of the
4693 expression recursively. */
4694
4695 switch (TREE_CODE (exp))
4696 {
4697 case ADDR_EXPR:
4698 return (staticp (TREE_OPERAND (exp, 0))
4699 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4700 || TREE_STATIC (exp));
4701
4702 case INDIRECT_REF:
4703 if (GET_CODE (x) == MEM)
4704 return 0;
4705 break;
4706
4707 case CALL_EXPR:
4708 exp_rtl = CALL_EXPR_RTL (exp);
4709 if (exp_rtl == 0)
4710 {
4711 /* Assume that the call will clobber all hard registers and
4712 all of memory. */
4713 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4714 || GET_CODE (x) == MEM)
4715 return 0;
4716 }
4717
4718 break;
4719
4720 case RTL_EXPR:
4721 /* If a sequence exists, we would have to scan every instruction
4722 in the sequence to see if it was safe. This is probably not
4723 worthwhile. */
4724 if (RTL_EXPR_SEQUENCE (exp))
4725 return 0;
4726
4727 exp_rtl = RTL_EXPR_RTL (exp);
4728 break;
4729
4730 case WITH_CLEANUP_EXPR:
4731 exp_rtl = RTL_EXPR_RTL (exp);
4732 break;
4733
4734 case CLEANUP_POINT_EXPR:
4735 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
4736
4737 case SAVE_EXPR:
4738 exp_rtl = SAVE_EXPR_RTL (exp);
4739 break;
4740
4741 case BIND_EXPR:
4742 /* The only operand we look at is operand 1. The rest aren't
4743 part of the expression. */
4744 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
4745
4746 case METHOD_CALL_EXPR:
4747 /* This takes a rtx argument, but shouldn't appear here. */
4748 abort ();
4749
4750 default:
4751 break;
4752 }
4753
4754 /* If we have an rtx, we do not need to scan our operands. */
4755 if (exp_rtl)
4756 break;
4757
4758 nops = tree_code_length[(int) TREE_CODE (exp)];
4759 for (i = 0; i < nops; i++)
4760 if (TREE_OPERAND (exp, i) != 0
4761 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
4762 return 0;
4763 }
4764
4765 /* If we have an rtl, find any enclosed object. Then see if we conflict
4766 with it. */
4767 if (exp_rtl)
4768 {
4769 if (GET_CODE (exp_rtl) == SUBREG)
4770 {
4771 exp_rtl = SUBREG_REG (exp_rtl);
4772 if (GET_CODE (exp_rtl) == REG
4773 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4774 return 0;
4775 }
4776
4777 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4778 are memory and EXP is not readonly. */
4779 return ! (rtx_equal_p (x, exp_rtl)
4780 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4781 && ! TREE_READONLY (exp)));
4782 }
4783
4784 /* If we reach here, it is safe. */
4785 return 1;
4786 }
4787
4788 /* Subroutine of expand_expr: return nonzero iff EXP is an
4789 expression whose type is statically determinable. */
4790
4791 static int
4792 fixed_type_p (exp)
4793 tree exp;
4794 {
4795 if (TREE_CODE (exp) == PARM_DECL
4796 || TREE_CODE (exp) == VAR_DECL
4797 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4798 || TREE_CODE (exp) == COMPONENT_REF
4799 || TREE_CODE (exp) == ARRAY_REF)
4800 return 1;
4801 return 0;
4802 }
4803
4804 /* Subroutine of expand_expr: return rtx if EXP is a
4805 variable or parameter; else return 0. */
4806
4807 static rtx
4808 var_rtx (exp)
4809 tree exp;
4810 {
4811 STRIP_NOPS (exp);
4812 switch (TREE_CODE (exp))
4813 {
4814 case PARM_DECL:
4815 case VAR_DECL:
4816 return DECL_RTL (exp);
4817 default:
4818 return 0;
4819 }
4820 }
4821 \f
4822 /* expand_expr: generate code for computing expression EXP.
4823 An rtx for the computed value is returned. The value is never null.
4824 In the case of a void EXP, const0_rtx is returned.
4825
4826 The value may be stored in TARGET if TARGET is nonzero.
4827 TARGET is just a suggestion; callers must assume that
4828 the rtx returned may not be the same as TARGET.
4829
4830 If TARGET is CONST0_RTX, it means that the value will be ignored.
4831
4832 If TMODE is not VOIDmode, it suggests generating the
4833 result in mode TMODE. But this is done only when convenient.
4834 Otherwise, TMODE is ignored and the value generated in its natural mode.
4835 TMODE is just a suggestion; callers must assume that
4836 the rtx returned may not have mode TMODE.
4837
4838 Note that TARGET may have neither TMODE nor MODE. In that case, it
4839 probably will not be used.
4840
4841 If MODIFIER is EXPAND_SUM then when EXP is an addition
4842 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4843 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4844 products as above, or REG or MEM, or constant.
4845 Ordinarily in such cases we would output mul or add instructions
4846 and then return a pseudo reg containing the sum.
4847
4848 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4849 it also marks a label as absolutely required (it can't be dead).
4850 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4851 This is used for outputting expressions used in initializers.
4852
4853 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4854 with a constant address even if that address is not normally legitimate.
4855 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4856
4857 rtx
4858 expand_expr (exp, target, tmode, modifier)
4859 register tree exp;
4860 rtx target;
4861 enum machine_mode tmode;
4862 enum expand_modifier modifier;
4863 {
4864 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4865 This is static so it will be accessible to our recursive callees. */
4866 static tree placeholder_list = 0;
4867 register rtx op0, op1, temp;
4868 tree type = TREE_TYPE (exp);
4869 int unsignedp = TREE_UNSIGNED (type);
4870 register enum machine_mode mode = TYPE_MODE (type);
4871 register enum tree_code code = TREE_CODE (exp);
4872 optab this_optab;
4873 /* Use subtarget as the target for operand 0 of a binary operation. */
4874 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4875 rtx original_target = target;
4876 int ignore = (target == const0_rtx
4877 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4878 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4879 || code == COND_EXPR)
4880 && TREE_CODE (type) == VOID_TYPE));
4881 tree context;
4882 /* Used by check-memory-usage to make modifier read only. */
4883 enum expand_modifier ro_modifier;
4884
4885 /* Make a read-only version of the modifier. */
4886 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4887 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4888 ro_modifier = modifier;
4889 else
4890 ro_modifier = EXPAND_NORMAL;
4891
4892 /* Don't use hard regs as subtargets, because the combiner
4893 can only handle pseudo regs. */
4894 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4895 subtarget = 0;
4896 /* Avoid subtargets inside loops,
4897 since they hide some invariant expressions. */
4898 if (preserve_subexpressions_p ())
4899 subtarget = 0;
4900
4901 /* If we are going to ignore this result, we need only do something
4902 if there is a side-effect somewhere in the expression. If there
4903 is, short-circuit the most common cases here. Note that we must
4904 not call expand_expr with anything but const0_rtx in case this
4905 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4906
4907 if (ignore)
4908 {
4909 if (! TREE_SIDE_EFFECTS (exp))
4910 return const0_rtx;
4911
4912 /* Ensure we reference a volatile object even if value is ignored. */
4913 if (TREE_THIS_VOLATILE (exp)
4914 && TREE_CODE (exp) != FUNCTION_DECL
4915 && mode != VOIDmode && mode != BLKmode)
4916 {
4917 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
4918 if (GET_CODE (temp) == MEM)
4919 temp = copy_to_reg (temp);
4920 return const0_rtx;
4921 }
4922
4923 if (TREE_CODE_CLASS (code) == '1')
4924 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4925 VOIDmode, ro_modifier);
4926 else if (TREE_CODE_CLASS (code) == '2'
4927 || TREE_CODE_CLASS (code) == '<')
4928 {
4929 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
4930 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
4931 return const0_rtx;
4932 }
4933 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4934 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4935 /* If the second operand has no side effects, just evaluate
4936 the first. */
4937 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4938 VOIDmode, ro_modifier);
4939
4940 target = 0;
4941 }
4942
4943 /* If will do cse, generate all results into pseudo registers
4944 since 1) that allows cse to find more things
4945 and 2) otherwise cse could produce an insn the machine
4946 cannot support. */
4947
4948 if (! cse_not_expected && mode != BLKmode && target
4949 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4950 target = subtarget;
4951
4952 switch (code)
4953 {
4954 case LABEL_DECL:
4955 {
4956 tree function = decl_function_context (exp);
4957 /* Handle using a label in a containing function. */
4958 if (function != current_function_decl
4959 && function != inline_function_decl && function != 0)
4960 {
4961 struct function *p = find_function_data (function);
4962 /* Allocate in the memory associated with the function
4963 that the label is in. */
4964 push_obstacks (p->function_obstack,
4965 p->function_maybepermanent_obstack);
4966
4967 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
4968 label_rtx (exp),
4969 p->forced_labels);
4970 pop_obstacks ();
4971 }
4972 else if (modifier == EXPAND_INITIALIZER)
4973 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
4974 label_rtx (exp), forced_labels);
4975 temp = gen_rtx_MEM (FUNCTION_MODE,
4976 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
4977 if (function != current_function_decl
4978 && function != inline_function_decl && function != 0)
4979 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4980 return temp;
4981 }
4982
4983 case PARM_DECL:
4984 if (DECL_RTL (exp) == 0)
4985 {
4986 error_with_decl (exp, "prior parameter's size depends on `%s'");
4987 return CONST0_RTX (mode);
4988 }
4989
4990 /* ... fall through ... */
4991
4992 case VAR_DECL:
4993 /* If a static var's type was incomplete when the decl was written,
4994 but the type is complete now, lay out the decl now. */
4995 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4996 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4997 {
4998 push_obstacks_nochange ();
4999 end_temporary_allocation ();
5000 layout_decl (exp, 0);
5001 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5002 pop_obstacks ();
5003 }
5004
5005 /* Only check automatic variables. Currently, function arguments are
5006 not checked (this can be done at compile-time with prototypes).
5007 Aggregates are not checked. */
5008 if (flag_check_memory_usage && code == VAR_DECL
5009 && GET_CODE (DECL_RTL (exp)) == MEM
5010 && DECL_CONTEXT (exp) != NULL_TREE
5011 && ! TREE_STATIC (exp)
5012 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5013 {
5014 enum memory_use_mode memory_usage;
5015 memory_usage = get_memory_usage_from_modifier (modifier);
5016
5017 if (memory_usage != MEMORY_USE_DONT)
5018 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5019 XEXP (DECL_RTL (exp), 0), ptr_mode,
5020 GEN_INT (int_size_in_bytes (type)),
5021 TYPE_MODE (sizetype),
5022 GEN_INT (memory_usage),
5023 TYPE_MODE (integer_type_node));
5024 }
5025
5026 /* ... fall through ... */
5027
5028 case FUNCTION_DECL:
5029 case RESULT_DECL:
5030 if (DECL_RTL (exp) == 0)
5031 abort ();
5032
5033 /* Ensure variable marked as used even if it doesn't go through
5034 a parser. If it hasn't be used yet, write out an external
5035 definition. */
5036 if (! TREE_USED (exp))
5037 {
5038 assemble_external (exp);
5039 TREE_USED (exp) = 1;
5040 }
5041
5042 /* Show we haven't gotten RTL for this yet. */
5043 temp = 0;
5044
5045 /* Handle variables inherited from containing functions. */
5046 context = decl_function_context (exp);
5047
5048 /* We treat inline_function_decl as an alias for the current function
5049 because that is the inline function whose vars, types, etc.
5050 are being merged into the current function.
5051 See expand_inline_function. */
5052
5053 if (context != 0 && context != current_function_decl
5054 && context != inline_function_decl
5055 /* If var is static, we don't need a static chain to access it. */
5056 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5057 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5058 {
5059 rtx addr;
5060
5061 /* Mark as non-local and addressable. */
5062 DECL_NONLOCAL (exp) = 1;
5063 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5064 abort ();
5065 mark_addressable (exp);
5066 if (GET_CODE (DECL_RTL (exp)) != MEM)
5067 abort ();
5068 addr = XEXP (DECL_RTL (exp), 0);
5069 if (GET_CODE (addr) == MEM)
5070 addr = gen_rtx_MEM (Pmode,
5071 fix_lexical_addr (XEXP (addr, 0), exp));
5072 else
5073 addr = fix_lexical_addr (addr, exp);
5074 temp = change_address (DECL_RTL (exp), mode, addr);
5075 }
5076
5077 /* This is the case of an array whose size is to be determined
5078 from its initializer, while the initializer is still being parsed.
5079 See expand_decl. */
5080
5081 else if (GET_CODE (DECL_RTL (exp)) == MEM
5082 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5083 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5084 XEXP (DECL_RTL (exp), 0));
5085
5086 /* If DECL_RTL is memory, we are in the normal case and either
5087 the address is not valid or it is not a register and -fforce-addr
5088 is specified, get the address into a register. */
5089
5090 else if (GET_CODE (DECL_RTL (exp)) == MEM
5091 && modifier != EXPAND_CONST_ADDRESS
5092 && modifier != EXPAND_SUM
5093 && modifier != EXPAND_INITIALIZER
5094 && (! memory_address_p (DECL_MODE (exp),
5095 XEXP (DECL_RTL (exp), 0))
5096 || (flag_force_addr
5097 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5098 temp = change_address (DECL_RTL (exp), VOIDmode,
5099 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5100
5101 /* If we got something, return it. But first, set the alignment
5102 the address is a register. */
5103 if (temp != 0)
5104 {
5105 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5106 mark_reg_pointer (XEXP (temp, 0),
5107 DECL_ALIGN (exp) / BITS_PER_UNIT);
5108
5109 return temp;
5110 }
5111
5112 /* If the mode of DECL_RTL does not match that of the decl, it
5113 must be a promoted value. We return a SUBREG of the wanted mode,
5114 but mark it so that we know that it was already extended. */
5115
5116 if (GET_CODE (DECL_RTL (exp)) == REG
5117 && GET_MODE (DECL_RTL (exp)) != mode)
5118 {
5119 /* Get the signedness used for this variable. Ensure we get the
5120 same mode we got when the variable was declared. */
5121 if (GET_MODE (DECL_RTL (exp))
5122 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5123 abort ();
5124
5125 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5126 SUBREG_PROMOTED_VAR_P (temp) = 1;
5127 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5128 return temp;
5129 }
5130
5131 return DECL_RTL (exp);
5132
5133 case INTEGER_CST:
5134 return immed_double_const (TREE_INT_CST_LOW (exp),
5135 TREE_INT_CST_HIGH (exp),
5136 mode);
5137
5138 case CONST_DECL:
5139 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5140 EXPAND_MEMORY_USE_BAD);
5141
5142 case REAL_CST:
5143 /* If optimized, generate immediate CONST_DOUBLE
5144 which will be turned into memory by reload if necessary.
5145
5146 We used to force a register so that loop.c could see it. But
5147 this does not allow gen_* patterns to perform optimizations with
5148 the constants. It also produces two insns in cases like "x = 1.0;".
5149 On most machines, floating-point constants are not permitted in
5150 many insns, so we'd end up copying it to a register in any case.
5151
5152 Now, we do the copying in expand_binop, if appropriate. */
5153 return immed_real_const (exp);
5154
5155 case COMPLEX_CST:
5156 case STRING_CST:
5157 if (! TREE_CST_RTL (exp))
5158 output_constant_def (exp);
5159
5160 /* TREE_CST_RTL probably contains a constant address.
5161 On RISC machines where a constant address isn't valid,
5162 make some insns to get that address into a register. */
5163 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5164 && modifier != EXPAND_CONST_ADDRESS
5165 && modifier != EXPAND_INITIALIZER
5166 && modifier != EXPAND_SUM
5167 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5168 || (flag_force_addr
5169 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5170 return change_address (TREE_CST_RTL (exp), VOIDmode,
5171 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5172 return TREE_CST_RTL (exp);
5173
5174 case SAVE_EXPR:
5175 context = decl_function_context (exp);
5176
5177 /* If this SAVE_EXPR was at global context, assume we are an
5178 initialization function and move it into our context. */
5179 if (context == 0)
5180 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5181
5182 /* We treat inline_function_decl as an alias for the current function
5183 because that is the inline function whose vars, types, etc.
5184 are being merged into the current function.
5185 See expand_inline_function. */
5186 if (context == current_function_decl || context == inline_function_decl)
5187 context = 0;
5188
5189 /* If this is non-local, handle it. */
5190 if (context)
5191 {
5192 /* The following call just exists to abort if the context is
5193 not of a containing function. */
5194 find_function_data (context);
5195
5196 temp = SAVE_EXPR_RTL (exp);
5197 if (temp && GET_CODE (temp) == REG)
5198 {
5199 put_var_into_stack (exp);
5200 temp = SAVE_EXPR_RTL (exp);
5201 }
5202 if (temp == 0 || GET_CODE (temp) != MEM)
5203 abort ();
5204 return change_address (temp, mode,
5205 fix_lexical_addr (XEXP (temp, 0), exp));
5206 }
5207 if (SAVE_EXPR_RTL (exp) == 0)
5208 {
5209 if (mode == VOIDmode)
5210 temp = const0_rtx;
5211 else
5212 temp = assign_temp (type, 3, 0, 0);
5213
5214 SAVE_EXPR_RTL (exp) = temp;
5215 if (!optimize && GET_CODE (temp) == REG)
5216 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5217 save_expr_regs);
5218
5219 /* If the mode of TEMP does not match that of the expression, it
5220 must be a promoted value. We pass store_expr a SUBREG of the
5221 wanted mode but mark it so that we know that it was already
5222 extended. Note that `unsignedp' was modified above in
5223 this case. */
5224
5225 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5226 {
5227 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5228 SUBREG_PROMOTED_VAR_P (temp) = 1;
5229 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5230 }
5231
5232 if (temp == const0_rtx)
5233 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5234 EXPAND_MEMORY_USE_BAD);
5235 else
5236 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5237
5238 TREE_USED (exp) = 1;
5239 }
5240
5241 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5242 must be a promoted value. We return a SUBREG of the wanted mode,
5243 but mark it so that we know that it was already extended. */
5244
5245 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5246 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5247 {
5248 /* Compute the signedness and make the proper SUBREG. */
5249 promote_mode (type, mode, &unsignedp, 0);
5250 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5251 SUBREG_PROMOTED_VAR_P (temp) = 1;
5252 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5253 return temp;
5254 }
5255
5256 return SAVE_EXPR_RTL (exp);
5257
5258 case UNSAVE_EXPR:
5259 {
5260 rtx temp;
5261 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5262 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5263 return temp;
5264 }
5265
5266 case PLACEHOLDER_EXPR:
5267 {
5268 tree placeholder_expr;
5269
5270 /* If there is an object on the head of the placeholder list,
5271 see if some object in it of type TYPE or a pointer to it. For
5272 further information, see tree.def. */
5273 for (placeholder_expr = placeholder_list;
5274 placeholder_expr != 0;
5275 placeholder_expr = TREE_CHAIN (placeholder_expr))
5276 {
5277 tree need_type = TYPE_MAIN_VARIANT (type);
5278 tree object = 0;
5279 tree old_list = placeholder_list;
5280 tree elt;
5281
5282 /* Find the outermost reference that is of the type we want.
5283 If none, see if any object has a type that is a pointer to
5284 the type we want. */
5285 for (elt = TREE_PURPOSE (placeholder_expr);
5286 elt != 0 && object == 0;
5287 elt
5288 = ((TREE_CODE (elt) == COMPOUND_EXPR
5289 || TREE_CODE (elt) == COND_EXPR)
5290 ? TREE_OPERAND (elt, 1)
5291 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5292 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5293 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5294 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5295 ? TREE_OPERAND (elt, 0) : 0))
5296 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5297 object = elt;
5298
5299 for (elt = TREE_PURPOSE (placeholder_expr);
5300 elt != 0 && object == 0;
5301 elt
5302 = ((TREE_CODE (elt) == COMPOUND_EXPR
5303 || TREE_CODE (elt) == COND_EXPR)
5304 ? TREE_OPERAND (elt, 1)
5305 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5306 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5307 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5308 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5309 ? TREE_OPERAND (elt, 0) : 0))
5310 if (POINTER_TYPE_P (TREE_TYPE (elt))
5311 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5312 == need_type))
5313 object = build1 (INDIRECT_REF, need_type, elt);
5314
5315 if (object != 0)
5316 {
5317 /* Expand this object skipping the list entries before
5318 it was found in case it is also a PLACEHOLDER_EXPR.
5319 In that case, we want to translate it using subsequent
5320 entries. */
5321 placeholder_list = TREE_CHAIN (placeholder_expr);
5322 temp = expand_expr (object, original_target, tmode,
5323 ro_modifier);
5324 placeholder_list = old_list;
5325 return temp;
5326 }
5327 }
5328 }
5329
5330 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5331 abort ();
5332
5333 case WITH_RECORD_EXPR:
5334 /* Put the object on the placeholder list, expand our first operand,
5335 and pop the list. */
5336 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5337 placeholder_list);
5338 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5339 tmode, ro_modifier);
5340 placeholder_list = TREE_CHAIN (placeholder_list);
5341 return target;
5342
5343 case EXIT_EXPR:
5344 expand_exit_loop_if_false (NULL_PTR,
5345 invert_truthvalue (TREE_OPERAND (exp, 0)));
5346 return const0_rtx;
5347
5348 case LOOP_EXPR:
5349 push_temp_slots ();
5350 expand_start_loop (1);
5351 expand_expr_stmt (TREE_OPERAND (exp, 0));
5352 expand_end_loop ();
5353 pop_temp_slots ();
5354
5355 return const0_rtx;
5356
5357 case BIND_EXPR:
5358 {
5359 tree vars = TREE_OPERAND (exp, 0);
5360 int vars_need_expansion = 0;
5361
5362 /* Need to open a binding contour here because
5363 if there are any cleanups they must be contained here. */
5364 expand_start_bindings (0);
5365
5366 /* Mark the corresponding BLOCK for output in its proper place. */
5367 if (TREE_OPERAND (exp, 2) != 0
5368 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5369 insert_block (TREE_OPERAND (exp, 2));
5370
5371 /* If VARS have not yet been expanded, expand them now. */
5372 while (vars)
5373 {
5374 if (DECL_RTL (vars) == 0)
5375 {
5376 vars_need_expansion = 1;
5377 expand_decl (vars);
5378 }
5379 expand_decl_init (vars);
5380 vars = TREE_CHAIN (vars);
5381 }
5382
5383 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
5384
5385 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5386
5387 return temp;
5388 }
5389
5390 case RTL_EXPR:
5391 if (RTL_EXPR_SEQUENCE (exp))
5392 {
5393 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5394 abort ();
5395 emit_insns (RTL_EXPR_SEQUENCE (exp));
5396 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5397 }
5398 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5399 free_temps_for_rtl_expr (exp);
5400 return RTL_EXPR_RTL (exp);
5401
5402 case CONSTRUCTOR:
5403 /* If we don't need the result, just ensure we evaluate any
5404 subexpressions. */
5405 if (ignore)
5406 {
5407 tree elt;
5408 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5409 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5410 EXPAND_MEMORY_USE_BAD);
5411 return const0_rtx;
5412 }
5413
5414 /* All elts simple constants => refer to a constant in memory. But
5415 if this is a non-BLKmode mode, let it store a field at a time
5416 since that should make a CONST_INT or CONST_DOUBLE when we
5417 fold. Likewise, if we have a target we can use, it is best to
5418 store directly into the target unless the type is large enough
5419 that memcpy will be used. If we are making an initializer and
5420 all operands are constant, put it in memory as well. */
5421 else if ((TREE_STATIC (exp)
5422 && ((mode == BLKmode
5423 && ! (target != 0 && safe_from_p (target, exp, 1)))
5424 || TREE_ADDRESSABLE (exp)
5425 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5426 && (move_by_pieces_ninsns
5427 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5428 TYPE_ALIGN (type) / BITS_PER_UNIT)
5429 > MOVE_RATIO)
5430 && ! mostly_zeros_p (exp))))
5431 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5432 {
5433 rtx constructor = output_constant_def (exp);
5434 if (modifier != EXPAND_CONST_ADDRESS
5435 && modifier != EXPAND_INITIALIZER
5436 && modifier != EXPAND_SUM
5437 && (! memory_address_p (GET_MODE (constructor),
5438 XEXP (constructor, 0))
5439 || (flag_force_addr
5440 && GET_CODE (XEXP (constructor, 0)) != REG)))
5441 constructor = change_address (constructor, VOIDmode,
5442 XEXP (constructor, 0));
5443 return constructor;
5444 }
5445
5446 else
5447 {
5448 /* Handle calls that pass values in multiple non-contiguous
5449 locations. The Irix 6 ABI has examples of this. */
5450 if (target == 0 || ! safe_from_p (target, exp, 1)
5451 || GET_CODE (target) == PARALLEL)
5452 {
5453 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5454 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5455 else
5456 target = assign_temp (type, 0, 1, 1);
5457 }
5458
5459 if (TREE_READONLY (exp))
5460 {
5461 if (GET_CODE (target) == MEM)
5462 target = copy_rtx (target);
5463
5464 RTX_UNCHANGING_P (target) = 1;
5465 }
5466
5467 store_constructor (exp, target, 0);
5468 return target;
5469 }
5470
5471 case INDIRECT_REF:
5472 {
5473 tree exp1 = TREE_OPERAND (exp, 0);
5474 tree exp2;
5475 tree index;
5476 tree string = string_constant (exp1, &index);
5477 int i;
5478
5479 if (string
5480 && TREE_CODE (string) == STRING_CST
5481 && TREE_CODE (index) == INTEGER_CST
5482 && !TREE_INT_CST_HIGH (index)
5483 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5484 && GET_MODE_CLASS (mode) == MODE_INT
5485 && GET_MODE_SIZE (mode) == 1)
5486 return GEN_INT (TREE_STRING_POINTER (string)[i]);
5487
5488 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5489 op0 = memory_address (mode, op0);
5490
5491 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5492 {
5493 enum memory_use_mode memory_usage;
5494 memory_usage = get_memory_usage_from_modifier (modifier);
5495
5496 if (memory_usage != MEMORY_USE_DONT)
5497 {
5498 in_check_memory_usage = 1;
5499 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5500 op0, ptr_mode,
5501 GEN_INT (int_size_in_bytes (type)),
5502 TYPE_MODE (sizetype),
5503 GEN_INT (memory_usage),
5504 TYPE_MODE (integer_type_node));
5505 in_check_memory_usage = 0;
5506 }
5507 }
5508
5509 temp = gen_rtx_MEM (mode, op0);
5510 /* If address was computed by addition,
5511 mark this as an element of an aggregate. */
5512 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5513 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5514 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5515 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5516 || (TREE_CODE (exp1) == ADDR_EXPR
5517 && (exp2 = TREE_OPERAND (exp1, 0))
5518 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5519 MEM_IN_STRUCT_P (temp) = 1;
5520 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5521
5522 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5523 here, because, in C and C++, the fact that a location is accessed
5524 through a pointer to const does not mean that the value there can
5525 never change. Languages where it can never change should
5526 also set TREE_STATIC. */
5527 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5528 return temp;
5529 }
5530
5531 case ARRAY_REF:
5532 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5533 abort ();
5534
5535 {
5536 tree array = TREE_OPERAND (exp, 0);
5537 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5538 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5539 tree index = TREE_OPERAND (exp, 1);
5540 tree index_type = TREE_TYPE (index);
5541 HOST_WIDE_INT i;
5542
5543 /* Optimize the special-case of a zero lower bound.
5544
5545 We convert the low_bound to sizetype to avoid some problems
5546 with constant folding. (E.g. suppose the lower bound is 1,
5547 and its mode is QI. Without the conversion, (ARRAY
5548 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5549 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5550
5551 But sizetype isn't quite right either (especially if
5552 the lowbound is negative). FIXME */
5553
5554 if (! integer_zerop (low_bound))
5555 index = fold (build (MINUS_EXPR, index_type, index,
5556 convert (sizetype, low_bound)));
5557
5558 /* Fold an expression like: "foo"[2].
5559 This is not done in fold so it won't happen inside &.
5560 Don't fold if this is for wide characters since it's too
5561 difficult to do correctly and this is a very rare case. */
5562
5563 if (TREE_CODE (array) == STRING_CST
5564 && TREE_CODE (index) == INTEGER_CST
5565 && !TREE_INT_CST_HIGH (index)
5566 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5567 && GET_MODE_CLASS (mode) == MODE_INT
5568 && GET_MODE_SIZE (mode) == 1)
5569 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5570
5571 /* If this is a constant index into a constant array,
5572 just get the value from the array. Handle both the cases when
5573 we have an explicit constructor and when our operand is a variable
5574 that was declared const. */
5575
5576 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5577 {
5578 if (TREE_CODE (index) == INTEGER_CST
5579 && TREE_INT_CST_HIGH (index) == 0)
5580 {
5581 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5582
5583 i = TREE_INT_CST_LOW (index);
5584 while (elem && i--)
5585 elem = TREE_CHAIN (elem);
5586 if (elem)
5587 return expand_expr (fold (TREE_VALUE (elem)), target,
5588 tmode, ro_modifier);
5589 }
5590 }
5591
5592 else if (optimize >= 1
5593 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5594 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5595 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5596 {
5597 if (TREE_CODE (index) == INTEGER_CST)
5598 {
5599 tree init = DECL_INITIAL (array);
5600
5601 i = TREE_INT_CST_LOW (index);
5602 if (TREE_CODE (init) == CONSTRUCTOR)
5603 {
5604 tree elem = CONSTRUCTOR_ELTS (init);
5605
5606 while (elem
5607 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5608 elem = TREE_CHAIN (elem);
5609 if (elem)
5610 return expand_expr (fold (TREE_VALUE (elem)), target,
5611 tmode, ro_modifier);
5612 }
5613 else if (TREE_CODE (init) == STRING_CST
5614 && TREE_INT_CST_HIGH (index) == 0
5615 && (TREE_INT_CST_LOW (index)
5616 < TREE_STRING_LENGTH (init)))
5617 return (GEN_INT
5618 (TREE_STRING_POINTER
5619 (init)[TREE_INT_CST_LOW (index)]));
5620 }
5621 }
5622 }
5623
5624 /* ... fall through ... */
5625
5626 case COMPONENT_REF:
5627 case BIT_FIELD_REF:
5628 /* If the operand is a CONSTRUCTOR, we can just extract the
5629 appropriate field if it is present. Don't do this if we have
5630 already written the data since we want to refer to that copy
5631 and varasm.c assumes that's what we'll do. */
5632 if (code != ARRAY_REF
5633 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5634 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5635 {
5636 tree elt;
5637
5638 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5639 elt = TREE_CHAIN (elt))
5640 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5641 /* We can normally use the value of the field in the
5642 CONSTRUCTOR. However, if this is a bitfield in
5643 an integral mode that we can fit in a HOST_WIDE_INT,
5644 we must mask only the number of bits in the bitfield,
5645 since this is done implicitly by the constructor. If
5646 the bitfield does not meet either of those conditions,
5647 we can't do this optimization. */
5648 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5649 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5650 == MODE_INT)
5651 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5652 <= HOST_BITS_PER_WIDE_INT))))
5653 {
5654 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5655 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5656 {
5657 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5658
5659 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5660 {
5661 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5662 op0 = expand_and (op0, op1, target);
5663 }
5664 else
5665 {
5666 enum machine_mode imode
5667 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5668 tree count
5669 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
5670 0);
5671
5672 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5673 target, 0);
5674 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5675 target, 0);
5676 }
5677 }
5678
5679 return op0;
5680 }
5681 }
5682
5683 {
5684 enum machine_mode mode1;
5685 int bitsize;
5686 int bitpos;
5687 tree offset;
5688 int volatilep = 0;
5689 int alignment;
5690 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5691 &mode1, &unsignedp, &volatilep,
5692 &alignment);
5693
5694 /* If we got back the original object, something is wrong. Perhaps
5695 we are evaluating an expression too early. In any event, don't
5696 infinitely recurse. */
5697 if (tem == exp)
5698 abort ();
5699
5700 /* If TEM's type is a union of variable size, pass TARGET to the inner
5701 computation, since it will need a temporary and TARGET is known
5702 to have to do. This occurs in unchecked conversion in Ada. */
5703
5704 op0 = expand_expr (tem,
5705 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5706 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5707 != INTEGER_CST)
5708 ? target : NULL_RTX),
5709 VOIDmode,
5710 modifier == EXPAND_INITIALIZER
5711 ? modifier : EXPAND_NORMAL);
5712
5713 /* If this is a constant, put it into a register if it is a
5714 legitimate constant and memory if it isn't. */
5715 if (CONSTANT_P (op0))
5716 {
5717 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5718 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5719 op0 = force_reg (mode, op0);
5720 else
5721 op0 = validize_mem (force_const_mem (mode, op0));
5722 }
5723
5724 if (offset != 0)
5725 {
5726 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5727
5728 if (GET_CODE (op0) != MEM)
5729 abort ();
5730
5731 if (GET_MODE (offset_rtx) != ptr_mode)
5732 #ifdef POINTERS_EXTEND_UNSIGNED
5733 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
5734 #else
5735 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5736 #endif
5737
5738 op0 = change_address (op0, VOIDmode,
5739 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
5740 force_reg (ptr_mode, offset_rtx)));
5741 }
5742
5743 /* Don't forget about volatility even if this is a bitfield. */
5744 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5745 {
5746 op0 = copy_rtx (op0);
5747 MEM_VOLATILE_P (op0) = 1;
5748 }
5749
5750 /* Check the access. */
5751 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5752 {
5753 enum memory_use_mode memory_usage;
5754 memory_usage = get_memory_usage_from_modifier (modifier);
5755
5756 if (memory_usage != MEMORY_USE_DONT)
5757 {
5758 rtx to;
5759 int size;
5760
5761 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5762 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5763
5764 /* Check the access right of the pointer. */
5765 if (size > BITS_PER_UNIT)
5766 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5767 to, ptr_mode,
5768 GEN_INT (size / BITS_PER_UNIT),
5769 TYPE_MODE (sizetype),
5770 GEN_INT (memory_usage),
5771 TYPE_MODE (integer_type_node));
5772 }
5773 }
5774
5775 /* In cases where an aligned union has an unaligned object
5776 as a field, we might be extracting a BLKmode value from
5777 an integer-mode (e.g., SImode) object. Handle this case
5778 by doing the extract into an object as wide as the field
5779 (which we know to be the width of a basic mode), then
5780 storing into memory, and changing the mode to BLKmode.
5781 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5782 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5783 if (mode1 == VOIDmode
5784 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5785 || (modifier != EXPAND_CONST_ADDRESS
5786 && modifier != EXPAND_INITIALIZER
5787 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5788 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5789 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5790 /* If the field isn't aligned enough to fetch as a memref,
5791 fetch it as a bit field. */
5792 || (SLOW_UNALIGNED_ACCESS
5793 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5794 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5795 {
5796 enum machine_mode ext_mode = mode;
5797
5798 if (ext_mode == BLKmode)
5799 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5800
5801 if (ext_mode == BLKmode)
5802 {
5803 /* In this case, BITPOS must start at a byte boundary and
5804 TARGET, if specified, must be a MEM. */
5805 if (GET_CODE (op0) != MEM
5806 || (target != 0 && GET_CODE (target) != MEM)
5807 || bitpos % BITS_PER_UNIT != 0)
5808 abort ();
5809
5810 op0 = change_address (op0, VOIDmode,
5811 plus_constant (XEXP (op0, 0),
5812 bitpos / BITS_PER_UNIT));
5813 if (target == 0)
5814 target = assign_temp (type, 0, 1, 1);
5815
5816 emit_block_move (target, op0,
5817 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5818 / BITS_PER_UNIT),
5819 1);
5820
5821 return target;
5822 }
5823
5824 op0 = validize_mem (op0);
5825
5826 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5827 mark_reg_pointer (XEXP (op0, 0), alignment);
5828
5829 op0 = extract_bit_field (op0, bitsize, bitpos,
5830 unsignedp, target, ext_mode, ext_mode,
5831 alignment,
5832 int_size_in_bytes (TREE_TYPE (tem)));
5833
5834 /* If the result is a record type and BITSIZE is narrower than
5835 the mode of OP0, an integral mode, and this is a big endian
5836 machine, we must put the field into the high-order bits. */
5837 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5838 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5839 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5840 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5841 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5842 - bitsize),
5843 op0, 1);
5844
5845 if (mode == BLKmode)
5846 {
5847 rtx new = assign_stack_temp (ext_mode,
5848 bitsize / BITS_PER_UNIT, 0);
5849
5850 emit_move_insn (new, op0);
5851 op0 = copy_rtx (new);
5852 PUT_MODE (op0, BLKmode);
5853 MEM_IN_STRUCT_P (op0) = 1;
5854 }
5855
5856 return op0;
5857 }
5858
5859 /* If the result is BLKmode, use that to access the object
5860 now as well. */
5861 if (mode == BLKmode)
5862 mode1 = BLKmode;
5863
5864 /* Get a reference to just this component. */
5865 if (modifier == EXPAND_CONST_ADDRESS
5866 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5867 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
5868 (bitpos / BITS_PER_UNIT)));
5869 else
5870 op0 = change_address (op0, mode1,
5871 plus_constant (XEXP (op0, 0),
5872 (bitpos / BITS_PER_UNIT)));
5873 if (GET_CODE (XEXP (op0, 0)) == REG)
5874 mark_reg_pointer (XEXP (op0, 0), alignment);
5875
5876 MEM_IN_STRUCT_P (op0) = 1;
5877 MEM_VOLATILE_P (op0) |= volatilep;
5878 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5879 || modifier == EXPAND_CONST_ADDRESS
5880 || modifier == EXPAND_INITIALIZER)
5881 return op0;
5882 else if (target == 0)
5883 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5884
5885 convert_move (target, op0, unsignedp);
5886 return target;
5887 }
5888
5889 /* Intended for a reference to a buffer of a file-object in Pascal.
5890 But it's not certain that a special tree code will really be
5891 necessary for these. INDIRECT_REF might work for them. */
5892 case BUFFER_REF:
5893 abort ();
5894
5895 case IN_EXPR:
5896 {
5897 /* Pascal set IN expression.
5898
5899 Algorithm:
5900 rlo = set_low - (set_low%bits_per_word);
5901 the_word = set [ (index - rlo)/bits_per_word ];
5902 bit_index = index % bits_per_word;
5903 bitmask = 1 << bit_index;
5904 return !!(the_word & bitmask); */
5905
5906 tree set = TREE_OPERAND (exp, 0);
5907 tree index = TREE_OPERAND (exp, 1);
5908 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5909 tree set_type = TREE_TYPE (set);
5910 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5911 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5912 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5913 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5914 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5915 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5916 rtx setaddr = XEXP (setval, 0);
5917 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5918 rtx rlow;
5919 rtx diff, quo, rem, addr, bit, result;
5920
5921 preexpand_calls (exp);
5922
5923 /* If domain is empty, answer is no. Likewise if index is constant
5924 and out of bounds. */
5925 if (((TREE_CODE (set_high_bound) == INTEGER_CST
5926 && TREE_CODE (set_low_bound) == INTEGER_CST
5927 && tree_int_cst_lt (set_high_bound, set_low_bound))
5928 || (TREE_CODE (index) == INTEGER_CST
5929 && TREE_CODE (set_low_bound) == INTEGER_CST
5930 && tree_int_cst_lt (index, set_low_bound))
5931 || (TREE_CODE (set_high_bound) == INTEGER_CST
5932 && TREE_CODE (index) == INTEGER_CST
5933 && tree_int_cst_lt (set_high_bound, index))))
5934 return const0_rtx;
5935
5936 if (target == 0)
5937 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5938
5939 /* If we get here, we have to generate the code for both cases
5940 (in range and out of range). */
5941
5942 op0 = gen_label_rtx ();
5943 op1 = gen_label_rtx ();
5944
5945 if (! (GET_CODE (index_val) == CONST_INT
5946 && GET_CODE (lo_r) == CONST_INT))
5947 {
5948 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5949 GET_MODE (index_val), iunsignedp, 0);
5950 emit_jump_insn (gen_blt (op1));
5951 }
5952
5953 if (! (GET_CODE (index_val) == CONST_INT
5954 && GET_CODE (hi_r) == CONST_INT))
5955 {
5956 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5957 GET_MODE (index_val), iunsignedp, 0);
5958 emit_jump_insn (gen_bgt (op1));
5959 }
5960
5961 /* Calculate the element number of bit zero in the first word
5962 of the set. */
5963 if (GET_CODE (lo_r) == CONST_INT)
5964 rlow = GEN_INT (INTVAL (lo_r)
5965 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5966 else
5967 rlow = expand_binop (index_mode, and_optab, lo_r,
5968 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5969 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5970
5971 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5972 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5973
5974 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5975 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5976 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5977 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5978
5979 addr = memory_address (byte_mode,
5980 expand_binop (index_mode, add_optab, diff,
5981 setaddr, NULL_RTX, iunsignedp,
5982 OPTAB_LIB_WIDEN));
5983
5984 /* Extract the bit we want to examine */
5985 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5986 gen_rtx_MEM (byte_mode, addr),
5987 make_tree (TREE_TYPE (index), rem),
5988 NULL_RTX, 1);
5989 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5990 GET_MODE (target) == byte_mode ? target : 0,
5991 1, OPTAB_LIB_WIDEN);
5992
5993 if (result != target)
5994 convert_move (target, result, 1);
5995
5996 /* Output the code to handle the out-of-range case. */
5997 emit_jump (op0);
5998 emit_label (op1);
5999 emit_move_insn (target, const0_rtx);
6000 emit_label (op0);
6001 return target;
6002 }
6003
6004 case WITH_CLEANUP_EXPR:
6005 if (RTL_EXPR_RTL (exp) == 0)
6006 {
6007 RTL_EXPR_RTL (exp)
6008 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6009 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6010
6011 /* That's it for this cleanup. */
6012 TREE_OPERAND (exp, 2) = 0;
6013 }
6014 return RTL_EXPR_RTL (exp);
6015
6016 case CLEANUP_POINT_EXPR:
6017 {
6018 extern int temp_slot_level;
6019 /* Start a new binding layer that will keep track of all cleanup
6020 actions to be performed. */
6021 expand_start_bindings (0);
6022
6023 target_temp_slot_level = temp_slot_level;
6024
6025 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6026 /* If we're going to use this value, load it up now. */
6027 if (! ignore)
6028 op0 = force_not_mem (op0);
6029 preserve_temp_slots (op0);
6030 expand_end_bindings (NULL_TREE, 0, 0);
6031 }
6032 return op0;
6033
6034 case CALL_EXPR:
6035 /* Check for a built-in function. */
6036 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6037 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6038 == FUNCTION_DECL)
6039 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6040 return expand_builtin (exp, target, subtarget, tmode, ignore);
6041
6042 /* If this call was expanded already by preexpand_calls,
6043 just return the result we got. */
6044 if (CALL_EXPR_RTL (exp) != 0)
6045 return CALL_EXPR_RTL (exp);
6046
6047 return expand_call (exp, target, ignore);
6048
6049 case NON_LVALUE_EXPR:
6050 case NOP_EXPR:
6051 case CONVERT_EXPR:
6052 case REFERENCE_EXPR:
6053 if (TREE_CODE (type) == UNION_TYPE)
6054 {
6055 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6056 if (target == 0)
6057 {
6058 if (mode != BLKmode)
6059 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6060 else
6061 target = assign_temp (type, 0, 1, 1);
6062 }
6063
6064 if (GET_CODE (target) == MEM)
6065 /* Store data into beginning of memory target. */
6066 store_expr (TREE_OPERAND (exp, 0),
6067 change_address (target, TYPE_MODE (valtype), 0), 0);
6068
6069 else if (GET_CODE (target) == REG)
6070 /* Store this field into a union of the proper type. */
6071 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6072 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6073 VOIDmode, 0, 1,
6074 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6075 else
6076 abort ();
6077
6078 /* Return the entire union. */
6079 return target;
6080 }
6081
6082 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6083 {
6084 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6085 ro_modifier);
6086
6087 /* If the signedness of the conversion differs and OP0 is
6088 a promoted SUBREG, clear that indication since we now
6089 have to do the proper extension. */
6090 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6091 && GET_CODE (op0) == SUBREG)
6092 SUBREG_PROMOTED_VAR_P (op0) = 0;
6093
6094 return op0;
6095 }
6096
6097 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6098 if (GET_MODE (op0) == mode)
6099 return op0;
6100
6101 /* If OP0 is a constant, just convert it into the proper mode. */
6102 if (CONSTANT_P (op0))
6103 return
6104 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6105 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6106
6107 if (modifier == EXPAND_INITIALIZER)
6108 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6109
6110 if (target == 0)
6111 return
6112 convert_to_mode (mode, op0,
6113 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6114 else
6115 convert_move (target, op0,
6116 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6117 return target;
6118
6119 case PLUS_EXPR:
6120 /* We come here from MINUS_EXPR when the second operand is a
6121 constant. */
6122 plus_expr:
6123 this_optab = add_optab;
6124
6125 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6126 something else, make sure we add the register to the constant and
6127 then to the other thing. This case can occur during strength
6128 reduction and doing it this way will produce better code if the
6129 frame pointer or argument pointer is eliminated.
6130
6131 fold-const.c will ensure that the constant is always in the inner
6132 PLUS_EXPR, so the only case we need to do anything about is if
6133 sp, ap, or fp is our second argument, in which case we must swap
6134 the innermost first argument and our second argument. */
6135
6136 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6137 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6138 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6139 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6140 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6141 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6142 {
6143 tree t = TREE_OPERAND (exp, 1);
6144
6145 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6146 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6147 }
6148
6149 /* If the result is to be ptr_mode and we are adding an integer to
6150 something, we might be forming a constant. So try to use
6151 plus_constant. If it produces a sum and we can't accept it,
6152 use force_operand. This allows P = &ARR[const] to generate
6153 efficient code on machines where a SYMBOL_REF is not a valid
6154 address.
6155
6156 If this is an EXPAND_SUM call, always return the sum. */
6157 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6158 || mode == ptr_mode)
6159 {
6160 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6161 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6162 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6163 {
6164 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6165 EXPAND_SUM);
6166 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6167 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6168 op1 = force_operand (op1, target);
6169 return op1;
6170 }
6171
6172 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6173 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6174 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6175 {
6176 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6177 EXPAND_SUM);
6178 if (! CONSTANT_P (op0))
6179 {
6180 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6181 VOIDmode, modifier);
6182 /* Don't go to both_summands if modifier
6183 says it's not right to return a PLUS. */
6184 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6185 goto binop2;
6186 goto both_summands;
6187 }
6188 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6189 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6190 op0 = force_operand (op0, target);
6191 return op0;
6192 }
6193 }
6194
6195 /* No sense saving up arithmetic to be done
6196 if it's all in the wrong mode to form part of an address.
6197 And force_operand won't know whether to sign-extend or
6198 zero-extend. */
6199 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6200 || mode != ptr_mode)
6201 goto binop;
6202
6203 preexpand_calls (exp);
6204 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6205 subtarget = 0;
6206
6207 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6208 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6209
6210 both_summands:
6211 /* Make sure any term that's a sum with a constant comes last. */
6212 if (GET_CODE (op0) == PLUS
6213 && CONSTANT_P (XEXP (op0, 1)))
6214 {
6215 temp = op0;
6216 op0 = op1;
6217 op1 = temp;
6218 }
6219 /* If adding to a sum including a constant,
6220 associate it to put the constant outside. */
6221 if (GET_CODE (op1) == PLUS
6222 && CONSTANT_P (XEXP (op1, 1)))
6223 {
6224 rtx constant_term = const0_rtx;
6225
6226 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6227 if (temp != 0)
6228 op0 = temp;
6229 /* Ensure that MULT comes first if there is one. */
6230 else if (GET_CODE (op0) == MULT)
6231 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6232 else
6233 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6234
6235 /* Let's also eliminate constants from op0 if possible. */
6236 op0 = eliminate_constant_term (op0, &constant_term);
6237
6238 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6239 their sum should be a constant. Form it into OP1, since the
6240 result we want will then be OP0 + OP1. */
6241
6242 temp = simplify_binary_operation (PLUS, mode, constant_term,
6243 XEXP (op1, 1));
6244 if (temp != 0)
6245 op1 = temp;
6246 else
6247 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6248 }
6249
6250 /* Put a constant term last and put a multiplication first. */
6251 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6252 temp = op1, op1 = op0, op0 = temp;
6253
6254 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6255 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6256
6257 case MINUS_EXPR:
6258 /* For initializers, we are allowed to return a MINUS of two
6259 symbolic constants. Here we handle all cases when both operands
6260 are constant. */
6261 /* Handle difference of two symbolic constants,
6262 for the sake of an initializer. */
6263 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6264 && really_constant_p (TREE_OPERAND (exp, 0))
6265 && really_constant_p (TREE_OPERAND (exp, 1)))
6266 {
6267 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6268 VOIDmode, ro_modifier);
6269 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6270 VOIDmode, ro_modifier);
6271
6272 /* If the last operand is a CONST_INT, use plus_constant of
6273 the negated constant. Else make the MINUS. */
6274 if (GET_CODE (op1) == CONST_INT)
6275 return plus_constant (op0, - INTVAL (op1));
6276 else
6277 return gen_rtx_MINUS (mode, op0, op1);
6278 }
6279 /* Convert A - const to A + (-const). */
6280 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6281 {
6282 tree negated = fold (build1 (NEGATE_EXPR, type,
6283 TREE_OPERAND (exp, 1)));
6284
6285 /* Deal with the case where we can't negate the constant
6286 in TYPE. */
6287 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6288 {
6289 tree newtype = signed_type (type);
6290 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6291 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6292 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6293
6294 if (! TREE_OVERFLOW (newneg))
6295 return expand_expr (convert (type,
6296 build (PLUS_EXPR, newtype,
6297 newop0, newneg)),
6298 target, tmode, ro_modifier);
6299 }
6300 else
6301 {
6302 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6303 goto plus_expr;
6304 }
6305 }
6306 this_optab = sub_optab;
6307 goto binop;
6308
6309 case MULT_EXPR:
6310 preexpand_calls (exp);
6311 /* If first operand is constant, swap them.
6312 Thus the following special case checks need only
6313 check the second operand. */
6314 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6315 {
6316 register tree t1 = TREE_OPERAND (exp, 0);
6317 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6318 TREE_OPERAND (exp, 1) = t1;
6319 }
6320
6321 /* Attempt to return something suitable for generating an
6322 indexed address, for machines that support that. */
6323
6324 if (modifier == EXPAND_SUM && mode == ptr_mode
6325 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6326 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6327 {
6328 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6329 EXPAND_SUM);
6330
6331 /* Apply distributive law if OP0 is x+c. */
6332 if (GET_CODE (op0) == PLUS
6333 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6334 return gen_rtx_PLUS (mode,
6335 gen_rtx_MULT (mode, XEXP (op0, 0),
6336 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6337 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6338 * INTVAL (XEXP (op0, 1))));
6339
6340 if (GET_CODE (op0) != REG)
6341 op0 = force_operand (op0, NULL_RTX);
6342 if (GET_CODE (op0) != REG)
6343 op0 = copy_to_mode_reg (mode, op0);
6344
6345 return gen_rtx_MULT (mode, op0,
6346 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6347 }
6348
6349 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6350 subtarget = 0;
6351
6352 /* Check for multiplying things that have been extended
6353 from a narrower type. If this machine supports multiplying
6354 in that narrower type with a result in the desired type,
6355 do it that way, and avoid the explicit type-conversion. */
6356 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6357 && TREE_CODE (type) == INTEGER_TYPE
6358 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6359 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6360 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6361 && int_fits_type_p (TREE_OPERAND (exp, 1),
6362 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6363 /* Don't use a widening multiply if a shift will do. */
6364 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6365 > HOST_BITS_PER_WIDE_INT)
6366 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6367 ||
6368 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6369 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6370 ==
6371 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6372 /* If both operands are extended, they must either both
6373 be zero-extended or both be sign-extended. */
6374 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6375 ==
6376 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6377 {
6378 enum machine_mode innermode
6379 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6380 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6381 ? smul_widen_optab : umul_widen_optab);
6382 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6383 ? umul_widen_optab : smul_widen_optab);
6384 if (mode == GET_MODE_WIDER_MODE (innermode))
6385 {
6386 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6387 {
6388 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6389 NULL_RTX, VOIDmode, 0);
6390 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6391 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6392 VOIDmode, 0);
6393 else
6394 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6395 NULL_RTX, VOIDmode, 0);
6396 goto binop2;
6397 }
6398 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6399 && innermode == word_mode)
6400 {
6401 rtx htem;
6402 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6403 NULL_RTX, VOIDmode, 0);
6404 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6405 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6406 VOIDmode, 0);
6407 else
6408 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6409 NULL_RTX, VOIDmode, 0);
6410 temp = expand_binop (mode, other_optab, op0, op1, target,
6411 unsignedp, OPTAB_LIB_WIDEN);
6412 htem = expand_mult_highpart_adjust (innermode,
6413 gen_highpart (innermode, temp),
6414 op0, op1,
6415 gen_highpart (innermode, temp),
6416 unsignedp);
6417 emit_move_insn (gen_highpart (innermode, temp), htem);
6418 return temp;
6419 }
6420 }
6421 }
6422 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6423 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6424 return expand_mult (mode, op0, op1, target, unsignedp);
6425
6426 case TRUNC_DIV_EXPR:
6427 case FLOOR_DIV_EXPR:
6428 case CEIL_DIV_EXPR:
6429 case ROUND_DIV_EXPR:
6430 case EXACT_DIV_EXPR:
6431 preexpand_calls (exp);
6432 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6433 subtarget = 0;
6434 /* Possible optimization: compute the dividend with EXPAND_SUM
6435 then if the divisor is constant can optimize the case
6436 where some terms of the dividend have coeffs divisible by it. */
6437 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6438 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6439 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6440
6441 case RDIV_EXPR:
6442 this_optab = flodiv_optab;
6443 goto binop;
6444
6445 case TRUNC_MOD_EXPR:
6446 case FLOOR_MOD_EXPR:
6447 case CEIL_MOD_EXPR:
6448 case ROUND_MOD_EXPR:
6449 preexpand_calls (exp);
6450 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6451 subtarget = 0;
6452 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6453 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6454 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6455
6456 case FIX_ROUND_EXPR:
6457 case FIX_FLOOR_EXPR:
6458 case FIX_CEIL_EXPR:
6459 abort (); /* Not used for C. */
6460
6461 case FIX_TRUNC_EXPR:
6462 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6463 if (target == 0)
6464 target = gen_reg_rtx (mode);
6465 expand_fix (target, op0, unsignedp);
6466 return target;
6467
6468 case FLOAT_EXPR:
6469 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6470 if (target == 0)
6471 target = gen_reg_rtx (mode);
6472 /* expand_float can't figure out what to do if FROM has VOIDmode.
6473 So give it the correct mode. With -O, cse will optimize this. */
6474 if (GET_MODE (op0) == VOIDmode)
6475 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6476 op0);
6477 expand_float (target, op0,
6478 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6479 return target;
6480
6481 case NEGATE_EXPR:
6482 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6483 temp = expand_unop (mode, neg_optab, op0, target, 0);
6484 if (temp == 0)
6485 abort ();
6486 return temp;
6487
6488 case ABS_EXPR:
6489 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6490
6491 /* Handle complex values specially. */
6492 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6493 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6494 return expand_complex_abs (mode, op0, target, unsignedp);
6495
6496 /* Unsigned abs is simply the operand. Testing here means we don't
6497 risk generating incorrect code below. */
6498 if (TREE_UNSIGNED (type))
6499 return op0;
6500
6501 return expand_abs (mode, op0, target, unsignedp,
6502 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
6503
6504 case MAX_EXPR:
6505 case MIN_EXPR:
6506 target = original_target;
6507 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
6508 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6509 || GET_MODE (target) != mode
6510 || (GET_CODE (target) == REG
6511 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6512 target = gen_reg_rtx (mode);
6513 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6514 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6515
6516 /* First try to do it with a special MIN or MAX instruction.
6517 If that does not win, use a conditional jump to select the proper
6518 value. */
6519 this_optab = (TREE_UNSIGNED (type)
6520 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6521 : (code == MIN_EXPR ? smin_optab : smax_optab));
6522
6523 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6524 OPTAB_WIDEN);
6525 if (temp != 0)
6526 return temp;
6527
6528 /* At this point, a MEM target is no longer useful; we will get better
6529 code without it. */
6530
6531 if (GET_CODE (target) == MEM)
6532 target = gen_reg_rtx (mode);
6533
6534 if (target != op0)
6535 emit_move_insn (target, op0);
6536
6537 op0 = gen_label_rtx ();
6538
6539 /* If this mode is an integer too wide to compare properly,
6540 compare word by word. Rely on cse to optimize constant cases. */
6541 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6542 {
6543 if (code == MAX_EXPR)
6544 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6545 target, op1, NULL_RTX, op0);
6546 else
6547 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6548 op1, target, NULL_RTX, op0);
6549 emit_move_insn (target, op1);
6550 }
6551 else
6552 {
6553 if (code == MAX_EXPR)
6554 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6555 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6556 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6557 else
6558 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6559 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6560 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6561 if (temp == const0_rtx)
6562 emit_move_insn (target, op1);
6563 else if (temp != const_true_rtx)
6564 {
6565 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6566 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6567 else
6568 abort ();
6569 emit_move_insn (target, op1);
6570 }
6571 }
6572 emit_label (op0);
6573 return target;
6574
6575 case BIT_NOT_EXPR:
6576 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6577 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6578 if (temp == 0)
6579 abort ();
6580 return temp;
6581
6582 case FFS_EXPR:
6583 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6584 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6585 if (temp == 0)
6586 abort ();
6587 return temp;
6588
6589 /* ??? Can optimize bitwise operations with one arg constant.
6590 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6591 and (a bitwise1 b) bitwise2 b (etc)
6592 but that is probably not worth while. */
6593
6594 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6595 boolean values when we want in all cases to compute both of them. In
6596 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6597 as actual zero-or-1 values and then bitwise anding. In cases where
6598 there cannot be any side effects, better code would be made by
6599 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6600 how to recognize those cases. */
6601
6602 case TRUTH_AND_EXPR:
6603 case BIT_AND_EXPR:
6604 this_optab = and_optab;
6605 goto binop;
6606
6607 case TRUTH_OR_EXPR:
6608 case BIT_IOR_EXPR:
6609 this_optab = ior_optab;
6610 goto binop;
6611
6612 case TRUTH_XOR_EXPR:
6613 case BIT_XOR_EXPR:
6614 this_optab = xor_optab;
6615 goto binop;
6616
6617 case LSHIFT_EXPR:
6618 case RSHIFT_EXPR:
6619 case LROTATE_EXPR:
6620 case RROTATE_EXPR:
6621 preexpand_calls (exp);
6622 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6623 subtarget = 0;
6624 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6625 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6626 unsignedp);
6627
6628 /* Could determine the answer when only additive constants differ. Also,
6629 the addition of one can be handled by changing the condition. */
6630 case LT_EXPR:
6631 case LE_EXPR:
6632 case GT_EXPR:
6633 case GE_EXPR:
6634 case EQ_EXPR:
6635 case NE_EXPR:
6636 preexpand_calls (exp);
6637 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6638 if (temp != 0)
6639 return temp;
6640
6641 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6642 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6643 && original_target
6644 && GET_CODE (original_target) == REG
6645 && (GET_MODE (original_target)
6646 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6647 {
6648 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6649 VOIDmode, 0);
6650
6651 if (temp != original_target)
6652 temp = copy_to_reg (temp);
6653
6654 op1 = gen_label_rtx ();
6655 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6656 GET_MODE (temp), unsignedp, 0);
6657 emit_jump_insn (gen_beq (op1));
6658 emit_move_insn (temp, const1_rtx);
6659 emit_label (op1);
6660 return temp;
6661 }
6662
6663 /* If no set-flag instruction, must generate a conditional
6664 store into a temporary variable. Drop through
6665 and handle this like && and ||. */
6666
6667 case TRUTH_ANDIF_EXPR:
6668 case TRUTH_ORIF_EXPR:
6669 if (! ignore
6670 && (target == 0 || ! safe_from_p (target, exp, 1)
6671 /* Make sure we don't have a hard reg (such as function's return
6672 value) live across basic blocks, if not optimizing. */
6673 || (!optimize && GET_CODE (target) == REG
6674 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6675 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6676
6677 if (target)
6678 emit_clr_insn (target);
6679
6680 op1 = gen_label_rtx ();
6681 jumpifnot (exp, op1);
6682
6683 if (target)
6684 emit_0_to_1_insn (target);
6685
6686 emit_label (op1);
6687 return ignore ? const0_rtx : target;
6688
6689 case TRUTH_NOT_EXPR:
6690 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6691 /* The parser is careful to generate TRUTH_NOT_EXPR
6692 only with operands that are always zero or one. */
6693 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6694 target, 1, OPTAB_LIB_WIDEN);
6695 if (temp == 0)
6696 abort ();
6697 return temp;
6698
6699 case COMPOUND_EXPR:
6700 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6701 emit_queue ();
6702 return expand_expr (TREE_OPERAND (exp, 1),
6703 (ignore ? const0_rtx : target),
6704 VOIDmode, 0);
6705
6706 case COND_EXPR:
6707 /* If we would have a "singleton" (see below) were it not for a
6708 conversion in each arm, bring that conversion back out. */
6709 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6710 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6711 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6712 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6713 {
6714 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6715 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6716
6717 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6718 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6719 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6720 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6721 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6722 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6723 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6724 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6725 return expand_expr (build1 (NOP_EXPR, type,
6726 build (COND_EXPR, TREE_TYPE (true),
6727 TREE_OPERAND (exp, 0),
6728 true, false)),
6729 target, tmode, modifier);
6730 }
6731
6732 {
6733 /* Note that COND_EXPRs whose type is a structure or union
6734 are required to be constructed to contain assignments of
6735 a temporary variable, so that we can evaluate them here
6736 for side effect only. If type is void, we must do likewise. */
6737
6738 /* If an arm of the branch requires a cleanup,
6739 only that cleanup is performed. */
6740
6741 tree singleton = 0;
6742 tree binary_op = 0, unary_op = 0;
6743
6744 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6745 convert it to our mode, if necessary. */
6746 if (integer_onep (TREE_OPERAND (exp, 1))
6747 && integer_zerop (TREE_OPERAND (exp, 2))
6748 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6749 {
6750 if (ignore)
6751 {
6752 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6753 ro_modifier);
6754 return const0_rtx;
6755 }
6756
6757 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
6758 if (GET_MODE (op0) == mode)
6759 return op0;
6760
6761 if (target == 0)
6762 target = gen_reg_rtx (mode);
6763 convert_move (target, op0, unsignedp);
6764 return target;
6765 }
6766
6767 /* Check for X ? A + B : A. If we have this, we can copy A to the
6768 output and conditionally add B. Similarly for unary operations.
6769 Don't do this if X has side-effects because those side effects
6770 might affect A or B and the "?" operation is a sequence point in
6771 ANSI. (operand_equal_p tests for side effects.) */
6772
6773 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6774 && operand_equal_p (TREE_OPERAND (exp, 2),
6775 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6776 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6777 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6778 && operand_equal_p (TREE_OPERAND (exp, 1),
6779 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6780 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6781 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6782 && operand_equal_p (TREE_OPERAND (exp, 2),
6783 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6784 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6785 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6786 && operand_equal_p (TREE_OPERAND (exp, 1),
6787 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6788 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6789
6790 /* If we are not to produce a result, we have no target. Otherwise,
6791 if a target was specified use it; it will not be used as an
6792 intermediate target unless it is safe. If no target, use a
6793 temporary. */
6794
6795 if (ignore)
6796 temp = 0;
6797 else if (original_target
6798 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
6799 || (singleton && GET_CODE (original_target) == REG
6800 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6801 && original_target == var_rtx (singleton)))
6802 && GET_MODE (original_target) == mode
6803 #ifdef HAVE_conditional_move
6804 && (! can_conditionally_move_p (mode)
6805 || GET_CODE (original_target) == REG
6806 || TREE_ADDRESSABLE (type))
6807 #endif
6808 && ! (GET_CODE (original_target) == MEM
6809 && MEM_VOLATILE_P (original_target)))
6810 temp = original_target;
6811 else if (TREE_ADDRESSABLE (type))
6812 abort ();
6813 else
6814 temp = assign_temp (type, 0, 0, 1);
6815
6816 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6817 do the test of X as a store-flag operation, do this as
6818 A + ((X != 0) << log C). Similarly for other simple binary
6819 operators. Only do for C == 1 if BRANCH_COST is low. */
6820 if (temp && singleton && binary_op
6821 && (TREE_CODE (binary_op) == PLUS_EXPR
6822 || TREE_CODE (binary_op) == MINUS_EXPR
6823 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6824 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6825 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6826 : integer_onep (TREE_OPERAND (binary_op, 1)))
6827 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6828 {
6829 rtx result;
6830 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6831 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6832 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6833 : xor_optab);
6834
6835 /* If we had X ? A : A + 1, do this as A + (X == 0).
6836
6837 We have to invert the truth value here and then put it
6838 back later if do_store_flag fails. We cannot simply copy
6839 TREE_OPERAND (exp, 0) to another variable and modify that
6840 because invert_truthvalue can modify the tree pointed to
6841 by its argument. */
6842 if (singleton == TREE_OPERAND (exp, 1))
6843 TREE_OPERAND (exp, 0)
6844 = invert_truthvalue (TREE_OPERAND (exp, 0));
6845
6846 result = do_store_flag (TREE_OPERAND (exp, 0),
6847 (safe_from_p (temp, singleton, 1)
6848 ? temp : NULL_RTX),
6849 mode, BRANCH_COST <= 1);
6850
6851 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6852 result = expand_shift (LSHIFT_EXPR, mode, result,
6853 build_int_2 (tree_log2
6854 (TREE_OPERAND
6855 (binary_op, 1)),
6856 0),
6857 (safe_from_p (temp, singleton, 1)
6858 ? temp : NULL_RTX), 0);
6859
6860 if (result)
6861 {
6862 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6863 return expand_binop (mode, boptab, op1, result, temp,
6864 unsignedp, OPTAB_LIB_WIDEN);
6865 }
6866 else if (singleton == TREE_OPERAND (exp, 1))
6867 TREE_OPERAND (exp, 0)
6868 = invert_truthvalue (TREE_OPERAND (exp, 0));
6869 }
6870
6871 do_pending_stack_adjust ();
6872 NO_DEFER_POP;
6873 op0 = gen_label_rtx ();
6874
6875 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6876 {
6877 if (temp != 0)
6878 {
6879 /* If the target conflicts with the other operand of the
6880 binary op, we can't use it. Also, we can't use the target
6881 if it is a hard register, because evaluating the condition
6882 might clobber it. */
6883 if ((binary_op
6884 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
6885 || (GET_CODE (temp) == REG
6886 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6887 temp = gen_reg_rtx (mode);
6888 store_expr (singleton, temp, 0);
6889 }
6890 else
6891 expand_expr (singleton,
6892 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6893 if (singleton == TREE_OPERAND (exp, 1))
6894 jumpif (TREE_OPERAND (exp, 0), op0);
6895 else
6896 jumpifnot (TREE_OPERAND (exp, 0), op0);
6897
6898 start_cleanup_deferral ();
6899 if (binary_op && temp == 0)
6900 /* Just touch the other operand. */
6901 expand_expr (TREE_OPERAND (binary_op, 1),
6902 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6903 else if (binary_op)
6904 store_expr (build (TREE_CODE (binary_op), type,
6905 make_tree (type, temp),
6906 TREE_OPERAND (binary_op, 1)),
6907 temp, 0);
6908 else
6909 store_expr (build1 (TREE_CODE (unary_op), type,
6910 make_tree (type, temp)),
6911 temp, 0);
6912 op1 = op0;
6913 }
6914 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6915 comparison operator. If we have one of these cases, set the
6916 output to A, branch on A (cse will merge these two references),
6917 then set the output to FOO. */
6918 else if (temp
6919 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6920 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6921 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6922 TREE_OPERAND (exp, 1), 0)
6923 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6924 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
6925 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
6926 {
6927 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6928 temp = gen_reg_rtx (mode);
6929 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6930 jumpif (TREE_OPERAND (exp, 0), op0);
6931
6932 start_cleanup_deferral ();
6933 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6934 op1 = op0;
6935 }
6936 else if (temp
6937 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6938 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6939 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6940 TREE_OPERAND (exp, 2), 0)
6941 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6942 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
6943 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
6944 {
6945 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6946 temp = gen_reg_rtx (mode);
6947 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6948 jumpifnot (TREE_OPERAND (exp, 0), op0);
6949
6950 start_cleanup_deferral ();
6951 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6952 op1 = op0;
6953 }
6954 else
6955 {
6956 op1 = gen_label_rtx ();
6957 jumpifnot (TREE_OPERAND (exp, 0), op0);
6958
6959 start_cleanup_deferral ();
6960 if (temp != 0)
6961 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6962 else
6963 expand_expr (TREE_OPERAND (exp, 1),
6964 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6965 end_cleanup_deferral ();
6966 emit_queue ();
6967 emit_jump_insn (gen_jump (op1));
6968 emit_barrier ();
6969 emit_label (op0);
6970 start_cleanup_deferral ();
6971 if (temp != 0)
6972 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6973 else
6974 expand_expr (TREE_OPERAND (exp, 2),
6975 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6976 }
6977
6978 end_cleanup_deferral ();
6979
6980 emit_queue ();
6981 emit_label (op1);
6982 OK_DEFER_POP;
6983
6984 return temp;
6985 }
6986
6987 case TARGET_EXPR:
6988 {
6989 /* Something needs to be initialized, but we didn't know
6990 where that thing was when building the tree. For example,
6991 it could be the return value of a function, or a parameter
6992 to a function which lays down in the stack, or a temporary
6993 variable which must be passed by reference.
6994
6995 We guarantee that the expression will either be constructed
6996 or copied into our original target. */
6997
6998 tree slot = TREE_OPERAND (exp, 0);
6999 tree cleanups = NULL_TREE;
7000 tree exp1;
7001
7002 if (TREE_CODE (slot) != VAR_DECL)
7003 abort ();
7004
7005 if (! ignore)
7006 target = original_target;
7007
7008 if (target == 0)
7009 {
7010 if (DECL_RTL (slot) != 0)
7011 {
7012 target = DECL_RTL (slot);
7013 /* If we have already expanded the slot, so don't do
7014 it again. (mrs) */
7015 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7016 return target;
7017 }
7018 else
7019 {
7020 target = assign_temp (type, 2, 0, 1);
7021 /* All temp slots at this level must not conflict. */
7022 preserve_temp_slots (target);
7023 DECL_RTL (slot) = target;
7024 if (TREE_ADDRESSABLE (slot))
7025 {
7026 TREE_ADDRESSABLE (slot) = 0;
7027 mark_addressable (slot);
7028 }
7029
7030 /* Since SLOT is not known to the called function
7031 to belong to its stack frame, we must build an explicit
7032 cleanup. This case occurs when we must build up a reference
7033 to pass the reference as an argument. In this case,
7034 it is very likely that such a reference need not be
7035 built here. */
7036
7037 if (TREE_OPERAND (exp, 2) == 0)
7038 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7039 cleanups = TREE_OPERAND (exp, 2);
7040 }
7041 }
7042 else
7043 {
7044 /* This case does occur, when expanding a parameter which
7045 needs to be constructed on the stack. The target
7046 is the actual stack address that we want to initialize.
7047 The function we call will perform the cleanup in this case. */
7048
7049 /* If we have already assigned it space, use that space,
7050 not target that we were passed in, as our target
7051 parameter is only a hint. */
7052 if (DECL_RTL (slot) != 0)
7053 {
7054 target = DECL_RTL (slot);
7055 /* If we have already expanded the slot, so don't do
7056 it again. (mrs) */
7057 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7058 return target;
7059 }
7060 else
7061 {
7062 DECL_RTL (slot) = target;
7063 /* If we must have an addressable slot, then make sure that
7064 the RTL that we just stored in slot is OK. */
7065 if (TREE_ADDRESSABLE (slot))
7066 {
7067 TREE_ADDRESSABLE (slot) = 0;
7068 mark_addressable (slot);
7069 }
7070 }
7071 }
7072
7073 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7074 /* Mark it as expanded. */
7075 TREE_OPERAND (exp, 1) = NULL_TREE;
7076
7077 TREE_USED (slot) = 1;
7078 store_expr (exp1, target, 0);
7079
7080 expand_decl_cleanup (NULL_TREE, cleanups);
7081
7082 return target;
7083 }
7084
7085 case INIT_EXPR:
7086 {
7087 tree lhs = TREE_OPERAND (exp, 0);
7088 tree rhs = TREE_OPERAND (exp, 1);
7089 tree noncopied_parts = 0;
7090 tree lhs_type = TREE_TYPE (lhs);
7091
7092 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7093 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7094 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7095 TYPE_NONCOPIED_PARTS (lhs_type));
7096 while (noncopied_parts != 0)
7097 {
7098 expand_assignment (TREE_VALUE (noncopied_parts),
7099 TREE_PURPOSE (noncopied_parts), 0, 0);
7100 noncopied_parts = TREE_CHAIN (noncopied_parts);
7101 }
7102 return temp;
7103 }
7104
7105 case MODIFY_EXPR:
7106 {
7107 /* If lhs is complex, expand calls in rhs before computing it.
7108 That's so we don't compute a pointer and save it over a call.
7109 If lhs is simple, compute it first so we can give it as a
7110 target if the rhs is just a call. This avoids an extra temp and copy
7111 and that prevents a partial-subsumption which makes bad code.
7112 Actually we could treat component_ref's of vars like vars. */
7113
7114 tree lhs = TREE_OPERAND (exp, 0);
7115 tree rhs = TREE_OPERAND (exp, 1);
7116 tree noncopied_parts = 0;
7117 tree lhs_type = TREE_TYPE (lhs);
7118
7119 temp = 0;
7120
7121 if (TREE_CODE (lhs) != VAR_DECL
7122 && TREE_CODE (lhs) != RESULT_DECL
7123 && TREE_CODE (lhs) != PARM_DECL
7124 && ! (TREE_CODE (lhs) == INDIRECT_REF
7125 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7126 preexpand_calls (exp);
7127
7128 /* Check for |= or &= of a bitfield of size one into another bitfield
7129 of size 1. In this case, (unless we need the result of the
7130 assignment) we can do this more efficiently with a
7131 test followed by an assignment, if necessary.
7132
7133 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7134 things change so we do, this code should be enhanced to
7135 support it. */
7136 if (ignore
7137 && TREE_CODE (lhs) == COMPONENT_REF
7138 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7139 || TREE_CODE (rhs) == BIT_AND_EXPR)
7140 && TREE_OPERAND (rhs, 0) == lhs
7141 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7142 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7143 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7144 {
7145 rtx label = gen_label_rtx ();
7146
7147 do_jump (TREE_OPERAND (rhs, 1),
7148 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7149 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7150 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7151 (TREE_CODE (rhs) == BIT_IOR_EXPR
7152 ? integer_one_node
7153 : integer_zero_node)),
7154 0, 0);
7155 do_pending_stack_adjust ();
7156 emit_label (label);
7157 return const0_rtx;
7158 }
7159
7160 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7161 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7162 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7163 TYPE_NONCOPIED_PARTS (lhs_type));
7164
7165 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7166 while (noncopied_parts != 0)
7167 {
7168 expand_assignment (TREE_PURPOSE (noncopied_parts),
7169 TREE_VALUE (noncopied_parts), 0, 0);
7170 noncopied_parts = TREE_CHAIN (noncopied_parts);
7171 }
7172 return temp;
7173 }
7174
7175 case PREINCREMENT_EXPR:
7176 case PREDECREMENT_EXPR:
7177 return expand_increment (exp, 0, ignore);
7178
7179 case POSTINCREMENT_EXPR:
7180 case POSTDECREMENT_EXPR:
7181 /* Faster to treat as pre-increment if result is not used. */
7182 return expand_increment (exp, ! ignore, ignore);
7183
7184 case ADDR_EXPR:
7185 /* If nonzero, TEMP will be set to the address of something that might
7186 be a MEM corresponding to a stack slot. */
7187 temp = 0;
7188
7189 /* Are we taking the address of a nested function? */
7190 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7191 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7192 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7193 && ! TREE_STATIC (exp))
7194 {
7195 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7196 op0 = force_operand (op0, target);
7197 }
7198 /* If we are taking the address of something erroneous, just
7199 return a zero. */
7200 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7201 return const0_rtx;
7202 else
7203 {
7204 /* We make sure to pass const0_rtx down if we came in with
7205 ignore set, to avoid doing the cleanups twice for something. */
7206 op0 = expand_expr (TREE_OPERAND (exp, 0),
7207 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7208 (modifier == EXPAND_INITIALIZER
7209 ? modifier : EXPAND_CONST_ADDRESS));
7210
7211 /* If we are going to ignore the result, OP0 will have been set
7212 to const0_rtx, so just return it. Don't get confused and
7213 think we are taking the address of the constant. */
7214 if (ignore)
7215 return op0;
7216
7217 op0 = protect_from_queue (op0, 0);
7218
7219 /* We would like the object in memory. If it is a constant,
7220 we can have it be statically allocated into memory. For
7221 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7222 memory and store the value into it. */
7223
7224 if (CONSTANT_P (op0))
7225 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7226 op0);
7227 else if (GET_CODE (op0) == MEM)
7228 {
7229 mark_temp_addr_taken (op0);
7230 temp = XEXP (op0, 0);
7231 }
7232
7233 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7234 || GET_CODE (op0) == CONCAT)
7235 {
7236 /* If this object is in a register, it must be not
7237 be BLKmode. */
7238 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7239 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7240
7241 mark_temp_addr_taken (memloc);
7242 emit_move_insn (memloc, op0);
7243 op0 = memloc;
7244 }
7245
7246 if (GET_CODE (op0) != MEM)
7247 abort ();
7248
7249 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7250 {
7251 temp = XEXP (op0, 0);
7252 #ifdef POINTERS_EXTEND_UNSIGNED
7253 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7254 && mode == ptr_mode)
7255 temp = convert_memory_address (ptr_mode, temp);
7256 #endif
7257 return temp;
7258 }
7259
7260 op0 = force_operand (XEXP (op0, 0), target);
7261 }
7262
7263 if (flag_force_addr && GET_CODE (op0) != REG)
7264 op0 = force_reg (Pmode, op0);
7265
7266 if (GET_CODE (op0) == REG
7267 && ! REG_USERVAR_P (op0))
7268 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7269
7270 /* If we might have had a temp slot, add an equivalent address
7271 for it. */
7272 if (temp != 0)
7273 update_temp_slot_address (temp, op0);
7274
7275 #ifdef POINTERS_EXTEND_UNSIGNED
7276 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7277 && mode == ptr_mode)
7278 op0 = convert_memory_address (ptr_mode, op0);
7279 #endif
7280
7281 return op0;
7282
7283 case ENTRY_VALUE_EXPR:
7284 abort ();
7285
7286 /* COMPLEX type for Extended Pascal & Fortran */
7287 case COMPLEX_EXPR:
7288 {
7289 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7290 rtx insns;
7291
7292 /* Get the rtx code of the operands. */
7293 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7294 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7295
7296 if (! target)
7297 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7298
7299 start_sequence ();
7300
7301 /* Move the real (op0) and imaginary (op1) parts to their location. */
7302 emit_move_insn (gen_realpart (mode, target), op0);
7303 emit_move_insn (gen_imagpart (mode, target), op1);
7304
7305 insns = get_insns ();
7306 end_sequence ();
7307
7308 /* Complex construction should appear as a single unit. */
7309 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7310 each with a separate pseudo as destination.
7311 It's not correct for flow to treat them as a unit. */
7312 if (GET_CODE (target) != CONCAT)
7313 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7314 else
7315 emit_insns (insns);
7316
7317 return target;
7318 }
7319
7320 case REALPART_EXPR:
7321 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7322 return gen_realpart (mode, op0);
7323
7324 case IMAGPART_EXPR:
7325 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7326 return gen_imagpart (mode, op0);
7327
7328 case CONJ_EXPR:
7329 {
7330 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7331 rtx imag_t;
7332 rtx insns;
7333
7334 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7335
7336 if (! target)
7337 target = gen_reg_rtx (mode);
7338
7339 start_sequence ();
7340
7341 /* Store the realpart and the negated imagpart to target. */
7342 emit_move_insn (gen_realpart (partmode, target),
7343 gen_realpart (partmode, op0));
7344
7345 imag_t = gen_imagpart (partmode, target);
7346 temp = expand_unop (partmode, neg_optab,
7347 gen_imagpart (partmode, op0), imag_t, 0);
7348 if (temp != imag_t)
7349 emit_move_insn (imag_t, temp);
7350
7351 insns = get_insns ();
7352 end_sequence ();
7353
7354 /* Conjugate should appear as a single unit
7355 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7356 each with a separate pseudo as destination.
7357 It's not correct for flow to treat them as a unit. */
7358 if (GET_CODE (target) != CONCAT)
7359 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7360 else
7361 emit_insns (insns);
7362
7363 return target;
7364 }
7365
7366 case TRY_CATCH_EXPR:
7367 {
7368 tree handler = TREE_OPERAND (exp, 1);
7369
7370 expand_eh_region_start ();
7371
7372 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7373
7374 expand_eh_region_end (handler);
7375
7376 return op0;
7377 }
7378
7379 case POPDCC_EXPR:
7380 {
7381 rtx dcc = get_dynamic_cleanup_chain ();
7382 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
7383 return const0_rtx;
7384 }
7385
7386 case POPDHC_EXPR:
7387 {
7388 rtx dhc = get_dynamic_handler_chain ();
7389 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
7390 return const0_rtx;
7391 }
7392
7393 case ERROR_MARK:
7394 op0 = CONST0_RTX (tmode);
7395 if (op0 != 0)
7396 return op0;
7397 return const0_rtx;
7398
7399 default:
7400 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7401 }
7402
7403 /* Here to do an ordinary binary operator, generating an instruction
7404 from the optab already placed in `this_optab'. */
7405 binop:
7406 preexpand_calls (exp);
7407 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7408 subtarget = 0;
7409 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7410 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7411 binop2:
7412 temp = expand_binop (mode, this_optab, op0, op1, target,
7413 unsignedp, OPTAB_LIB_WIDEN);
7414 if (temp == 0)
7415 abort ();
7416 return temp;
7417 }
7418
7419
7420 \f
7421 /* Return the alignment in bits of EXP, a pointer valued expression.
7422 But don't return more than MAX_ALIGN no matter what.
7423 The alignment returned is, by default, the alignment of the thing that
7424 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7425
7426 Otherwise, look at the expression to see if we can do better, i.e., if the
7427 expression is actually pointing at an object whose alignment is tighter. */
7428
7429 static int
7430 get_pointer_alignment (exp, max_align)
7431 tree exp;
7432 unsigned max_align;
7433 {
7434 unsigned align, inner;
7435
7436 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7437 return 0;
7438
7439 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7440 align = MIN (align, max_align);
7441
7442 while (1)
7443 {
7444 switch (TREE_CODE (exp))
7445 {
7446 case NOP_EXPR:
7447 case CONVERT_EXPR:
7448 case NON_LVALUE_EXPR:
7449 exp = TREE_OPERAND (exp, 0);
7450 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7451 return align;
7452 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7453 align = MIN (inner, max_align);
7454 break;
7455
7456 case PLUS_EXPR:
7457 /* If sum of pointer + int, restrict our maximum alignment to that
7458 imposed by the integer. If not, we can't do any better than
7459 ALIGN. */
7460 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7461 return align;
7462
7463 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7464 & (max_align - 1))
7465 != 0)
7466 max_align >>= 1;
7467
7468 exp = TREE_OPERAND (exp, 0);
7469 break;
7470
7471 case ADDR_EXPR:
7472 /* See what we are pointing at and look at its alignment. */
7473 exp = TREE_OPERAND (exp, 0);
7474 if (TREE_CODE (exp) == FUNCTION_DECL)
7475 align = FUNCTION_BOUNDARY;
7476 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7477 align = DECL_ALIGN (exp);
7478 #ifdef CONSTANT_ALIGNMENT
7479 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7480 align = CONSTANT_ALIGNMENT (exp, align);
7481 #endif
7482 return MIN (align, max_align);
7483
7484 default:
7485 return align;
7486 }
7487 }
7488 }
7489 \f
7490 /* Return the tree node and offset if a given argument corresponds to
7491 a string constant. */
7492
7493 static tree
7494 string_constant (arg, ptr_offset)
7495 tree arg;
7496 tree *ptr_offset;
7497 {
7498 STRIP_NOPS (arg);
7499
7500 if (TREE_CODE (arg) == ADDR_EXPR
7501 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7502 {
7503 *ptr_offset = integer_zero_node;
7504 return TREE_OPERAND (arg, 0);
7505 }
7506 else if (TREE_CODE (arg) == PLUS_EXPR)
7507 {
7508 tree arg0 = TREE_OPERAND (arg, 0);
7509 tree arg1 = TREE_OPERAND (arg, 1);
7510
7511 STRIP_NOPS (arg0);
7512 STRIP_NOPS (arg1);
7513
7514 if (TREE_CODE (arg0) == ADDR_EXPR
7515 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7516 {
7517 *ptr_offset = arg1;
7518 return TREE_OPERAND (arg0, 0);
7519 }
7520 else if (TREE_CODE (arg1) == ADDR_EXPR
7521 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7522 {
7523 *ptr_offset = arg0;
7524 return TREE_OPERAND (arg1, 0);
7525 }
7526 }
7527
7528 return 0;
7529 }
7530
7531 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7532 way, because it could contain a zero byte in the middle.
7533 TREE_STRING_LENGTH is the size of the character array, not the string.
7534
7535 Unfortunately, string_constant can't access the values of const char
7536 arrays with initializers, so neither can we do so here. */
7537
7538 static tree
7539 c_strlen (src)
7540 tree src;
7541 {
7542 tree offset_node;
7543 int offset, max;
7544 char *ptr;
7545
7546 src = string_constant (src, &offset_node);
7547 if (src == 0)
7548 return 0;
7549 max = TREE_STRING_LENGTH (src);
7550 ptr = TREE_STRING_POINTER (src);
7551 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7552 {
7553 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7554 compute the offset to the following null if we don't know where to
7555 start searching for it. */
7556 int i;
7557 for (i = 0; i < max; i++)
7558 if (ptr[i] == 0)
7559 return 0;
7560 /* We don't know the starting offset, but we do know that the string
7561 has no internal zero bytes. We can assume that the offset falls
7562 within the bounds of the string; otherwise, the programmer deserves
7563 what he gets. Subtract the offset from the length of the string,
7564 and return that. */
7565 /* This would perhaps not be valid if we were dealing with named
7566 arrays in addition to literal string constants. */
7567 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7568 }
7569
7570 /* We have a known offset into the string. Start searching there for
7571 a null character. */
7572 if (offset_node == 0)
7573 offset = 0;
7574 else
7575 {
7576 /* Did we get a long long offset? If so, punt. */
7577 if (TREE_INT_CST_HIGH (offset_node) != 0)
7578 return 0;
7579 offset = TREE_INT_CST_LOW (offset_node);
7580 }
7581 /* If the offset is known to be out of bounds, warn, and call strlen at
7582 runtime. */
7583 if (offset < 0 || offset > max)
7584 {
7585 warning ("offset outside bounds of constant string");
7586 return 0;
7587 }
7588 /* Use strlen to search for the first zero byte. Since any strings
7589 constructed with build_string will have nulls appended, we win even
7590 if we get handed something like (char[4])"abcd".
7591
7592 Since OFFSET is our starting index into the string, no further
7593 calculation is needed. */
7594 return size_int (strlen (ptr + offset));
7595 }
7596
7597 rtx
7598 expand_builtin_return_addr (fndecl_code, count, tem)
7599 enum built_in_function fndecl_code;
7600 int count;
7601 rtx tem;
7602 {
7603 int i;
7604
7605 /* Some machines need special handling before we can access
7606 arbitrary frames. For example, on the sparc, we must first flush
7607 all register windows to the stack. */
7608 #ifdef SETUP_FRAME_ADDRESSES
7609 if (count > 0)
7610 SETUP_FRAME_ADDRESSES ();
7611 #endif
7612
7613 /* On the sparc, the return address is not in the frame, it is in a
7614 register. There is no way to access it off of the current frame
7615 pointer, but it can be accessed off the previous frame pointer by
7616 reading the value from the register window save area. */
7617 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7618 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7619 count--;
7620 #endif
7621
7622 /* Scan back COUNT frames to the specified frame. */
7623 for (i = 0; i < count; i++)
7624 {
7625 /* Assume the dynamic chain pointer is in the word that the
7626 frame address points to, unless otherwise specified. */
7627 #ifdef DYNAMIC_CHAIN_ADDRESS
7628 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7629 #endif
7630 tem = memory_address (Pmode, tem);
7631 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
7632 }
7633
7634 /* For __builtin_frame_address, return what we've got. */
7635 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7636 return tem;
7637
7638 /* For __builtin_return_address, Get the return address from that
7639 frame. */
7640 #ifdef RETURN_ADDR_RTX
7641 tem = RETURN_ADDR_RTX (count, tem);
7642 #else
7643 tem = memory_address (Pmode,
7644 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7645 tem = gen_rtx_MEM (Pmode, tem);
7646 #endif
7647 return tem;
7648 }
7649
7650 /* __builtin_setjmp is passed a pointer to an array of five words (not
7651 all will be used on all machines). It operates similarly to the C
7652 library function of the same name, but is more efficient. Much of
7653 the code below (and for longjmp) is copied from the handling of
7654 non-local gotos.
7655
7656 NOTE: This is intended for use by GNAT and the exception handling
7657 scheme in the compiler and will only work in the method used by
7658 them. */
7659
7660 rtx
7661 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
7662 rtx buf_addr;
7663 rtx target;
7664 rtx first_label, next_label;
7665 {
7666 rtx lab1 = gen_label_rtx ();
7667 enum machine_mode sa_mode = Pmode, value_mode;
7668 rtx stack_save;
7669
7670 value_mode = TYPE_MODE (integer_type_node);
7671
7672 #ifdef POINTERS_EXTEND_UNSIGNED
7673 buf_addr = convert_memory_address (Pmode, buf_addr);
7674 #endif
7675
7676 buf_addr = force_reg (Pmode, buf_addr);
7677
7678 if (target == 0 || GET_CODE (target) != REG
7679 || REGNO (target) < FIRST_PSEUDO_REGISTER)
7680 target = gen_reg_rtx (value_mode);
7681
7682 emit_queue ();
7683
7684 /* We store the frame pointer and the address of lab1 in the buffer
7685 and use the rest of it for the stack save area, which is
7686 machine-dependent. */
7687 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
7688 virtual_stack_vars_rtx);
7689 emit_move_insn (validize_mem
7690 (gen_rtx_MEM (Pmode,
7691 plus_constant (buf_addr,
7692 GET_MODE_SIZE (Pmode)))),
7693 gen_rtx_LABEL_REF (Pmode, lab1));
7694
7695 #ifdef HAVE_save_stack_nonlocal
7696 if (HAVE_save_stack_nonlocal)
7697 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
7698 #endif
7699
7700 stack_save = gen_rtx_MEM (sa_mode,
7701 plus_constant (buf_addr,
7702 2 * GET_MODE_SIZE (Pmode)));
7703 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
7704
7705 /* If there is further processing to do, do it. */
7706 #ifdef HAVE_builtin_setjmp_setup
7707 if (HAVE_builtin_setjmp_setup)
7708 emit_insn (gen_builtin_setjmp_setup (buf_addr));
7709 #endif
7710
7711 /* Set TARGET to zero and branch to the first-time-through label. */
7712 emit_move_insn (target, const0_rtx);
7713 emit_jump_insn (gen_jump (first_label));
7714 emit_barrier ();
7715 emit_label (lab1);
7716
7717 /* Tell flow about the strange goings on. */
7718 current_function_has_nonlocal_label = 1;
7719
7720 /* Clobber the FP when we get here, so we have to make sure it's
7721 marked as used by this function. */
7722 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
7723
7724 /* Mark the static chain as clobbered here so life information
7725 doesn't get messed up for it. */
7726 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
7727
7728 /* Now put in the code to restore the frame pointer, and argument
7729 pointer, if needed. The code below is from expand_end_bindings
7730 in stmt.c; see detailed documentation there. */
7731 #ifdef HAVE_nonlocal_goto
7732 if (! HAVE_nonlocal_goto)
7733 #endif
7734 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
7735
7736 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7737 if (fixed_regs[ARG_POINTER_REGNUM])
7738 {
7739 #ifdef ELIMINABLE_REGS
7740 int i;
7741 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
7742
7743 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
7744 if (elim_regs[i].from == ARG_POINTER_REGNUM
7745 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
7746 break;
7747
7748 if (i == sizeof elim_regs / sizeof elim_regs [0])
7749 #endif
7750 {
7751 /* Now restore our arg pointer from the address at which it
7752 was saved in our stack frame.
7753 If there hasn't be space allocated for it yet, make
7754 some now. */
7755 if (arg_pointer_save_area == 0)
7756 arg_pointer_save_area
7757 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
7758 emit_move_insn (virtual_incoming_args_rtx,
7759 copy_to_reg (arg_pointer_save_area));
7760 }
7761 }
7762 #endif
7763
7764 #ifdef HAVE_builtin_setjmp_receiver
7765 if (HAVE_builtin_setjmp_receiver)
7766 emit_insn (gen_builtin_setjmp_receiver (lab1));
7767 else
7768 #endif
7769 #ifdef HAVE_nonlocal_goto_receiver
7770 if (HAVE_nonlocal_goto_receiver)
7771 emit_insn (gen_nonlocal_goto_receiver ());
7772 else
7773 #endif
7774 {
7775 ; /* Nothing */
7776 }
7777
7778 /* Set TARGET, and branch to the next-time-through label. */
7779 emit_move_insn (target, gen_lowpart (GET_MODE (target), static_chain_rtx));
7780 emit_jump_insn (gen_jump (next_label));
7781 emit_barrier ();
7782
7783 return target;
7784 }
7785
7786 void
7787 expand_builtin_longjmp (buf_addr, value)
7788 rtx buf_addr, value;
7789 {
7790 rtx fp, lab, stack;
7791 enum machine_mode sa_mode;
7792
7793 #ifdef POINTERS_EXTEND_UNSIGNED
7794 buf_addr = convert_memory_address (Pmode, buf_addr);
7795 #endif
7796 buf_addr = force_reg (Pmode, buf_addr);
7797
7798 /* The value sent by longjmp is not allowed to be zero. Force it
7799 to one if so. */
7800 if (GET_CODE (value) == CONST_INT)
7801 {
7802 if (INTVAL (value) == 0)
7803 value = const1_rtx;
7804 }
7805 else
7806 {
7807 lab = gen_label_rtx ();
7808
7809 emit_cmp_insn (value, const0_rtx, NE, NULL_RTX, GET_MODE (value), 0, 0);
7810 emit_jump_insn (gen_bne (lab));
7811 emit_move_insn (value, const1_rtx);
7812 emit_label (lab);
7813 }
7814
7815 /* Make sure the value is in the right mode to be copied to the chain. */
7816 if (GET_MODE (value) != VOIDmode)
7817 value = gen_lowpart (GET_MODE (static_chain_rtx), value);
7818
7819 #ifdef HAVE_builtin_longjmp
7820 if (HAVE_builtin_longjmp)
7821 {
7822 /* Copy the "return value" to the static chain reg. */
7823 emit_move_insn (static_chain_rtx, value);
7824 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7825 emit_insn (gen_builtin_longjmp (buf_addr));
7826 }
7827 else
7828 #endif
7829 {
7830 fp = gen_rtx_MEM (Pmode, buf_addr);
7831 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
7832 GET_MODE_SIZE (Pmode)));
7833
7834 #ifdef HAVE_save_stack_nonlocal
7835 sa_mode = (HAVE_save_stack_nonlocal
7836 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
7837 : Pmode);
7838 #else
7839 sa_mode = Pmode;
7840 #endif
7841
7842 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
7843 2 * GET_MODE_SIZE (Pmode)));
7844
7845 /* Pick up FP, label, and SP from the block and jump. This code is
7846 from expand_goto in stmt.c; see there for detailed comments. */
7847 #if HAVE_nonlocal_goto
7848 if (HAVE_nonlocal_goto)
7849 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
7850 else
7851 #endif
7852 {
7853 lab = copy_to_reg (lab);
7854
7855 /* Copy the "return value" to the static chain reg. */
7856 emit_move_insn (static_chain_rtx, value);
7857
7858 emit_move_insn (hard_frame_pointer_rtx, fp);
7859 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
7860
7861 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
7862 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
7863 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7864 emit_indirect_jump (lab);
7865 }
7866 }
7867 }
7868
7869 \f
7870 /* Expand an expression EXP that calls a built-in function,
7871 with result going to TARGET if that's convenient
7872 (and in mode MODE if that's convenient).
7873 SUBTARGET may be used as the target for computing one of EXP's operands.
7874 IGNORE is nonzero if the value is to be ignored. */
7875
7876 #define CALLED_AS_BUILT_IN(NODE) \
7877 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7878
7879 static rtx
7880 expand_builtin (exp, target, subtarget, mode, ignore)
7881 tree exp;
7882 rtx target;
7883 rtx subtarget;
7884 enum machine_mode mode;
7885 int ignore;
7886 {
7887 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7888 tree arglist = TREE_OPERAND (exp, 1);
7889 rtx op0;
7890 rtx lab1, insns;
7891 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7892 optab builtin_optab;
7893
7894 switch (DECL_FUNCTION_CODE (fndecl))
7895 {
7896 case BUILT_IN_ABS:
7897 case BUILT_IN_LABS:
7898 case BUILT_IN_FABS:
7899 /* build_function_call changes these into ABS_EXPR. */
7900 abort ();
7901
7902 case BUILT_IN_SIN:
7903 case BUILT_IN_COS:
7904 /* Treat these like sqrt, but only if the user asks for them. */
7905 if (! flag_fast_math)
7906 break;
7907 case BUILT_IN_FSQRT:
7908 /* If not optimizing, call the library function. */
7909 if (! optimize)
7910 break;
7911
7912 if (arglist == 0
7913 /* Arg could be wrong type if user redeclared this fcn wrong. */
7914 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7915 break;
7916
7917 /* Stabilize and compute the argument. */
7918 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7919 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7920 {
7921 exp = copy_node (exp);
7922 arglist = copy_node (arglist);
7923 TREE_OPERAND (exp, 1) = arglist;
7924 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7925 }
7926 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7927
7928 /* Make a suitable register to place result in. */
7929 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7930
7931 emit_queue ();
7932 start_sequence ();
7933
7934 switch (DECL_FUNCTION_CODE (fndecl))
7935 {
7936 case BUILT_IN_SIN:
7937 builtin_optab = sin_optab; break;
7938 case BUILT_IN_COS:
7939 builtin_optab = cos_optab; break;
7940 case BUILT_IN_FSQRT:
7941 builtin_optab = sqrt_optab; break;
7942 default:
7943 abort ();
7944 }
7945
7946 /* Compute into TARGET.
7947 Set TARGET to wherever the result comes back. */
7948 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7949 builtin_optab, op0, target, 0);
7950
7951 /* If we were unable to expand via the builtin, stop the
7952 sequence (without outputting the insns) and break, causing
7953 a call the the library function. */
7954 if (target == 0)
7955 {
7956 end_sequence ();
7957 break;
7958 }
7959
7960 /* Check the results by default. But if flag_fast_math is turned on,
7961 then assume sqrt will always be called with valid arguments. */
7962
7963 if (! flag_fast_math)
7964 {
7965 /* Don't define the builtin FP instructions
7966 if your machine is not IEEE. */
7967 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7968 abort ();
7969
7970 lab1 = gen_label_rtx ();
7971
7972 /* Test the result; if it is NaN, set errno=EDOM because
7973 the argument was not in the domain. */
7974 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7975 emit_jump_insn (gen_beq (lab1));
7976
7977 #ifdef TARGET_EDOM
7978 {
7979 #ifdef GEN_ERRNO_RTX
7980 rtx errno_rtx = GEN_ERRNO_RTX;
7981 #else
7982 rtx errno_rtx
7983 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
7984 #endif
7985
7986 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7987 }
7988 #else
7989 /* We can't set errno=EDOM directly; let the library call do it.
7990 Pop the arguments right away in case the call gets deleted. */
7991 NO_DEFER_POP;
7992 expand_call (exp, target, 0);
7993 OK_DEFER_POP;
7994 #endif
7995
7996 emit_label (lab1);
7997 }
7998
7999 /* Output the entire sequence. */
8000 insns = get_insns ();
8001 end_sequence ();
8002 emit_insns (insns);
8003
8004 return target;
8005
8006 case BUILT_IN_FMOD:
8007 break;
8008
8009 /* __builtin_apply_args returns block of memory allocated on
8010 the stack into which is stored the arg pointer, structure
8011 value address, static chain, and all the registers that might
8012 possibly be used in performing a function call. The code is
8013 moved to the start of the function so the incoming values are
8014 saved. */
8015 case BUILT_IN_APPLY_ARGS:
8016 /* Don't do __builtin_apply_args more than once in a function.
8017 Save the result of the first call and reuse it. */
8018 if (apply_args_value != 0)
8019 return apply_args_value;
8020 {
8021 /* When this function is called, it means that registers must be
8022 saved on entry to this function. So we migrate the
8023 call to the first insn of this function. */
8024 rtx temp;
8025 rtx seq;
8026
8027 start_sequence ();
8028 temp = expand_builtin_apply_args ();
8029 seq = get_insns ();
8030 end_sequence ();
8031
8032 apply_args_value = temp;
8033
8034 /* Put the sequence after the NOTE that starts the function.
8035 If this is inside a SEQUENCE, make the outer-level insn
8036 chain current, so the code is placed at the start of the
8037 function. */
8038 push_topmost_sequence ();
8039 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8040 pop_topmost_sequence ();
8041 return temp;
8042 }
8043
8044 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8045 FUNCTION with a copy of the parameters described by
8046 ARGUMENTS, and ARGSIZE. It returns a block of memory
8047 allocated on the stack into which is stored all the registers
8048 that might possibly be used for returning the result of a
8049 function. ARGUMENTS is the value returned by
8050 __builtin_apply_args. ARGSIZE is the number of bytes of
8051 arguments that must be copied. ??? How should this value be
8052 computed? We'll also need a safe worst case value for varargs
8053 functions. */
8054 case BUILT_IN_APPLY:
8055 if (arglist == 0
8056 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8057 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8058 || TREE_CHAIN (arglist) == 0
8059 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8060 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8061 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8062 return const0_rtx;
8063 else
8064 {
8065 int i;
8066 tree t;
8067 rtx ops[3];
8068
8069 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8070 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8071
8072 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8073 }
8074
8075 /* __builtin_return (RESULT) causes the function to return the
8076 value described by RESULT. RESULT is address of the block of
8077 memory returned by __builtin_apply. */
8078 case BUILT_IN_RETURN:
8079 if (arglist
8080 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8081 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8082 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8083 NULL_RTX, VOIDmode, 0));
8084 return const0_rtx;
8085
8086 case BUILT_IN_SAVEREGS:
8087 /* Don't do __builtin_saveregs more than once in a function.
8088 Save the result of the first call and reuse it. */
8089 if (saveregs_value != 0)
8090 return saveregs_value;
8091 {
8092 /* When this function is called, it means that registers must be
8093 saved on entry to this function. So we migrate the
8094 call to the first insn of this function. */
8095 rtx temp;
8096 rtx seq;
8097
8098 /* Now really call the function. `expand_call' does not call
8099 expand_builtin, so there is no danger of infinite recursion here. */
8100 start_sequence ();
8101
8102 #ifdef EXPAND_BUILTIN_SAVEREGS
8103 /* Do whatever the machine needs done in this case. */
8104 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8105 #else
8106 /* The register where the function returns its value
8107 is likely to have something else in it, such as an argument.
8108 So preserve that register around the call. */
8109
8110 if (value_mode != VOIDmode)
8111 {
8112 rtx valreg = hard_libcall_value (value_mode);
8113 rtx saved_valreg = gen_reg_rtx (value_mode);
8114
8115 emit_move_insn (saved_valreg, valreg);
8116 temp = expand_call (exp, target, ignore);
8117 emit_move_insn (valreg, saved_valreg);
8118 }
8119 else
8120 /* Generate the call, putting the value in a pseudo. */
8121 temp = expand_call (exp, target, ignore);
8122 #endif
8123
8124 seq = get_insns ();
8125 end_sequence ();
8126
8127 saveregs_value = temp;
8128
8129 /* Put the sequence after the NOTE that starts the function.
8130 If this is inside a SEQUENCE, make the outer-level insn
8131 chain current, so the code is placed at the start of the
8132 function. */
8133 push_topmost_sequence ();
8134 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8135 pop_topmost_sequence ();
8136 return temp;
8137 }
8138
8139 /* __builtin_args_info (N) returns word N of the arg space info
8140 for the current function. The number and meanings of words
8141 is controlled by the definition of CUMULATIVE_ARGS. */
8142 case BUILT_IN_ARGS_INFO:
8143 {
8144 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8145 int *word_ptr = (int *) &current_function_args_info;
8146 #if 0
8147 /* These are used by the code below that is if 0'ed away */
8148 int i;
8149 tree type, elts, result;
8150 #endif
8151
8152 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8153 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8154 __FILE__, __LINE__);
8155
8156 if (arglist != 0)
8157 {
8158 tree arg = TREE_VALUE (arglist);
8159 if (TREE_CODE (arg) != INTEGER_CST)
8160 error ("argument of `__builtin_args_info' must be constant");
8161 else
8162 {
8163 int wordnum = TREE_INT_CST_LOW (arg);
8164
8165 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8166 error ("argument of `__builtin_args_info' out of range");
8167 else
8168 return GEN_INT (word_ptr[wordnum]);
8169 }
8170 }
8171 else
8172 error ("missing argument in `__builtin_args_info'");
8173
8174 return const0_rtx;
8175
8176 #if 0
8177 for (i = 0; i < nwords; i++)
8178 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8179
8180 type = build_array_type (integer_type_node,
8181 build_index_type (build_int_2 (nwords, 0)));
8182 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8183 TREE_CONSTANT (result) = 1;
8184 TREE_STATIC (result) = 1;
8185 result = build (INDIRECT_REF, build_pointer_type (type), result);
8186 TREE_CONSTANT (result) = 1;
8187 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8188 #endif
8189 }
8190
8191 /* Return the address of the first anonymous stack arg. */
8192 case BUILT_IN_NEXT_ARG:
8193 {
8194 tree fntype = TREE_TYPE (current_function_decl);
8195
8196 if ((TYPE_ARG_TYPES (fntype) == 0
8197 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8198 == void_type_node))
8199 && ! current_function_varargs)
8200 {
8201 error ("`va_start' used in function with fixed args");
8202 return const0_rtx;
8203 }
8204
8205 if (arglist)
8206 {
8207 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8208 tree arg = TREE_VALUE (arglist);
8209
8210 /* Strip off all nops for the sake of the comparison. This
8211 is not quite the same as STRIP_NOPS. It does more.
8212 We must also strip off INDIRECT_EXPR for C++ reference
8213 parameters. */
8214 while (TREE_CODE (arg) == NOP_EXPR
8215 || TREE_CODE (arg) == CONVERT_EXPR
8216 || TREE_CODE (arg) == NON_LVALUE_EXPR
8217 || TREE_CODE (arg) == INDIRECT_REF)
8218 arg = TREE_OPERAND (arg, 0);
8219 if (arg != last_parm)
8220 warning ("second parameter of `va_start' not last named argument");
8221 }
8222 else if (! current_function_varargs)
8223 /* Evidently an out of date version of <stdarg.h>; can't validate
8224 va_start's second argument, but can still work as intended. */
8225 warning ("`__builtin_next_arg' called without an argument");
8226 }
8227
8228 return expand_binop (Pmode, add_optab,
8229 current_function_internal_arg_pointer,
8230 current_function_arg_offset_rtx,
8231 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8232
8233 case BUILT_IN_CLASSIFY_TYPE:
8234 if (arglist != 0)
8235 {
8236 tree type = TREE_TYPE (TREE_VALUE (arglist));
8237 enum tree_code code = TREE_CODE (type);
8238 if (code == VOID_TYPE)
8239 return GEN_INT (void_type_class);
8240 if (code == INTEGER_TYPE)
8241 return GEN_INT (integer_type_class);
8242 if (code == CHAR_TYPE)
8243 return GEN_INT (char_type_class);
8244 if (code == ENUMERAL_TYPE)
8245 return GEN_INT (enumeral_type_class);
8246 if (code == BOOLEAN_TYPE)
8247 return GEN_INT (boolean_type_class);
8248 if (code == POINTER_TYPE)
8249 return GEN_INT (pointer_type_class);
8250 if (code == REFERENCE_TYPE)
8251 return GEN_INT (reference_type_class);
8252 if (code == OFFSET_TYPE)
8253 return GEN_INT (offset_type_class);
8254 if (code == REAL_TYPE)
8255 return GEN_INT (real_type_class);
8256 if (code == COMPLEX_TYPE)
8257 return GEN_INT (complex_type_class);
8258 if (code == FUNCTION_TYPE)
8259 return GEN_INT (function_type_class);
8260 if (code == METHOD_TYPE)
8261 return GEN_INT (method_type_class);
8262 if (code == RECORD_TYPE)
8263 return GEN_INT (record_type_class);
8264 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8265 return GEN_INT (union_type_class);
8266 if (code == ARRAY_TYPE)
8267 {
8268 if (TYPE_STRING_FLAG (type))
8269 return GEN_INT (string_type_class);
8270 else
8271 return GEN_INT (array_type_class);
8272 }
8273 if (code == SET_TYPE)
8274 return GEN_INT (set_type_class);
8275 if (code == FILE_TYPE)
8276 return GEN_INT (file_type_class);
8277 if (code == LANG_TYPE)
8278 return GEN_INT (lang_type_class);
8279 }
8280 return GEN_INT (no_type_class);
8281
8282 case BUILT_IN_CONSTANT_P:
8283 if (arglist == 0)
8284 return const0_rtx;
8285 else
8286 {
8287 tree arg = TREE_VALUE (arglist);
8288
8289 STRIP_NOPS (arg);
8290 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8291 || (TREE_CODE (arg) == ADDR_EXPR
8292 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8293 ? const1_rtx : const0_rtx);
8294 }
8295
8296 case BUILT_IN_FRAME_ADDRESS:
8297 /* The argument must be a nonnegative integer constant.
8298 It counts the number of frames to scan up the stack.
8299 The value is the address of that frame. */
8300 case BUILT_IN_RETURN_ADDRESS:
8301 /* The argument must be a nonnegative integer constant.
8302 It counts the number of frames to scan up the stack.
8303 The value is the return address saved in that frame. */
8304 if (arglist == 0)
8305 /* Warning about missing arg was already issued. */
8306 return const0_rtx;
8307 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8308 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8309 {
8310 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8311 error ("invalid arg to `__builtin_frame_address'");
8312 else
8313 error ("invalid arg to `__builtin_return_address'");
8314 return const0_rtx;
8315 }
8316 else
8317 {
8318 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8319 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8320 hard_frame_pointer_rtx);
8321
8322 /* Some ports cannot access arbitrary stack frames. */
8323 if (tem == NULL)
8324 {
8325 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8326 warning ("unsupported arg to `__builtin_frame_address'");
8327 else
8328 warning ("unsupported arg to `__builtin_return_address'");
8329 return const0_rtx;
8330 }
8331
8332 /* For __builtin_frame_address, return what we've got. */
8333 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8334 return tem;
8335
8336 if (GET_CODE (tem) != REG)
8337 tem = copy_to_reg (tem);
8338 return tem;
8339 }
8340
8341 /* Returns the address of the area where the structure is returned.
8342 0 otherwise. */
8343 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8344 if (arglist != 0
8345 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8346 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8347 return const0_rtx;
8348 else
8349 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8350
8351 case BUILT_IN_ALLOCA:
8352 if (arglist == 0
8353 /* Arg could be non-integer if user redeclared this fcn wrong. */
8354 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8355 break;
8356
8357 /* Compute the argument. */
8358 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8359
8360 /* Allocate the desired space. */
8361 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8362
8363 case BUILT_IN_FFS:
8364 /* If not optimizing, call the library function. */
8365 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8366 break;
8367
8368 if (arglist == 0
8369 /* Arg could be non-integer if user redeclared this fcn wrong. */
8370 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8371 break;
8372
8373 /* Compute the argument. */
8374 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8375 /* Compute ffs, into TARGET if possible.
8376 Set TARGET to wherever the result comes back. */
8377 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8378 ffs_optab, op0, target, 1);
8379 if (target == 0)
8380 abort ();
8381 return target;
8382
8383 case BUILT_IN_STRLEN:
8384 /* If not optimizing, call the library function. */
8385 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8386 break;
8387
8388 if (arglist == 0
8389 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8390 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8391 break;
8392 else
8393 {
8394 tree src = TREE_VALUE (arglist);
8395 tree len = c_strlen (src);
8396
8397 int align
8398 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8399
8400 rtx result, src_rtx, char_rtx;
8401 enum machine_mode insn_mode = value_mode, char_mode;
8402 enum insn_code icode;
8403
8404 /* If the length is known, just return it. */
8405 if (len != 0)
8406 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
8407
8408 /* If SRC is not a pointer type, don't do this operation inline. */
8409 if (align == 0)
8410 break;
8411
8412 /* Call a function if we can't compute strlen in the right mode. */
8413
8414 while (insn_mode != VOIDmode)
8415 {
8416 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8417 if (icode != CODE_FOR_nothing)
8418 break;
8419
8420 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8421 }
8422 if (insn_mode == VOIDmode)
8423 break;
8424
8425 /* Make a place to write the result of the instruction. */
8426 result = target;
8427 if (! (result != 0
8428 && GET_CODE (result) == REG
8429 && GET_MODE (result) == insn_mode
8430 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8431 result = gen_reg_rtx (insn_mode);
8432
8433 /* Make sure the operands are acceptable to the predicates. */
8434
8435 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8436 result = gen_reg_rtx (insn_mode);
8437 src_rtx = memory_address (BLKmode,
8438 expand_expr (src, NULL_RTX, ptr_mode,
8439 EXPAND_NORMAL));
8440
8441 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8442 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8443
8444 /* Check the string is readable and has an end. */
8445 if (flag_check_memory_usage)
8446 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
8447 src_rtx, ptr_mode,
8448 GEN_INT (MEMORY_USE_RO),
8449 TYPE_MODE (integer_type_node));
8450
8451 char_rtx = const0_rtx;
8452 char_mode = insn_operand_mode[(int)icode][2];
8453 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8454 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8455
8456 emit_insn (GEN_FCN (icode) (result,
8457 gen_rtx_MEM (BLKmode, src_rtx),
8458 char_rtx, GEN_INT (align)));
8459
8460 /* Return the value in the proper mode for this function. */
8461 if (GET_MODE (result) == value_mode)
8462 return result;
8463 else if (target != 0)
8464 {
8465 convert_move (target, result, 0);
8466 return target;
8467 }
8468 else
8469 return convert_to_mode (value_mode, result, 0);
8470 }
8471
8472 case BUILT_IN_STRCPY:
8473 /* If not optimizing, call the library function. */
8474 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8475 break;
8476
8477 if (arglist == 0
8478 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8479 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8480 || TREE_CHAIN (arglist) == 0
8481 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8482 break;
8483 else
8484 {
8485 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8486
8487 if (len == 0)
8488 break;
8489
8490 len = size_binop (PLUS_EXPR, len, integer_one_node);
8491
8492 chainon (arglist, build_tree_list (NULL_TREE, len));
8493 }
8494
8495 /* Drops in. */
8496 case BUILT_IN_MEMCPY:
8497 /* If not optimizing, call the library function. */
8498 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8499 break;
8500
8501 if (arglist == 0
8502 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8503 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8504 || TREE_CHAIN (arglist) == 0
8505 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8506 != POINTER_TYPE)
8507 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8508 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8509 (TREE_CHAIN (TREE_CHAIN (arglist)))))
8510 != INTEGER_TYPE))
8511 break;
8512 else
8513 {
8514 tree dest = TREE_VALUE (arglist);
8515 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8516 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8517 tree type;
8518
8519 int src_align
8520 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8521 int dest_align
8522 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8523 rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
8524
8525 /* If either SRC or DEST is not a pointer type, don't do
8526 this operation in-line. */
8527 if (src_align == 0 || dest_align == 0)
8528 {
8529 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8530 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8531 break;
8532 }
8533
8534 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8535 dest_mem = gen_rtx_MEM (BLKmode,
8536 memory_address (BLKmode, dest_rtx));
8537 /* There could be a void* cast on top of the object. */
8538 while (TREE_CODE (dest) == NOP_EXPR)
8539 dest = TREE_OPERAND (dest, 0);
8540 type = TREE_TYPE (TREE_TYPE (dest));
8541 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8542 src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
8543 src_mem = gen_rtx_MEM (BLKmode,
8544 memory_address (BLKmode, src_rtx));
8545 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8546
8547 /* Just copy the rights of SRC to the rights of DEST. */
8548 if (flag_check_memory_usage)
8549 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
8550 dest_rtx, ptr_mode,
8551 src_rtx, ptr_mode,
8552 len_rtx, TYPE_MODE (sizetype));
8553
8554 /* There could be a void* cast on top of the object. */
8555 while (TREE_CODE (src) == NOP_EXPR)
8556 src = TREE_OPERAND (src, 0);
8557 type = TREE_TYPE (TREE_TYPE (src));
8558 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8559
8560 /* Copy word part most expediently. */
8561 dest_addr
8562 = emit_block_move (dest_mem, src_mem, len_rtx,
8563 MIN (src_align, dest_align));
8564
8565 if (dest_addr == 0)
8566 dest_addr = force_operand (dest_rtx, NULL_RTX);
8567
8568 return dest_addr;
8569 }
8570
8571 case BUILT_IN_MEMSET:
8572 /* If not optimizing, call the library function. */
8573 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8574 break;
8575
8576 if (arglist == 0
8577 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8578 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8579 || TREE_CHAIN (arglist) == 0
8580 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8581 != INTEGER_TYPE)
8582 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8583 || (INTEGER_TYPE
8584 != (TREE_CODE (TREE_TYPE
8585 (TREE_VALUE
8586 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8587 break;
8588 else
8589 {
8590 tree dest = TREE_VALUE (arglist);
8591 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8592 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8593 tree type;
8594
8595 int dest_align
8596 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8597 rtx dest_rtx, dest_mem, dest_addr, len_rtx;
8598
8599 /* If DEST is not a pointer type, don't do this
8600 operation in-line. */
8601 if (dest_align == 0)
8602 break;
8603
8604 /* If VAL is not 0, don't do this operation in-line. */
8605 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8606 break;
8607
8608 /* If LEN does not expand to a constant, don't do this
8609 operation in-line. */
8610 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8611 if (GET_CODE (len_rtx) != CONST_INT)
8612 break;
8613
8614 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8615 dest_mem = gen_rtx_MEM (BLKmode,
8616 memory_address (BLKmode, dest_rtx));
8617
8618 /* Just check DST is writable and mark it as readable. */
8619 if (flag_check_memory_usage)
8620 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8621 dest_rtx, ptr_mode,
8622 len_rtx, TYPE_MODE (sizetype),
8623 GEN_INT (MEMORY_USE_WO),
8624 TYPE_MODE (integer_type_node));
8625
8626 /* There could be a void* cast on top of the object. */
8627 while (TREE_CODE (dest) == NOP_EXPR)
8628 dest = TREE_OPERAND (dest, 0);
8629 type = TREE_TYPE (TREE_TYPE (dest));
8630 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8631
8632 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
8633
8634 if (dest_addr == 0)
8635 dest_addr = force_operand (dest_rtx, NULL_RTX);
8636
8637 return dest_addr;
8638 }
8639
8640 /* These comparison functions need an instruction that returns an actual
8641 index. An ordinary compare that just sets the condition codes
8642 is not enough. */
8643 #ifdef HAVE_cmpstrsi
8644 case BUILT_IN_STRCMP:
8645 /* If not optimizing, call the library function. */
8646 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8647 break;
8648
8649 /* If we need to check memory accesses, call the library function. */
8650 if (flag_check_memory_usage)
8651 break;
8652
8653 if (arglist == 0
8654 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8655 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8656 || TREE_CHAIN (arglist) == 0
8657 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8658 break;
8659 else if (!HAVE_cmpstrsi)
8660 break;
8661 {
8662 tree arg1 = TREE_VALUE (arglist);
8663 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8664 tree len, len2;
8665
8666 len = c_strlen (arg1);
8667 if (len)
8668 len = size_binop (PLUS_EXPR, integer_one_node, len);
8669 len2 = c_strlen (arg2);
8670 if (len2)
8671 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8672
8673 /* If we don't have a constant length for the first, use the length
8674 of the second, if we know it. We don't require a constant for
8675 this case; some cost analysis could be done if both are available
8676 but neither is constant. For now, assume they're equally cheap.
8677
8678 If both strings have constant lengths, use the smaller. This
8679 could arise if optimization results in strcpy being called with
8680 two fixed strings, or if the code was machine-generated. We should
8681 add some code to the `memcmp' handler below to deal with such
8682 situations, someday. */
8683 if (!len || TREE_CODE (len) != INTEGER_CST)
8684 {
8685 if (len2)
8686 len = len2;
8687 else if (len == 0)
8688 break;
8689 }
8690 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8691 {
8692 if (tree_int_cst_lt (len2, len))
8693 len = len2;
8694 }
8695
8696 chainon (arglist, build_tree_list (NULL_TREE, len));
8697 }
8698
8699 /* Drops in. */
8700 case BUILT_IN_MEMCMP:
8701 /* If not optimizing, call the library function. */
8702 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8703 break;
8704
8705 /* If we need to check memory accesses, call the library function. */
8706 if (flag_check_memory_usage)
8707 break;
8708
8709 if (arglist == 0
8710 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8711 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8712 || TREE_CHAIN (arglist) == 0
8713 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8714 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8715 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8716 break;
8717 else if (!HAVE_cmpstrsi)
8718 break;
8719 {
8720 tree arg1 = TREE_VALUE (arglist);
8721 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8722 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8723 rtx result;
8724
8725 int arg1_align
8726 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8727 int arg2_align
8728 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8729 enum machine_mode insn_mode
8730 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8731
8732 /* If we don't have POINTER_TYPE, call the function. */
8733 if (arg1_align == 0 || arg2_align == 0)
8734 {
8735 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8736 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8737 break;
8738 }
8739
8740 /* Make a place to write the result of the instruction. */
8741 result = target;
8742 if (! (result != 0
8743 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8744 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8745 result = gen_reg_rtx (insn_mode);
8746
8747 emit_insn (gen_cmpstrsi (result,
8748 gen_rtx_MEM (BLKmode,
8749 expand_expr (arg1, NULL_RTX,
8750 ptr_mode,
8751 EXPAND_NORMAL)),
8752 gen_rtx_MEM (BLKmode,
8753 expand_expr (arg2, NULL_RTX,
8754 ptr_mode,
8755 EXPAND_NORMAL)),
8756 expand_expr (len, NULL_RTX, VOIDmode, 0),
8757 GEN_INT (MIN (arg1_align, arg2_align))));
8758
8759 /* Return the value in the proper mode for this function. */
8760 mode = TYPE_MODE (TREE_TYPE (exp));
8761 if (GET_MODE (result) == mode)
8762 return result;
8763 else if (target != 0)
8764 {
8765 convert_move (target, result, 0);
8766 return target;
8767 }
8768 else
8769 return convert_to_mode (mode, result, 0);
8770 }
8771 #else
8772 case BUILT_IN_STRCMP:
8773 case BUILT_IN_MEMCMP:
8774 break;
8775 #endif
8776
8777 case BUILT_IN_SETJMP:
8778 if (arglist == 0
8779 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8780 break;
8781 else
8782 {
8783 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8784 VOIDmode, 0);
8785 rtx lab = gen_label_rtx ();
8786 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
8787 emit_label (lab);
8788 return ret;
8789 }
8790
8791 /* __builtin_longjmp is passed a pointer to an array of five words.
8792 It's similar to the C library longjmp function but works with
8793 __builtin_setjmp above. */
8794 case BUILT_IN_LONGJMP:
8795 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8796 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8797 break;
8798 else
8799 {
8800 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8801 VOIDmode, 0);
8802 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8803 const0_rtx, VOIDmode, 0);
8804 expand_builtin_longjmp (buf_addr, value);
8805 return const0_rtx;
8806 }
8807
8808 /* Various hooks for the DWARF 2 __throw routine. */
8809 case BUILT_IN_UNWIND_INIT:
8810 expand_builtin_unwind_init ();
8811 return const0_rtx;
8812 case BUILT_IN_FP:
8813 return frame_pointer_rtx;
8814 case BUILT_IN_SP:
8815 return stack_pointer_rtx;
8816 #ifdef DWARF2_UNWIND_INFO
8817 case BUILT_IN_DWARF_FP_REGNUM:
8818 return expand_builtin_dwarf_fp_regnum ();
8819 case BUILT_IN_DWARF_REG_SIZE:
8820 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
8821 #endif
8822 case BUILT_IN_FROB_RETURN_ADDR:
8823 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
8824 case BUILT_IN_EXTRACT_RETURN_ADDR:
8825 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
8826 case BUILT_IN_SET_RETURN_ADDR_REG:
8827 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
8828 return const0_rtx;
8829 case BUILT_IN_EH_STUB:
8830 return expand_builtin_eh_stub ();
8831 case BUILT_IN_SET_EH_REGS:
8832 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
8833 TREE_VALUE (TREE_CHAIN (arglist)));
8834 return const0_rtx;
8835
8836 default: /* just do library call, if unknown builtin */
8837 error ("built-in function `%s' not currently supported",
8838 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8839 }
8840
8841 /* The switch statement above can drop through to cause the function
8842 to be called normally. */
8843
8844 return expand_call (exp, target, ignore);
8845 }
8846 \f
8847 /* Built-in functions to perform an untyped call and return. */
8848
8849 /* For each register that may be used for calling a function, this
8850 gives a mode used to copy the register's value. VOIDmode indicates
8851 the register is not used for calling a function. If the machine
8852 has register windows, this gives only the outbound registers.
8853 INCOMING_REGNO gives the corresponding inbound register. */
8854 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
8855
8856 /* For each register that may be used for returning values, this gives
8857 a mode used to copy the register's value. VOIDmode indicates the
8858 register is not used for returning values. If the machine has
8859 register windows, this gives only the outbound registers.
8860 INCOMING_REGNO gives the corresponding inbound register. */
8861 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
8862
8863 /* For each register that may be used for calling a function, this
8864 gives the offset of that register into the block returned by
8865 __builtin_apply_args. 0 indicates that the register is not
8866 used for calling a function. */
8867 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8868
8869 /* Return the offset of register REGNO into the block returned by
8870 __builtin_apply_args. This is not declared static, since it is
8871 needed in objc-act.c. */
8872
8873 int
8874 apply_args_register_offset (regno)
8875 int regno;
8876 {
8877 apply_args_size ();
8878
8879 /* Arguments are always put in outgoing registers (in the argument
8880 block) if such make sense. */
8881 #ifdef OUTGOING_REGNO
8882 regno = OUTGOING_REGNO(regno);
8883 #endif
8884 return apply_args_reg_offset[regno];
8885 }
8886
8887 /* Return the size required for the block returned by __builtin_apply_args,
8888 and initialize apply_args_mode. */
8889
8890 static int
8891 apply_args_size ()
8892 {
8893 static int size = -1;
8894 int align, regno;
8895 enum machine_mode mode;
8896
8897 /* The values computed by this function never change. */
8898 if (size < 0)
8899 {
8900 /* The first value is the incoming arg-pointer. */
8901 size = GET_MODE_SIZE (Pmode);
8902
8903 /* The second value is the structure value address unless this is
8904 passed as an "invisible" first argument. */
8905 if (struct_value_rtx)
8906 size += GET_MODE_SIZE (Pmode);
8907
8908 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8909 if (FUNCTION_ARG_REGNO_P (regno))
8910 {
8911 /* Search for the proper mode for copying this register's
8912 value. I'm not sure this is right, but it works so far. */
8913 enum machine_mode best_mode = VOIDmode;
8914
8915 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8916 mode != VOIDmode;
8917 mode = GET_MODE_WIDER_MODE (mode))
8918 if (HARD_REGNO_MODE_OK (regno, mode)
8919 && HARD_REGNO_NREGS (regno, mode) == 1)
8920 best_mode = mode;
8921
8922 if (best_mode == VOIDmode)
8923 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8924 mode != VOIDmode;
8925 mode = GET_MODE_WIDER_MODE (mode))
8926 if (HARD_REGNO_MODE_OK (regno, mode)
8927 && (mov_optab->handlers[(int) mode].insn_code
8928 != CODE_FOR_nothing))
8929 best_mode = mode;
8930
8931 mode = best_mode;
8932 if (mode == VOIDmode)
8933 abort ();
8934
8935 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8936 if (size % align != 0)
8937 size = CEIL (size, align) * align;
8938 apply_args_reg_offset[regno] = size;
8939 size += GET_MODE_SIZE (mode);
8940 apply_args_mode[regno] = mode;
8941 }
8942 else
8943 {
8944 apply_args_mode[regno] = VOIDmode;
8945 apply_args_reg_offset[regno] = 0;
8946 }
8947 }
8948 return size;
8949 }
8950
8951 /* Return the size required for the block returned by __builtin_apply,
8952 and initialize apply_result_mode. */
8953
8954 static int
8955 apply_result_size ()
8956 {
8957 static int size = -1;
8958 int align, regno;
8959 enum machine_mode mode;
8960
8961 /* The values computed by this function never change. */
8962 if (size < 0)
8963 {
8964 size = 0;
8965
8966 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8967 if (FUNCTION_VALUE_REGNO_P (regno))
8968 {
8969 /* Search for the proper mode for copying this register's
8970 value. I'm not sure this is right, but it works so far. */
8971 enum machine_mode best_mode = VOIDmode;
8972
8973 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8974 mode != TImode;
8975 mode = GET_MODE_WIDER_MODE (mode))
8976 if (HARD_REGNO_MODE_OK (regno, mode))
8977 best_mode = mode;
8978
8979 if (best_mode == VOIDmode)
8980 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8981 mode != VOIDmode;
8982 mode = GET_MODE_WIDER_MODE (mode))
8983 if (HARD_REGNO_MODE_OK (regno, mode)
8984 && (mov_optab->handlers[(int) mode].insn_code
8985 != CODE_FOR_nothing))
8986 best_mode = mode;
8987
8988 mode = best_mode;
8989 if (mode == VOIDmode)
8990 abort ();
8991
8992 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8993 if (size % align != 0)
8994 size = CEIL (size, align) * align;
8995 size += GET_MODE_SIZE (mode);
8996 apply_result_mode[regno] = mode;
8997 }
8998 else
8999 apply_result_mode[regno] = VOIDmode;
9000
9001 /* Allow targets that use untyped_call and untyped_return to override
9002 the size so that machine-specific information can be stored here. */
9003 #ifdef APPLY_RESULT_SIZE
9004 size = APPLY_RESULT_SIZE;
9005 #endif
9006 }
9007 return size;
9008 }
9009
9010 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9011 /* Create a vector describing the result block RESULT. If SAVEP is true,
9012 the result block is used to save the values; otherwise it is used to
9013 restore the values. */
9014
9015 static rtx
9016 result_vector (savep, result)
9017 int savep;
9018 rtx result;
9019 {
9020 int regno, size, align, nelts;
9021 enum machine_mode mode;
9022 rtx reg, mem;
9023 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9024
9025 size = nelts = 0;
9026 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9027 if ((mode = apply_result_mode[regno]) != VOIDmode)
9028 {
9029 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9030 if (size % align != 0)
9031 size = CEIL (size, align) * align;
9032 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9033 mem = change_address (result, mode,
9034 plus_constant (XEXP (result, 0), size));
9035 savevec[nelts++] = (savep
9036 ? gen_rtx_SET (VOIDmode, mem, reg)
9037 : gen_rtx_SET (VOIDmode, reg, mem));
9038 size += GET_MODE_SIZE (mode);
9039 }
9040 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9041 }
9042 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9043
9044 /* Save the state required to perform an untyped call with the same
9045 arguments as were passed to the current function. */
9046
9047 static rtx
9048 expand_builtin_apply_args ()
9049 {
9050 rtx registers;
9051 int size, align, regno;
9052 enum machine_mode mode;
9053
9054 /* Create a block where the arg-pointer, structure value address,
9055 and argument registers can be saved. */
9056 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9057
9058 /* Walk past the arg-pointer and structure value address. */
9059 size = GET_MODE_SIZE (Pmode);
9060 if (struct_value_rtx)
9061 size += GET_MODE_SIZE (Pmode);
9062
9063 /* Save each register used in calling a function to the block. */
9064 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9065 if ((mode = apply_args_mode[regno]) != VOIDmode)
9066 {
9067 rtx tem;
9068
9069 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9070 if (size % align != 0)
9071 size = CEIL (size, align) * align;
9072
9073 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9074
9075 #ifdef STACK_REGS
9076 /* For reg-stack.c's stack register household.
9077 Compare with a similar piece of code in function.c. */
9078
9079 emit_insn (gen_rtx_USE (mode, tem));
9080 #endif
9081
9082 emit_move_insn (change_address (registers, mode,
9083 plus_constant (XEXP (registers, 0),
9084 size)),
9085 tem);
9086 size += GET_MODE_SIZE (mode);
9087 }
9088
9089 /* Save the arg pointer to the block. */
9090 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9091 copy_to_reg (virtual_incoming_args_rtx));
9092 size = GET_MODE_SIZE (Pmode);
9093
9094 /* Save the structure value address unless this is passed as an
9095 "invisible" first argument. */
9096 if (struct_value_incoming_rtx)
9097 {
9098 emit_move_insn (change_address (registers, Pmode,
9099 plus_constant (XEXP (registers, 0),
9100 size)),
9101 copy_to_reg (struct_value_incoming_rtx));
9102 size += GET_MODE_SIZE (Pmode);
9103 }
9104
9105 /* Return the address of the block. */
9106 return copy_addr_to_reg (XEXP (registers, 0));
9107 }
9108
9109 /* Perform an untyped call and save the state required to perform an
9110 untyped return of whatever value was returned by the given function. */
9111
9112 static rtx
9113 expand_builtin_apply (function, arguments, argsize)
9114 rtx function, arguments, argsize;
9115 {
9116 int size, align, regno;
9117 enum machine_mode mode;
9118 rtx incoming_args, result, reg, dest, call_insn;
9119 rtx old_stack_level = 0;
9120 rtx call_fusage = 0;
9121
9122 /* Create a block where the return registers can be saved. */
9123 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9124
9125 /* ??? The argsize value should be adjusted here. */
9126
9127 /* Fetch the arg pointer from the ARGUMENTS block. */
9128 incoming_args = gen_reg_rtx (Pmode);
9129 emit_move_insn (incoming_args,
9130 gen_rtx_MEM (Pmode, arguments));
9131 #ifndef STACK_GROWS_DOWNWARD
9132 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9133 incoming_args, 0, OPTAB_LIB_WIDEN);
9134 #endif
9135
9136 /* Perform postincrements before actually calling the function. */
9137 emit_queue ();
9138
9139 /* Push a new argument block and copy the arguments. */
9140 do_pending_stack_adjust ();
9141
9142 /* Save the stack with nonlocal if available */
9143 #ifdef HAVE_save_stack_nonlocal
9144 if (HAVE_save_stack_nonlocal)
9145 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9146 else
9147 #endif
9148 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9149
9150 /* Push a block of memory onto the stack to store the memory arguments.
9151 Save the address in a register, and copy the memory arguments. ??? I
9152 haven't figured out how the calling convention macros effect this,
9153 but it's likely that the source and/or destination addresses in
9154 the block copy will need updating in machine specific ways. */
9155 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9156 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9157 gen_rtx_MEM (BLKmode, incoming_args),
9158 argsize,
9159 PARM_BOUNDARY / BITS_PER_UNIT);
9160
9161 /* Refer to the argument block. */
9162 apply_args_size ();
9163 arguments = gen_rtx_MEM (BLKmode, arguments);
9164
9165 /* Walk past the arg-pointer and structure value address. */
9166 size = GET_MODE_SIZE (Pmode);
9167 if (struct_value_rtx)
9168 size += GET_MODE_SIZE (Pmode);
9169
9170 /* Restore each of the registers previously saved. Make USE insns
9171 for each of these registers for use in making the call. */
9172 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9173 if ((mode = apply_args_mode[regno]) != VOIDmode)
9174 {
9175 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9176 if (size % align != 0)
9177 size = CEIL (size, align) * align;
9178 reg = gen_rtx_REG (mode, regno);
9179 emit_move_insn (reg,
9180 change_address (arguments, mode,
9181 plus_constant (XEXP (arguments, 0),
9182 size)));
9183
9184 use_reg (&call_fusage, reg);
9185 size += GET_MODE_SIZE (mode);
9186 }
9187
9188 /* Restore the structure value address unless this is passed as an
9189 "invisible" first argument. */
9190 size = GET_MODE_SIZE (Pmode);
9191 if (struct_value_rtx)
9192 {
9193 rtx value = gen_reg_rtx (Pmode);
9194 emit_move_insn (value,
9195 change_address (arguments, Pmode,
9196 plus_constant (XEXP (arguments, 0),
9197 size)));
9198 emit_move_insn (struct_value_rtx, value);
9199 if (GET_CODE (struct_value_rtx) == REG)
9200 use_reg (&call_fusage, struct_value_rtx);
9201 size += GET_MODE_SIZE (Pmode);
9202 }
9203
9204 /* All arguments and registers used for the call are set up by now! */
9205 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9206
9207 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9208 and we don't want to load it into a register as an optimization,
9209 because prepare_call_address already did it if it should be done. */
9210 if (GET_CODE (function) != SYMBOL_REF)
9211 function = memory_address (FUNCTION_MODE, function);
9212
9213 /* Generate the actual call instruction and save the return value. */
9214 #ifdef HAVE_untyped_call
9215 if (HAVE_untyped_call)
9216 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9217 result, result_vector (1, result)));
9218 else
9219 #endif
9220 #ifdef HAVE_call_value
9221 if (HAVE_call_value)
9222 {
9223 rtx valreg = 0;
9224
9225 /* Locate the unique return register. It is not possible to
9226 express a call that sets more than one return register using
9227 call_value; use untyped_call for that. In fact, untyped_call
9228 only needs to save the return registers in the given block. */
9229 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9230 if ((mode = apply_result_mode[regno]) != VOIDmode)
9231 {
9232 if (valreg)
9233 abort (); /* HAVE_untyped_call required. */
9234 valreg = gen_rtx_REG (mode, regno);
9235 }
9236
9237 emit_call_insn (gen_call_value (valreg,
9238 gen_rtx_MEM (FUNCTION_MODE, function),
9239 const0_rtx, NULL_RTX, const0_rtx));
9240
9241 emit_move_insn (change_address (result, GET_MODE (valreg),
9242 XEXP (result, 0)),
9243 valreg);
9244 }
9245 else
9246 #endif
9247 abort ();
9248
9249 /* Find the CALL insn we just emitted. */
9250 for (call_insn = get_last_insn ();
9251 call_insn && GET_CODE (call_insn) != CALL_INSN;
9252 call_insn = PREV_INSN (call_insn))
9253 ;
9254
9255 if (! call_insn)
9256 abort ();
9257
9258 /* Put the register usage information on the CALL. If there is already
9259 some usage information, put ours at the end. */
9260 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9261 {
9262 rtx link;
9263
9264 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9265 link = XEXP (link, 1))
9266 ;
9267
9268 XEXP (link, 1) = call_fusage;
9269 }
9270 else
9271 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9272
9273 /* Restore the stack. */
9274 #ifdef HAVE_save_stack_nonlocal
9275 if (HAVE_save_stack_nonlocal)
9276 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9277 else
9278 #endif
9279 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9280
9281 /* Return the address of the result block. */
9282 return copy_addr_to_reg (XEXP (result, 0));
9283 }
9284
9285 /* Perform an untyped return. */
9286
9287 static void
9288 expand_builtin_return (result)
9289 rtx result;
9290 {
9291 int size, align, regno;
9292 enum machine_mode mode;
9293 rtx reg;
9294 rtx call_fusage = 0;
9295
9296 apply_result_size ();
9297 result = gen_rtx_MEM (BLKmode, result);
9298
9299 #ifdef HAVE_untyped_return
9300 if (HAVE_untyped_return)
9301 {
9302 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9303 emit_barrier ();
9304 return;
9305 }
9306 #endif
9307
9308 /* Restore the return value and note that each value is used. */
9309 size = 0;
9310 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9311 if ((mode = apply_result_mode[regno]) != VOIDmode)
9312 {
9313 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9314 if (size % align != 0)
9315 size = CEIL (size, align) * align;
9316 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9317 emit_move_insn (reg,
9318 change_address (result, mode,
9319 plus_constant (XEXP (result, 0),
9320 size)));
9321
9322 push_to_sequence (call_fusage);
9323 emit_insn (gen_rtx_USE (VOIDmode, reg));
9324 call_fusage = get_insns ();
9325 end_sequence ();
9326 size += GET_MODE_SIZE (mode);
9327 }
9328
9329 /* Put the USE insns before the return. */
9330 emit_insns (call_fusage);
9331
9332 /* Return whatever values was restored by jumping directly to the end
9333 of the function. */
9334 expand_null_return ();
9335 }
9336 \f
9337 /* Expand code for a post- or pre- increment or decrement
9338 and return the RTX for the result.
9339 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9340
9341 static rtx
9342 expand_increment (exp, post, ignore)
9343 register tree exp;
9344 int post, ignore;
9345 {
9346 register rtx op0, op1;
9347 register rtx temp, value;
9348 register tree incremented = TREE_OPERAND (exp, 0);
9349 optab this_optab = add_optab;
9350 int icode;
9351 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9352 int op0_is_copy = 0;
9353 int single_insn = 0;
9354 /* 1 means we can't store into OP0 directly,
9355 because it is a subreg narrower than a word,
9356 and we don't dare clobber the rest of the word. */
9357 int bad_subreg = 0;
9358
9359 /* Stabilize any component ref that might need to be
9360 evaluated more than once below. */
9361 if (!post
9362 || TREE_CODE (incremented) == BIT_FIELD_REF
9363 || (TREE_CODE (incremented) == COMPONENT_REF
9364 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9365 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9366 incremented = stabilize_reference (incremented);
9367 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9368 ones into save exprs so that they don't accidentally get evaluated
9369 more than once by the code below. */
9370 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9371 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9372 incremented = save_expr (incremented);
9373
9374 /* Compute the operands as RTX.
9375 Note whether OP0 is the actual lvalue or a copy of it:
9376 I believe it is a copy iff it is a register or subreg
9377 and insns were generated in computing it. */
9378
9379 temp = get_last_insn ();
9380 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9381
9382 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9383 in place but instead must do sign- or zero-extension during assignment,
9384 so we copy it into a new register and let the code below use it as
9385 a copy.
9386
9387 Note that we can safely modify this SUBREG since it is know not to be
9388 shared (it was made by the expand_expr call above). */
9389
9390 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9391 {
9392 if (post)
9393 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9394 else
9395 bad_subreg = 1;
9396 }
9397 else if (GET_CODE (op0) == SUBREG
9398 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9399 {
9400 /* We cannot increment this SUBREG in place. If we are
9401 post-incrementing, get a copy of the old value. Otherwise,
9402 just mark that we cannot increment in place. */
9403 if (post)
9404 op0 = copy_to_reg (op0);
9405 else
9406 bad_subreg = 1;
9407 }
9408
9409 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9410 && temp != get_last_insn ());
9411 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9412 EXPAND_MEMORY_USE_BAD);
9413
9414 /* Decide whether incrementing or decrementing. */
9415 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9416 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9417 this_optab = sub_optab;
9418
9419 /* Convert decrement by a constant into a negative increment. */
9420 if (this_optab == sub_optab
9421 && GET_CODE (op1) == CONST_INT)
9422 {
9423 op1 = GEN_INT (- INTVAL (op1));
9424 this_optab = add_optab;
9425 }
9426
9427 /* For a preincrement, see if we can do this with a single instruction. */
9428 if (!post)
9429 {
9430 icode = (int) this_optab->handlers[(int) mode].insn_code;
9431 if (icode != (int) CODE_FOR_nothing
9432 /* Make sure that OP0 is valid for operands 0 and 1
9433 of the insn we want to queue. */
9434 && (*insn_operand_predicate[icode][0]) (op0, mode)
9435 && (*insn_operand_predicate[icode][1]) (op0, mode)
9436 && (*insn_operand_predicate[icode][2]) (op1, mode))
9437 single_insn = 1;
9438 }
9439
9440 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9441 then we cannot just increment OP0. We must therefore contrive to
9442 increment the original value. Then, for postincrement, we can return
9443 OP0 since it is a copy of the old value. For preincrement, expand here
9444 unless we can do it with a single insn.
9445
9446 Likewise if storing directly into OP0 would clobber high bits
9447 we need to preserve (bad_subreg). */
9448 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9449 {
9450 /* This is the easiest way to increment the value wherever it is.
9451 Problems with multiple evaluation of INCREMENTED are prevented
9452 because either (1) it is a component_ref or preincrement,
9453 in which case it was stabilized above, or (2) it is an array_ref
9454 with constant index in an array in a register, which is
9455 safe to reevaluate. */
9456 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9457 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9458 ? MINUS_EXPR : PLUS_EXPR),
9459 TREE_TYPE (exp),
9460 incremented,
9461 TREE_OPERAND (exp, 1));
9462
9463 while (TREE_CODE (incremented) == NOP_EXPR
9464 || TREE_CODE (incremented) == CONVERT_EXPR)
9465 {
9466 newexp = convert (TREE_TYPE (incremented), newexp);
9467 incremented = TREE_OPERAND (incremented, 0);
9468 }
9469
9470 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9471 return post ? op0 : temp;
9472 }
9473
9474 if (post)
9475 {
9476 /* We have a true reference to the value in OP0.
9477 If there is an insn to add or subtract in this mode, queue it.
9478 Queueing the increment insn avoids the register shuffling
9479 that often results if we must increment now and first save
9480 the old value for subsequent use. */
9481
9482 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9483 op0 = stabilize (op0);
9484 #endif
9485
9486 icode = (int) this_optab->handlers[(int) mode].insn_code;
9487 if (icode != (int) CODE_FOR_nothing
9488 /* Make sure that OP0 is valid for operands 0 and 1
9489 of the insn we want to queue. */
9490 && (*insn_operand_predicate[icode][0]) (op0, mode)
9491 && (*insn_operand_predicate[icode][1]) (op0, mode))
9492 {
9493 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9494 op1 = force_reg (mode, op1);
9495
9496 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9497 }
9498 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9499 {
9500 rtx addr = (general_operand (XEXP (op0, 0), mode)
9501 ? force_reg (Pmode, XEXP (op0, 0))
9502 : copy_to_reg (XEXP (op0, 0)));
9503 rtx temp, result;
9504
9505 op0 = change_address (op0, VOIDmode, addr);
9506 temp = force_reg (GET_MODE (op0), op0);
9507 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9508 op1 = force_reg (mode, op1);
9509
9510 /* The increment queue is LIFO, thus we have to `queue'
9511 the instructions in reverse order. */
9512 enqueue_insn (op0, gen_move_insn (op0, temp));
9513 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9514 return result;
9515 }
9516 }
9517
9518 /* Preincrement, or we can't increment with one simple insn. */
9519 if (post)
9520 /* Save a copy of the value before inc or dec, to return it later. */
9521 temp = value = copy_to_reg (op0);
9522 else
9523 /* Arrange to return the incremented value. */
9524 /* Copy the rtx because expand_binop will protect from the queue,
9525 and the results of that would be invalid for us to return
9526 if our caller does emit_queue before using our result. */
9527 temp = copy_rtx (value = op0);
9528
9529 /* Increment however we can. */
9530 op1 = expand_binop (mode, this_optab, value, op1,
9531 flag_check_memory_usage ? NULL_RTX : op0,
9532 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9533 /* Make sure the value is stored into OP0. */
9534 if (op1 != op0)
9535 emit_move_insn (op0, op1);
9536
9537 return temp;
9538 }
9539 \f
9540 /* Expand all function calls contained within EXP, innermost ones first.
9541 But don't look within expressions that have sequence points.
9542 For each CALL_EXPR, record the rtx for its value
9543 in the CALL_EXPR_RTL field. */
9544
9545 static void
9546 preexpand_calls (exp)
9547 tree exp;
9548 {
9549 register int nops, i;
9550 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9551
9552 if (! do_preexpand_calls)
9553 return;
9554
9555 /* Only expressions and references can contain calls. */
9556
9557 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9558 return;
9559
9560 switch (TREE_CODE (exp))
9561 {
9562 case CALL_EXPR:
9563 /* Do nothing if already expanded. */
9564 if (CALL_EXPR_RTL (exp) != 0
9565 /* Do nothing if the call returns a variable-sized object. */
9566 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9567 /* Do nothing to built-in functions. */
9568 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9569 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9570 == FUNCTION_DECL)
9571 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9572 return;
9573
9574 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9575 return;
9576
9577 case COMPOUND_EXPR:
9578 case COND_EXPR:
9579 case TRUTH_ANDIF_EXPR:
9580 case TRUTH_ORIF_EXPR:
9581 /* If we find one of these, then we can be sure
9582 the adjust will be done for it (since it makes jumps).
9583 Do it now, so that if this is inside an argument
9584 of a function, we don't get the stack adjustment
9585 after some other args have already been pushed. */
9586 do_pending_stack_adjust ();
9587 return;
9588
9589 case BLOCK:
9590 case RTL_EXPR:
9591 case WITH_CLEANUP_EXPR:
9592 case CLEANUP_POINT_EXPR:
9593 case TRY_CATCH_EXPR:
9594 return;
9595
9596 case SAVE_EXPR:
9597 if (SAVE_EXPR_RTL (exp) != 0)
9598 return;
9599
9600 default:
9601 break;
9602 }
9603
9604 nops = tree_code_length[(int) TREE_CODE (exp)];
9605 for (i = 0; i < nops; i++)
9606 if (TREE_OPERAND (exp, i) != 0)
9607 {
9608 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9609 if (type == 'e' || type == '<' || type == '1' || type == '2'
9610 || type == 'r')
9611 preexpand_calls (TREE_OPERAND (exp, i));
9612 }
9613 }
9614 \f
9615 /* At the start of a function, record that we have no previously-pushed
9616 arguments waiting to be popped. */
9617
9618 void
9619 init_pending_stack_adjust ()
9620 {
9621 pending_stack_adjust = 0;
9622 }
9623
9624 /* When exiting from function, if safe, clear out any pending stack adjust
9625 so the adjustment won't get done.
9626
9627 Note, if the current function calls alloca, then it must have a
9628 frame pointer regardless of the value of flag_omit_frame_pointer. */
9629
9630 void
9631 clear_pending_stack_adjust ()
9632 {
9633 #ifdef EXIT_IGNORE_STACK
9634 if (optimize > 0
9635 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9636 && EXIT_IGNORE_STACK
9637 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9638 && ! flag_inline_functions)
9639 pending_stack_adjust = 0;
9640 #endif
9641 }
9642
9643 /* Pop any previously-pushed arguments that have not been popped yet. */
9644
9645 void
9646 do_pending_stack_adjust ()
9647 {
9648 if (inhibit_defer_pop == 0)
9649 {
9650 if (pending_stack_adjust != 0)
9651 adjust_stack (GEN_INT (pending_stack_adjust));
9652 pending_stack_adjust = 0;
9653 }
9654 }
9655 \f
9656 /* Expand conditional expressions. */
9657
9658 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9659 LABEL is an rtx of code CODE_LABEL, in this function and all the
9660 functions here. */
9661
9662 void
9663 jumpifnot (exp, label)
9664 tree exp;
9665 rtx label;
9666 {
9667 do_jump (exp, label, NULL_RTX);
9668 }
9669
9670 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9671
9672 void
9673 jumpif (exp, label)
9674 tree exp;
9675 rtx label;
9676 {
9677 do_jump (exp, NULL_RTX, label);
9678 }
9679
9680 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9681 the result is zero, or IF_TRUE_LABEL if the result is one.
9682 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9683 meaning fall through in that case.
9684
9685 do_jump always does any pending stack adjust except when it does not
9686 actually perform a jump. An example where there is no jump
9687 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9688
9689 This function is responsible for optimizing cases such as
9690 &&, || and comparison operators in EXP. */
9691
9692 void
9693 do_jump (exp, if_false_label, if_true_label)
9694 tree exp;
9695 rtx if_false_label, if_true_label;
9696 {
9697 register enum tree_code code = TREE_CODE (exp);
9698 /* Some cases need to create a label to jump to
9699 in order to properly fall through.
9700 These cases set DROP_THROUGH_LABEL nonzero. */
9701 rtx drop_through_label = 0;
9702 rtx temp;
9703 rtx comparison = 0;
9704 int i;
9705 tree type;
9706 enum machine_mode mode;
9707
9708 emit_queue ();
9709
9710 switch (code)
9711 {
9712 case ERROR_MARK:
9713 break;
9714
9715 case INTEGER_CST:
9716 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9717 if (temp)
9718 emit_jump (temp);
9719 break;
9720
9721 #if 0
9722 /* This is not true with #pragma weak */
9723 case ADDR_EXPR:
9724 /* The address of something can never be zero. */
9725 if (if_true_label)
9726 emit_jump (if_true_label);
9727 break;
9728 #endif
9729
9730 case NOP_EXPR:
9731 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9732 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9733 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9734 goto normal;
9735 case CONVERT_EXPR:
9736 /* If we are narrowing the operand, we have to do the compare in the
9737 narrower mode. */
9738 if ((TYPE_PRECISION (TREE_TYPE (exp))
9739 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9740 goto normal;
9741 case NON_LVALUE_EXPR:
9742 case REFERENCE_EXPR:
9743 case ABS_EXPR:
9744 case NEGATE_EXPR:
9745 case LROTATE_EXPR:
9746 case RROTATE_EXPR:
9747 /* These cannot change zero->non-zero or vice versa. */
9748 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9749 break;
9750
9751 #if 0
9752 /* This is never less insns than evaluating the PLUS_EXPR followed by
9753 a test and can be longer if the test is eliminated. */
9754 case PLUS_EXPR:
9755 /* Reduce to minus. */
9756 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9757 TREE_OPERAND (exp, 0),
9758 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9759 TREE_OPERAND (exp, 1))));
9760 /* Process as MINUS. */
9761 #endif
9762
9763 case MINUS_EXPR:
9764 /* Non-zero iff operands of minus differ. */
9765 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9766 TREE_OPERAND (exp, 0),
9767 TREE_OPERAND (exp, 1)),
9768 NE, NE);
9769 break;
9770
9771 case BIT_AND_EXPR:
9772 /* If we are AND'ing with a small constant, do this comparison in the
9773 smallest type that fits. If the machine doesn't have comparisons
9774 that small, it will be converted back to the wider comparison.
9775 This helps if we are testing the sign bit of a narrower object.
9776 combine can't do this for us because it can't know whether a
9777 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9778
9779 if (! SLOW_BYTE_ACCESS
9780 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9781 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9782 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9783 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9784 && (type = type_for_mode (mode, 1)) != 0
9785 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9786 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9787 != CODE_FOR_nothing))
9788 {
9789 do_jump (convert (type, exp), if_false_label, if_true_label);
9790 break;
9791 }
9792 goto normal;
9793
9794 case TRUTH_NOT_EXPR:
9795 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9796 break;
9797
9798 case TRUTH_ANDIF_EXPR:
9799 if (if_false_label == 0)
9800 if_false_label = drop_through_label = gen_label_rtx ();
9801 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9802 start_cleanup_deferral ();
9803 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9804 end_cleanup_deferral ();
9805 break;
9806
9807 case TRUTH_ORIF_EXPR:
9808 if (if_true_label == 0)
9809 if_true_label = drop_through_label = gen_label_rtx ();
9810 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9811 start_cleanup_deferral ();
9812 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9813 end_cleanup_deferral ();
9814 break;
9815
9816 case COMPOUND_EXPR:
9817 push_temp_slots ();
9818 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9819 preserve_temp_slots (NULL_RTX);
9820 free_temp_slots ();
9821 pop_temp_slots ();
9822 emit_queue ();
9823 do_pending_stack_adjust ();
9824 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9825 break;
9826
9827 case COMPONENT_REF:
9828 case BIT_FIELD_REF:
9829 case ARRAY_REF:
9830 {
9831 int bitsize, bitpos, unsignedp;
9832 enum machine_mode mode;
9833 tree type;
9834 tree offset;
9835 int volatilep = 0;
9836 int alignment;
9837
9838 /* Get description of this reference. We don't actually care
9839 about the underlying object here. */
9840 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9841 &mode, &unsignedp, &volatilep,
9842 &alignment);
9843
9844 type = type_for_size (bitsize, unsignedp);
9845 if (! SLOW_BYTE_ACCESS
9846 && type != 0 && bitsize >= 0
9847 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9848 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9849 != CODE_FOR_nothing))
9850 {
9851 do_jump (convert (type, exp), if_false_label, if_true_label);
9852 break;
9853 }
9854 goto normal;
9855 }
9856
9857 case COND_EXPR:
9858 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9859 if (integer_onep (TREE_OPERAND (exp, 1))
9860 && integer_zerop (TREE_OPERAND (exp, 2)))
9861 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9862
9863 else if (integer_zerop (TREE_OPERAND (exp, 1))
9864 && integer_onep (TREE_OPERAND (exp, 2)))
9865 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9866
9867 else
9868 {
9869 register rtx label1 = gen_label_rtx ();
9870 drop_through_label = gen_label_rtx ();
9871
9872 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9873
9874 start_cleanup_deferral ();
9875 /* Now the THEN-expression. */
9876 do_jump (TREE_OPERAND (exp, 1),
9877 if_false_label ? if_false_label : drop_through_label,
9878 if_true_label ? if_true_label : drop_through_label);
9879 /* In case the do_jump just above never jumps. */
9880 do_pending_stack_adjust ();
9881 emit_label (label1);
9882
9883 /* Now the ELSE-expression. */
9884 do_jump (TREE_OPERAND (exp, 2),
9885 if_false_label ? if_false_label : drop_through_label,
9886 if_true_label ? if_true_label : drop_through_label);
9887 end_cleanup_deferral ();
9888 }
9889 break;
9890
9891 case EQ_EXPR:
9892 {
9893 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9894
9895 if (integer_zerop (TREE_OPERAND (exp, 1)))
9896 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9897 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9898 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9899 do_jump
9900 (fold
9901 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9902 fold (build (EQ_EXPR, TREE_TYPE (exp),
9903 fold (build1 (REALPART_EXPR,
9904 TREE_TYPE (inner_type),
9905 TREE_OPERAND (exp, 0))),
9906 fold (build1 (REALPART_EXPR,
9907 TREE_TYPE (inner_type),
9908 TREE_OPERAND (exp, 1))))),
9909 fold (build (EQ_EXPR, TREE_TYPE (exp),
9910 fold (build1 (IMAGPART_EXPR,
9911 TREE_TYPE (inner_type),
9912 TREE_OPERAND (exp, 0))),
9913 fold (build1 (IMAGPART_EXPR,
9914 TREE_TYPE (inner_type),
9915 TREE_OPERAND (exp, 1))))))),
9916 if_false_label, if_true_label);
9917 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9918 && !can_compare_p (TYPE_MODE (inner_type)))
9919 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9920 else
9921 comparison = compare (exp, EQ, EQ);
9922 break;
9923 }
9924
9925 case NE_EXPR:
9926 {
9927 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9928
9929 if (integer_zerop (TREE_OPERAND (exp, 1)))
9930 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9931 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9932 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9933 do_jump
9934 (fold
9935 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9936 fold (build (NE_EXPR, TREE_TYPE (exp),
9937 fold (build1 (REALPART_EXPR,
9938 TREE_TYPE (inner_type),
9939 TREE_OPERAND (exp, 0))),
9940 fold (build1 (REALPART_EXPR,
9941 TREE_TYPE (inner_type),
9942 TREE_OPERAND (exp, 1))))),
9943 fold (build (NE_EXPR, TREE_TYPE (exp),
9944 fold (build1 (IMAGPART_EXPR,
9945 TREE_TYPE (inner_type),
9946 TREE_OPERAND (exp, 0))),
9947 fold (build1 (IMAGPART_EXPR,
9948 TREE_TYPE (inner_type),
9949 TREE_OPERAND (exp, 1))))))),
9950 if_false_label, if_true_label);
9951 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9952 && !can_compare_p (TYPE_MODE (inner_type)))
9953 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9954 else
9955 comparison = compare (exp, NE, NE);
9956 break;
9957 }
9958
9959 case LT_EXPR:
9960 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9961 == MODE_INT)
9962 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9963 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9964 else
9965 comparison = compare (exp, LT, LTU);
9966 break;
9967
9968 case LE_EXPR:
9969 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9970 == MODE_INT)
9971 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9972 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9973 else
9974 comparison = compare (exp, LE, LEU);
9975 break;
9976
9977 case GT_EXPR:
9978 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9979 == MODE_INT)
9980 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9981 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9982 else
9983 comparison = compare (exp, GT, GTU);
9984 break;
9985
9986 case GE_EXPR:
9987 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9988 == MODE_INT)
9989 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9990 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9991 else
9992 comparison = compare (exp, GE, GEU);
9993 break;
9994
9995 default:
9996 normal:
9997 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9998 #if 0
9999 /* This is not needed any more and causes poor code since it causes
10000 comparisons and tests from non-SI objects to have different code
10001 sequences. */
10002 /* Copy to register to avoid generating bad insns by cse
10003 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10004 if (!cse_not_expected && GET_CODE (temp) == MEM)
10005 temp = copy_to_reg (temp);
10006 #endif
10007 do_pending_stack_adjust ();
10008 if (GET_CODE (temp) == CONST_INT)
10009 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10010 else if (GET_CODE (temp) == LABEL_REF)
10011 comparison = const_true_rtx;
10012 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10013 && !can_compare_p (GET_MODE (temp)))
10014 /* Note swapping the labels gives us not-equal. */
10015 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10016 else if (GET_MODE (temp) != VOIDmode)
10017 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10018 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10019 GET_MODE (temp), NULL_RTX, 0);
10020 else
10021 abort ();
10022 }
10023
10024 /* Do any postincrements in the expression that was tested. */
10025 emit_queue ();
10026
10027 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10028 straight into a conditional jump instruction as the jump condition.
10029 Otherwise, all the work has been done already. */
10030
10031 if (comparison == const_true_rtx)
10032 {
10033 if (if_true_label)
10034 emit_jump (if_true_label);
10035 }
10036 else if (comparison == const0_rtx)
10037 {
10038 if (if_false_label)
10039 emit_jump (if_false_label);
10040 }
10041 else if (comparison)
10042 do_jump_for_compare (comparison, if_false_label, if_true_label);
10043
10044 if (drop_through_label)
10045 {
10046 /* If do_jump produces code that might be jumped around,
10047 do any stack adjusts from that code, before the place
10048 where control merges in. */
10049 do_pending_stack_adjust ();
10050 emit_label (drop_through_label);
10051 }
10052 }
10053 \f
10054 /* Given a comparison expression EXP for values too wide to be compared
10055 with one insn, test the comparison and jump to the appropriate label.
10056 The code of EXP is ignored; we always test GT if SWAP is 0,
10057 and LT if SWAP is 1. */
10058
10059 static void
10060 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10061 tree exp;
10062 int swap;
10063 rtx if_false_label, if_true_label;
10064 {
10065 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10066 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10067 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10068 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10069 rtx drop_through_label = 0;
10070 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10071 int i;
10072
10073 if (! if_true_label || ! if_false_label)
10074 drop_through_label = gen_label_rtx ();
10075 if (! if_true_label)
10076 if_true_label = drop_through_label;
10077 if (! if_false_label)
10078 if_false_label = drop_through_label;
10079
10080 /* Compare a word at a time, high order first. */
10081 for (i = 0; i < nwords; i++)
10082 {
10083 rtx comp;
10084 rtx op0_word, op1_word;
10085
10086 if (WORDS_BIG_ENDIAN)
10087 {
10088 op0_word = operand_subword_force (op0, i, mode);
10089 op1_word = operand_subword_force (op1, i, mode);
10090 }
10091 else
10092 {
10093 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10094 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10095 }
10096
10097 /* All but high-order word must be compared as unsigned. */
10098 comp = compare_from_rtx (op0_word, op1_word,
10099 (unsignedp || i > 0) ? GTU : GT,
10100 unsignedp, word_mode, NULL_RTX, 0);
10101 if (comp == const_true_rtx)
10102 emit_jump (if_true_label);
10103 else if (comp != const0_rtx)
10104 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10105
10106 /* Consider lower words only if these are equal. */
10107 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10108 NULL_RTX, 0);
10109 if (comp == const_true_rtx)
10110 emit_jump (if_false_label);
10111 else if (comp != const0_rtx)
10112 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10113 }
10114
10115 if (if_false_label)
10116 emit_jump (if_false_label);
10117 if (drop_through_label)
10118 emit_label (drop_through_label);
10119 }
10120
10121 /* Compare OP0 with OP1, word at a time, in mode MODE.
10122 UNSIGNEDP says to do unsigned comparison.
10123 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10124
10125 void
10126 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10127 enum machine_mode mode;
10128 int unsignedp;
10129 rtx op0, op1;
10130 rtx if_false_label, if_true_label;
10131 {
10132 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10133 rtx drop_through_label = 0;
10134 int i;
10135
10136 if (! if_true_label || ! if_false_label)
10137 drop_through_label = gen_label_rtx ();
10138 if (! if_true_label)
10139 if_true_label = drop_through_label;
10140 if (! if_false_label)
10141 if_false_label = drop_through_label;
10142
10143 /* Compare a word at a time, high order first. */
10144 for (i = 0; i < nwords; i++)
10145 {
10146 rtx comp;
10147 rtx op0_word, op1_word;
10148
10149 if (WORDS_BIG_ENDIAN)
10150 {
10151 op0_word = operand_subword_force (op0, i, mode);
10152 op1_word = operand_subword_force (op1, i, mode);
10153 }
10154 else
10155 {
10156 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10157 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10158 }
10159
10160 /* All but high-order word must be compared as unsigned. */
10161 comp = compare_from_rtx (op0_word, op1_word,
10162 (unsignedp || i > 0) ? GTU : GT,
10163 unsignedp, word_mode, NULL_RTX, 0);
10164 if (comp == const_true_rtx)
10165 emit_jump (if_true_label);
10166 else if (comp != const0_rtx)
10167 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10168
10169 /* Consider lower words only if these are equal. */
10170 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10171 NULL_RTX, 0);
10172 if (comp == const_true_rtx)
10173 emit_jump (if_false_label);
10174 else if (comp != const0_rtx)
10175 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10176 }
10177
10178 if (if_false_label)
10179 emit_jump (if_false_label);
10180 if (drop_through_label)
10181 emit_label (drop_through_label);
10182 }
10183
10184 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10185 with one insn, test the comparison and jump to the appropriate label. */
10186
10187 static void
10188 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10189 tree exp;
10190 rtx if_false_label, if_true_label;
10191 {
10192 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10193 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10194 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10195 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10196 int i;
10197 rtx drop_through_label = 0;
10198
10199 if (! if_false_label)
10200 drop_through_label = if_false_label = gen_label_rtx ();
10201
10202 for (i = 0; i < nwords; i++)
10203 {
10204 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10205 operand_subword_force (op1, i, mode),
10206 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10207 word_mode, NULL_RTX, 0);
10208 if (comp == const_true_rtx)
10209 emit_jump (if_false_label);
10210 else if (comp != const0_rtx)
10211 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10212 }
10213
10214 if (if_true_label)
10215 emit_jump (if_true_label);
10216 if (drop_through_label)
10217 emit_label (drop_through_label);
10218 }
10219 \f
10220 /* Jump according to whether OP0 is 0.
10221 We assume that OP0 has an integer mode that is too wide
10222 for the available compare insns. */
10223
10224 static void
10225 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10226 rtx op0;
10227 rtx if_false_label, if_true_label;
10228 {
10229 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10230 rtx part;
10231 int i;
10232 rtx drop_through_label = 0;
10233
10234 /* The fastest way of doing this comparison on almost any machine is to
10235 "or" all the words and compare the result. If all have to be loaded
10236 from memory and this is a very wide item, it's possible this may
10237 be slower, but that's highly unlikely. */
10238
10239 part = gen_reg_rtx (word_mode);
10240 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10241 for (i = 1; i < nwords && part != 0; i++)
10242 part = expand_binop (word_mode, ior_optab, part,
10243 operand_subword_force (op0, i, GET_MODE (op0)),
10244 part, 1, OPTAB_WIDEN);
10245
10246 if (part != 0)
10247 {
10248 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10249 NULL_RTX, 0);
10250
10251 if (comp == const_true_rtx)
10252 emit_jump (if_false_label);
10253 else if (comp == const0_rtx)
10254 emit_jump (if_true_label);
10255 else
10256 do_jump_for_compare (comp, if_false_label, if_true_label);
10257
10258 return;
10259 }
10260
10261 /* If we couldn't do the "or" simply, do this with a series of compares. */
10262 if (! if_false_label)
10263 drop_through_label = if_false_label = gen_label_rtx ();
10264
10265 for (i = 0; i < nwords; i++)
10266 {
10267 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10268 GET_MODE (op0)),
10269 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10270 if (comp == const_true_rtx)
10271 emit_jump (if_false_label);
10272 else if (comp != const0_rtx)
10273 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10274 }
10275
10276 if (if_true_label)
10277 emit_jump (if_true_label);
10278
10279 if (drop_through_label)
10280 emit_label (drop_through_label);
10281 }
10282
10283 /* Given a comparison expression in rtl form, output conditional branches to
10284 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10285
10286 static void
10287 do_jump_for_compare (comparison, if_false_label, if_true_label)
10288 rtx comparison, if_false_label, if_true_label;
10289 {
10290 if (if_true_label)
10291 {
10292 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10293 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10294 else
10295 abort ();
10296
10297 if (if_false_label)
10298 emit_jump (if_false_label);
10299 }
10300 else if (if_false_label)
10301 {
10302 rtx insn;
10303 rtx prev = get_last_insn ();
10304 rtx branch = 0;
10305
10306 /* Output the branch with the opposite condition. Then try to invert
10307 what is generated. If more than one insn is a branch, or if the
10308 branch is not the last insn written, abort. If we can't invert
10309 the branch, emit make a true label, redirect this jump to that,
10310 emit a jump to the false label and define the true label. */
10311
10312 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10313 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10314 else
10315 abort ();
10316
10317 /* Here we get the first insn that was just emitted. It used to be the
10318 case that, on some machines, emitting the branch would discard
10319 the previous compare insn and emit a replacement. This isn't
10320 done anymore, but abort if we see that PREV is deleted. */
10321
10322 if (prev == 0)
10323 insn = get_insns ();
10324 else if (INSN_DELETED_P (prev))
10325 abort ();
10326 else
10327 insn = NEXT_INSN (prev);
10328
10329 for (; insn; insn = NEXT_INSN (insn))
10330 if (GET_CODE (insn) == JUMP_INSN)
10331 {
10332 if (branch)
10333 abort ();
10334 branch = insn;
10335 }
10336
10337 if (branch != get_last_insn ())
10338 abort ();
10339
10340 JUMP_LABEL (branch) = if_false_label;
10341 if (! invert_jump (branch, if_false_label))
10342 {
10343 if_true_label = gen_label_rtx ();
10344 redirect_jump (branch, if_true_label);
10345 emit_jump (if_false_label);
10346 emit_label (if_true_label);
10347 }
10348 }
10349 }
10350 \f
10351 /* Generate code for a comparison expression EXP
10352 (including code to compute the values to be compared)
10353 and set (CC0) according to the result.
10354 SIGNED_CODE should be the rtx operation for this comparison for
10355 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10356
10357 We force a stack adjustment unless there are currently
10358 things pushed on the stack that aren't yet used. */
10359
10360 static rtx
10361 compare (exp, signed_code, unsigned_code)
10362 register tree exp;
10363 enum rtx_code signed_code, unsigned_code;
10364 {
10365 register rtx op0
10366 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10367 register rtx op1
10368 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10369 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10370 register enum machine_mode mode = TYPE_MODE (type);
10371 int unsignedp = TREE_UNSIGNED (type);
10372 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10373
10374 #ifdef HAVE_canonicalize_funcptr_for_compare
10375 /* If function pointers need to be "canonicalized" before they can
10376 be reliably compared, then canonicalize them. */
10377 if (HAVE_canonicalize_funcptr_for_compare
10378 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10379 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10380 == FUNCTION_TYPE))
10381 {
10382 rtx new_op0 = gen_reg_rtx (mode);
10383
10384 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10385 op0 = new_op0;
10386 }
10387
10388 if (HAVE_canonicalize_funcptr_for_compare
10389 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10390 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10391 == FUNCTION_TYPE))
10392 {
10393 rtx new_op1 = gen_reg_rtx (mode);
10394
10395 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10396 op1 = new_op1;
10397 }
10398 #endif
10399
10400 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10401 ((mode == BLKmode)
10402 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10403 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10404 }
10405
10406 /* Like compare but expects the values to compare as two rtx's.
10407 The decision as to signed or unsigned comparison must be made by the caller.
10408
10409 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10410 compared.
10411
10412 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10413 size of MODE should be used. */
10414
10415 rtx
10416 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10417 register rtx op0, op1;
10418 enum rtx_code code;
10419 int unsignedp;
10420 enum machine_mode mode;
10421 rtx size;
10422 int align;
10423 {
10424 rtx tem;
10425
10426 /* If one operand is constant, make it the second one. Only do this
10427 if the other operand is not constant as well. */
10428
10429 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10430 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10431 {
10432 tem = op0;
10433 op0 = op1;
10434 op1 = tem;
10435 code = swap_condition (code);
10436 }
10437
10438 if (flag_force_mem)
10439 {
10440 op0 = force_not_mem (op0);
10441 op1 = force_not_mem (op1);
10442 }
10443
10444 do_pending_stack_adjust ();
10445
10446 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10447 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10448 return tem;
10449
10450 #if 0
10451 /* There's no need to do this now that combine.c can eliminate lots of
10452 sign extensions. This can be less efficient in certain cases on other
10453 machines. */
10454
10455 /* If this is a signed equality comparison, we can do it as an
10456 unsigned comparison since zero-extension is cheaper than sign
10457 extension and comparisons with zero are done as unsigned. This is
10458 the case even on machines that can do fast sign extension, since
10459 zero-extension is easier to combine with other operations than
10460 sign-extension is. If we are comparing against a constant, we must
10461 convert it to what it would look like unsigned. */
10462 if ((code == EQ || code == NE) && ! unsignedp
10463 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10464 {
10465 if (GET_CODE (op1) == CONST_INT
10466 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10467 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10468 unsignedp = 1;
10469 }
10470 #endif
10471
10472 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10473
10474 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10475 }
10476 \f
10477 /* Generate code to calculate EXP using a store-flag instruction
10478 and return an rtx for the result. EXP is either a comparison
10479 or a TRUTH_NOT_EXPR whose operand is a comparison.
10480
10481 If TARGET is nonzero, store the result there if convenient.
10482
10483 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10484 cheap.
10485
10486 Return zero if there is no suitable set-flag instruction
10487 available on this machine.
10488
10489 Once expand_expr has been called on the arguments of the comparison,
10490 we are committed to doing the store flag, since it is not safe to
10491 re-evaluate the expression. We emit the store-flag insn by calling
10492 emit_store_flag, but only expand the arguments if we have a reason
10493 to believe that emit_store_flag will be successful. If we think that
10494 it will, but it isn't, we have to simulate the store-flag with a
10495 set/jump/set sequence. */
10496
10497 static rtx
10498 do_store_flag (exp, target, mode, only_cheap)
10499 tree exp;
10500 rtx target;
10501 enum machine_mode mode;
10502 int only_cheap;
10503 {
10504 enum rtx_code code;
10505 tree arg0, arg1, type;
10506 tree tem;
10507 enum machine_mode operand_mode;
10508 int invert = 0;
10509 int unsignedp;
10510 rtx op0, op1;
10511 enum insn_code icode;
10512 rtx subtarget = target;
10513 rtx result, label;
10514
10515 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10516 result at the end. We can't simply invert the test since it would
10517 have already been inverted if it were valid. This case occurs for
10518 some floating-point comparisons. */
10519
10520 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10521 invert = 1, exp = TREE_OPERAND (exp, 0);
10522
10523 arg0 = TREE_OPERAND (exp, 0);
10524 arg1 = TREE_OPERAND (exp, 1);
10525 type = TREE_TYPE (arg0);
10526 operand_mode = TYPE_MODE (type);
10527 unsignedp = TREE_UNSIGNED (type);
10528
10529 /* We won't bother with BLKmode store-flag operations because it would mean
10530 passing a lot of information to emit_store_flag. */
10531 if (operand_mode == BLKmode)
10532 return 0;
10533
10534 /* We won't bother with store-flag operations involving function pointers
10535 when function pointers must be canonicalized before comparisons. */
10536 #ifdef HAVE_canonicalize_funcptr_for_compare
10537 if (HAVE_canonicalize_funcptr_for_compare
10538 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10539 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10540 == FUNCTION_TYPE))
10541 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10542 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10543 == FUNCTION_TYPE))))
10544 return 0;
10545 #endif
10546
10547 STRIP_NOPS (arg0);
10548 STRIP_NOPS (arg1);
10549
10550 /* Get the rtx comparison code to use. We know that EXP is a comparison
10551 operation of some type. Some comparisons against 1 and -1 can be
10552 converted to comparisons with zero. Do so here so that the tests
10553 below will be aware that we have a comparison with zero. These
10554 tests will not catch constants in the first operand, but constants
10555 are rarely passed as the first operand. */
10556
10557 switch (TREE_CODE (exp))
10558 {
10559 case EQ_EXPR:
10560 code = EQ;
10561 break;
10562 case NE_EXPR:
10563 code = NE;
10564 break;
10565 case LT_EXPR:
10566 if (integer_onep (arg1))
10567 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10568 else
10569 code = unsignedp ? LTU : LT;
10570 break;
10571 case LE_EXPR:
10572 if (! unsignedp && integer_all_onesp (arg1))
10573 arg1 = integer_zero_node, code = LT;
10574 else
10575 code = unsignedp ? LEU : LE;
10576 break;
10577 case GT_EXPR:
10578 if (! unsignedp && integer_all_onesp (arg1))
10579 arg1 = integer_zero_node, code = GE;
10580 else
10581 code = unsignedp ? GTU : GT;
10582 break;
10583 case GE_EXPR:
10584 if (integer_onep (arg1))
10585 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10586 else
10587 code = unsignedp ? GEU : GE;
10588 break;
10589 default:
10590 abort ();
10591 }
10592
10593 /* Put a constant second. */
10594 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10595 {
10596 tem = arg0; arg0 = arg1; arg1 = tem;
10597 code = swap_condition (code);
10598 }
10599
10600 /* If this is an equality or inequality test of a single bit, we can
10601 do this by shifting the bit being tested to the low-order bit and
10602 masking the result with the constant 1. If the condition was EQ,
10603 we xor it with 1. This does not require an scc insn and is faster
10604 than an scc insn even if we have it. */
10605
10606 if ((code == NE || code == EQ)
10607 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10608 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10609 {
10610 tree inner = TREE_OPERAND (arg0, 0);
10611 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10612 int ops_unsignedp;
10613
10614 /* If INNER is a right shift of a constant and it plus BITNUM does
10615 not overflow, adjust BITNUM and INNER. */
10616
10617 if (TREE_CODE (inner) == RSHIFT_EXPR
10618 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10619 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10620 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10621 < TYPE_PRECISION (type)))
10622 {
10623 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10624 inner = TREE_OPERAND (inner, 0);
10625 }
10626
10627 /* If we are going to be able to omit the AND below, we must do our
10628 operations as unsigned. If we must use the AND, we have a choice.
10629 Normally unsigned is faster, but for some machines signed is. */
10630 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10631 #ifdef LOAD_EXTEND_OP
10632 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10633 #else
10634 : 1
10635 #endif
10636 );
10637
10638 if (subtarget == 0 || GET_CODE (subtarget) != REG
10639 || GET_MODE (subtarget) != operand_mode
10640 || ! safe_from_p (subtarget, inner, 1))
10641 subtarget = 0;
10642
10643 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10644
10645 if (bitnum != 0)
10646 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10647 size_int (bitnum), subtarget, ops_unsignedp);
10648
10649 if (GET_MODE (op0) != mode)
10650 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10651
10652 if ((code == EQ && ! invert) || (code == NE && invert))
10653 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10654 ops_unsignedp, OPTAB_LIB_WIDEN);
10655
10656 /* Put the AND last so it can combine with more things. */
10657 if (bitnum != TYPE_PRECISION (type) - 1)
10658 op0 = expand_and (op0, const1_rtx, subtarget);
10659
10660 return op0;
10661 }
10662
10663 /* Now see if we are likely to be able to do this. Return if not. */
10664 if (! can_compare_p (operand_mode))
10665 return 0;
10666 icode = setcc_gen_code[(int) code];
10667 if (icode == CODE_FOR_nothing
10668 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10669 {
10670 /* We can only do this if it is one of the special cases that
10671 can be handled without an scc insn. */
10672 if ((code == LT && integer_zerop (arg1))
10673 || (! only_cheap && code == GE && integer_zerop (arg1)))
10674 ;
10675 else if (BRANCH_COST >= 0
10676 && ! only_cheap && (code == NE || code == EQ)
10677 && TREE_CODE (type) != REAL_TYPE
10678 && ((abs_optab->handlers[(int) operand_mode].insn_code
10679 != CODE_FOR_nothing)
10680 || (ffs_optab->handlers[(int) operand_mode].insn_code
10681 != CODE_FOR_nothing)))
10682 ;
10683 else
10684 return 0;
10685 }
10686
10687 preexpand_calls (exp);
10688 if (subtarget == 0 || GET_CODE (subtarget) != REG
10689 || GET_MODE (subtarget) != operand_mode
10690 || ! safe_from_p (subtarget, arg1, 1))
10691 subtarget = 0;
10692
10693 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10694 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10695
10696 if (target == 0)
10697 target = gen_reg_rtx (mode);
10698
10699 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10700 because, if the emit_store_flag does anything it will succeed and
10701 OP0 and OP1 will not be used subsequently. */
10702
10703 result = emit_store_flag (target, code,
10704 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10705 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10706 operand_mode, unsignedp, 1);
10707
10708 if (result)
10709 {
10710 if (invert)
10711 result = expand_binop (mode, xor_optab, result, const1_rtx,
10712 result, 0, OPTAB_LIB_WIDEN);
10713 return result;
10714 }
10715
10716 /* If this failed, we have to do this with set/compare/jump/set code. */
10717 if (GET_CODE (target) != REG
10718 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10719 target = gen_reg_rtx (GET_MODE (target));
10720
10721 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10722 result = compare_from_rtx (op0, op1, code, unsignedp,
10723 operand_mode, NULL_RTX, 0);
10724 if (GET_CODE (result) == CONST_INT)
10725 return (((result == const0_rtx && ! invert)
10726 || (result != const0_rtx && invert))
10727 ? const0_rtx : const1_rtx);
10728
10729 label = gen_label_rtx ();
10730 if (bcc_gen_fctn[(int) code] == 0)
10731 abort ();
10732
10733 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10734 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10735 emit_label (label);
10736
10737 return target;
10738 }
10739 \f
10740 /* Generate a tablejump instruction (used for switch statements). */
10741
10742 #ifdef HAVE_tablejump
10743
10744 /* INDEX is the value being switched on, with the lowest value
10745 in the table already subtracted.
10746 MODE is its expected mode (needed if INDEX is constant).
10747 RANGE is the length of the jump table.
10748 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10749
10750 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10751 index value is out of range. */
10752
10753 void
10754 do_tablejump (index, mode, range, table_label, default_label)
10755 rtx index, range, table_label, default_label;
10756 enum machine_mode mode;
10757 {
10758 register rtx temp, vector;
10759
10760 /* Do an unsigned comparison (in the proper mode) between the index
10761 expression and the value which represents the length of the range.
10762 Since we just finished subtracting the lower bound of the range
10763 from the index expression, this comparison allows us to simultaneously
10764 check that the original index expression value is both greater than
10765 or equal to the minimum value of the range and less than or equal to
10766 the maximum value of the range. */
10767
10768 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10769 emit_jump_insn (gen_bgtu (default_label));
10770
10771 /* If index is in range, it must fit in Pmode.
10772 Convert to Pmode so we can index with it. */
10773 if (mode != Pmode)
10774 index = convert_to_mode (Pmode, index, 1);
10775
10776 /* Don't let a MEM slip thru, because then INDEX that comes
10777 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10778 and break_out_memory_refs will go to work on it and mess it up. */
10779 #ifdef PIC_CASE_VECTOR_ADDRESS
10780 if (flag_pic && GET_CODE (index) != REG)
10781 index = copy_to_mode_reg (Pmode, index);
10782 #endif
10783
10784 /* If flag_force_addr were to affect this address
10785 it could interfere with the tricky assumptions made
10786 about addresses that contain label-refs,
10787 which may be valid only very near the tablejump itself. */
10788 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10789 GET_MODE_SIZE, because this indicates how large insns are. The other
10790 uses should all be Pmode, because they are addresses. This code
10791 could fail if addresses and insns are not the same size. */
10792 index = gen_rtx_PLUS (Pmode,
10793 gen_rtx_MULT (Pmode, index,
10794 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10795 gen_rtx_LABEL_REF (Pmode, table_label));
10796 #ifdef PIC_CASE_VECTOR_ADDRESS
10797 if (flag_pic)
10798 index = PIC_CASE_VECTOR_ADDRESS (index);
10799 else
10800 #endif
10801 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10802 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10803 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10804 RTX_UNCHANGING_P (vector) = 1;
10805 convert_move (temp, vector, 0);
10806
10807 emit_jump_insn (gen_tablejump (temp, table_label));
10808
10809 /* If we are generating PIC code or if the table is PC-relative, the
10810 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10811 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10812 emit_barrier ();
10813 }
10814
10815 #endif /* HAVE_tablejump */