b350c70de4071260ef2bcf221959e5377769f486
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
43
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
45
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
48
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
51
52 #ifdef PUSH_ROUNDING
53
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
56 #endif
57
58 #endif
59
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
67
68 /* Assume that case vectors are not pc-relative. */
69 #ifndef CASE_VECTOR_PC_RELATIVE
70 #define CASE_VECTOR_PC_RELATIVE 0
71 #endif
72
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79 int cse_not_expected;
80
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
85
86 /* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88 int pending_stack_adjust;
89
90 /* Under some ABIs, it is the caller's responsibility to pop arguments
91 pushed for function calls. A naive implementation would simply pop
92 the arguments immediately after each call. However, if several
93 function calls are made in a row, it is typically cheaper to pop
94 all the arguments after all of the calls are complete since a
95 single pop instruction can be used. Therefore, GCC attempts to
96 defer popping the arguments until absolutely necessary. (For
97 example, at the end of a conditional, the arguments must be popped,
98 since code outside the conditional won't know whether or not the
99 arguments need to be popped.)
100
101 When INHIBIT_DEFER_POP is non-zero, however, the compiler does not
102 attempt to defer pops. Instead, the stack is popped immediately
103 after each call. Rather then setting this variable directly, use
104 NO_DEFER_POP and OK_DEFER_POP. */
105 int inhibit_defer_pop;
106
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
109 returned. */
110 static rtx saveregs_value;
111
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
114
115 /* Don't check memory usage, since code is being emitted to check a memory
116 usage. Used when current_function_check_memory_usage is true, to avoid
117 infinite recursion. */
118 static int in_check_memory_usage;
119
120 /* Postincrements that still need to be expanded. */
121 static rtx pending_chain;
122
123 /* This structure is used by move_by_pieces to describe the move to
124 be performed. */
125 struct move_by_pieces
126 {
127 rtx to;
128 rtx to_addr;
129 int autinc_to;
130 int explicit_inc_to;
131 int to_struct;
132 rtx from;
133 rtx from_addr;
134 int autinc_from;
135 int explicit_inc_from;
136 int from_struct;
137 int len;
138 int offset;
139 int reverse;
140 };
141
142 /* This structure is used by clear_by_pieces to describe the clear to
143 be performed. */
144
145 struct clear_by_pieces
146 {
147 rtx to;
148 rtx to_addr;
149 int autinc_to;
150 int explicit_inc_to;
151 int to_struct;
152 int len;
153 int offset;
154 int reverse;
155 };
156
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
159
160 static rtx get_push_address PROTO ((int));
161
162 static rtx enqueue_insn PROTO((rtx, rtx));
163 static void init_queue PROTO((void));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
173 tree, tree, int));
174 static void store_constructor PROTO((tree, rtx, int));
175 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
176 enum machine_mode, int, int,
177 int, int));
178 static enum memory_use_mode
179 get_memory_usage_from_modifier PROTO((enum expand_modifier));
180 static tree save_noncopied_parts PROTO((tree, tree));
181 static tree init_noncopied_parts PROTO((tree, tree));
182 static int safe_from_p PROTO((rtx, tree, int));
183 static int fixed_type_p PROTO((tree));
184 static rtx var_rtx PROTO((tree));
185 static int get_pointer_alignment PROTO((tree, unsigned));
186 static tree string_constant PROTO((tree, tree *));
187 static tree c_strlen PROTO((tree));
188 static rtx get_memory_rtx PROTO((tree));
189 static rtx expand_builtin PROTO((tree, rtx, rtx,
190 enum machine_mode, int));
191 static int apply_args_size PROTO((void));
192 static int apply_result_size PROTO((void));
193 static rtx result_vector PROTO((int, rtx));
194 static rtx expand_builtin_apply_args PROTO((void));
195 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
196 static void expand_builtin_return PROTO((rtx));
197 static rtx expand_increment PROTO((tree, int, int));
198 static void preexpand_calls PROTO((tree));
199 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
200 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
201 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
202 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
203 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
204
205 /* Record for each mode whether we can move a register directly to or
206 from an object of that mode in memory. If we can't, we won't try
207 to use that mode directly when accessing a field of that mode. */
208
209 static char direct_load[NUM_MACHINE_MODES];
210 static char direct_store[NUM_MACHINE_MODES];
211
212 /* If a memory-to-memory move would take MOVE_RATIO or more simple
213 move-instruction sequences, we will do a movstr or libcall instead. */
214
215 #ifndef MOVE_RATIO
216 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
217 #define MOVE_RATIO 2
218 #else
219 /* If we are optimizing for space (-Os), cut down the default move ratio */
220 #define MOVE_RATIO (optimize_size ? 3 : 15)
221 #endif
222 #endif
223
224 /* This macro is used to determine whether move_by_pieces should be called
225 to perform a structure copy. */
226 #ifndef MOVE_BY_PIECES_P
227 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
228 (SIZE, ALIGN) < MOVE_RATIO)
229 #endif
230
231 /* This array records the insn_code of insns to perform block moves. */
232 enum insn_code movstr_optab[NUM_MACHINE_MODES];
233
234 /* This array records the insn_code of insns to perform block clears. */
235 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
236
237 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
238
239 #ifndef SLOW_UNALIGNED_ACCESS
240 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
241 #endif
242
243 /* Register mappings for target machines without register windows. */
244 #ifndef INCOMING_REGNO
245 #define INCOMING_REGNO(OUT) (OUT)
246 #endif
247 #ifndef OUTGOING_REGNO
248 #define OUTGOING_REGNO(IN) (IN)
249 #endif
250 \f
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
253
254 void
255 init_expr_once ()
256 {
257 rtx insn, pat;
258 enum machine_mode mode;
259 int num_clobbers;
260 rtx mem, mem1;
261 char *free_point;
262
263 start_sequence ();
264
265 /* Since we are on the permanent obstack, we must be sure we save this
266 spot AFTER we call start_sequence, since it will reuse the rtl it
267 makes. */
268 free_point = (char *) oballoc (0);
269
270 /* Try indexing by frame ptr and try by stack ptr.
271 It is known that on the Convex the stack ptr isn't a valid index.
272 With luck, one or the other is valid on any machine. */
273 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
274 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
275
276 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
277 pat = PATTERN (insn);
278
279 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
280 mode = (enum machine_mode) ((int) mode + 1))
281 {
282 int regno;
283 rtx reg;
284
285 direct_load[(int) mode] = direct_store[(int) mode] = 0;
286 PUT_MODE (mem, mode);
287 PUT_MODE (mem1, mode);
288
289 /* See if there is some register that can be used in this mode and
290 directly loaded or stored from memory. */
291
292 if (mode != VOIDmode && mode != BLKmode)
293 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
294 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 regno++)
296 {
297 if (! HARD_REGNO_MODE_OK (regno, mode))
298 continue;
299
300 reg = gen_rtx_REG (mode, regno);
301
302 SET_SRC (pat) = mem;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
306
307 SET_SRC (pat) = mem1;
308 SET_DEST (pat) = reg;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_load[(int) mode] = 1;
311
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
316
317 SET_SRC (pat) = reg;
318 SET_DEST (pat) = mem1;
319 if (recog (pat, insn, &num_clobbers) >= 0)
320 direct_store[(int) mode] = 1;
321 }
322 }
323
324 end_sequence ();
325 obfree (free_point);
326 }
327
328 /* This is run at the start of compiling a function. */
329
330 void
331 init_expr ()
332 {
333 init_queue ();
334
335 pending_stack_adjust = 0;
336 inhibit_defer_pop = 0;
337 saveregs_value = 0;
338 apply_args_value = 0;
339 forced_labels = 0;
340 }
341
342 /* Save all variables describing the current status into the structure *P.
343 This is used before starting a nested function. */
344
345 void
346 save_expr_status (p)
347 struct function *p;
348 {
349 p->pending_chain = pending_chain;
350 p->pending_stack_adjust = pending_stack_adjust;
351 p->inhibit_defer_pop = inhibit_defer_pop;
352 p->saveregs_value = saveregs_value;
353 p->apply_args_value = apply_args_value;
354 p->forced_labels = forced_labels;
355
356 pending_chain = NULL_RTX;
357 pending_stack_adjust = 0;
358 inhibit_defer_pop = 0;
359 saveregs_value = 0;
360 apply_args_value = 0;
361 forced_labels = 0;
362 }
363
364 /* Restore all variables describing the current status from the structure *P.
365 This is used after a nested function. */
366
367 void
368 restore_expr_status (p)
369 struct function *p;
370 {
371 pending_chain = p->pending_chain;
372 pending_stack_adjust = p->pending_stack_adjust;
373 inhibit_defer_pop = p->inhibit_defer_pop;
374 saveregs_value = p->saveregs_value;
375 apply_args_value = p->apply_args_value;
376 forced_labels = p->forced_labels;
377 }
378 \f
379 /* Manage the queue of increment instructions to be output
380 for POSTINCREMENT_EXPR expressions, etc. */
381
382 /* Queue up to increment (or change) VAR later. BODY says how:
383 BODY should be the same thing you would pass to emit_insn
384 to increment right away. It will go to emit_insn later on.
385
386 The value is a QUEUED expression to be used in place of VAR
387 where you want to guarantee the pre-incrementation value of VAR. */
388
389 static rtx
390 enqueue_insn (var, body)
391 rtx var, body;
392 {
393 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
394 var, NULL_RTX, NULL_RTX, body,
395 pending_chain);
396 return pending_chain;
397 }
398
399 /* Use protect_from_queue to convert a QUEUED expression
400 into something that you can put immediately into an instruction.
401 If the queued incrementation has not happened yet,
402 protect_from_queue returns the variable itself.
403 If the incrementation has happened, protect_from_queue returns a temp
404 that contains a copy of the old value of the variable.
405
406 Any time an rtx which might possibly be a QUEUED is to be put
407 into an instruction, it must be passed through protect_from_queue first.
408 QUEUED expressions are not meaningful in instructions.
409
410 Do not pass a value through protect_from_queue and then hold
411 on to it for a while before putting it in an instruction!
412 If the queue is flushed in between, incorrect code will result. */
413
414 rtx
415 protect_from_queue (x, modify)
416 register rtx x;
417 int modify;
418 {
419 register RTX_CODE code = GET_CODE (x);
420
421 #if 0 /* A QUEUED can hang around after the queue is forced out. */
422 /* Shortcut for most common case. */
423 if (pending_chain == 0)
424 return x;
425 #endif
426
427 if (code != QUEUED)
428 {
429 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
430 use of autoincrement. Make a copy of the contents of the memory
431 location rather than a copy of the address, but not if the value is
432 of mode BLKmode. Don't modify X in place since it might be
433 shared. */
434 if (code == MEM && GET_MODE (x) != BLKmode
435 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
436 {
437 register rtx y = XEXP (x, 0);
438 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
439
440 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
441 MEM_COPY_ATTRIBUTES (new, x);
442 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
443
444 if (QUEUED_INSN (y))
445 {
446 register rtx temp = gen_reg_rtx (GET_MODE (new));
447 emit_insn_before (gen_move_insn (temp, new),
448 QUEUED_INSN (y));
449 return temp;
450 }
451 return new;
452 }
453 /* Otherwise, recursively protect the subexpressions of all
454 the kinds of rtx's that can contain a QUEUED. */
455 if (code == MEM)
456 {
457 rtx tem = protect_from_queue (XEXP (x, 0), 0);
458 if (tem != XEXP (x, 0))
459 {
460 x = copy_rtx (x);
461 XEXP (x, 0) = tem;
462 }
463 }
464 else if (code == PLUS || code == MULT)
465 {
466 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
467 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
468 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
469 {
470 x = copy_rtx (x);
471 XEXP (x, 0) = new0;
472 XEXP (x, 1) = new1;
473 }
474 }
475 return x;
476 }
477 /* If the increment has not happened, use the variable itself. */
478 if (QUEUED_INSN (x) == 0)
479 return QUEUED_VAR (x);
480 /* If the increment has happened and a pre-increment copy exists,
481 use that copy. */
482 if (QUEUED_COPY (x) != 0)
483 return QUEUED_COPY (x);
484 /* The increment has happened but we haven't set up a pre-increment copy.
485 Set one up now, and use it. */
486 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
487 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
488 QUEUED_INSN (x));
489 return QUEUED_COPY (x);
490 }
491
492 /* Return nonzero if X contains a QUEUED expression:
493 if it contains anything that will be altered by a queued increment.
494 We handle only combinations of MEM, PLUS, MINUS and MULT operators
495 since memory addresses generally contain only those. */
496
497 int
498 queued_subexp_p (x)
499 rtx x;
500 {
501 register enum rtx_code code = GET_CODE (x);
502 switch (code)
503 {
504 case QUEUED:
505 return 1;
506 case MEM:
507 return queued_subexp_p (XEXP (x, 0));
508 case MULT:
509 case PLUS:
510 case MINUS:
511 return (queued_subexp_p (XEXP (x, 0))
512 || queued_subexp_p (XEXP (x, 1)));
513 default:
514 return 0;
515 }
516 }
517
518 /* Perform all the pending incrementations. */
519
520 void
521 emit_queue ()
522 {
523 register rtx p;
524 while ((p = pending_chain))
525 {
526 rtx body = QUEUED_BODY (p);
527
528 if (GET_CODE (body) == SEQUENCE)
529 {
530 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
531 emit_insn (QUEUED_BODY (p));
532 }
533 else
534 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
535 pending_chain = QUEUED_NEXT (p);
536 }
537 }
538
539 static void
540 init_queue ()
541 {
542 if (pending_chain)
543 abort ();
544 }
545 \f
546 /* Copy data from FROM to TO, where the machine modes are not the same.
547 Both modes may be integer, or both may be floating.
548 UNSIGNEDP should be nonzero if FROM is an unsigned type.
549 This causes zero-extension instead of sign-extension. */
550
551 void
552 convert_move (to, from, unsignedp)
553 register rtx to, from;
554 int unsignedp;
555 {
556 enum machine_mode to_mode = GET_MODE (to);
557 enum machine_mode from_mode = GET_MODE (from);
558 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
559 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
560 enum insn_code code;
561 rtx libcall;
562
563 /* rtx code for making an equivalent value. */
564 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
565
566 to = protect_from_queue (to, 1);
567 from = protect_from_queue (from, 0);
568
569 if (to_real != from_real)
570 abort ();
571
572 /* If FROM is a SUBREG that indicates that we have already done at least
573 the required extension, strip it. We don't handle such SUBREGs as
574 TO here. */
575
576 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
577 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
578 >= GET_MODE_SIZE (to_mode))
579 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
580 from = gen_lowpart (to_mode, from), from_mode = to_mode;
581
582 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
583 abort ();
584
585 if (to_mode == from_mode
586 || (from_mode == VOIDmode && CONSTANT_P (from)))
587 {
588 emit_move_insn (to, from);
589 return;
590 }
591
592 if (to_real)
593 {
594 rtx value;
595
596 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
597 {
598 /* Try converting directly if the insn is supported. */
599 if ((code = can_extend_p (to_mode, from_mode, 0))
600 != CODE_FOR_nothing)
601 {
602 emit_unop_insn (code, to, from, UNKNOWN);
603 return;
604 }
605 }
606
607 #ifdef HAVE_trunchfqf2
608 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
609 {
610 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
611 return;
612 }
613 #endif
614 #ifdef HAVE_trunctqfqf2
615 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
616 {
617 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
618 return;
619 }
620 #endif
621 #ifdef HAVE_truncsfqf2
622 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
623 {
624 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
625 return;
626 }
627 #endif
628 #ifdef HAVE_truncdfqf2
629 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
630 {
631 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
632 return;
633 }
634 #endif
635 #ifdef HAVE_truncxfqf2
636 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
637 {
638 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
639 return;
640 }
641 #endif
642 #ifdef HAVE_trunctfqf2
643 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
644 {
645 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
646 return;
647 }
648 #endif
649
650 #ifdef HAVE_trunctqfhf2
651 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
652 {
653 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
654 return;
655 }
656 #endif
657 #ifdef HAVE_truncsfhf2
658 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
659 {
660 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
661 return;
662 }
663 #endif
664 #ifdef HAVE_truncdfhf2
665 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
666 {
667 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
668 return;
669 }
670 #endif
671 #ifdef HAVE_truncxfhf2
672 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
673 {
674 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
675 return;
676 }
677 #endif
678 #ifdef HAVE_trunctfhf2
679 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
680 {
681 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
682 return;
683 }
684 #endif
685
686 #ifdef HAVE_truncsftqf2
687 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
688 {
689 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
690 return;
691 }
692 #endif
693 #ifdef HAVE_truncdftqf2
694 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
695 {
696 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
697 return;
698 }
699 #endif
700 #ifdef HAVE_truncxftqf2
701 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
702 {
703 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
704 return;
705 }
706 #endif
707 #ifdef HAVE_trunctftqf2
708 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
709 {
710 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
711 return;
712 }
713 #endif
714
715 #ifdef HAVE_truncdfsf2
716 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
717 {
718 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
719 return;
720 }
721 #endif
722 #ifdef HAVE_truncxfsf2
723 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
724 {
725 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
726 return;
727 }
728 #endif
729 #ifdef HAVE_trunctfsf2
730 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
731 {
732 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
733 return;
734 }
735 #endif
736 #ifdef HAVE_truncxfdf2
737 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
738 {
739 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
740 return;
741 }
742 #endif
743 #ifdef HAVE_trunctfdf2
744 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
745 {
746 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
747 return;
748 }
749 #endif
750
751 libcall = (rtx) 0;
752 switch (from_mode)
753 {
754 case SFmode:
755 switch (to_mode)
756 {
757 case DFmode:
758 libcall = extendsfdf2_libfunc;
759 break;
760
761 case XFmode:
762 libcall = extendsfxf2_libfunc;
763 break;
764
765 case TFmode:
766 libcall = extendsftf2_libfunc;
767 break;
768
769 default:
770 break;
771 }
772 break;
773
774 case DFmode:
775 switch (to_mode)
776 {
777 case SFmode:
778 libcall = truncdfsf2_libfunc;
779 break;
780
781 case XFmode:
782 libcall = extenddfxf2_libfunc;
783 break;
784
785 case TFmode:
786 libcall = extenddftf2_libfunc;
787 break;
788
789 default:
790 break;
791 }
792 break;
793
794 case XFmode:
795 switch (to_mode)
796 {
797 case SFmode:
798 libcall = truncxfsf2_libfunc;
799 break;
800
801 case DFmode:
802 libcall = truncxfdf2_libfunc;
803 break;
804
805 default:
806 break;
807 }
808 break;
809
810 case TFmode:
811 switch (to_mode)
812 {
813 case SFmode:
814 libcall = trunctfsf2_libfunc;
815 break;
816
817 case DFmode:
818 libcall = trunctfdf2_libfunc;
819 break;
820
821 default:
822 break;
823 }
824 break;
825
826 default:
827 break;
828 }
829
830 if (libcall == (rtx) 0)
831 /* This conversion is not implemented yet. */
832 abort ();
833
834 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
835 1, from, from_mode);
836 emit_move_insn (to, value);
837 return;
838 }
839
840 /* Now both modes are integers. */
841
842 /* Handle expanding beyond a word. */
843 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
844 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
845 {
846 rtx insns;
847 rtx lowpart;
848 rtx fill_value;
849 rtx lowfrom;
850 int i;
851 enum machine_mode lowpart_mode;
852 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
853
854 /* Try converting directly if the insn is supported. */
855 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
856 != CODE_FOR_nothing)
857 {
858 /* If FROM is a SUBREG, put it into a register. Do this
859 so that we always generate the same set of insns for
860 better cse'ing; if an intermediate assignment occurred,
861 we won't be doing the operation directly on the SUBREG. */
862 if (optimize > 0 && GET_CODE (from) == SUBREG)
863 from = force_reg (from_mode, from);
864 emit_unop_insn (code, to, from, equiv_code);
865 return;
866 }
867 /* Next, try converting via full word. */
868 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
869 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
870 != CODE_FOR_nothing))
871 {
872 if (GET_CODE (to) == REG)
873 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
874 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
875 emit_unop_insn (code, to,
876 gen_lowpart (word_mode, to), equiv_code);
877 return;
878 }
879
880 /* No special multiword conversion insn; do it by hand. */
881 start_sequence ();
882
883 /* Since we will turn this into a no conflict block, we must ensure
884 that the source does not overlap the target. */
885
886 if (reg_overlap_mentioned_p (to, from))
887 from = force_reg (from_mode, from);
888
889 /* Get a copy of FROM widened to a word, if necessary. */
890 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
891 lowpart_mode = word_mode;
892 else
893 lowpart_mode = from_mode;
894
895 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
896
897 lowpart = gen_lowpart (lowpart_mode, to);
898 emit_move_insn (lowpart, lowfrom);
899
900 /* Compute the value to put in each remaining word. */
901 if (unsignedp)
902 fill_value = const0_rtx;
903 else
904 {
905 #ifdef HAVE_slt
906 if (HAVE_slt
907 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
908 && STORE_FLAG_VALUE == -1)
909 {
910 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
911 lowpart_mode, 0, 0);
912 fill_value = gen_reg_rtx (word_mode);
913 emit_insn (gen_slt (fill_value));
914 }
915 else
916 #endif
917 {
918 fill_value
919 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
920 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
921 NULL_RTX, 0);
922 fill_value = convert_to_mode (word_mode, fill_value, 1);
923 }
924 }
925
926 /* Fill the remaining words. */
927 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
928 {
929 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
930 rtx subword = operand_subword (to, index, 1, to_mode);
931
932 if (subword == 0)
933 abort ();
934
935 if (fill_value != subword)
936 emit_move_insn (subword, fill_value);
937 }
938
939 insns = get_insns ();
940 end_sequence ();
941
942 emit_no_conflict_block (insns, to, from, NULL_RTX,
943 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
944 return;
945 }
946
947 /* Truncating multi-word to a word or less. */
948 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
949 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
950 {
951 if (!((GET_CODE (from) == MEM
952 && ! MEM_VOLATILE_P (from)
953 && direct_load[(int) to_mode]
954 && ! mode_dependent_address_p (XEXP (from, 0)))
955 || GET_CODE (from) == REG
956 || GET_CODE (from) == SUBREG))
957 from = force_reg (from_mode, from);
958 convert_move (to, gen_lowpart (word_mode, from), 0);
959 return;
960 }
961
962 /* Handle pointer conversion */ /* SPEE 900220 */
963 if (to_mode == PQImode)
964 {
965 if (from_mode != QImode)
966 from = convert_to_mode (QImode, from, unsignedp);
967
968 #ifdef HAVE_truncqipqi2
969 if (HAVE_truncqipqi2)
970 {
971 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
972 return;
973 }
974 #endif /* HAVE_truncqipqi2 */
975 abort ();
976 }
977
978 if (from_mode == PQImode)
979 {
980 if (to_mode != QImode)
981 {
982 from = convert_to_mode (QImode, from, unsignedp);
983 from_mode = QImode;
984 }
985 else
986 {
987 #ifdef HAVE_extendpqiqi2
988 if (HAVE_extendpqiqi2)
989 {
990 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
991 return;
992 }
993 #endif /* HAVE_extendpqiqi2 */
994 abort ();
995 }
996 }
997
998 if (to_mode == PSImode)
999 {
1000 if (from_mode != SImode)
1001 from = convert_to_mode (SImode, from, unsignedp);
1002
1003 #ifdef HAVE_truncsipsi2
1004 if (HAVE_truncsipsi2)
1005 {
1006 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1007 return;
1008 }
1009 #endif /* HAVE_truncsipsi2 */
1010 abort ();
1011 }
1012
1013 if (from_mode == PSImode)
1014 {
1015 if (to_mode != SImode)
1016 {
1017 from = convert_to_mode (SImode, from, unsignedp);
1018 from_mode = SImode;
1019 }
1020 else
1021 {
1022 #ifdef HAVE_extendpsisi2
1023 if (HAVE_extendpsisi2)
1024 {
1025 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1026 return;
1027 }
1028 #endif /* HAVE_extendpsisi2 */
1029 abort ();
1030 }
1031 }
1032
1033 if (to_mode == PDImode)
1034 {
1035 if (from_mode != DImode)
1036 from = convert_to_mode (DImode, from, unsignedp);
1037
1038 #ifdef HAVE_truncdipdi2
1039 if (HAVE_truncdipdi2)
1040 {
1041 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1042 return;
1043 }
1044 #endif /* HAVE_truncdipdi2 */
1045 abort ();
1046 }
1047
1048 if (from_mode == PDImode)
1049 {
1050 if (to_mode != DImode)
1051 {
1052 from = convert_to_mode (DImode, from, unsignedp);
1053 from_mode = DImode;
1054 }
1055 else
1056 {
1057 #ifdef HAVE_extendpdidi2
1058 if (HAVE_extendpdidi2)
1059 {
1060 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1061 return;
1062 }
1063 #endif /* HAVE_extendpdidi2 */
1064 abort ();
1065 }
1066 }
1067
1068 /* Now follow all the conversions between integers
1069 no more than a word long. */
1070
1071 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1072 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074 GET_MODE_BITSIZE (from_mode)))
1075 {
1076 if (!((GET_CODE (from) == MEM
1077 && ! MEM_VOLATILE_P (from)
1078 && direct_load[(int) to_mode]
1079 && ! mode_dependent_address_p (XEXP (from, 0)))
1080 || GET_CODE (from) == REG
1081 || GET_CODE (from) == SUBREG))
1082 from = force_reg (from_mode, from);
1083 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1084 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1085 from = copy_to_reg (from);
1086 emit_move_insn (to, gen_lowpart (to_mode, from));
1087 return;
1088 }
1089
1090 /* Handle extension. */
1091 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1092 {
1093 /* Convert directly if that works. */
1094 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1095 != CODE_FOR_nothing)
1096 {
1097 emit_unop_insn (code, to, from, equiv_code);
1098 return;
1099 }
1100 else
1101 {
1102 enum machine_mode intermediate;
1103 rtx tmp;
1104 tree shift_amount;
1105
1106 /* Search for a mode to convert via. */
1107 for (intermediate = from_mode; intermediate != VOIDmode;
1108 intermediate = GET_MODE_WIDER_MODE (intermediate))
1109 if (((can_extend_p (to_mode, intermediate, unsignedp)
1110 != CODE_FOR_nothing)
1111 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1112 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1113 && (can_extend_p (intermediate, from_mode, unsignedp)
1114 != CODE_FOR_nothing))
1115 {
1116 convert_move (to, convert_to_mode (intermediate, from,
1117 unsignedp), unsignedp);
1118 return;
1119 }
1120
1121 /* No suitable intermediate mode.
1122 Generate what we need with shifts. */
1123 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1124 - GET_MODE_BITSIZE (from_mode), 0);
1125 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1126 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1127 to, unsignedp);
1128 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1129 to, unsignedp);
1130 if (tmp != to)
1131 emit_move_insn (to, tmp);
1132 return;
1133 }
1134 }
1135
1136 /* Support special truncate insns for certain modes. */
1137
1138 if (from_mode == DImode && to_mode == SImode)
1139 {
1140 #ifdef HAVE_truncdisi2
1141 if (HAVE_truncdisi2)
1142 {
1143 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1144 return;
1145 }
1146 #endif
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 return;
1149 }
1150
1151 if (from_mode == DImode && to_mode == HImode)
1152 {
1153 #ifdef HAVE_truncdihi2
1154 if (HAVE_truncdihi2)
1155 {
1156 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1157 return;
1158 }
1159 #endif
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 return;
1162 }
1163
1164 if (from_mode == DImode && to_mode == QImode)
1165 {
1166 #ifdef HAVE_truncdiqi2
1167 if (HAVE_truncdiqi2)
1168 {
1169 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1170 return;
1171 }
1172 #endif
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 return;
1175 }
1176
1177 if (from_mode == SImode && to_mode == HImode)
1178 {
1179 #ifdef HAVE_truncsihi2
1180 if (HAVE_truncsihi2)
1181 {
1182 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1183 return;
1184 }
1185 #endif
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 return;
1188 }
1189
1190 if (from_mode == SImode && to_mode == QImode)
1191 {
1192 #ifdef HAVE_truncsiqi2
1193 if (HAVE_truncsiqi2)
1194 {
1195 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1196 return;
1197 }
1198 #endif
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 return;
1201 }
1202
1203 if (from_mode == HImode && to_mode == QImode)
1204 {
1205 #ifdef HAVE_trunchiqi2
1206 if (HAVE_trunchiqi2)
1207 {
1208 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1209 return;
1210 }
1211 #endif
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 return;
1214 }
1215
1216 if (from_mode == TImode && to_mode == DImode)
1217 {
1218 #ifdef HAVE_trunctidi2
1219 if (HAVE_trunctidi2)
1220 {
1221 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1222 return;
1223 }
1224 #endif
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 return;
1227 }
1228
1229 if (from_mode == TImode && to_mode == SImode)
1230 {
1231 #ifdef HAVE_trunctisi2
1232 if (HAVE_trunctisi2)
1233 {
1234 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1235 return;
1236 }
1237 #endif
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 return;
1240 }
1241
1242 if (from_mode == TImode && to_mode == HImode)
1243 {
1244 #ifdef HAVE_trunctihi2
1245 if (HAVE_trunctihi2)
1246 {
1247 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1248 return;
1249 }
1250 #endif
1251 convert_move (to, force_reg (from_mode, from), unsignedp);
1252 return;
1253 }
1254
1255 if (from_mode == TImode && to_mode == QImode)
1256 {
1257 #ifdef HAVE_trunctiqi2
1258 if (HAVE_trunctiqi2)
1259 {
1260 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1261 return;
1262 }
1263 #endif
1264 convert_move (to, force_reg (from_mode, from), unsignedp);
1265 return;
1266 }
1267
1268 /* Handle truncation of volatile memrefs, and so on;
1269 the things that couldn't be truncated directly,
1270 and for which there was no special instruction. */
1271 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1272 {
1273 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1274 emit_move_insn (to, temp);
1275 return;
1276 }
1277
1278 /* Mode combination is not recognized. */
1279 abort ();
1280 }
1281
1282 /* Return an rtx for a value that would result
1283 from converting X to mode MODE.
1284 Both X and MODE may be floating, or both integer.
1285 UNSIGNEDP is nonzero if X is an unsigned value.
1286 This can be done by referring to a part of X in place
1287 or by copying to a new temporary with conversion.
1288
1289 This function *must not* call protect_from_queue
1290 except when putting X into an insn (in which case convert_move does it). */
1291
1292 rtx
1293 convert_to_mode (mode, x, unsignedp)
1294 enum machine_mode mode;
1295 rtx x;
1296 int unsignedp;
1297 {
1298 return convert_modes (mode, VOIDmode, x, unsignedp);
1299 }
1300
1301 /* Return an rtx for a value that would result
1302 from converting X from mode OLDMODE to mode MODE.
1303 Both modes may be floating, or both integer.
1304 UNSIGNEDP is nonzero if X is an unsigned value.
1305
1306 This can be done by referring to a part of X in place
1307 or by copying to a new temporary with conversion.
1308
1309 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1310
1311 This function *must not* call protect_from_queue
1312 except when putting X into an insn (in which case convert_move does it). */
1313
1314 rtx
1315 convert_modes (mode, oldmode, x, unsignedp)
1316 enum machine_mode mode, oldmode;
1317 rtx x;
1318 int unsignedp;
1319 {
1320 register rtx temp;
1321
1322 /* If FROM is a SUBREG that indicates that we have already done at least
1323 the required extension, strip it. */
1324
1325 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1326 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1327 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1328 x = gen_lowpart (mode, x);
1329
1330 if (GET_MODE (x) != VOIDmode)
1331 oldmode = GET_MODE (x);
1332
1333 if (mode == oldmode)
1334 return x;
1335
1336 /* There is one case that we must handle specially: If we are converting
1337 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1338 we are to interpret the constant as unsigned, gen_lowpart will do
1339 the wrong if the constant appears negative. What we want to do is
1340 make the high-order word of the constant zero, not all ones. */
1341
1342 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1343 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1344 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1345 {
1346 HOST_WIDE_INT val = INTVAL (x);
1347
1348 if (oldmode != VOIDmode
1349 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1350 {
1351 int width = GET_MODE_BITSIZE (oldmode);
1352
1353 /* We need to zero extend VAL. */
1354 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1355 }
1356
1357 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1358 }
1359
1360 /* We can do this with a gen_lowpart if both desired and current modes
1361 are integer, and this is either a constant integer, a register, or a
1362 non-volatile MEM. Except for the constant case where MODE is no
1363 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1364
1365 if ((GET_CODE (x) == CONST_INT
1366 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1367 || (GET_MODE_CLASS (mode) == MODE_INT
1368 && GET_MODE_CLASS (oldmode) == MODE_INT
1369 && (GET_CODE (x) == CONST_DOUBLE
1370 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1371 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1372 && direct_load[(int) mode])
1373 || (GET_CODE (x) == REG
1374 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1375 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1376 {
1377 /* ?? If we don't know OLDMODE, we have to assume here that
1378 X does not need sign- or zero-extension. This may not be
1379 the case, but it's the best we can do. */
1380 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1381 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1382 {
1383 HOST_WIDE_INT val = INTVAL (x);
1384 int width = GET_MODE_BITSIZE (oldmode);
1385
1386 /* We must sign or zero-extend in this case. Start by
1387 zero-extending, then sign extend if we need to. */
1388 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1389 if (! unsignedp
1390 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1391 val |= (HOST_WIDE_INT) (-1) << width;
1392
1393 return GEN_INT (val);
1394 }
1395
1396 return gen_lowpart (mode, x);
1397 }
1398
1399 temp = gen_reg_rtx (mode);
1400 convert_move (temp, x, unsignedp);
1401 return temp;
1402 }
1403 \f
1404
1405 /* This macro is used to determine what the largest unit size that
1406 move_by_pieces can use is. */
1407
1408 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1409 move efficiently, as opposed to MOVE_MAX which is the maximum
1410 number of bhytes we can move with a single instruction. */
1411
1412 #ifndef MOVE_MAX_PIECES
1413 #define MOVE_MAX_PIECES MOVE_MAX
1414 #endif
1415
1416 /* Generate several move instructions to copy LEN bytes
1417 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1418 The caller must pass FROM and TO
1419 through protect_from_queue before calling.
1420 ALIGN (in bytes) is maximum alignment we can assume. */
1421
1422 void
1423 move_by_pieces (to, from, len, align)
1424 rtx to, from;
1425 int len, align;
1426 {
1427 struct move_by_pieces data;
1428 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1429 int max_size = MOVE_MAX_PIECES + 1;
1430 enum machine_mode mode = VOIDmode, tmode;
1431 enum insn_code icode;
1432
1433 data.offset = 0;
1434 data.to_addr = to_addr;
1435 data.from_addr = from_addr;
1436 data.to = to;
1437 data.from = from;
1438 data.autinc_to
1439 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1440 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1441 data.autinc_from
1442 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1443 || GET_CODE (from_addr) == POST_INC
1444 || GET_CODE (from_addr) == POST_DEC);
1445
1446 data.explicit_inc_from = 0;
1447 data.explicit_inc_to = 0;
1448 data.reverse
1449 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1450 if (data.reverse) data.offset = len;
1451 data.len = len;
1452
1453 data.to_struct = MEM_IN_STRUCT_P (to);
1454 data.from_struct = MEM_IN_STRUCT_P (from);
1455
1456 /* If copying requires more than two move insns,
1457 copy addresses to registers (to make displacements shorter)
1458 and use post-increment if available. */
1459 if (!(data.autinc_from && data.autinc_to)
1460 && move_by_pieces_ninsns (len, align) > 2)
1461 {
1462 /* Find the mode of the largest move... */
1463 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1464 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1465 if (GET_MODE_SIZE (tmode) < max_size)
1466 mode = tmode;
1467
1468 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1469 {
1470 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1471 data.autinc_from = 1;
1472 data.explicit_inc_from = -1;
1473 }
1474 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1475 {
1476 data.from_addr = copy_addr_to_reg (from_addr);
1477 data.autinc_from = 1;
1478 data.explicit_inc_from = 1;
1479 }
1480 if (!data.autinc_from && CONSTANT_P (from_addr))
1481 data.from_addr = copy_addr_to_reg (from_addr);
1482 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1483 {
1484 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1485 data.autinc_to = 1;
1486 data.explicit_inc_to = -1;
1487 }
1488 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1489 {
1490 data.to_addr = copy_addr_to_reg (to_addr);
1491 data.autinc_to = 1;
1492 data.explicit_inc_to = 1;
1493 }
1494 if (!data.autinc_to && CONSTANT_P (to_addr))
1495 data.to_addr = copy_addr_to_reg (to_addr);
1496 }
1497
1498 if (! SLOW_UNALIGNED_ACCESS
1499 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1500 align = MOVE_MAX;
1501
1502 /* First move what we can in the largest integer mode, then go to
1503 successively smaller modes. */
1504
1505 while (max_size > 1)
1506 {
1507 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1508 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1509 if (GET_MODE_SIZE (tmode) < max_size)
1510 mode = tmode;
1511
1512 if (mode == VOIDmode)
1513 break;
1514
1515 icode = mov_optab->handlers[(int) mode].insn_code;
1516 if (icode != CODE_FOR_nothing
1517 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1518 GET_MODE_SIZE (mode)))
1519 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1520
1521 max_size = GET_MODE_SIZE (mode);
1522 }
1523
1524 /* The code above should have handled everything. */
1525 if (data.len > 0)
1526 abort ();
1527 }
1528
1529 /* Return number of insns required to move L bytes by pieces.
1530 ALIGN (in bytes) is maximum alignment we can assume. */
1531
1532 static int
1533 move_by_pieces_ninsns (l, align)
1534 unsigned int l;
1535 int align;
1536 {
1537 register int n_insns = 0;
1538 int max_size = MOVE_MAX + 1;
1539
1540 if (! SLOW_UNALIGNED_ACCESS
1541 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1542 align = MOVE_MAX;
1543
1544 while (max_size > 1)
1545 {
1546 enum machine_mode mode = VOIDmode, tmode;
1547 enum insn_code icode;
1548
1549 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1550 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1551 if (GET_MODE_SIZE (tmode) < max_size)
1552 mode = tmode;
1553
1554 if (mode == VOIDmode)
1555 break;
1556
1557 icode = mov_optab->handlers[(int) mode].insn_code;
1558 if (icode != CODE_FOR_nothing
1559 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1560 GET_MODE_SIZE (mode)))
1561 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1562
1563 max_size = GET_MODE_SIZE (mode);
1564 }
1565
1566 return n_insns;
1567 }
1568
1569 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1570 with move instructions for mode MODE. GENFUN is the gen_... function
1571 to make a move insn for that mode. DATA has all the other info. */
1572
1573 static void
1574 move_by_pieces_1 (genfun, mode, data)
1575 rtx (*genfun) PROTO ((rtx, ...));
1576 enum machine_mode mode;
1577 struct move_by_pieces *data;
1578 {
1579 register int size = GET_MODE_SIZE (mode);
1580 register rtx to1, from1;
1581
1582 while (data->len >= size)
1583 {
1584 if (data->reverse) data->offset -= size;
1585
1586 to1 = (data->autinc_to
1587 ? gen_rtx_MEM (mode, data->to_addr)
1588 : copy_rtx (change_address (data->to, mode,
1589 plus_constant (data->to_addr,
1590 data->offset))));
1591 MEM_IN_STRUCT_P (to1) = data->to_struct;
1592
1593 from1
1594 = (data->autinc_from
1595 ? gen_rtx_MEM (mode, data->from_addr)
1596 : copy_rtx (change_address (data->from, mode,
1597 plus_constant (data->from_addr,
1598 data->offset))));
1599 MEM_IN_STRUCT_P (from1) = data->from_struct;
1600
1601 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1602 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1603 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1604 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1605
1606 emit_insn ((*genfun) (to1, from1));
1607 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1608 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1609 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1610 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1611
1612 if (! data->reverse) data->offset += size;
1613
1614 data->len -= size;
1615 }
1616 }
1617 \f
1618 /* Emit code to move a block Y to a block X.
1619 This may be done with string-move instructions,
1620 with multiple scalar move instructions, or with a library call.
1621
1622 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1623 with mode BLKmode.
1624 SIZE is an rtx that says how long they are.
1625 ALIGN is the maximum alignment we can assume they have,
1626 measured in bytes.
1627
1628 Return the address of the new block, if memcpy is called and returns it,
1629 0 otherwise. */
1630
1631 rtx
1632 emit_block_move (x, y, size, align)
1633 rtx x, y;
1634 rtx size;
1635 int align;
1636 {
1637 rtx retval = 0;
1638 #ifdef TARGET_MEM_FUNCTIONS
1639 static tree fn;
1640 tree call_expr, arg_list;
1641 #endif
1642
1643 if (GET_MODE (x) != BLKmode)
1644 abort ();
1645
1646 if (GET_MODE (y) != BLKmode)
1647 abort ();
1648
1649 x = protect_from_queue (x, 1);
1650 y = protect_from_queue (y, 0);
1651 size = protect_from_queue (size, 0);
1652
1653 if (GET_CODE (x) != MEM)
1654 abort ();
1655 if (GET_CODE (y) != MEM)
1656 abort ();
1657 if (size == 0)
1658 abort ();
1659
1660 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1661 move_by_pieces (x, y, INTVAL (size), align);
1662 else
1663 {
1664 /* Try the most limited insn first, because there's no point
1665 including more than one in the machine description unless
1666 the more limited one has some advantage. */
1667
1668 rtx opalign = GEN_INT (align);
1669 enum machine_mode mode;
1670
1671 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1672 mode = GET_MODE_WIDER_MODE (mode))
1673 {
1674 enum insn_code code = movstr_optab[(int) mode];
1675
1676 if (code != CODE_FOR_nothing
1677 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1678 here because if SIZE is less than the mode mask, as it is
1679 returned by the macro, it will definitely be less than the
1680 actual mode mask. */
1681 && ((GET_CODE (size) == CONST_INT
1682 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1683 <= (GET_MODE_MASK (mode) >> 1)))
1684 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1685 && (insn_operand_predicate[(int) code][0] == 0
1686 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1687 && (insn_operand_predicate[(int) code][1] == 0
1688 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1689 && (insn_operand_predicate[(int) code][3] == 0
1690 || (*insn_operand_predicate[(int) code][3]) (opalign,
1691 VOIDmode)))
1692 {
1693 rtx op2;
1694 rtx last = get_last_insn ();
1695 rtx pat;
1696
1697 op2 = convert_to_mode (mode, size, 1);
1698 if (insn_operand_predicate[(int) code][2] != 0
1699 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1700 op2 = copy_to_mode_reg (mode, op2);
1701
1702 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1703 if (pat)
1704 {
1705 emit_insn (pat);
1706 return 0;
1707 }
1708 else
1709 delete_insns_since (last);
1710 }
1711 }
1712
1713 #ifdef TARGET_MEM_FUNCTIONS
1714 /* It is incorrect to use the libcall calling conventions to call
1715 memcpy in this context.
1716
1717 This could be a user call to memcpy and the user may wish to
1718 examine the return value from memcpy.
1719
1720 For targets where libcalls and normal calls have different conventions
1721 for returning pointers, we could end up generating incorrect code.
1722
1723 So instead of using a libcall sequence we build up a suitable
1724 CALL_EXPR and expand the call in the normal fashion. */
1725 if (fn == NULL_TREE)
1726 {
1727 tree fntype;
1728
1729 /* This was copied from except.c, I don't know if all this is
1730 necessary in this context or not. */
1731 fn = get_identifier ("memcpy");
1732 push_obstacks_nochange ();
1733 end_temporary_allocation ();
1734 fntype = build_pointer_type (void_type_node);
1735 fntype = build_function_type (fntype, NULL_TREE);
1736 fn = build_decl (FUNCTION_DECL, fn, fntype);
1737 DECL_EXTERNAL (fn) = 1;
1738 TREE_PUBLIC (fn) = 1;
1739 DECL_ARTIFICIAL (fn) = 1;
1740 make_decl_rtl (fn, NULL_PTR, 1);
1741 assemble_external (fn);
1742 pop_obstacks ();
1743 }
1744
1745 /* We need to make an argument list for the function call.
1746
1747 memcpy has three arguments, the first two are void * addresses and
1748 the last is a size_t byte count for the copy. */
1749 arg_list
1750 = build_tree_list (NULL_TREE,
1751 make_tree (build_pointer_type (void_type_node),
1752 XEXP (x, 0)));
1753 TREE_CHAIN (arg_list)
1754 = build_tree_list (NULL_TREE,
1755 make_tree (build_pointer_type (void_type_node),
1756 XEXP (y, 0)));
1757 TREE_CHAIN (TREE_CHAIN (arg_list))
1758 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1759 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1760
1761 /* Now we have to build up the CALL_EXPR itself. */
1762 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1763 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1764 call_expr, arg_list, NULL_TREE);
1765 TREE_SIDE_EFFECTS (call_expr) = 1;
1766
1767 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1768 #else
1769 emit_library_call (bcopy_libfunc, 0,
1770 VOIDmode, 3, XEXP (y, 0), Pmode,
1771 XEXP (x, 0), Pmode,
1772 convert_to_mode (TYPE_MODE (integer_type_node), size,
1773 TREE_UNSIGNED (integer_type_node)),
1774 TYPE_MODE (integer_type_node));
1775 #endif
1776 }
1777
1778 return retval;
1779 }
1780 \f
1781 /* Copy all or part of a value X into registers starting at REGNO.
1782 The number of registers to be filled is NREGS. */
1783
1784 void
1785 move_block_to_reg (regno, x, nregs, mode)
1786 int regno;
1787 rtx x;
1788 int nregs;
1789 enum machine_mode mode;
1790 {
1791 int i;
1792 #ifdef HAVE_load_multiple
1793 rtx pat;
1794 rtx last;
1795 #endif
1796
1797 if (nregs == 0)
1798 return;
1799
1800 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1801 x = validize_mem (force_const_mem (mode, x));
1802
1803 /* See if the machine can do this with a load multiple insn. */
1804 #ifdef HAVE_load_multiple
1805 if (HAVE_load_multiple)
1806 {
1807 last = get_last_insn ();
1808 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1809 GEN_INT (nregs));
1810 if (pat)
1811 {
1812 emit_insn (pat);
1813 return;
1814 }
1815 else
1816 delete_insns_since (last);
1817 }
1818 #endif
1819
1820 for (i = 0; i < nregs; i++)
1821 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1822 operand_subword_force (x, i, mode));
1823 }
1824
1825 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1826 The number of registers to be filled is NREGS. SIZE indicates the number
1827 of bytes in the object X. */
1828
1829
1830 void
1831 move_block_from_reg (regno, x, nregs, size)
1832 int regno;
1833 rtx x;
1834 int nregs;
1835 int size;
1836 {
1837 int i;
1838 #ifdef HAVE_store_multiple
1839 rtx pat;
1840 rtx last;
1841 #endif
1842 enum machine_mode mode;
1843
1844 /* If SIZE is that of a mode no bigger than a word, just use that
1845 mode's store operation. */
1846 if (size <= UNITS_PER_WORD
1847 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1848 {
1849 emit_move_insn (change_address (x, mode, NULL),
1850 gen_rtx_REG (mode, regno));
1851 return;
1852 }
1853
1854 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1855 to the left before storing to memory. Note that the previous test
1856 doesn't handle all cases (e.g. SIZE == 3). */
1857 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1858 {
1859 rtx tem = operand_subword (x, 0, 1, BLKmode);
1860 rtx shift;
1861
1862 if (tem == 0)
1863 abort ();
1864
1865 shift = expand_shift (LSHIFT_EXPR, word_mode,
1866 gen_rtx_REG (word_mode, regno),
1867 build_int_2 ((UNITS_PER_WORD - size)
1868 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1869 emit_move_insn (tem, shift);
1870 return;
1871 }
1872
1873 /* See if the machine can do this with a store multiple insn. */
1874 #ifdef HAVE_store_multiple
1875 if (HAVE_store_multiple)
1876 {
1877 last = get_last_insn ();
1878 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1879 GEN_INT (nregs));
1880 if (pat)
1881 {
1882 emit_insn (pat);
1883 return;
1884 }
1885 else
1886 delete_insns_since (last);
1887 }
1888 #endif
1889
1890 for (i = 0; i < nregs; i++)
1891 {
1892 rtx tem = operand_subword (x, i, 1, BLKmode);
1893
1894 if (tem == 0)
1895 abort ();
1896
1897 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1898 }
1899 }
1900
1901 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1902 registers represented by a PARALLEL. SSIZE represents the total size of
1903 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1904 SRC in bits. */
1905 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1906 the balance will be in what would be the low-order memory addresses, i.e.
1907 left justified for big endian, right justified for little endian. This
1908 happens to be true for the targets currently using this support. If this
1909 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1910 would be needed. */
1911
1912 void
1913 emit_group_load (dst, orig_src, ssize, align)
1914 rtx dst, orig_src;
1915 int align, ssize;
1916 {
1917 rtx *tmps, src;
1918 int start, i;
1919
1920 if (GET_CODE (dst) != PARALLEL)
1921 abort ();
1922
1923 /* Check for a NULL entry, used to indicate that the parameter goes
1924 both on the stack and in registers. */
1925 if (XEXP (XVECEXP (dst, 0, 0), 0))
1926 start = 0;
1927 else
1928 start = 1;
1929
1930 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1931
1932 /* If we won't be loading directly from memory, protect the real source
1933 from strange tricks we might play. */
1934 src = orig_src;
1935 if (GET_CODE (src) != MEM)
1936 {
1937 src = gen_reg_rtx (GET_MODE (orig_src));
1938 emit_move_insn (src, orig_src);
1939 }
1940
1941 /* Process the pieces. */
1942 for (i = start; i < XVECLEN (dst, 0); i++)
1943 {
1944 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1945 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1946 int bytelen = GET_MODE_SIZE (mode);
1947 int shift = 0;
1948
1949 /* Handle trailing fragments that run over the size of the struct. */
1950 if (ssize >= 0 && bytepos + bytelen > ssize)
1951 {
1952 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1953 bytelen = ssize - bytepos;
1954 if (bytelen <= 0)
1955 abort();
1956 }
1957
1958 /* Optimize the access just a bit. */
1959 if (GET_CODE (src) == MEM
1960 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1961 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1962 && bytelen == GET_MODE_SIZE (mode))
1963 {
1964 tmps[i] = gen_reg_rtx (mode);
1965 emit_move_insn (tmps[i],
1966 change_address (src, mode,
1967 plus_constant (XEXP (src, 0),
1968 bytepos)));
1969 }
1970 else
1971 {
1972 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1973 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1974 mode, mode, align, ssize);
1975 }
1976
1977 if (BYTES_BIG_ENDIAN && shift)
1978 {
1979 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1980 tmps[i], 0, OPTAB_WIDEN);
1981 }
1982 }
1983 emit_queue();
1984
1985 /* Copy the extracted pieces into the proper (probable) hard regs. */
1986 for (i = start; i < XVECLEN (dst, 0); i++)
1987 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1988 }
1989
1990 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1991 registers represented by a PARALLEL. SSIZE represents the total size of
1992 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1993
1994 void
1995 emit_group_store (orig_dst, src, ssize, align)
1996 rtx orig_dst, src;
1997 int ssize, align;
1998 {
1999 rtx *tmps, dst;
2000 int start, i;
2001
2002 if (GET_CODE (src) != PARALLEL)
2003 abort ();
2004
2005 /* Check for a NULL entry, used to indicate that the parameter goes
2006 both on the stack and in registers. */
2007 if (XEXP (XVECEXP (src, 0, 0), 0))
2008 start = 0;
2009 else
2010 start = 1;
2011
2012 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2013
2014 /* Copy the (probable) hard regs into pseudos. */
2015 for (i = start; i < XVECLEN (src, 0); i++)
2016 {
2017 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2018 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2019 emit_move_insn (tmps[i], reg);
2020 }
2021 emit_queue();
2022
2023 /* If we won't be storing directly into memory, protect the real destination
2024 from strange tricks we might play. */
2025 dst = orig_dst;
2026 if (GET_CODE (dst) == PARALLEL)
2027 {
2028 rtx temp;
2029
2030 /* We can get a PARALLEL dst if there is a conditional expression in
2031 a return statement. In that case, the dst and src are the same,
2032 so no action is necessary. */
2033 if (rtx_equal_p (dst, src))
2034 return;
2035
2036 /* It is unclear if we can ever reach here, but we may as well handle
2037 it. Allocate a temporary, and split this into a store/load to/from
2038 the temporary. */
2039
2040 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2041 emit_group_store (temp, src, ssize, align);
2042 emit_group_load (dst, temp, ssize, align);
2043 return;
2044 }
2045 else if (GET_CODE (dst) != MEM)
2046 {
2047 dst = gen_reg_rtx (GET_MODE (orig_dst));
2048 /* Make life a bit easier for combine. */
2049 emit_move_insn (dst, const0_rtx);
2050 }
2051 else if (! MEM_IN_STRUCT_P (dst))
2052 {
2053 /* store_bit_field requires that memory operations have
2054 mem_in_struct_p set; we might not. */
2055
2056 dst = copy_rtx (orig_dst);
2057 MEM_SET_IN_STRUCT_P (dst, 1);
2058 }
2059
2060 /* Process the pieces. */
2061 for (i = start; i < XVECLEN (src, 0); i++)
2062 {
2063 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2064 enum machine_mode mode = GET_MODE (tmps[i]);
2065 int bytelen = GET_MODE_SIZE (mode);
2066
2067 /* Handle trailing fragments that run over the size of the struct. */
2068 if (ssize >= 0 && bytepos + bytelen > ssize)
2069 {
2070 if (BYTES_BIG_ENDIAN)
2071 {
2072 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2073 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2074 tmps[i], 0, OPTAB_WIDEN);
2075 }
2076 bytelen = ssize - bytepos;
2077 }
2078
2079 /* Optimize the access just a bit. */
2080 if (GET_CODE (dst) == MEM
2081 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2082 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2083 && bytelen == GET_MODE_SIZE (mode))
2084 {
2085 emit_move_insn (change_address (dst, mode,
2086 plus_constant (XEXP (dst, 0),
2087 bytepos)),
2088 tmps[i]);
2089 }
2090 else
2091 {
2092 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2093 mode, tmps[i], align, ssize);
2094 }
2095 }
2096 emit_queue();
2097
2098 /* Copy from the pseudo into the (probable) hard reg. */
2099 if (GET_CODE (dst) == REG)
2100 emit_move_insn (orig_dst, dst);
2101 }
2102
2103 /* Generate code to copy a BLKmode object of TYPE out of a
2104 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2105 is null, a stack temporary is created. TGTBLK is returned.
2106
2107 The primary purpose of this routine is to handle functions
2108 that return BLKmode structures in registers. Some machines
2109 (the PA for example) want to return all small structures
2110 in registers regardless of the structure's alignment.
2111 */
2112
2113 rtx
2114 copy_blkmode_from_reg(tgtblk,srcreg,type)
2115 rtx tgtblk;
2116 rtx srcreg;
2117 tree type;
2118 {
2119 int bytes = int_size_in_bytes (type);
2120 rtx src = NULL, dst = NULL;
2121 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2122 int bitpos, xbitpos, big_endian_correction = 0;
2123
2124 if (tgtblk == 0)
2125 {
2126 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2127 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2128 preserve_temp_slots (tgtblk);
2129 }
2130
2131 /* This code assumes srcreg is at least a full word. If it isn't,
2132 copy it into a new pseudo which is a full word. */
2133 if (GET_MODE (srcreg) != BLKmode
2134 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2135 srcreg = convert_to_mode (word_mode, srcreg,
2136 TREE_UNSIGNED (type));
2137
2138 /* Structures whose size is not a multiple of a word are aligned
2139 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2140 machine, this means we must skip the empty high order bytes when
2141 calculating the bit offset. */
2142 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2143 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2144 * BITS_PER_UNIT));
2145
2146 /* Copy the structure BITSIZE bites at a time.
2147
2148 We could probably emit more efficient code for machines
2149 which do not use strict alignment, but it doesn't seem
2150 worth the effort at the current time. */
2151 for (bitpos = 0, xbitpos = big_endian_correction;
2152 bitpos < bytes * BITS_PER_UNIT;
2153 bitpos += bitsize, xbitpos += bitsize)
2154 {
2155
2156 /* We need a new source operand each time xbitpos is on a
2157 word boundary and when xbitpos == big_endian_correction
2158 (the first time through). */
2159 if (xbitpos % BITS_PER_WORD == 0
2160 || xbitpos == big_endian_correction)
2161 src = operand_subword_force (srcreg,
2162 xbitpos / BITS_PER_WORD,
2163 BLKmode);
2164
2165 /* We need a new destination operand each time bitpos is on
2166 a word boundary. */
2167 if (bitpos % BITS_PER_WORD == 0)
2168 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2169
2170 /* Use xbitpos for the source extraction (right justified) and
2171 xbitpos for the destination store (left justified). */
2172 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2173 extract_bit_field (src, bitsize,
2174 xbitpos % BITS_PER_WORD, 1,
2175 NULL_RTX, word_mode,
2176 word_mode,
2177 bitsize / BITS_PER_UNIT,
2178 BITS_PER_WORD),
2179 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2180 }
2181 return tgtblk;
2182 }
2183
2184
2185 /* Add a USE expression for REG to the (possibly empty) list pointed
2186 to by CALL_FUSAGE. REG must denote a hard register. */
2187
2188 void
2189 use_reg (call_fusage, reg)
2190 rtx *call_fusage, reg;
2191 {
2192 if (GET_CODE (reg) != REG
2193 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2194 abort();
2195
2196 *call_fusage
2197 = gen_rtx_EXPR_LIST (VOIDmode,
2198 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2199 }
2200
2201 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2202 starting at REGNO. All of these registers must be hard registers. */
2203
2204 void
2205 use_regs (call_fusage, regno, nregs)
2206 rtx *call_fusage;
2207 int regno;
2208 int nregs;
2209 {
2210 int i;
2211
2212 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2213 abort ();
2214
2215 for (i = 0; i < nregs; i++)
2216 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2217 }
2218
2219 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2220 PARALLEL REGS. This is for calls that pass values in multiple
2221 non-contiguous locations. The Irix 6 ABI has examples of this. */
2222
2223 void
2224 use_group_regs (call_fusage, regs)
2225 rtx *call_fusage;
2226 rtx regs;
2227 {
2228 int i;
2229
2230 for (i = 0; i < XVECLEN (regs, 0); i++)
2231 {
2232 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2233
2234 /* A NULL entry means the parameter goes both on the stack and in
2235 registers. This can also be a MEM for targets that pass values
2236 partially on the stack and partially in registers. */
2237 if (reg != 0 && GET_CODE (reg) == REG)
2238 use_reg (call_fusage, reg);
2239 }
2240 }
2241 \f
2242 /* Generate several move instructions to clear LEN bytes of block TO.
2243 (A MEM rtx with BLKmode). The caller must pass TO through
2244 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2245 we can assume. */
2246
2247 static void
2248 clear_by_pieces (to, len, align)
2249 rtx to;
2250 int len, align;
2251 {
2252 struct clear_by_pieces data;
2253 rtx to_addr = XEXP (to, 0);
2254 int max_size = MOVE_MAX_PIECES + 1;
2255 enum machine_mode mode = VOIDmode, tmode;
2256 enum insn_code icode;
2257
2258 data.offset = 0;
2259 data.to_addr = to_addr;
2260 data.to = to;
2261 data.autinc_to
2262 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2263 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2264
2265 data.explicit_inc_to = 0;
2266 data.reverse
2267 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2268 if (data.reverse) data.offset = len;
2269 data.len = len;
2270
2271 data.to_struct = MEM_IN_STRUCT_P (to);
2272
2273 /* If copying requires more than two move insns,
2274 copy addresses to registers (to make displacements shorter)
2275 and use post-increment if available. */
2276 if (!data.autinc_to
2277 && move_by_pieces_ninsns (len, align) > 2)
2278 {
2279 /* Determine the main mode we'll be using */
2280 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2281 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2282 if (GET_MODE_SIZE (tmode) < max_size)
2283 mode = tmode;
2284
2285 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2286 {
2287 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2288 data.autinc_to = 1;
2289 data.explicit_inc_to = -1;
2290 }
2291 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2292 {
2293 data.to_addr = copy_addr_to_reg (to_addr);
2294 data.autinc_to = 1;
2295 data.explicit_inc_to = 1;
2296 }
2297 if (!data.autinc_to && CONSTANT_P (to_addr))
2298 data.to_addr = copy_addr_to_reg (to_addr);
2299 }
2300
2301 if (! SLOW_UNALIGNED_ACCESS
2302 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2303 align = MOVE_MAX;
2304
2305 /* First move what we can in the largest integer mode, then go to
2306 successively smaller modes. */
2307
2308 while (max_size > 1)
2309 {
2310 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2311 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2312 if (GET_MODE_SIZE (tmode) < max_size)
2313 mode = tmode;
2314
2315 if (mode == VOIDmode)
2316 break;
2317
2318 icode = mov_optab->handlers[(int) mode].insn_code;
2319 if (icode != CODE_FOR_nothing
2320 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2321 GET_MODE_SIZE (mode)))
2322 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2323
2324 max_size = GET_MODE_SIZE (mode);
2325 }
2326
2327 /* The code above should have handled everything. */
2328 if (data.len != 0)
2329 abort ();
2330 }
2331
2332 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2333 with move instructions for mode MODE. GENFUN is the gen_... function
2334 to make a move insn for that mode. DATA has all the other info. */
2335
2336 static void
2337 clear_by_pieces_1 (genfun, mode, data)
2338 rtx (*genfun) PROTO ((rtx, ...));
2339 enum machine_mode mode;
2340 struct clear_by_pieces *data;
2341 {
2342 register int size = GET_MODE_SIZE (mode);
2343 register rtx to1;
2344
2345 while (data->len >= size)
2346 {
2347 if (data->reverse) data->offset -= size;
2348
2349 to1 = (data->autinc_to
2350 ? gen_rtx_MEM (mode, data->to_addr)
2351 : copy_rtx (change_address (data->to, mode,
2352 plus_constant (data->to_addr,
2353 data->offset))));
2354 MEM_IN_STRUCT_P (to1) = data->to_struct;
2355
2356 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2357 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2358
2359 emit_insn ((*genfun) (to1, const0_rtx));
2360 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2361 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2362
2363 if (! data->reverse) data->offset += size;
2364
2365 data->len -= size;
2366 }
2367 }
2368 \f
2369 /* Write zeros through the storage of OBJECT.
2370 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2371 the maximum alignment we can is has, measured in bytes.
2372
2373 If we call a function that returns the length of the block, return it. */
2374
2375 rtx
2376 clear_storage (object, size, align)
2377 rtx object;
2378 rtx size;
2379 int align;
2380 {
2381 #ifdef TARGET_MEM_FUNCTIONS
2382 static tree fn;
2383 tree call_expr, arg_list;
2384 #endif
2385 rtx retval = 0;
2386
2387 if (GET_MODE (object) == BLKmode)
2388 {
2389 object = protect_from_queue (object, 1);
2390 size = protect_from_queue (size, 0);
2391
2392 if (GET_CODE (size) == CONST_INT
2393 && MOVE_BY_PIECES_P (INTVAL (size), align))
2394 clear_by_pieces (object, INTVAL (size), align);
2395
2396 else
2397 {
2398 /* Try the most limited insn first, because there's no point
2399 including more than one in the machine description unless
2400 the more limited one has some advantage. */
2401
2402 rtx opalign = GEN_INT (align);
2403 enum machine_mode mode;
2404
2405 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2406 mode = GET_MODE_WIDER_MODE (mode))
2407 {
2408 enum insn_code code = clrstr_optab[(int) mode];
2409
2410 if (code != CODE_FOR_nothing
2411 /* We don't need MODE to be narrower than
2412 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2413 the mode mask, as it is returned by the macro, it will
2414 definitely be less than the actual mode mask. */
2415 && ((GET_CODE (size) == CONST_INT
2416 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2417 <= (GET_MODE_MASK (mode) >> 1)))
2418 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2419 && (insn_operand_predicate[(int) code][0] == 0
2420 || (*insn_operand_predicate[(int) code][0]) (object,
2421 BLKmode))
2422 && (insn_operand_predicate[(int) code][2] == 0
2423 || (*insn_operand_predicate[(int) code][2]) (opalign,
2424 VOIDmode)))
2425 {
2426 rtx op1;
2427 rtx last = get_last_insn ();
2428 rtx pat;
2429
2430 op1 = convert_to_mode (mode, size, 1);
2431 if (insn_operand_predicate[(int) code][1] != 0
2432 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2433 mode))
2434 op1 = copy_to_mode_reg (mode, op1);
2435
2436 pat = GEN_FCN ((int) code) (object, op1, opalign);
2437 if (pat)
2438 {
2439 emit_insn (pat);
2440 return 0;
2441 }
2442 else
2443 delete_insns_since (last);
2444 }
2445 }
2446
2447
2448 #ifdef TARGET_MEM_FUNCTIONS
2449 /* It is incorrect to use the libcall calling conventions to call
2450 memset in this context.
2451
2452 This could be a user call to memset and the user may wish to
2453 examine the return value from memset.
2454
2455 For targets where libcalls and normal calls have different conventions
2456 for returning pointers, we could end up generating incorrect code.
2457
2458 So instead of using a libcall sequence we build up a suitable
2459 CALL_EXPR and expand the call in the normal fashion. */
2460 if (fn == NULL_TREE)
2461 {
2462 tree fntype;
2463
2464 /* This was copied from except.c, I don't know if all this is
2465 necessary in this context or not. */
2466 fn = get_identifier ("memset");
2467 push_obstacks_nochange ();
2468 end_temporary_allocation ();
2469 fntype = build_pointer_type (void_type_node);
2470 fntype = build_function_type (fntype, NULL_TREE);
2471 fn = build_decl (FUNCTION_DECL, fn, fntype);
2472 DECL_EXTERNAL (fn) = 1;
2473 TREE_PUBLIC (fn) = 1;
2474 DECL_ARTIFICIAL (fn) = 1;
2475 make_decl_rtl (fn, NULL_PTR, 1);
2476 assemble_external (fn);
2477 pop_obstacks ();
2478 }
2479
2480 /* We need to make an argument list for the function call.
2481
2482 memset has three arguments, the first is a void * addresses, the
2483 second a integer with the initialization value, the last is a size_t
2484 byte count for the copy. */
2485 arg_list
2486 = build_tree_list (NULL_TREE,
2487 make_tree (build_pointer_type (void_type_node),
2488 XEXP (object, 0)));
2489 TREE_CHAIN (arg_list)
2490 = build_tree_list (NULL_TREE,
2491 make_tree (integer_type_node, const0_rtx));
2492 TREE_CHAIN (TREE_CHAIN (arg_list))
2493 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2494 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2495
2496 /* Now we have to build up the CALL_EXPR itself. */
2497 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2498 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2499 call_expr, arg_list, NULL_TREE);
2500 TREE_SIDE_EFFECTS (call_expr) = 1;
2501
2502 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2503 #else
2504 emit_library_call (bzero_libfunc, 0,
2505 VOIDmode, 2,
2506 XEXP (object, 0), Pmode,
2507 convert_to_mode
2508 (TYPE_MODE (integer_type_node), size,
2509 TREE_UNSIGNED (integer_type_node)),
2510 TYPE_MODE (integer_type_node));
2511 #endif
2512 }
2513 }
2514 else
2515 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2516
2517 return retval;
2518 }
2519
2520 /* Generate code to copy Y into X.
2521 Both Y and X must have the same mode, except that
2522 Y can be a constant with VOIDmode.
2523 This mode cannot be BLKmode; use emit_block_move for that.
2524
2525 Return the last instruction emitted. */
2526
2527 rtx
2528 emit_move_insn (x, y)
2529 rtx x, y;
2530 {
2531 enum machine_mode mode = GET_MODE (x);
2532
2533 x = protect_from_queue (x, 1);
2534 y = protect_from_queue (y, 0);
2535
2536 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2537 abort ();
2538
2539 /* Never force constant_p_rtx to memory. */
2540 if (GET_CODE (y) == CONSTANT_P_RTX)
2541 ;
2542 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2543 y = force_const_mem (mode, y);
2544
2545 /* If X or Y are memory references, verify that their addresses are valid
2546 for the machine. */
2547 if (GET_CODE (x) == MEM
2548 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2549 && ! push_operand (x, GET_MODE (x)))
2550 || (flag_force_addr
2551 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2552 x = change_address (x, VOIDmode, XEXP (x, 0));
2553
2554 if (GET_CODE (y) == MEM
2555 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2556 || (flag_force_addr
2557 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2558 y = change_address (y, VOIDmode, XEXP (y, 0));
2559
2560 if (mode == BLKmode)
2561 abort ();
2562
2563 return emit_move_insn_1 (x, y);
2564 }
2565
2566 /* Low level part of emit_move_insn.
2567 Called just like emit_move_insn, but assumes X and Y
2568 are basically valid. */
2569
2570 rtx
2571 emit_move_insn_1 (x, y)
2572 rtx x, y;
2573 {
2574 enum machine_mode mode = GET_MODE (x);
2575 enum machine_mode submode;
2576 enum mode_class class = GET_MODE_CLASS (mode);
2577 int i;
2578
2579 if (mode >= MAX_MACHINE_MODE)
2580 abort ();
2581
2582 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2583 return
2584 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2585
2586 /* Expand complex moves by moving real part and imag part, if possible. */
2587 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2588 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2589 * BITS_PER_UNIT),
2590 (class == MODE_COMPLEX_INT
2591 ? MODE_INT : MODE_FLOAT),
2592 0))
2593 && (mov_optab->handlers[(int) submode].insn_code
2594 != CODE_FOR_nothing))
2595 {
2596 /* Don't split destination if it is a stack push. */
2597 int stack = push_operand (x, GET_MODE (x));
2598
2599 /* If this is a stack, push the highpart first, so it
2600 will be in the argument order.
2601
2602 In that case, change_address is used only to convert
2603 the mode, not to change the address. */
2604 if (stack)
2605 {
2606 /* Note that the real part always precedes the imag part in memory
2607 regardless of machine's endianness. */
2608 #ifdef STACK_GROWS_DOWNWARD
2609 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2610 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2611 gen_imagpart (submode, y)));
2612 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2613 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2614 gen_realpart (submode, y)));
2615 #else
2616 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2617 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2618 gen_realpart (submode, y)));
2619 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2620 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2621 gen_imagpart (submode, y)));
2622 #endif
2623 }
2624 else
2625 {
2626 /* Show the output dies here. This is necessary for pseudos;
2627 hard regs shouldn't appear here except as return values.
2628 We never want to emit such a clobber after reload. */
2629 if (x != y
2630 && ! (reload_in_progress || reload_completed))
2631 {
2632 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2633 }
2634
2635 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2636 (gen_realpart (submode, x), gen_realpart (submode, y)));
2637 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2638 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2639 }
2640
2641 return get_last_insn ();
2642 }
2643
2644 /* This will handle any multi-word mode that lacks a move_insn pattern.
2645 However, you will get better code if you define such patterns,
2646 even if they must turn into multiple assembler instructions. */
2647 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2648 {
2649 rtx last_insn = 0;
2650
2651 #ifdef PUSH_ROUNDING
2652
2653 /* If X is a push on the stack, do the push now and replace
2654 X with a reference to the stack pointer. */
2655 if (push_operand (x, GET_MODE (x)))
2656 {
2657 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2658 x = change_address (x, VOIDmode, stack_pointer_rtx);
2659 }
2660 #endif
2661
2662 /* Show the output dies here. This is necessary for pseudos;
2663 hard regs shouldn't appear here except as return values.
2664 We never want to emit such a clobber after reload. */
2665 if (x != y
2666 && ! (reload_in_progress || reload_completed))
2667 {
2668 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2669 }
2670
2671 for (i = 0;
2672 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2673 i++)
2674 {
2675 rtx xpart = operand_subword (x, i, 1, mode);
2676 rtx ypart = operand_subword (y, i, 1, mode);
2677
2678 /* If we can't get a part of Y, put Y into memory if it is a
2679 constant. Otherwise, force it into a register. If we still
2680 can't get a part of Y, abort. */
2681 if (ypart == 0 && CONSTANT_P (y))
2682 {
2683 y = force_const_mem (mode, y);
2684 ypart = operand_subword (y, i, 1, mode);
2685 }
2686 else if (ypart == 0)
2687 ypart = operand_subword_force (y, i, mode);
2688
2689 if (xpart == 0 || ypart == 0)
2690 abort ();
2691
2692 last_insn = emit_move_insn (xpart, ypart);
2693 }
2694
2695 return last_insn;
2696 }
2697 else
2698 abort ();
2699 }
2700 \f
2701 /* Pushing data onto the stack. */
2702
2703 /* Push a block of length SIZE (perhaps variable)
2704 and return an rtx to address the beginning of the block.
2705 Note that it is not possible for the value returned to be a QUEUED.
2706 The value may be virtual_outgoing_args_rtx.
2707
2708 EXTRA is the number of bytes of padding to push in addition to SIZE.
2709 BELOW nonzero means this padding comes at low addresses;
2710 otherwise, the padding comes at high addresses. */
2711
2712 rtx
2713 push_block (size, extra, below)
2714 rtx size;
2715 int extra, below;
2716 {
2717 register rtx temp;
2718
2719 size = convert_modes (Pmode, ptr_mode, size, 1);
2720 if (CONSTANT_P (size))
2721 anti_adjust_stack (plus_constant (size, extra));
2722 else if (GET_CODE (size) == REG && extra == 0)
2723 anti_adjust_stack (size);
2724 else
2725 {
2726 rtx temp = copy_to_mode_reg (Pmode, size);
2727 if (extra != 0)
2728 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2729 temp, 0, OPTAB_LIB_WIDEN);
2730 anti_adjust_stack (temp);
2731 }
2732
2733 #if defined (STACK_GROWS_DOWNWARD) \
2734 || (defined (ARGS_GROW_DOWNWARD) \
2735 && !defined (ACCUMULATE_OUTGOING_ARGS))
2736
2737 /* Return the lowest stack address when STACK or ARGS grow downward and
2738 we are not aaccumulating outgoing arguments (the c4x port uses such
2739 conventions). */
2740 temp = virtual_outgoing_args_rtx;
2741 if (extra != 0 && below)
2742 temp = plus_constant (temp, extra);
2743 #else
2744 if (GET_CODE (size) == CONST_INT)
2745 temp = plus_constant (virtual_outgoing_args_rtx,
2746 - INTVAL (size) - (below ? 0 : extra));
2747 else if (extra != 0 && !below)
2748 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2749 negate_rtx (Pmode, plus_constant (size, extra)));
2750 else
2751 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2752 negate_rtx (Pmode, size));
2753 #endif
2754
2755 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2756 }
2757
2758 rtx
2759 gen_push_operand ()
2760 {
2761 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2762 }
2763
2764 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2765 block of SIZE bytes. */
2766
2767 static rtx
2768 get_push_address (size)
2769 int size;
2770 {
2771 register rtx temp;
2772
2773 if (STACK_PUSH_CODE == POST_DEC)
2774 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2775 else if (STACK_PUSH_CODE == POST_INC)
2776 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2777 else
2778 temp = stack_pointer_rtx;
2779
2780 return copy_to_reg (temp);
2781 }
2782
2783 /* Generate code to push X onto the stack, assuming it has mode MODE and
2784 type TYPE.
2785 MODE is redundant except when X is a CONST_INT (since they don't
2786 carry mode info).
2787 SIZE is an rtx for the size of data to be copied (in bytes),
2788 needed only if X is BLKmode.
2789
2790 ALIGN (in bytes) is maximum alignment we can assume.
2791
2792 If PARTIAL and REG are both nonzero, then copy that many of the first
2793 words of X into registers starting with REG, and push the rest of X.
2794 The amount of space pushed is decreased by PARTIAL words,
2795 rounded *down* to a multiple of PARM_BOUNDARY.
2796 REG must be a hard register in this case.
2797 If REG is zero but PARTIAL is not, take any all others actions for an
2798 argument partially in registers, but do not actually load any
2799 registers.
2800
2801 EXTRA is the amount in bytes of extra space to leave next to this arg.
2802 This is ignored if an argument block has already been allocated.
2803
2804 On a machine that lacks real push insns, ARGS_ADDR is the address of
2805 the bottom of the argument block for this call. We use indexing off there
2806 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2807 argument block has not been preallocated.
2808
2809 ARGS_SO_FAR is the size of args previously pushed for this call.
2810
2811 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2812 for arguments passed in registers. If nonzero, it will be the number
2813 of bytes required. */
2814
2815 void
2816 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2817 args_addr, args_so_far, reg_parm_stack_space)
2818 register rtx x;
2819 enum machine_mode mode;
2820 tree type;
2821 rtx size;
2822 int align;
2823 int partial;
2824 rtx reg;
2825 int extra;
2826 rtx args_addr;
2827 rtx args_so_far;
2828 int reg_parm_stack_space;
2829 {
2830 rtx xinner;
2831 enum direction stack_direction
2832 #ifdef STACK_GROWS_DOWNWARD
2833 = downward;
2834 #else
2835 = upward;
2836 #endif
2837
2838 /* Decide where to pad the argument: `downward' for below,
2839 `upward' for above, or `none' for don't pad it.
2840 Default is below for small data on big-endian machines; else above. */
2841 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2842
2843 /* Invert direction if stack is post-update. */
2844 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2845 if (where_pad != none)
2846 where_pad = (where_pad == downward ? upward : downward);
2847
2848 xinner = x = protect_from_queue (x, 0);
2849
2850 if (mode == BLKmode)
2851 {
2852 /* Copy a block into the stack, entirely or partially. */
2853
2854 register rtx temp;
2855 int used = partial * UNITS_PER_WORD;
2856 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2857 int skip;
2858
2859 if (size == 0)
2860 abort ();
2861
2862 used -= offset;
2863
2864 /* USED is now the # of bytes we need not copy to the stack
2865 because registers will take care of them. */
2866
2867 if (partial != 0)
2868 xinner = change_address (xinner, BLKmode,
2869 plus_constant (XEXP (xinner, 0), used));
2870
2871 /* If the partial register-part of the arg counts in its stack size,
2872 skip the part of stack space corresponding to the registers.
2873 Otherwise, start copying to the beginning of the stack space,
2874 by setting SKIP to 0. */
2875 skip = (reg_parm_stack_space == 0) ? 0 : used;
2876
2877 #ifdef PUSH_ROUNDING
2878 /* Do it with several push insns if that doesn't take lots of insns
2879 and if there is no difficulty with push insns that skip bytes
2880 on the stack for alignment purposes. */
2881 if (args_addr == 0
2882 && GET_CODE (size) == CONST_INT
2883 && skip == 0
2884 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2885 /* Here we avoid the case of a structure whose weak alignment
2886 forces many pushes of a small amount of data,
2887 and such small pushes do rounding that causes trouble. */
2888 && ((! SLOW_UNALIGNED_ACCESS)
2889 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2890 || PUSH_ROUNDING (align) == align)
2891 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2892 {
2893 /* Push padding now if padding above and stack grows down,
2894 or if padding below and stack grows up.
2895 But if space already allocated, this has already been done. */
2896 if (extra && args_addr == 0
2897 && where_pad != none && where_pad != stack_direction)
2898 anti_adjust_stack (GEN_INT (extra));
2899
2900 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2901 INTVAL (size) - used, align);
2902
2903 if (current_function_check_memory_usage && ! in_check_memory_usage)
2904 {
2905 rtx temp;
2906
2907 in_check_memory_usage = 1;
2908 temp = get_push_address (INTVAL(size) - used);
2909 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2910 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2911 temp, Pmode,
2912 XEXP (xinner, 0), Pmode,
2913 GEN_INT (INTVAL(size) - used),
2914 TYPE_MODE (sizetype));
2915 else
2916 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2917 temp, Pmode,
2918 GEN_INT (INTVAL(size) - used),
2919 TYPE_MODE (sizetype),
2920 GEN_INT (MEMORY_USE_RW),
2921 TYPE_MODE (integer_type_node));
2922 in_check_memory_usage = 0;
2923 }
2924 }
2925 else
2926 #endif /* PUSH_ROUNDING */
2927 {
2928 /* Otherwise make space on the stack and copy the data
2929 to the address of that space. */
2930
2931 /* Deduct words put into registers from the size we must copy. */
2932 if (partial != 0)
2933 {
2934 if (GET_CODE (size) == CONST_INT)
2935 size = GEN_INT (INTVAL (size) - used);
2936 else
2937 size = expand_binop (GET_MODE (size), sub_optab, size,
2938 GEN_INT (used), NULL_RTX, 0,
2939 OPTAB_LIB_WIDEN);
2940 }
2941
2942 /* Get the address of the stack space.
2943 In this case, we do not deal with EXTRA separately.
2944 A single stack adjust will do. */
2945 if (! args_addr)
2946 {
2947 temp = push_block (size, extra, where_pad == downward);
2948 extra = 0;
2949 }
2950 else if (GET_CODE (args_so_far) == CONST_INT)
2951 temp = memory_address (BLKmode,
2952 plus_constant (args_addr,
2953 skip + INTVAL (args_so_far)));
2954 else
2955 temp = memory_address (BLKmode,
2956 plus_constant (gen_rtx_PLUS (Pmode,
2957 args_addr,
2958 args_so_far),
2959 skip));
2960 if (current_function_check_memory_usage && ! in_check_memory_usage)
2961 {
2962 rtx target;
2963
2964 in_check_memory_usage = 1;
2965 target = copy_to_reg (temp);
2966 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2967 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2968 target, Pmode,
2969 XEXP (xinner, 0), Pmode,
2970 size, TYPE_MODE (sizetype));
2971 else
2972 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2973 target, Pmode,
2974 size, TYPE_MODE (sizetype),
2975 GEN_INT (MEMORY_USE_RW),
2976 TYPE_MODE (integer_type_node));
2977 in_check_memory_usage = 0;
2978 }
2979
2980 /* TEMP is the address of the block. Copy the data there. */
2981 if (GET_CODE (size) == CONST_INT
2982 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
2983 {
2984 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2985 INTVAL (size), align);
2986 goto ret;
2987 }
2988 else
2989 {
2990 rtx opalign = GEN_INT (align);
2991 enum machine_mode mode;
2992 rtx target = gen_rtx_MEM (BLKmode, temp);
2993
2994 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2995 mode != VOIDmode;
2996 mode = GET_MODE_WIDER_MODE (mode))
2997 {
2998 enum insn_code code = movstr_optab[(int) mode];
2999
3000 if (code != CODE_FOR_nothing
3001 && ((GET_CODE (size) == CONST_INT
3002 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3003 <= (GET_MODE_MASK (mode) >> 1)))
3004 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3005 && (insn_operand_predicate[(int) code][0] == 0
3006 || ((*insn_operand_predicate[(int) code][0])
3007 (target, BLKmode)))
3008 && (insn_operand_predicate[(int) code][1] == 0
3009 || ((*insn_operand_predicate[(int) code][1])
3010 (xinner, BLKmode)))
3011 && (insn_operand_predicate[(int) code][3] == 0
3012 || ((*insn_operand_predicate[(int) code][3])
3013 (opalign, VOIDmode))))
3014 {
3015 rtx op2 = convert_to_mode (mode, size, 1);
3016 rtx last = get_last_insn ();
3017 rtx pat;
3018
3019 if (insn_operand_predicate[(int) code][2] != 0
3020 && ! ((*insn_operand_predicate[(int) code][2])
3021 (op2, mode)))
3022 op2 = copy_to_mode_reg (mode, op2);
3023
3024 pat = GEN_FCN ((int) code) (target, xinner,
3025 op2, opalign);
3026 if (pat)
3027 {
3028 emit_insn (pat);
3029 goto ret;
3030 }
3031 else
3032 delete_insns_since (last);
3033 }
3034 }
3035 }
3036
3037 #ifndef ACCUMULATE_OUTGOING_ARGS
3038 /* If the source is referenced relative to the stack pointer,
3039 copy it to another register to stabilize it. We do not need
3040 to do this if we know that we won't be changing sp. */
3041
3042 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3043 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3044 temp = copy_to_reg (temp);
3045 #endif
3046
3047 /* Make inhibit_defer_pop nonzero around the library call
3048 to force it to pop the bcopy-arguments right away. */
3049 NO_DEFER_POP;
3050 #ifdef TARGET_MEM_FUNCTIONS
3051 emit_library_call (memcpy_libfunc, 0,
3052 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3053 convert_to_mode (TYPE_MODE (sizetype),
3054 size, TREE_UNSIGNED (sizetype)),
3055 TYPE_MODE (sizetype));
3056 #else
3057 emit_library_call (bcopy_libfunc, 0,
3058 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3059 convert_to_mode (TYPE_MODE (integer_type_node),
3060 size,
3061 TREE_UNSIGNED (integer_type_node)),
3062 TYPE_MODE (integer_type_node));
3063 #endif
3064 OK_DEFER_POP;
3065 }
3066 }
3067 else if (partial > 0)
3068 {
3069 /* Scalar partly in registers. */
3070
3071 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3072 int i;
3073 int not_stack;
3074 /* # words of start of argument
3075 that we must make space for but need not store. */
3076 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3077 int args_offset = INTVAL (args_so_far);
3078 int skip;
3079
3080 /* Push padding now if padding above and stack grows down,
3081 or if padding below and stack grows up.
3082 But if space already allocated, this has already been done. */
3083 if (extra && args_addr == 0
3084 && where_pad != none && where_pad != stack_direction)
3085 anti_adjust_stack (GEN_INT (extra));
3086
3087 /* If we make space by pushing it, we might as well push
3088 the real data. Otherwise, we can leave OFFSET nonzero
3089 and leave the space uninitialized. */
3090 if (args_addr == 0)
3091 offset = 0;
3092
3093 /* Now NOT_STACK gets the number of words that we don't need to
3094 allocate on the stack. */
3095 not_stack = partial - offset;
3096
3097 /* If the partial register-part of the arg counts in its stack size,
3098 skip the part of stack space corresponding to the registers.
3099 Otherwise, start copying to the beginning of the stack space,
3100 by setting SKIP to 0. */
3101 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3102
3103 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3104 x = validize_mem (force_const_mem (mode, x));
3105
3106 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3107 SUBREGs of such registers are not allowed. */
3108 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3109 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3110 x = copy_to_reg (x);
3111
3112 /* Loop over all the words allocated on the stack for this arg. */
3113 /* We can do it by words, because any scalar bigger than a word
3114 has a size a multiple of a word. */
3115 #ifndef PUSH_ARGS_REVERSED
3116 for (i = not_stack; i < size; i++)
3117 #else
3118 for (i = size - 1; i >= not_stack; i--)
3119 #endif
3120 if (i >= not_stack + offset)
3121 emit_push_insn (operand_subword_force (x, i, mode),
3122 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3123 0, args_addr,
3124 GEN_INT (args_offset + ((i - not_stack + skip)
3125 * UNITS_PER_WORD)),
3126 reg_parm_stack_space);
3127 }
3128 else
3129 {
3130 rtx addr;
3131 rtx target = NULL_RTX;
3132
3133 /* Push padding now if padding above and stack grows down,
3134 or if padding below and stack grows up.
3135 But if space already allocated, this has already been done. */
3136 if (extra && args_addr == 0
3137 && where_pad != none && where_pad != stack_direction)
3138 anti_adjust_stack (GEN_INT (extra));
3139
3140 #ifdef PUSH_ROUNDING
3141 if (args_addr == 0)
3142 addr = gen_push_operand ();
3143 else
3144 #endif
3145 {
3146 if (GET_CODE (args_so_far) == CONST_INT)
3147 addr
3148 = memory_address (mode,
3149 plus_constant (args_addr,
3150 INTVAL (args_so_far)));
3151 else
3152 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3153 args_so_far));
3154 target = addr;
3155 }
3156
3157 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3158
3159 if (current_function_check_memory_usage && ! in_check_memory_usage)
3160 {
3161 in_check_memory_usage = 1;
3162 if (target == 0)
3163 target = get_push_address (GET_MODE_SIZE (mode));
3164
3165 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3166 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3167 target, Pmode,
3168 XEXP (x, 0), Pmode,
3169 GEN_INT (GET_MODE_SIZE (mode)),
3170 TYPE_MODE (sizetype));
3171 else
3172 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3173 target, Pmode,
3174 GEN_INT (GET_MODE_SIZE (mode)),
3175 TYPE_MODE (sizetype),
3176 GEN_INT (MEMORY_USE_RW),
3177 TYPE_MODE (integer_type_node));
3178 in_check_memory_usage = 0;
3179 }
3180 }
3181
3182 ret:
3183 /* If part should go in registers, copy that part
3184 into the appropriate registers. Do this now, at the end,
3185 since mem-to-mem copies above may do function calls. */
3186 if (partial > 0 && reg != 0)
3187 {
3188 /* Handle calls that pass values in multiple non-contiguous locations.
3189 The Irix 6 ABI has examples of this. */
3190 if (GET_CODE (reg) == PARALLEL)
3191 emit_group_load (reg, x, -1, align); /* ??? size? */
3192 else
3193 move_block_to_reg (REGNO (reg), x, partial, mode);
3194 }
3195
3196 if (extra && args_addr == 0 && where_pad == stack_direction)
3197 anti_adjust_stack (GEN_INT (extra));
3198 }
3199 \f
3200 /* Expand an assignment that stores the value of FROM into TO.
3201 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3202 (This may contain a QUEUED rtx;
3203 if the value is constant, this rtx is a constant.)
3204 Otherwise, the returned value is NULL_RTX.
3205
3206 SUGGEST_REG is no longer actually used.
3207 It used to mean, copy the value through a register
3208 and return that register, if that is possible.
3209 We now use WANT_VALUE to decide whether to do this. */
3210
3211 rtx
3212 expand_assignment (to, from, want_value, suggest_reg)
3213 tree to, from;
3214 int want_value;
3215 int suggest_reg;
3216 {
3217 register rtx to_rtx = 0;
3218 rtx result;
3219
3220 /* Don't crash if the lhs of the assignment was erroneous. */
3221
3222 if (TREE_CODE (to) == ERROR_MARK)
3223 {
3224 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3225 return want_value ? result : NULL_RTX;
3226 }
3227
3228 /* Assignment of a structure component needs special treatment
3229 if the structure component's rtx is not simply a MEM.
3230 Assignment of an array element at a constant index, and assignment of
3231 an array element in an unaligned packed structure field, has the same
3232 problem. */
3233
3234 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3235 || TREE_CODE (to) == ARRAY_REF)
3236 {
3237 enum machine_mode mode1;
3238 int bitsize;
3239 int bitpos;
3240 tree offset;
3241 int unsignedp;
3242 int volatilep = 0;
3243 tree tem;
3244 int alignment;
3245
3246 push_temp_slots ();
3247 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3248 &unsignedp, &volatilep, &alignment);
3249
3250 /* If we are going to use store_bit_field and extract_bit_field,
3251 make sure to_rtx will be safe for multiple use. */
3252
3253 if (mode1 == VOIDmode && want_value)
3254 tem = stabilize_reference (tem);
3255
3256 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3257 if (offset != 0)
3258 {
3259 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3260
3261 if (GET_CODE (to_rtx) != MEM)
3262 abort ();
3263
3264 if (GET_MODE (offset_rtx) != ptr_mode)
3265 {
3266 #ifdef POINTERS_EXTEND_UNSIGNED
3267 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3268 #else
3269 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3270 #endif
3271 }
3272
3273 /* A constant address in TO_RTX can have VOIDmode, we must not try
3274 to call force_reg for that case. Avoid that case. */
3275 if (GET_CODE (to_rtx) == MEM
3276 && GET_MODE (to_rtx) == BLKmode
3277 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3278 && bitsize
3279 && (bitpos % bitsize) == 0
3280 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3281 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3282 {
3283 rtx temp = change_address (to_rtx, mode1,
3284 plus_constant (XEXP (to_rtx, 0),
3285 (bitpos /
3286 BITS_PER_UNIT)));
3287 if (GET_CODE (XEXP (temp, 0)) == REG)
3288 to_rtx = temp;
3289 else
3290 to_rtx = change_address (to_rtx, mode1,
3291 force_reg (GET_MODE (XEXP (temp, 0)),
3292 XEXP (temp, 0)));
3293 bitpos = 0;
3294 }
3295
3296 to_rtx = change_address (to_rtx, VOIDmode,
3297 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3298 force_reg (ptr_mode, offset_rtx)));
3299 }
3300 if (volatilep)
3301 {
3302 if (GET_CODE (to_rtx) == MEM)
3303 {
3304 /* When the offset is zero, to_rtx is the address of the
3305 structure we are storing into, and hence may be shared.
3306 We must make a new MEM before setting the volatile bit. */
3307 if (offset == 0)
3308 to_rtx = copy_rtx (to_rtx);
3309
3310 MEM_VOLATILE_P (to_rtx) = 1;
3311 }
3312 #if 0 /* This was turned off because, when a field is volatile
3313 in an object which is not volatile, the object may be in a register,
3314 and then we would abort over here. */
3315 else
3316 abort ();
3317 #endif
3318 }
3319
3320 if (TREE_CODE (to) == COMPONENT_REF
3321 && TREE_READONLY (TREE_OPERAND (to, 1)))
3322 {
3323 if (offset == 0)
3324 to_rtx = copy_rtx (to_rtx);
3325
3326 RTX_UNCHANGING_P (to_rtx) = 1;
3327 }
3328
3329 /* Check the access. */
3330 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3331 {
3332 rtx to_addr;
3333 int size;
3334 int best_mode_size;
3335 enum machine_mode best_mode;
3336
3337 best_mode = get_best_mode (bitsize, bitpos,
3338 TYPE_ALIGN (TREE_TYPE (tem)),
3339 mode1, volatilep);
3340 if (best_mode == VOIDmode)
3341 best_mode = QImode;
3342
3343 best_mode_size = GET_MODE_BITSIZE (best_mode);
3344 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3345 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3346 size *= GET_MODE_SIZE (best_mode);
3347
3348 /* Check the access right of the pointer. */
3349 if (size)
3350 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3351 to_addr, Pmode,
3352 GEN_INT (size), TYPE_MODE (sizetype),
3353 GEN_INT (MEMORY_USE_WO),
3354 TYPE_MODE (integer_type_node));
3355 }
3356
3357 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3358 (want_value
3359 /* Spurious cast makes HPUX compiler happy. */
3360 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3361 : VOIDmode),
3362 unsignedp,
3363 /* Required alignment of containing datum. */
3364 alignment,
3365 int_size_in_bytes (TREE_TYPE (tem)),
3366 get_alias_set (to));
3367 preserve_temp_slots (result);
3368 free_temp_slots ();
3369 pop_temp_slots ();
3370
3371 /* If the value is meaningful, convert RESULT to the proper mode.
3372 Otherwise, return nothing. */
3373 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3374 TYPE_MODE (TREE_TYPE (from)),
3375 result,
3376 TREE_UNSIGNED (TREE_TYPE (to)))
3377 : NULL_RTX);
3378 }
3379
3380 /* If the rhs is a function call and its value is not an aggregate,
3381 call the function before we start to compute the lhs.
3382 This is needed for correct code for cases such as
3383 val = setjmp (buf) on machines where reference to val
3384 requires loading up part of an address in a separate insn.
3385
3386 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3387 a promoted variable where the zero- or sign- extension needs to be done.
3388 Handling this in the normal way is safe because no computation is done
3389 before the call. */
3390 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3391 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3392 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3393 {
3394 rtx value;
3395
3396 push_temp_slots ();
3397 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3398 if (to_rtx == 0)
3399 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3400
3401 /* Handle calls that return values in multiple non-contiguous locations.
3402 The Irix 6 ABI has examples of this. */
3403 if (GET_CODE (to_rtx) == PARALLEL)
3404 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3405 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3406 else if (GET_MODE (to_rtx) == BLKmode)
3407 emit_block_move (to_rtx, value, expr_size (from),
3408 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3409 else
3410 {
3411 #ifdef POINTERS_EXTEND_UNSIGNED
3412 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3413 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3414 value = convert_memory_address (GET_MODE (to_rtx), value);
3415 #endif
3416 emit_move_insn (to_rtx, value);
3417 }
3418 preserve_temp_slots (to_rtx);
3419 free_temp_slots ();
3420 pop_temp_slots ();
3421 return want_value ? to_rtx : NULL_RTX;
3422 }
3423
3424 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3425 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3426
3427 if (to_rtx == 0)
3428 {
3429 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3430 if (GET_CODE (to_rtx) == MEM)
3431 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3432 }
3433
3434 /* Don't move directly into a return register. */
3435 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3436 {
3437 rtx temp;
3438
3439 push_temp_slots ();
3440 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3441 emit_move_insn (to_rtx, temp);
3442 preserve_temp_slots (to_rtx);
3443 free_temp_slots ();
3444 pop_temp_slots ();
3445 return want_value ? to_rtx : NULL_RTX;
3446 }
3447
3448 /* In case we are returning the contents of an object which overlaps
3449 the place the value is being stored, use a safe function when copying
3450 a value through a pointer into a structure value return block. */
3451 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3452 && current_function_returns_struct
3453 && !current_function_returns_pcc_struct)
3454 {
3455 rtx from_rtx, size;
3456
3457 push_temp_slots ();
3458 size = expr_size (from);
3459 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3460 EXPAND_MEMORY_USE_DONT);
3461
3462 /* Copy the rights of the bitmap. */
3463 if (current_function_check_memory_usage)
3464 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3465 XEXP (to_rtx, 0), Pmode,
3466 XEXP (from_rtx, 0), Pmode,
3467 convert_to_mode (TYPE_MODE (sizetype),
3468 size, TREE_UNSIGNED (sizetype)),
3469 TYPE_MODE (sizetype));
3470
3471 #ifdef TARGET_MEM_FUNCTIONS
3472 emit_library_call (memcpy_libfunc, 0,
3473 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3474 XEXP (from_rtx, 0), Pmode,
3475 convert_to_mode (TYPE_MODE (sizetype),
3476 size, TREE_UNSIGNED (sizetype)),
3477 TYPE_MODE (sizetype));
3478 #else
3479 emit_library_call (bcopy_libfunc, 0,
3480 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3481 XEXP (to_rtx, 0), Pmode,
3482 convert_to_mode (TYPE_MODE (integer_type_node),
3483 size, TREE_UNSIGNED (integer_type_node)),
3484 TYPE_MODE (integer_type_node));
3485 #endif
3486
3487 preserve_temp_slots (to_rtx);
3488 free_temp_slots ();
3489 pop_temp_slots ();
3490 return want_value ? to_rtx : NULL_RTX;
3491 }
3492
3493 /* Compute FROM and store the value in the rtx we got. */
3494
3495 push_temp_slots ();
3496 result = store_expr (from, to_rtx, want_value);
3497 preserve_temp_slots (result);
3498 free_temp_slots ();
3499 pop_temp_slots ();
3500 return want_value ? result : NULL_RTX;
3501 }
3502
3503 /* Generate code for computing expression EXP,
3504 and storing the value into TARGET.
3505 TARGET may contain a QUEUED rtx.
3506
3507 If WANT_VALUE is nonzero, return a copy of the value
3508 not in TARGET, so that we can be sure to use the proper
3509 value in a containing expression even if TARGET has something
3510 else stored in it. If possible, we copy the value through a pseudo
3511 and return that pseudo. Or, if the value is constant, we try to
3512 return the constant. In some cases, we return a pseudo
3513 copied *from* TARGET.
3514
3515 If the mode is BLKmode then we may return TARGET itself.
3516 It turns out that in BLKmode it doesn't cause a problem.
3517 because C has no operators that could combine two different
3518 assignments into the same BLKmode object with different values
3519 with no sequence point. Will other languages need this to
3520 be more thorough?
3521
3522 If WANT_VALUE is 0, we return NULL, to make sure
3523 to catch quickly any cases where the caller uses the value
3524 and fails to set WANT_VALUE. */
3525
3526 rtx
3527 store_expr (exp, target, want_value)
3528 register tree exp;
3529 register rtx target;
3530 int want_value;
3531 {
3532 register rtx temp;
3533 int dont_return_target = 0;
3534
3535 if (TREE_CODE (exp) == COMPOUND_EXPR)
3536 {
3537 /* Perform first part of compound expression, then assign from second
3538 part. */
3539 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3540 emit_queue ();
3541 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3542 }
3543 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3544 {
3545 /* For conditional expression, get safe form of the target. Then
3546 test the condition, doing the appropriate assignment on either
3547 side. This avoids the creation of unnecessary temporaries.
3548 For non-BLKmode, it is more efficient not to do this. */
3549
3550 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3551
3552 emit_queue ();
3553 target = protect_from_queue (target, 1);
3554
3555 do_pending_stack_adjust ();
3556 NO_DEFER_POP;
3557 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3558 start_cleanup_deferral ();
3559 store_expr (TREE_OPERAND (exp, 1), target, 0);
3560 end_cleanup_deferral ();
3561 emit_queue ();
3562 emit_jump_insn (gen_jump (lab2));
3563 emit_barrier ();
3564 emit_label (lab1);
3565 start_cleanup_deferral ();
3566 store_expr (TREE_OPERAND (exp, 2), target, 0);
3567 end_cleanup_deferral ();
3568 emit_queue ();
3569 emit_label (lab2);
3570 OK_DEFER_POP;
3571
3572 return want_value ? target : NULL_RTX;
3573 }
3574 else if (queued_subexp_p (target))
3575 /* If target contains a postincrement, let's not risk
3576 using it as the place to generate the rhs. */
3577 {
3578 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3579 {
3580 /* Expand EXP into a new pseudo. */
3581 temp = gen_reg_rtx (GET_MODE (target));
3582 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3583 }
3584 else
3585 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3586
3587 /* If target is volatile, ANSI requires accessing the value
3588 *from* the target, if it is accessed. So make that happen.
3589 In no case return the target itself. */
3590 if (! MEM_VOLATILE_P (target) && want_value)
3591 dont_return_target = 1;
3592 }
3593 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3594 && GET_MODE (target) != BLKmode)
3595 /* If target is in memory and caller wants value in a register instead,
3596 arrange that. Pass TARGET as target for expand_expr so that,
3597 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3598 We know expand_expr will not use the target in that case.
3599 Don't do this if TARGET is volatile because we are supposed
3600 to write it and then read it. */
3601 {
3602 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3603 GET_MODE (target), 0);
3604 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3605 temp = copy_to_reg (temp);
3606 dont_return_target = 1;
3607 }
3608 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3609 /* If this is an scalar in a register that is stored in a wider mode
3610 than the declared mode, compute the result into its declared mode
3611 and then convert to the wider mode. Our value is the computed
3612 expression. */
3613 {
3614 /* If we don't want a value, we can do the conversion inside EXP,
3615 which will often result in some optimizations. Do the conversion
3616 in two steps: first change the signedness, if needed, then
3617 the extend. But don't do this if the type of EXP is a subtype
3618 of something else since then the conversion might involve
3619 more than just converting modes. */
3620 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3621 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3622 {
3623 if (TREE_UNSIGNED (TREE_TYPE (exp))
3624 != SUBREG_PROMOTED_UNSIGNED_P (target))
3625 exp
3626 = convert
3627 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3628 TREE_TYPE (exp)),
3629 exp);
3630
3631 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3632 SUBREG_PROMOTED_UNSIGNED_P (target)),
3633 exp);
3634 }
3635
3636 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3637
3638 /* If TEMP is a volatile MEM and we want a result value, make
3639 the access now so it gets done only once. Likewise if
3640 it contains TARGET. */
3641 if (GET_CODE (temp) == MEM && want_value
3642 && (MEM_VOLATILE_P (temp)
3643 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3644 temp = copy_to_reg (temp);
3645
3646 /* If TEMP is a VOIDmode constant, use convert_modes to make
3647 sure that we properly convert it. */
3648 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3649 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3650 TYPE_MODE (TREE_TYPE (exp)), temp,
3651 SUBREG_PROMOTED_UNSIGNED_P (target));
3652
3653 convert_move (SUBREG_REG (target), temp,
3654 SUBREG_PROMOTED_UNSIGNED_P (target));
3655
3656 /* If we promoted a constant, change the mode back down to match
3657 target. Otherwise, the caller might get confused by a result whose
3658 mode is larger than expected. */
3659
3660 if (want_value && GET_MODE (temp) != GET_MODE (target)
3661 && GET_MODE (temp) != VOIDmode)
3662 {
3663 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3664 SUBREG_PROMOTED_VAR_P (temp) = 1;
3665 SUBREG_PROMOTED_UNSIGNED_P (temp)
3666 = SUBREG_PROMOTED_UNSIGNED_P (target);
3667 }
3668
3669 return want_value ? temp : NULL_RTX;
3670 }
3671 else
3672 {
3673 temp = expand_expr (exp, target, GET_MODE (target), 0);
3674 /* Return TARGET if it's a specified hardware register.
3675 If TARGET is a volatile mem ref, either return TARGET
3676 or return a reg copied *from* TARGET; ANSI requires this.
3677
3678 Otherwise, if TEMP is not TARGET, return TEMP
3679 if it is constant (for efficiency),
3680 or if we really want the correct value. */
3681 if (!(target && GET_CODE (target) == REG
3682 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3683 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3684 && ! rtx_equal_p (temp, target)
3685 && (CONSTANT_P (temp) || want_value))
3686 dont_return_target = 1;
3687 }
3688
3689 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3690 the same as that of TARGET, adjust the constant. This is needed, for
3691 example, in case it is a CONST_DOUBLE and we want only a word-sized
3692 value. */
3693 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3694 && TREE_CODE (exp) != ERROR_MARK
3695 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3696 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3697 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3698
3699 if (current_function_check_memory_usage
3700 && GET_CODE (target) == MEM
3701 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3702 {
3703 if (GET_CODE (temp) == MEM)
3704 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3705 XEXP (target, 0), Pmode,
3706 XEXP (temp, 0), Pmode,
3707 expr_size (exp), TYPE_MODE (sizetype));
3708 else
3709 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3710 XEXP (target, 0), Pmode,
3711 expr_size (exp), TYPE_MODE (sizetype),
3712 GEN_INT (MEMORY_USE_WO),
3713 TYPE_MODE (integer_type_node));
3714 }
3715
3716 /* If value was not generated in the target, store it there.
3717 Convert the value to TARGET's type first if nec. */
3718 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3719 one or both of them are volatile memory refs, we have to distinguish
3720 two cases:
3721 - expand_expr has used TARGET. In this case, we must not generate
3722 another copy. This can be detected by TARGET being equal according
3723 to == .
3724 - expand_expr has not used TARGET - that means that the source just
3725 happens to have the same RTX form. Since temp will have been created
3726 by expand_expr, it will compare unequal according to == .
3727 We must generate a copy in this case, to reach the correct number
3728 of volatile memory references. */
3729
3730 if ((! rtx_equal_p (temp, target)
3731 || (temp != target && (side_effects_p (temp)
3732 || side_effects_p (target))))
3733 && TREE_CODE (exp) != ERROR_MARK)
3734 {
3735 target = protect_from_queue (target, 1);
3736 if (GET_MODE (temp) != GET_MODE (target)
3737 && GET_MODE (temp) != VOIDmode)
3738 {
3739 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3740 if (dont_return_target)
3741 {
3742 /* In this case, we will return TEMP,
3743 so make sure it has the proper mode.
3744 But don't forget to store the value into TARGET. */
3745 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3746 emit_move_insn (target, temp);
3747 }
3748 else
3749 convert_move (target, temp, unsignedp);
3750 }
3751
3752 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3753 {
3754 /* Handle copying a string constant into an array.
3755 The string constant may be shorter than the array.
3756 So copy just the string's actual length, and clear the rest. */
3757 rtx size;
3758 rtx addr;
3759
3760 /* Get the size of the data type of the string,
3761 which is actually the size of the target. */
3762 size = expr_size (exp);
3763 if (GET_CODE (size) == CONST_INT
3764 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3765 emit_block_move (target, temp, size,
3766 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3767 else
3768 {
3769 /* Compute the size of the data to copy from the string. */
3770 tree copy_size
3771 = size_binop (MIN_EXPR,
3772 make_tree (sizetype, size),
3773 convert (sizetype,
3774 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3775 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3776 VOIDmode, 0);
3777 rtx label = 0;
3778
3779 /* Copy that much. */
3780 emit_block_move (target, temp, copy_size_rtx,
3781 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3782
3783 /* Figure out how much is left in TARGET that we have to clear.
3784 Do all calculations in ptr_mode. */
3785
3786 addr = XEXP (target, 0);
3787 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3788
3789 if (GET_CODE (copy_size_rtx) == CONST_INT)
3790 {
3791 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3792 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3793 }
3794 else
3795 {
3796 addr = force_reg (ptr_mode, addr);
3797 addr = expand_binop (ptr_mode, add_optab, addr,
3798 copy_size_rtx, NULL_RTX, 0,
3799 OPTAB_LIB_WIDEN);
3800
3801 size = expand_binop (ptr_mode, sub_optab, size,
3802 copy_size_rtx, NULL_RTX, 0,
3803 OPTAB_LIB_WIDEN);
3804
3805 label = gen_label_rtx ();
3806 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3807 GET_MODE (size), 0, 0, label);
3808 }
3809
3810 if (size != const0_rtx)
3811 {
3812 /* Be sure we can write on ADDR. */
3813 if (current_function_check_memory_usage)
3814 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3815 addr, Pmode,
3816 size, TYPE_MODE (sizetype),
3817 GEN_INT (MEMORY_USE_WO),
3818 TYPE_MODE (integer_type_node));
3819 #ifdef TARGET_MEM_FUNCTIONS
3820 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3821 addr, ptr_mode,
3822 const0_rtx, TYPE_MODE (integer_type_node),
3823 convert_to_mode (TYPE_MODE (sizetype),
3824 size,
3825 TREE_UNSIGNED (sizetype)),
3826 TYPE_MODE (sizetype));
3827 #else
3828 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3829 addr, ptr_mode,
3830 convert_to_mode (TYPE_MODE (integer_type_node),
3831 size,
3832 TREE_UNSIGNED (integer_type_node)),
3833 TYPE_MODE (integer_type_node));
3834 #endif
3835 }
3836
3837 if (label)
3838 emit_label (label);
3839 }
3840 }
3841 /* Handle calls that return values in multiple non-contiguous locations.
3842 The Irix 6 ABI has examples of this. */
3843 else if (GET_CODE (target) == PARALLEL)
3844 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3845 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3846 else if (GET_MODE (temp) == BLKmode)
3847 emit_block_move (target, temp, expr_size (exp),
3848 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3849 else
3850 emit_move_insn (target, temp);
3851 }
3852
3853 /* If we don't want a value, return NULL_RTX. */
3854 if (! want_value)
3855 return NULL_RTX;
3856
3857 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3858 ??? The latter test doesn't seem to make sense. */
3859 else if (dont_return_target && GET_CODE (temp) != MEM)
3860 return temp;
3861
3862 /* Return TARGET itself if it is a hard register. */
3863 else if (want_value && GET_MODE (target) != BLKmode
3864 && ! (GET_CODE (target) == REG
3865 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3866 return copy_to_reg (target);
3867
3868 else
3869 return target;
3870 }
3871 \f
3872 /* Return 1 if EXP just contains zeros. */
3873
3874 static int
3875 is_zeros_p (exp)
3876 tree exp;
3877 {
3878 tree elt;
3879
3880 switch (TREE_CODE (exp))
3881 {
3882 case CONVERT_EXPR:
3883 case NOP_EXPR:
3884 case NON_LVALUE_EXPR:
3885 return is_zeros_p (TREE_OPERAND (exp, 0));
3886
3887 case INTEGER_CST:
3888 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3889
3890 case COMPLEX_CST:
3891 return
3892 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3893
3894 case REAL_CST:
3895 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3896
3897 case CONSTRUCTOR:
3898 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3899 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3900 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3901 if (! is_zeros_p (TREE_VALUE (elt)))
3902 return 0;
3903
3904 return 1;
3905
3906 default:
3907 return 0;
3908 }
3909 }
3910
3911 /* Return 1 if EXP contains mostly (3/4) zeros. */
3912
3913 static int
3914 mostly_zeros_p (exp)
3915 tree exp;
3916 {
3917 if (TREE_CODE (exp) == CONSTRUCTOR)
3918 {
3919 int elts = 0, zeros = 0;
3920 tree elt = CONSTRUCTOR_ELTS (exp);
3921 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3922 {
3923 /* If there are no ranges of true bits, it is all zero. */
3924 return elt == NULL_TREE;
3925 }
3926 for (; elt; elt = TREE_CHAIN (elt))
3927 {
3928 /* We do not handle the case where the index is a RANGE_EXPR,
3929 so the statistic will be somewhat inaccurate.
3930 We do make a more accurate count in store_constructor itself,
3931 so since this function is only used for nested array elements,
3932 this should be close enough. */
3933 if (mostly_zeros_p (TREE_VALUE (elt)))
3934 zeros++;
3935 elts++;
3936 }
3937
3938 return 4 * zeros >= 3 * elts;
3939 }
3940
3941 return is_zeros_p (exp);
3942 }
3943 \f
3944 /* Helper function for store_constructor.
3945 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3946 TYPE is the type of the CONSTRUCTOR, not the element type.
3947 CLEARED is as for store_constructor.
3948
3949 This provides a recursive shortcut back to store_constructor when it isn't
3950 necessary to go through store_field. This is so that we can pass through
3951 the cleared field to let store_constructor know that we may not have to
3952 clear a substructure if the outer structure has already been cleared. */
3953
3954 static void
3955 store_constructor_field (target, bitsize, bitpos,
3956 mode, exp, type, cleared)
3957 rtx target;
3958 int bitsize, bitpos;
3959 enum machine_mode mode;
3960 tree exp, type;
3961 int cleared;
3962 {
3963 if (TREE_CODE (exp) == CONSTRUCTOR
3964 && bitpos % BITS_PER_UNIT == 0
3965 /* If we have a non-zero bitpos for a register target, then we just
3966 let store_field do the bitfield handling. This is unlikely to
3967 generate unnecessary clear instructions anyways. */
3968 && (bitpos == 0 || GET_CODE (target) == MEM))
3969 {
3970 if (bitpos != 0)
3971 target = change_address (target, VOIDmode,
3972 plus_constant (XEXP (target, 0),
3973 bitpos / BITS_PER_UNIT));
3974 store_constructor (exp, target, cleared);
3975 }
3976 else
3977 store_field (target, bitsize, bitpos, mode, exp,
3978 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3979 int_size_in_bytes (type), 0);
3980 }
3981
3982 /* Store the value of constructor EXP into the rtx TARGET.
3983 TARGET is either a REG or a MEM.
3984 CLEARED is true if TARGET is known to have been zero'd. */
3985
3986 static void
3987 store_constructor (exp, target, cleared)
3988 tree exp;
3989 rtx target;
3990 int cleared;
3991 {
3992 tree type = TREE_TYPE (exp);
3993 rtx exp_size = expr_size (exp);
3994
3995 /* We know our target cannot conflict, since safe_from_p has been called. */
3996 #if 0
3997 /* Don't try copying piece by piece into a hard register
3998 since that is vulnerable to being clobbered by EXP.
3999 Instead, construct in a pseudo register and then copy it all. */
4000 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4001 {
4002 rtx temp = gen_reg_rtx (GET_MODE (target));
4003 store_constructor (exp, temp, 0);
4004 emit_move_insn (target, temp);
4005 return;
4006 }
4007 #endif
4008
4009 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4010 || TREE_CODE (type) == QUAL_UNION_TYPE)
4011 {
4012 register tree elt;
4013
4014 /* Inform later passes that the whole union value is dead. */
4015 if (TREE_CODE (type) == UNION_TYPE
4016 || TREE_CODE (type) == QUAL_UNION_TYPE)
4017 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4018
4019 /* If we are building a static constructor into a register,
4020 set the initial value as zero so we can fold the value into
4021 a constant. But if more than one register is involved,
4022 this probably loses. */
4023 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4024 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4025 {
4026 if (! cleared)
4027 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4028
4029 cleared = 1;
4030 }
4031
4032 /* If the constructor has fewer fields than the structure
4033 or if we are initializing the structure to mostly zeros,
4034 clear the whole structure first. */
4035 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4036 != list_length (TYPE_FIELDS (type)))
4037 || mostly_zeros_p (exp))
4038 {
4039 if (! cleared)
4040 clear_storage (target, expr_size (exp),
4041 TYPE_ALIGN (type) / BITS_PER_UNIT);
4042
4043 cleared = 1;
4044 }
4045 else
4046 /* Inform later passes that the old value is dead. */
4047 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4048
4049 /* Store each element of the constructor into
4050 the corresponding field of TARGET. */
4051
4052 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4053 {
4054 register tree field = TREE_PURPOSE (elt);
4055 tree value = TREE_VALUE (elt);
4056 register enum machine_mode mode;
4057 int bitsize;
4058 int bitpos = 0;
4059 int unsignedp;
4060 tree pos, constant = 0, offset = 0;
4061 rtx to_rtx = target;
4062
4063 /* Just ignore missing fields.
4064 We cleared the whole structure, above,
4065 if any fields are missing. */
4066 if (field == 0)
4067 continue;
4068
4069 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4070 continue;
4071
4072 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4073 unsignedp = TREE_UNSIGNED (field);
4074 mode = DECL_MODE (field);
4075 if (DECL_BIT_FIELD (field))
4076 mode = VOIDmode;
4077
4078 pos = DECL_FIELD_BITPOS (field);
4079 if (TREE_CODE (pos) == INTEGER_CST)
4080 constant = pos;
4081 else if (TREE_CODE (pos) == PLUS_EXPR
4082 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4083 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4084 else
4085 offset = pos;
4086
4087 if (constant)
4088 bitpos = TREE_INT_CST_LOW (constant);
4089
4090 if (offset)
4091 {
4092 rtx offset_rtx;
4093
4094 if (contains_placeholder_p (offset))
4095 offset = build (WITH_RECORD_EXPR, sizetype,
4096 offset, make_tree (TREE_TYPE (exp), target));
4097
4098 offset = size_binop (FLOOR_DIV_EXPR, offset,
4099 size_int (BITS_PER_UNIT));
4100
4101 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4102 if (GET_CODE (to_rtx) != MEM)
4103 abort ();
4104
4105 if (GET_MODE (offset_rtx) != ptr_mode)
4106 {
4107 #ifdef POINTERS_EXTEND_UNSIGNED
4108 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4109 #else
4110 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4111 #endif
4112 }
4113
4114 to_rtx
4115 = change_address (to_rtx, VOIDmode,
4116 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4117 force_reg (ptr_mode, offset_rtx)));
4118 }
4119 if (TREE_READONLY (field))
4120 {
4121 if (GET_CODE (to_rtx) == MEM)
4122 to_rtx = copy_rtx (to_rtx);
4123
4124 RTX_UNCHANGING_P (to_rtx) = 1;
4125 }
4126
4127 #ifdef WORD_REGISTER_OPERATIONS
4128 /* If this initializes a field that is smaller than a word, at the
4129 start of a word, try to widen it to a full word.
4130 This special case allows us to output C++ member function
4131 initializations in a form that the optimizers can understand. */
4132 if (constant
4133 && GET_CODE (target) == REG
4134 && bitsize < BITS_PER_WORD
4135 && bitpos % BITS_PER_WORD == 0
4136 && GET_MODE_CLASS (mode) == MODE_INT
4137 && TREE_CODE (value) == INTEGER_CST
4138 && GET_CODE (exp_size) == CONST_INT
4139 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4140 {
4141 tree type = TREE_TYPE (value);
4142 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4143 {
4144 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4145 value = convert (type, value);
4146 }
4147 if (BYTES_BIG_ENDIAN)
4148 value
4149 = fold (build (LSHIFT_EXPR, type, value,
4150 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4151 bitsize = BITS_PER_WORD;
4152 mode = word_mode;
4153 }
4154 #endif
4155 store_constructor_field (to_rtx, bitsize, bitpos,
4156 mode, value, type, cleared);
4157 }
4158 }
4159 else if (TREE_CODE (type) == ARRAY_TYPE)
4160 {
4161 register tree elt;
4162 register int i;
4163 int need_to_clear;
4164 tree domain = TYPE_DOMAIN (type);
4165 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4166 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4167 tree elttype = TREE_TYPE (type);
4168
4169 /* If the constructor has fewer elements than the array,
4170 clear the whole array first. Similarly if this is
4171 static constructor of a non-BLKmode object. */
4172 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4173 need_to_clear = 1;
4174 else
4175 {
4176 HOST_WIDE_INT count = 0, zero_count = 0;
4177 need_to_clear = 0;
4178 /* This loop is a more accurate version of the loop in
4179 mostly_zeros_p (it handles RANGE_EXPR in an index).
4180 It is also needed to check for missing elements. */
4181 for (elt = CONSTRUCTOR_ELTS (exp);
4182 elt != NULL_TREE;
4183 elt = TREE_CHAIN (elt))
4184 {
4185 tree index = TREE_PURPOSE (elt);
4186 HOST_WIDE_INT this_node_count;
4187 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4188 {
4189 tree lo_index = TREE_OPERAND (index, 0);
4190 tree hi_index = TREE_OPERAND (index, 1);
4191 if (TREE_CODE (lo_index) != INTEGER_CST
4192 || TREE_CODE (hi_index) != INTEGER_CST)
4193 {
4194 need_to_clear = 1;
4195 break;
4196 }
4197 this_node_count = TREE_INT_CST_LOW (hi_index)
4198 - TREE_INT_CST_LOW (lo_index) + 1;
4199 }
4200 else
4201 this_node_count = 1;
4202 count += this_node_count;
4203 if (mostly_zeros_p (TREE_VALUE (elt)))
4204 zero_count += this_node_count;
4205 }
4206 /* Clear the entire array first if there are any missing elements,
4207 or if the incidence of zero elements is >= 75%. */
4208 if (count < maxelt - minelt + 1
4209 || 4 * zero_count >= 3 * count)
4210 need_to_clear = 1;
4211 }
4212 if (need_to_clear)
4213 {
4214 if (! cleared)
4215 clear_storage (target, expr_size (exp),
4216 TYPE_ALIGN (type) / BITS_PER_UNIT);
4217 cleared = 1;
4218 }
4219 else
4220 /* Inform later passes that the old value is dead. */
4221 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4222
4223 /* Store each element of the constructor into
4224 the corresponding element of TARGET, determined
4225 by counting the elements. */
4226 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4227 elt;
4228 elt = TREE_CHAIN (elt), i++)
4229 {
4230 register enum machine_mode mode;
4231 int bitsize;
4232 int bitpos;
4233 int unsignedp;
4234 tree value = TREE_VALUE (elt);
4235 tree index = TREE_PURPOSE (elt);
4236 rtx xtarget = target;
4237
4238 if (cleared && is_zeros_p (value))
4239 continue;
4240
4241 mode = TYPE_MODE (elttype);
4242 bitsize = GET_MODE_BITSIZE (mode);
4243 unsignedp = TREE_UNSIGNED (elttype);
4244
4245 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4246 {
4247 tree lo_index = TREE_OPERAND (index, 0);
4248 tree hi_index = TREE_OPERAND (index, 1);
4249 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4250 struct nesting *loop;
4251 HOST_WIDE_INT lo, hi, count;
4252 tree position;
4253
4254 /* If the range is constant and "small", unroll the loop. */
4255 if (TREE_CODE (lo_index) == INTEGER_CST
4256 && TREE_CODE (hi_index) == INTEGER_CST
4257 && (lo = TREE_INT_CST_LOW (lo_index),
4258 hi = TREE_INT_CST_LOW (hi_index),
4259 count = hi - lo + 1,
4260 (GET_CODE (target) != MEM
4261 || count <= 2
4262 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4263 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4264 <= 40 * 8))))
4265 {
4266 lo -= minelt; hi -= minelt;
4267 for (; lo <= hi; lo++)
4268 {
4269 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4270 store_constructor_field (target, bitsize, bitpos,
4271 mode, value, type, cleared);
4272 }
4273 }
4274 else
4275 {
4276 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4277 loop_top = gen_label_rtx ();
4278 loop_end = gen_label_rtx ();
4279
4280 unsignedp = TREE_UNSIGNED (domain);
4281
4282 index = build_decl (VAR_DECL, NULL_TREE, domain);
4283
4284 DECL_RTL (index) = index_r
4285 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4286 &unsignedp, 0));
4287
4288 if (TREE_CODE (value) == SAVE_EXPR
4289 && SAVE_EXPR_RTL (value) == 0)
4290 {
4291 /* Make sure value gets expanded once before the
4292 loop. */
4293 expand_expr (value, const0_rtx, VOIDmode, 0);
4294 emit_queue ();
4295 }
4296 store_expr (lo_index, index_r, 0);
4297 loop = expand_start_loop (0);
4298
4299 /* Assign value to element index. */
4300 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4301 size_int (BITS_PER_UNIT));
4302 position = size_binop (MULT_EXPR,
4303 size_binop (MINUS_EXPR, index,
4304 TYPE_MIN_VALUE (domain)),
4305 position);
4306 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4307 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4308 xtarget = change_address (target, mode, addr);
4309 if (TREE_CODE (value) == CONSTRUCTOR)
4310 store_constructor (value, xtarget, cleared);
4311 else
4312 store_expr (value, xtarget, 0);
4313
4314 expand_exit_loop_if_false (loop,
4315 build (LT_EXPR, integer_type_node,
4316 index, hi_index));
4317
4318 expand_increment (build (PREINCREMENT_EXPR,
4319 TREE_TYPE (index),
4320 index, integer_one_node), 0, 0);
4321 expand_end_loop ();
4322 emit_label (loop_end);
4323
4324 /* Needed by stupid register allocation. to extend the
4325 lifetime of pseudo-regs used by target past the end
4326 of the loop. */
4327 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4328 }
4329 }
4330 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4331 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4332 {
4333 rtx pos_rtx, addr;
4334 tree position;
4335
4336 if (index == 0)
4337 index = size_int (i);
4338
4339 if (minelt)
4340 index = size_binop (MINUS_EXPR, index,
4341 TYPE_MIN_VALUE (domain));
4342 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4343 size_int (BITS_PER_UNIT));
4344 position = size_binop (MULT_EXPR, index, position);
4345 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4346 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4347 xtarget = change_address (target, mode, addr);
4348 store_expr (value, xtarget, 0);
4349 }
4350 else
4351 {
4352 if (index != 0)
4353 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4354 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4355 else
4356 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4357 store_constructor_field (target, bitsize, bitpos,
4358 mode, value, type, cleared);
4359 }
4360 }
4361 }
4362 /* set constructor assignments */
4363 else if (TREE_CODE (type) == SET_TYPE)
4364 {
4365 tree elt = CONSTRUCTOR_ELTS (exp);
4366 int nbytes = int_size_in_bytes (type), nbits;
4367 tree domain = TYPE_DOMAIN (type);
4368 tree domain_min, domain_max, bitlength;
4369
4370 /* The default implementation strategy is to extract the constant
4371 parts of the constructor, use that to initialize the target,
4372 and then "or" in whatever non-constant ranges we need in addition.
4373
4374 If a large set is all zero or all ones, it is
4375 probably better to set it using memset (if available) or bzero.
4376 Also, if a large set has just a single range, it may also be
4377 better to first clear all the first clear the set (using
4378 bzero/memset), and set the bits we want. */
4379
4380 /* Check for all zeros. */
4381 if (elt == NULL_TREE)
4382 {
4383 if (!cleared)
4384 clear_storage (target, expr_size (exp),
4385 TYPE_ALIGN (type) / BITS_PER_UNIT);
4386 return;
4387 }
4388
4389 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4390 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4391 bitlength = size_binop (PLUS_EXPR,
4392 size_binop (MINUS_EXPR, domain_max, domain_min),
4393 size_one_node);
4394
4395 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4396 abort ();
4397 nbits = TREE_INT_CST_LOW (bitlength);
4398
4399 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4400 are "complicated" (more than one range), initialize (the
4401 constant parts) by copying from a constant. */
4402 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4403 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4404 {
4405 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4406 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4407 char *bit_buffer = (char *) alloca (nbits);
4408 HOST_WIDE_INT word = 0;
4409 int bit_pos = 0;
4410 int ibit = 0;
4411 int offset = 0; /* In bytes from beginning of set. */
4412 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4413 for (;;)
4414 {
4415 if (bit_buffer[ibit])
4416 {
4417 if (BYTES_BIG_ENDIAN)
4418 word |= (1 << (set_word_size - 1 - bit_pos));
4419 else
4420 word |= 1 << bit_pos;
4421 }
4422 bit_pos++; ibit++;
4423 if (bit_pos >= set_word_size || ibit == nbits)
4424 {
4425 if (word != 0 || ! cleared)
4426 {
4427 rtx datum = GEN_INT (word);
4428 rtx to_rtx;
4429 /* The assumption here is that it is safe to use
4430 XEXP if the set is multi-word, but not if
4431 it's single-word. */
4432 if (GET_CODE (target) == MEM)
4433 {
4434 to_rtx = plus_constant (XEXP (target, 0), offset);
4435 to_rtx = change_address (target, mode, to_rtx);
4436 }
4437 else if (offset == 0)
4438 to_rtx = target;
4439 else
4440 abort ();
4441 emit_move_insn (to_rtx, datum);
4442 }
4443 if (ibit == nbits)
4444 break;
4445 word = 0;
4446 bit_pos = 0;
4447 offset += set_word_size / BITS_PER_UNIT;
4448 }
4449 }
4450 }
4451 else if (!cleared)
4452 {
4453 /* Don't bother clearing storage if the set is all ones. */
4454 if (TREE_CHAIN (elt) != NULL_TREE
4455 || (TREE_PURPOSE (elt) == NULL_TREE
4456 ? nbits != 1
4457 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4458 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4459 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4460 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4461 != nbits))))
4462 clear_storage (target, expr_size (exp),
4463 TYPE_ALIGN (type) / BITS_PER_UNIT);
4464 }
4465
4466 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4467 {
4468 /* start of range of element or NULL */
4469 tree startbit = TREE_PURPOSE (elt);
4470 /* end of range of element, or element value */
4471 tree endbit = TREE_VALUE (elt);
4472 #ifdef TARGET_MEM_FUNCTIONS
4473 HOST_WIDE_INT startb, endb;
4474 #endif
4475 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4476
4477 bitlength_rtx = expand_expr (bitlength,
4478 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4479
4480 /* handle non-range tuple element like [ expr ] */
4481 if (startbit == NULL_TREE)
4482 {
4483 startbit = save_expr (endbit);
4484 endbit = startbit;
4485 }
4486 startbit = convert (sizetype, startbit);
4487 endbit = convert (sizetype, endbit);
4488 if (! integer_zerop (domain_min))
4489 {
4490 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4491 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4492 }
4493 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4494 EXPAND_CONST_ADDRESS);
4495 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4496 EXPAND_CONST_ADDRESS);
4497
4498 if (REG_P (target))
4499 {
4500 targetx = assign_stack_temp (GET_MODE (target),
4501 GET_MODE_SIZE (GET_MODE (target)),
4502 0);
4503 emit_move_insn (targetx, target);
4504 }
4505 else if (GET_CODE (target) == MEM)
4506 targetx = target;
4507 else
4508 abort ();
4509
4510 #ifdef TARGET_MEM_FUNCTIONS
4511 /* Optimization: If startbit and endbit are
4512 constants divisible by BITS_PER_UNIT,
4513 call memset instead. */
4514 if (TREE_CODE (startbit) == INTEGER_CST
4515 && TREE_CODE (endbit) == INTEGER_CST
4516 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4517 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4518 {
4519 emit_library_call (memset_libfunc, 0,
4520 VOIDmode, 3,
4521 plus_constant (XEXP (targetx, 0),
4522 startb / BITS_PER_UNIT),
4523 Pmode,
4524 constm1_rtx, TYPE_MODE (integer_type_node),
4525 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4526 TYPE_MODE (sizetype));
4527 }
4528 else
4529 #endif
4530 {
4531 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4532 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4533 bitlength_rtx, TYPE_MODE (sizetype),
4534 startbit_rtx, TYPE_MODE (sizetype),
4535 endbit_rtx, TYPE_MODE (sizetype));
4536 }
4537 if (REG_P (target))
4538 emit_move_insn (target, targetx);
4539 }
4540 }
4541
4542 else
4543 abort ();
4544 }
4545
4546 /* Store the value of EXP (an expression tree)
4547 into a subfield of TARGET which has mode MODE and occupies
4548 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4549 If MODE is VOIDmode, it means that we are storing into a bit-field.
4550
4551 If VALUE_MODE is VOIDmode, return nothing in particular.
4552 UNSIGNEDP is not used in this case.
4553
4554 Otherwise, return an rtx for the value stored. This rtx
4555 has mode VALUE_MODE if that is convenient to do.
4556 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4557
4558 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4559 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4560
4561 ALIAS_SET is the alias set for the destination. This value will
4562 (in general) be different from that for TARGET, since TARGET is a
4563 reference to the containing structure. */
4564
4565 static rtx
4566 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4567 unsignedp, align, total_size, alias_set)
4568 rtx target;
4569 int bitsize, bitpos;
4570 enum machine_mode mode;
4571 tree exp;
4572 enum machine_mode value_mode;
4573 int unsignedp;
4574 int align;
4575 int total_size;
4576 int alias_set;
4577 {
4578 HOST_WIDE_INT width_mask = 0;
4579
4580 if (TREE_CODE (exp) == ERROR_MARK)
4581 return const0_rtx;
4582
4583 if (bitsize < HOST_BITS_PER_WIDE_INT)
4584 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4585
4586 /* If we are storing into an unaligned field of an aligned union that is
4587 in a register, we may have the mode of TARGET being an integer mode but
4588 MODE == BLKmode. In that case, get an aligned object whose size and
4589 alignment are the same as TARGET and store TARGET into it (we can avoid
4590 the store if the field being stored is the entire width of TARGET). Then
4591 call ourselves recursively to store the field into a BLKmode version of
4592 that object. Finally, load from the object into TARGET. This is not
4593 very efficient in general, but should only be slightly more expensive
4594 than the otherwise-required unaligned accesses. Perhaps this can be
4595 cleaned up later. */
4596
4597 if (mode == BLKmode
4598 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4599 {
4600 rtx object = assign_stack_temp (GET_MODE (target),
4601 GET_MODE_SIZE (GET_MODE (target)), 0);
4602 rtx blk_object = copy_rtx (object);
4603
4604 MEM_SET_IN_STRUCT_P (object, 1);
4605 MEM_SET_IN_STRUCT_P (blk_object, 1);
4606 PUT_MODE (blk_object, BLKmode);
4607
4608 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4609 emit_move_insn (object, target);
4610
4611 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4612 align, total_size, alias_set);
4613
4614 /* Even though we aren't returning target, we need to
4615 give it the updated value. */
4616 emit_move_insn (target, object);
4617
4618 return blk_object;
4619 }
4620
4621 /* If the structure is in a register or if the component
4622 is a bit field, we cannot use addressing to access it.
4623 Use bit-field techniques or SUBREG to store in it. */
4624
4625 if (mode == VOIDmode
4626 || (mode != BLKmode && ! direct_store[(int) mode]
4627 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4628 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4629 || GET_CODE (target) == REG
4630 || GET_CODE (target) == SUBREG
4631 /* If the field isn't aligned enough to store as an ordinary memref,
4632 store it as a bit field. */
4633 || (SLOW_UNALIGNED_ACCESS
4634 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4635 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4636 {
4637 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4638
4639 /* If BITSIZE is narrower than the size of the type of EXP
4640 we will be narrowing TEMP. Normally, what's wanted are the
4641 low-order bits. However, if EXP's type is a record and this is
4642 big-endian machine, we want the upper BITSIZE bits. */
4643 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4644 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4645 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4646 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4647 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4648 - bitsize),
4649 temp, 1);
4650
4651 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4652 MODE. */
4653 if (mode != VOIDmode && mode != BLKmode
4654 && mode != TYPE_MODE (TREE_TYPE (exp)))
4655 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4656
4657 /* If the modes of TARGET and TEMP are both BLKmode, both
4658 must be in memory and BITPOS must be aligned on a byte
4659 boundary. If so, we simply do a block copy. */
4660 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4661 {
4662 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4663 || bitpos % BITS_PER_UNIT != 0)
4664 abort ();
4665
4666 target = change_address (target, VOIDmode,
4667 plus_constant (XEXP (target, 0),
4668 bitpos / BITS_PER_UNIT));
4669
4670 emit_block_move (target, temp,
4671 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4672 / BITS_PER_UNIT),
4673 1);
4674
4675 return value_mode == VOIDmode ? const0_rtx : target;
4676 }
4677
4678 /* Store the value in the bitfield. */
4679 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4680 if (value_mode != VOIDmode)
4681 {
4682 /* The caller wants an rtx for the value. */
4683 /* If possible, avoid refetching from the bitfield itself. */
4684 if (width_mask != 0
4685 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4686 {
4687 tree count;
4688 enum machine_mode tmode;
4689
4690 if (unsignedp)
4691 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4692 tmode = GET_MODE (temp);
4693 if (tmode == VOIDmode)
4694 tmode = value_mode;
4695 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4696 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4697 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4698 }
4699 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4700 NULL_RTX, value_mode, 0, align,
4701 total_size);
4702 }
4703 return const0_rtx;
4704 }
4705 else
4706 {
4707 rtx addr = XEXP (target, 0);
4708 rtx to_rtx;
4709
4710 /* If a value is wanted, it must be the lhs;
4711 so make the address stable for multiple use. */
4712
4713 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4714 && ! CONSTANT_ADDRESS_P (addr)
4715 /* A frame-pointer reference is already stable. */
4716 && ! (GET_CODE (addr) == PLUS
4717 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4718 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4719 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4720 addr = copy_to_reg (addr);
4721
4722 /* Now build a reference to just the desired component. */
4723
4724 to_rtx = copy_rtx (change_address (target, mode,
4725 plus_constant (addr,
4726 (bitpos
4727 / BITS_PER_UNIT))));
4728 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4729 MEM_ALIAS_SET (to_rtx) = alias_set;
4730
4731 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4732 }
4733 }
4734 \f
4735 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4736 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4737 ARRAY_REFs and find the ultimate containing object, which we return.
4738
4739 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4740 bit position, and *PUNSIGNEDP to the signedness of the field.
4741 If the position of the field is variable, we store a tree
4742 giving the variable offset (in units) in *POFFSET.
4743 This offset is in addition to the bit position.
4744 If the position is not variable, we store 0 in *POFFSET.
4745 We set *PALIGNMENT to the alignment in bytes of the address that will be
4746 computed. This is the alignment of the thing we return if *POFFSET
4747 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4748
4749 If any of the extraction expressions is volatile,
4750 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4751
4752 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4753 is a mode that can be used to access the field. In that case, *PBITSIZE
4754 is redundant.
4755
4756 If the field describes a variable-sized object, *PMODE is set to
4757 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4758 this case, but the address of the object can be found. */
4759
4760 tree
4761 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4762 punsignedp, pvolatilep, palignment)
4763 tree exp;
4764 int *pbitsize;
4765 int *pbitpos;
4766 tree *poffset;
4767 enum machine_mode *pmode;
4768 int *punsignedp;
4769 int *pvolatilep;
4770 int *palignment;
4771 {
4772 tree orig_exp = exp;
4773 tree size_tree = 0;
4774 enum machine_mode mode = VOIDmode;
4775 tree offset = integer_zero_node;
4776 unsigned int alignment = BIGGEST_ALIGNMENT;
4777
4778 if (TREE_CODE (exp) == COMPONENT_REF)
4779 {
4780 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4781 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4782 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4783 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4784 }
4785 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4786 {
4787 size_tree = TREE_OPERAND (exp, 1);
4788 *punsignedp = TREE_UNSIGNED (exp);
4789 }
4790 else
4791 {
4792 mode = TYPE_MODE (TREE_TYPE (exp));
4793 if (mode == BLKmode)
4794 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4795
4796 *pbitsize = GET_MODE_BITSIZE (mode);
4797 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4798 }
4799
4800 if (size_tree)
4801 {
4802 if (TREE_CODE (size_tree) != INTEGER_CST)
4803 mode = BLKmode, *pbitsize = -1;
4804 else
4805 *pbitsize = TREE_INT_CST_LOW (size_tree);
4806 }
4807
4808 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4809 and find the ultimate containing object. */
4810
4811 *pbitpos = 0;
4812
4813 while (1)
4814 {
4815 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4816 {
4817 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4818 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4819 : TREE_OPERAND (exp, 2));
4820 tree constant = integer_zero_node, var = pos;
4821
4822 /* If this field hasn't been filled in yet, don't go
4823 past it. This should only happen when folding expressions
4824 made during type construction. */
4825 if (pos == 0)
4826 break;
4827
4828 /* Assume here that the offset is a multiple of a unit.
4829 If not, there should be an explicitly added constant. */
4830 if (TREE_CODE (pos) == PLUS_EXPR
4831 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4832 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4833 else if (TREE_CODE (pos) == INTEGER_CST)
4834 constant = pos, var = integer_zero_node;
4835
4836 *pbitpos += TREE_INT_CST_LOW (constant);
4837 offset = size_binop (PLUS_EXPR, offset,
4838 size_binop (EXACT_DIV_EXPR, var,
4839 size_int (BITS_PER_UNIT)));
4840 }
4841
4842 else if (TREE_CODE (exp) == ARRAY_REF)
4843 {
4844 /* This code is based on the code in case ARRAY_REF in expand_expr
4845 below. We assume here that the size of an array element is
4846 always an integral multiple of BITS_PER_UNIT. */
4847
4848 tree index = TREE_OPERAND (exp, 1);
4849 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4850 tree low_bound
4851 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4852 tree index_type = TREE_TYPE (index);
4853 tree xindex;
4854
4855 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4856 {
4857 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4858 index);
4859 index_type = TREE_TYPE (index);
4860 }
4861
4862 /* Optimize the special-case of a zero lower bound.
4863
4864 We convert the low_bound to sizetype to avoid some problems
4865 with constant folding. (E.g. suppose the lower bound is 1,
4866 and its mode is QI. Without the conversion, (ARRAY
4867 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4868 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4869
4870 But sizetype isn't quite right either (especially if
4871 the lowbound is negative). FIXME */
4872
4873 if (! integer_zerop (low_bound))
4874 index = fold (build (MINUS_EXPR, index_type, index,
4875 convert (sizetype, low_bound)));
4876
4877 if (TREE_CODE (index) == INTEGER_CST)
4878 {
4879 index = convert (sbitsizetype, index);
4880 index_type = TREE_TYPE (index);
4881 }
4882
4883 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4884 convert (sbitsizetype,
4885 TYPE_SIZE (TREE_TYPE (exp)))));
4886
4887 if (TREE_CODE (xindex) == INTEGER_CST
4888 && TREE_INT_CST_HIGH (xindex) == 0)
4889 *pbitpos += TREE_INT_CST_LOW (xindex);
4890 else
4891 {
4892 /* Either the bit offset calculated above is not constant, or
4893 it overflowed. In either case, redo the multiplication
4894 against the size in units. This is especially important
4895 in the non-constant case to avoid a division at runtime. */
4896 xindex = fold (build (MULT_EXPR, ssizetype, index,
4897 convert (ssizetype,
4898 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4899
4900 if (contains_placeholder_p (xindex))
4901 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4902
4903 offset = size_binop (PLUS_EXPR, offset, xindex);
4904 }
4905 }
4906 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4907 && ! ((TREE_CODE (exp) == NOP_EXPR
4908 || TREE_CODE (exp) == CONVERT_EXPR)
4909 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4910 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4911 != UNION_TYPE))
4912 && (TYPE_MODE (TREE_TYPE (exp))
4913 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4914 break;
4915
4916 /* If any reference in the chain is volatile, the effect is volatile. */
4917 if (TREE_THIS_VOLATILE (exp))
4918 *pvolatilep = 1;
4919
4920 /* If the offset is non-constant already, then we can't assume any
4921 alignment more than the alignment here. */
4922 if (! integer_zerop (offset))
4923 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4924
4925 exp = TREE_OPERAND (exp, 0);
4926 }
4927
4928 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4929 alignment = MIN (alignment, DECL_ALIGN (exp));
4930 else if (TREE_TYPE (exp) != 0)
4931 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4932
4933 if (integer_zerop (offset))
4934 offset = 0;
4935
4936 if (offset != 0 && contains_placeholder_p (offset))
4937 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4938
4939 *pmode = mode;
4940 *poffset = offset;
4941 *palignment = alignment / BITS_PER_UNIT;
4942 return exp;
4943 }
4944
4945 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4946 static enum memory_use_mode
4947 get_memory_usage_from_modifier (modifier)
4948 enum expand_modifier modifier;
4949 {
4950 switch (modifier)
4951 {
4952 case EXPAND_NORMAL:
4953 case EXPAND_SUM:
4954 return MEMORY_USE_RO;
4955 break;
4956 case EXPAND_MEMORY_USE_WO:
4957 return MEMORY_USE_WO;
4958 break;
4959 case EXPAND_MEMORY_USE_RW:
4960 return MEMORY_USE_RW;
4961 break;
4962 case EXPAND_MEMORY_USE_DONT:
4963 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4964 MEMORY_USE_DONT, because they are modifiers to a call of
4965 expand_expr in the ADDR_EXPR case of expand_expr. */
4966 case EXPAND_CONST_ADDRESS:
4967 case EXPAND_INITIALIZER:
4968 return MEMORY_USE_DONT;
4969 case EXPAND_MEMORY_USE_BAD:
4970 default:
4971 abort ();
4972 }
4973 }
4974 \f
4975 /* Given an rtx VALUE that may contain additions and multiplications,
4976 return an equivalent value that just refers to a register or memory.
4977 This is done by generating instructions to perform the arithmetic
4978 and returning a pseudo-register containing the value.
4979
4980 The returned value may be a REG, SUBREG, MEM or constant. */
4981
4982 rtx
4983 force_operand (value, target)
4984 rtx value, target;
4985 {
4986 register optab binoptab = 0;
4987 /* Use a temporary to force order of execution of calls to
4988 `force_operand'. */
4989 rtx tmp;
4990 register rtx op2;
4991 /* Use subtarget as the target for operand 0 of a binary operation. */
4992 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4993
4994 /* Check for a PIC address load. */
4995 if (flag_pic
4996 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4997 && XEXP (value, 0) == pic_offset_table_rtx
4998 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4999 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5000 || GET_CODE (XEXP (value, 1)) == CONST))
5001 {
5002 if (!subtarget)
5003 subtarget = gen_reg_rtx (GET_MODE (value));
5004 emit_move_insn (subtarget, value);
5005 return subtarget;
5006 }
5007
5008 if (GET_CODE (value) == PLUS)
5009 binoptab = add_optab;
5010 else if (GET_CODE (value) == MINUS)
5011 binoptab = sub_optab;
5012 else if (GET_CODE (value) == MULT)
5013 {
5014 op2 = XEXP (value, 1);
5015 if (!CONSTANT_P (op2)
5016 && !(GET_CODE (op2) == REG && op2 != subtarget))
5017 subtarget = 0;
5018 tmp = force_operand (XEXP (value, 0), subtarget);
5019 return expand_mult (GET_MODE (value), tmp,
5020 force_operand (op2, NULL_RTX),
5021 target, 0);
5022 }
5023
5024 if (binoptab)
5025 {
5026 op2 = XEXP (value, 1);
5027 if (!CONSTANT_P (op2)
5028 && !(GET_CODE (op2) == REG && op2 != subtarget))
5029 subtarget = 0;
5030 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5031 {
5032 binoptab = add_optab;
5033 op2 = negate_rtx (GET_MODE (value), op2);
5034 }
5035
5036 /* Check for an addition with OP2 a constant integer and our first
5037 operand a PLUS of a virtual register and something else. In that
5038 case, we want to emit the sum of the virtual register and the
5039 constant first and then add the other value. This allows virtual
5040 register instantiation to simply modify the constant rather than
5041 creating another one around this addition. */
5042 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5043 && GET_CODE (XEXP (value, 0)) == PLUS
5044 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5045 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5046 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5047 {
5048 rtx temp = expand_binop (GET_MODE (value), binoptab,
5049 XEXP (XEXP (value, 0), 0), op2,
5050 subtarget, 0, OPTAB_LIB_WIDEN);
5051 return expand_binop (GET_MODE (value), binoptab, temp,
5052 force_operand (XEXP (XEXP (value, 0), 1), 0),
5053 target, 0, OPTAB_LIB_WIDEN);
5054 }
5055
5056 tmp = force_operand (XEXP (value, 0), subtarget);
5057 return expand_binop (GET_MODE (value), binoptab, tmp,
5058 force_operand (op2, NULL_RTX),
5059 target, 0, OPTAB_LIB_WIDEN);
5060 /* We give UNSIGNEDP = 0 to expand_binop
5061 because the only operations we are expanding here are signed ones. */
5062 }
5063 return value;
5064 }
5065 \f
5066 /* Subroutine of expand_expr:
5067 save the non-copied parts (LIST) of an expr (LHS), and return a list
5068 which can restore these values to their previous values,
5069 should something modify their storage. */
5070
5071 static tree
5072 save_noncopied_parts (lhs, list)
5073 tree lhs;
5074 tree list;
5075 {
5076 tree tail;
5077 tree parts = 0;
5078
5079 for (tail = list; tail; tail = TREE_CHAIN (tail))
5080 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5081 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5082 else
5083 {
5084 tree part = TREE_VALUE (tail);
5085 tree part_type = TREE_TYPE (part);
5086 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5087 rtx target = assign_temp (part_type, 0, 1, 1);
5088 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5089 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5090 parts = tree_cons (to_be_saved,
5091 build (RTL_EXPR, part_type, NULL_TREE,
5092 (tree) target),
5093 parts);
5094 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5095 }
5096 return parts;
5097 }
5098
5099 /* Subroutine of expand_expr:
5100 record the non-copied parts (LIST) of an expr (LHS), and return a list
5101 which specifies the initial values of these parts. */
5102
5103 static tree
5104 init_noncopied_parts (lhs, list)
5105 tree lhs;
5106 tree list;
5107 {
5108 tree tail;
5109 tree parts = 0;
5110
5111 for (tail = list; tail; tail = TREE_CHAIN (tail))
5112 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5113 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5114 else if (TREE_PURPOSE (tail))
5115 {
5116 tree part = TREE_VALUE (tail);
5117 tree part_type = TREE_TYPE (part);
5118 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5119 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5120 }
5121 return parts;
5122 }
5123
5124 /* Subroutine of expand_expr: return nonzero iff there is no way that
5125 EXP can reference X, which is being modified. TOP_P is nonzero if this
5126 call is going to be used to determine whether we need a temporary
5127 for EXP, as opposed to a recursive call to this function.
5128
5129 It is always safe for this routine to return zero since it merely
5130 searches for optimization opportunities. */
5131
5132 static int
5133 safe_from_p (x, exp, top_p)
5134 rtx x;
5135 tree exp;
5136 int top_p;
5137 {
5138 rtx exp_rtl = 0;
5139 int i, nops;
5140 static int save_expr_count;
5141 static int save_expr_size = 0;
5142 static tree *save_expr_rewritten;
5143 static tree save_expr_trees[256];
5144
5145 if (x == 0
5146 /* If EXP has varying size, we MUST use a target since we currently
5147 have no way of allocating temporaries of variable size
5148 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5149 So we assume here that something at a higher level has prevented a
5150 clash. This is somewhat bogus, but the best we can do. Only
5151 do this when X is BLKmode and when we are at the top level. */
5152 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5153 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5154 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5155 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5156 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5157 != INTEGER_CST)
5158 && GET_MODE (x) == BLKmode))
5159 return 1;
5160
5161 if (top_p && save_expr_size == 0)
5162 {
5163 int rtn;
5164
5165 save_expr_count = 0;
5166 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5167 save_expr_rewritten = &save_expr_trees[0];
5168
5169 rtn = safe_from_p (x, exp, 1);
5170
5171 for (i = 0; i < save_expr_count; ++i)
5172 {
5173 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5174 abort ();
5175 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5176 }
5177
5178 save_expr_size = 0;
5179
5180 return rtn;
5181 }
5182
5183 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5184 find the underlying pseudo. */
5185 if (GET_CODE (x) == SUBREG)
5186 {
5187 x = SUBREG_REG (x);
5188 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5189 return 0;
5190 }
5191
5192 /* If X is a location in the outgoing argument area, it is always safe. */
5193 if (GET_CODE (x) == MEM
5194 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5195 || (GET_CODE (XEXP (x, 0)) == PLUS
5196 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5197 return 1;
5198
5199 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5200 {
5201 case 'd':
5202 exp_rtl = DECL_RTL (exp);
5203 break;
5204
5205 case 'c':
5206 return 1;
5207
5208 case 'x':
5209 if (TREE_CODE (exp) == TREE_LIST)
5210 return ((TREE_VALUE (exp) == 0
5211 || safe_from_p (x, TREE_VALUE (exp), 0))
5212 && (TREE_CHAIN (exp) == 0
5213 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5214 else if (TREE_CODE (exp) == ERROR_MARK)
5215 return 1; /* An already-visited SAVE_EXPR? */
5216 else
5217 return 0;
5218
5219 case '1':
5220 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5221
5222 case '2':
5223 case '<':
5224 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5225 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5226
5227 case 'e':
5228 case 'r':
5229 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5230 the expression. If it is set, we conflict iff we are that rtx or
5231 both are in memory. Otherwise, we check all operands of the
5232 expression recursively. */
5233
5234 switch (TREE_CODE (exp))
5235 {
5236 case ADDR_EXPR:
5237 return (staticp (TREE_OPERAND (exp, 0))
5238 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5239 || TREE_STATIC (exp));
5240
5241 case INDIRECT_REF:
5242 if (GET_CODE (x) == MEM)
5243 return 0;
5244 break;
5245
5246 case CALL_EXPR:
5247 exp_rtl = CALL_EXPR_RTL (exp);
5248 if (exp_rtl == 0)
5249 {
5250 /* Assume that the call will clobber all hard registers and
5251 all of memory. */
5252 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5253 || GET_CODE (x) == MEM)
5254 return 0;
5255 }
5256
5257 break;
5258
5259 case RTL_EXPR:
5260 /* If a sequence exists, we would have to scan every instruction
5261 in the sequence to see if it was safe. This is probably not
5262 worthwhile. */
5263 if (RTL_EXPR_SEQUENCE (exp))
5264 return 0;
5265
5266 exp_rtl = RTL_EXPR_RTL (exp);
5267 break;
5268
5269 case WITH_CLEANUP_EXPR:
5270 exp_rtl = RTL_EXPR_RTL (exp);
5271 break;
5272
5273 case CLEANUP_POINT_EXPR:
5274 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5275
5276 case SAVE_EXPR:
5277 exp_rtl = SAVE_EXPR_RTL (exp);
5278 if (exp_rtl)
5279 break;
5280
5281 /* This SAVE_EXPR might appear many times in the top-level
5282 safe_from_p() expression, and if it has a complex
5283 subexpression, examining it multiple times could result
5284 in a combinatorial explosion. E.g. on an Alpha
5285 running at least 200MHz, a Fortran test case compiled with
5286 optimization took about 28 minutes to compile -- even though
5287 it was only a few lines long, and the complicated line causing
5288 so much time to be spent in the earlier version of safe_from_p()
5289 had only 293 or so unique nodes.
5290
5291 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5292 where it is so we can turn it back in the top-level safe_from_p()
5293 when we're done. */
5294
5295 /* For now, don't bother re-sizing the array. */
5296 if (save_expr_count >= save_expr_size)
5297 return 0;
5298 save_expr_rewritten[save_expr_count++] = exp;
5299
5300 nops = tree_code_length[(int) SAVE_EXPR];
5301 for (i = 0; i < nops; i++)
5302 {
5303 tree operand = TREE_OPERAND (exp, i);
5304 if (operand == NULL_TREE)
5305 continue;
5306 TREE_SET_CODE (exp, ERROR_MARK);
5307 if (!safe_from_p (x, operand, 0))
5308 return 0;
5309 TREE_SET_CODE (exp, SAVE_EXPR);
5310 }
5311 TREE_SET_CODE (exp, ERROR_MARK);
5312 return 1;
5313
5314 case BIND_EXPR:
5315 /* The only operand we look at is operand 1. The rest aren't
5316 part of the expression. */
5317 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5318
5319 case METHOD_CALL_EXPR:
5320 /* This takes a rtx argument, but shouldn't appear here. */
5321 abort ();
5322
5323 default:
5324 break;
5325 }
5326
5327 /* If we have an rtx, we do not need to scan our operands. */
5328 if (exp_rtl)
5329 break;
5330
5331 nops = tree_code_length[(int) TREE_CODE (exp)];
5332 for (i = 0; i < nops; i++)
5333 if (TREE_OPERAND (exp, i) != 0
5334 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5335 return 0;
5336 }
5337
5338 /* If we have an rtl, find any enclosed object. Then see if we conflict
5339 with it. */
5340 if (exp_rtl)
5341 {
5342 if (GET_CODE (exp_rtl) == SUBREG)
5343 {
5344 exp_rtl = SUBREG_REG (exp_rtl);
5345 if (GET_CODE (exp_rtl) == REG
5346 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5347 return 0;
5348 }
5349
5350 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5351 are memory and EXP is not readonly. */
5352 return ! (rtx_equal_p (x, exp_rtl)
5353 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5354 && ! TREE_READONLY (exp)));
5355 }
5356
5357 /* If we reach here, it is safe. */
5358 return 1;
5359 }
5360
5361 /* Subroutine of expand_expr: return nonzero iff EXP is an
5362 expression whose type is statically determinable. */
5363
5364 static int
5365 fixed_type_p (exp)
5366 tree exp;
5367 {
5368 if (TREE_CODE (exp) == PARM_DECL
5369 || TREE_CODE (exp) == VAR_DECL
5370 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5371 || TREE_CODE (exp) == COMPONENT_REF
5372 || TREE_CODE (exp) == ARRAY_REF)
5373 return 1;
5374 return 0;
5375 }
5376
5377 /* Subroutine of expand_expr: return rtx if EXP is a
5378 variable or parameter; else return 0. */
5379
5380 static rtx
5381 var_rtx (exp)
5382 tree exp;
5383 {
5384 STRIP_NOPS (exp);
5385 switch (TREE_CODE (exp))
5386 {
5387 case PARM_DECL:
5388 case VAR_DECL:
5389 return DECL_RTL (exp);
5390 default:
5391 return 0;
5392 }
5393 }
5394
5395 #ifdef MAX_INTEGER_COMPUTATION_MODE
5396 void
5397 check_max_integer_computation_mode (exp)
5398 tree exp;
5399 {
5400 enum tree_code code;
5401 enum machine_mode mode;
5402
5403 /* Strip any NOPs that don't change the mode. */
5404 STRIP_NOPS (exp);
5405 code = TREE_CODE (exp);
5406
5407 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5408 if (code == NOP_EXPR
5409 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5410 return;
5411
5412 /* First check the type of the overall operation. We need only look at
5413 unary, binary and relational operations. */
5414 if (TREE_CODE_CLASS (code) == '1'
5415 || TREE_CODE_CLASS (code) == '2'
5416 || TREE_CODE_CLASS (code) == '<')
5417 {
5418 mode = TYPE_MODE (TREE_TYPE (exp));
5419 if (GET_MODE_CLASS (mode) == MODE_INT
5420 && mode > MAX_INTEGER_COMPUTATION_MODE)
5421 fatal ("unsupported wide integer operation");
5422 }
5423
5424 /* Check operand of a unary op. */
5425 if (TREE_CODE_CLASS (code) == '1')
5426 {
5427 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5428 if (GET_MODE_CLASS (mode) == MODE_INT
5429 && mode > MAX_INTEGER_COMPUTATION_MODE)
5430 fatal ("unsupported wide integer operation");
5431 }
5432
5433 /* Check operands of a binary/comparison op. */
5434 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5435 {
5436 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5437 if (GET_MODE_CLASS (mode) == MODE_INT
5438 && mode > MAX_INTEGER_COMPUTATION_MODE)
5439 fatal ("unsupported wide integer operation");
5440
5441 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5442 if (GET_MODE_CLASS (mode) == MODE_INT
5443 && mode > MAX_INTEGER_COMPUTATION_MODE)
5444 fatal ("unsupported wide integer operation");
5445 }
5446 }
5447 #endif
5448
5449 \f
5450 /* expand_expr: generate code for computing expression EXP.
5451 An rtx for the computed value is returned. The value is never null.
5452 In the case of a void EXP, const0_rtx is returned.
5453
5454 The value may be stored in TARGET if TARGET is nonzero.
5455 TARGET is just a suggestion; callers must assume that
5456 the rtx returned may not be the same as TARGET.
5457
5458 If TARGET is CONST0_RTX, it means that the value will be ignored.
5459
5460 If TMODE is not VOIDmode, it suggests generating the
5461 result in mode TMODE. But this is done only when convenient.
5462 Otherwise, TMODE is ignored and the value generated in its natural mode.
5463 TMODE is just a suggestion; callers must assume that
5464 the rtx returned may not have mode TMODE.
5465
5466 Note that TARGET may have neither TMODE nor MODE. In that case, it
5467 probably will not be used.
5468
5469 If MODIFIER is EXPAND_SUM then when EXP is an addition
5470 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5471 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5472 products as above, or REG or MEM, or constant.
5473 Ordinarily in such cases we would output mul or add instructions
5474 and then return a pseudo reg containing the sum.
5475
5476 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5477 it also marks a label as absolutely required (it can't be dead).
5478 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5479 This is used for outputting expressions used in initializers.
5480
5481 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5482 with a constant address even if that address is not normally legitimate.
5483 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5484
5485 rtx
5486 expand_expr (exp, target, tmode, modifier)
5487 register tree exp;
5488 rtx target;
5489 enum machine_mode tmode;
5490 enum expand_modifier modifier;
5491 {
5492 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5493 This is static so it will be accessible to our recursive callees. */
5494 static tree placeholder_list = 0;
5495 register rtx op0, op1, temp;
5496 tree type = TREE_TYPE (exp);
5497 int unsignedp = TREE_UNSIGNED (type);
5498 register enum machine_mode mode;
5499 register enum tree_code code = TREE_CODE (exp);
5500 optab this_optab;
5501 rtx subtarget, original_target;
5502 int ignore;
5503 tree context;
5504 /* Used by check-memory-usage to make modifier read only. */
5505 enum expand_modifier ro_modifier;
5506
5507 /* Handle ERROR_MARK before anybody tries to access its type. */
5508 if (TREE_CODE (exp) == ERROR_MARK)
5509 {
5510 op0 = CONST0_RTX (tmode);
5511 if (op0 != 0)
5512 return op0;
5513 return const0_rtx;
5514 }
5515
5516 mode = TYPE_MODE (type);
5517 /* Use subtarget as the target for operand 0 of a binary operation. */
5518 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5519 original_target = target;
5520 ignore = (target == const0_rtx
5521 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5522 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5523 || code == COND_EXPR)
5524 && TREE_CODE (type) == VOID_TYPE));
5525
5526 /* Make a read-only version of the modifier. */
5527 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5528 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5529 ro_modifier = modifier;
5530 else
5531 ro_modifier = EXPAND_NORMAL;
5532
5533 /* Don't use hard regs as subtargets, because the combiner
5534 can only handle pseudo regs. */
5535 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5536 subtarget = 0;
5537 /* Avoid subtargets inside loops,
5538 since they hide some invariant expressions. */
5539 if (preserve_subexpressions_p ())
5540 subtarget = 0;
5541
5542 /* If we are going to ignore this result, we need only do something
5543 if there is a side-effect somewhere in the expression. If there
5544 is, short-circuit the most common cases here. Note that we must
5545 not call expand_expr with anything but const0_rtx in case this
5546 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5547
5548 if (ignore)
5549 {
5550 if (! TREE_SIDE_EFFECTS (exp))
5551 return const0_rtx;
5552
5553 /* Ensure we reference a volatile object even if value is ignored. */
5554 if (TREE_THIS_VOLATILE (exp)
5555 && TREE_CODE (exp) != FUNCTION_DECL
5556 && mode != VOIDmode && mode != BLKmode)
5557 {
5558 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5559 if (GET_CODE (temp) == MEM)
5560 temp = copy_to_reg (temp);
5561 return const0_rtx;
5562 }
5563
5564 if (TREE_CODE_CLASS (code) == '1')
5565 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5566 VOIDmode, ro_modifier);
5567 else if (TREE_CODE_CLASS (code) == '2'
5568 || TREE_CODE_CLASS (code) == '<')
5569 {
5570 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5571 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5572 return const0_rtx;
5573 }
5574 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5575 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5576 /* If the second operand has no side effects, just evaluate
5577 the first. */
5578 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5579 VOIDmode, ro_modifier);
5580
5581 target = 0;
5582 }
5583
5584 #ifdef MAX_INTEGER_COMPUTATION_MODE
5585 /* Only check stuff here if the mode we want is different from the mode
5586 of the expression; if it's the same, check_max_integer_computiation_mode
5587 will handle it. Do we really need to check this stuff at all? */
5588
5589 if (target
5590 && GET_MODE (target) != mode
5591 && TREE_CODE (exp) != INTEGER_CST
5592 && TREE_CODE (exp) != PARM_DECL
5593 && TREE_CODE (exp) != ARRAY_REF
5594 && TREE_CODE (exp) != COMPONENT_REF
5595 && TREE_CODE (exp) != BIT_FIELD_REF
5596 && TREE_CODE (exp) != INDIRECT_REF
5597 && TREE_CODE (exp) != CALL_EXPR
5598 && TREE_CODE (exp) != VAR_DECL
5599 && TREE_CODE (exp) != RTL_EXPR)
5600 {
5601 enum machine_mode mode = GET_MODE (target);
5602
5603 if (GET_MODE_CLASS (mode) == MODE_INT
5604 && mode > MAX_INTEGER_COMPUTATION_MODE)
5605 fatal ("unsupported wide integer operation");
5606 }
5607
5608 if (tmode != mode
5609 && TREE_CODE (exp) != INTEGER_CST
5610 && TREE_CODE (exp) != PARM_DECL
5611 && TREE_CODE (exp) != ARRAY_REF
5612 && TREE_CODE (exp) != COMPONENT_REF
5613 && TREE_CODE (exp) != BIT_FIELD_REF
5614 && TREE_CODE (exp) != INDIRECT_REF
5615 && TREE_CODE (exp) != VAR_DECL
5616 && TREE_CODE (exp) != CALL_EXPR
5617 && TREE_CODE (exp) != RTL_EXPR
5618 && GET_MODE_CLASS (tmode) == MODE_INT
5619 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5620 fatal ("unsupported wide integer operation");
5621
5622 check_max_integer_computation_mode (exp);
5623 #endif
5624
5625 /* If will do cse, generate all results into pseudo registers
5626 since 1) that allows cse to find more things
5627 and 2) otherwise cse could produce an insn the machine
5628 cannot support. */
5629
5630 if (! cse_not_expected && mode != BLKmode && target
5631 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5632 target = subtarget;
5633
5634 switch (code)
5635 {
5636 case LABEL_DECL:
5637 {
5638 tree function = decl_function_context (exp);
5639 /* Handle using a label in a containing function. */
5640 if (function != current_function_decl
5641 && function != inline_function_decl && function != 0)
5642 {
5643 struct function *p = find_function_data (function);
5644 /* Allocate in the memory associated with the function
5645 that the label is in. */
5646 push_obstacks (p->function_obstack,
5647 p->function_maybepermanent_obstack);
5648
5649 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5650 label_rtx (exp),
5651 p->forced_labels);
5652 pop_obstacks ();
5653 }
5654 else
5655 {
5656 if (modifier == EXPAND_INITIALIZER)
5657 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5658 label_rtx (exp),
5659 forced_labels);
5660 }
5661 temp = gen_rtx_MEM (FUNCTION_MODE,
5662 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5663 if (function != current_function_decl
5664 && function != inline_function_decl && function != 0)
5665 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5666 return temp;
5667 }
5668
5669 case PARM_DECL:
5670 if (DECL_RTL (exp) == 0)
5671 {
5672 error_with_decl (exp, "prior parameter's size depends on `%s'");
5673 return CONST0_RTX (mode);
5674 }
5675
5676 /* ... fall through ... */
5677
5678 case VAR_DECL:
5679 /* If a static var's type was incomplete when the decl was written,
5680 but the type is complete now, lay out the decl now. */
5681 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5682 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5683 {
5684 push_obstacks_nochange ();
5685 end_temporary_allocation ();
5686 layout_decl (exp, 0);
5687 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5688 pop_obstacks ();
5689 }
5690
5691 /* Although static-storage variables start off initialized, according to
5692 ANSI C, a memcpy could overwrite them with uninitialized values. So
5693 we check them too. This also lets us check for read-only variables
5694 accessed via a non-const declaration, in case it won't be detected
5695 any other way (e.g., in an embedded system or OS kernel without
5696 memory protection).
5697
5698 Aggregates are not checked here; they're handled elsewhere. */
5699 if (current_function_check_memory_usage && code == VAR_DECL
5700 && GET_CODE (DECL_RTL (exp)) == MEM
5701 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5702 {
5703 enum memory_use_mode memory_usage;
5704 memory_usage = get_memory_usage_from_modifier (modifier);
5705
5706 if (memory_usage != MEMORY_USE_DONT)
5707 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5708 XEXP (DECL_RTL (exp), 0), Pmode,
5709 GEN_INT (int_size_in_bytes (type)),
5710 TYPE_MODE (sizetype),
5711 GEN_INT (memory_usage),
5712 TYPE_MODE (integer_type_node));
5713 }
5714
5715 /* ... fall through ... */
5716
5717 case FUNCTION_DECL:
5718 case RESULT_DECL:
5719 if (DECL_RTL (exp) == 0)
5720 abort ();
5721
5722 /* Ensure variable marked as used even if it doesn't go through
5723 a parser. If it hasn't be used yet, write out an external
5724 definition. */
5725 if (! TREE_USED (exp))
5726 {
5727 assemble_external (exp);
5728 TREE_USED (exp) = 1;
5729 }
5730
5731 /* Show we haven't gotten RTL for this yet. */
5732 temp = 0;
5733
5734 /* Handle variables inherited from containing functions. */
5735 context = decl_function_context (exp);
5736
5737 /* We treat inline_function_decl as an alias for the current function
5738 because that is the inline function whose vars, types, etc.
5739 are being merged into the current function.
5740 See expand_inline_function. */
5741
5742 if (context != 0 && context != current_function_decl
5743 && context != inline_function_decl
5744 /* If var is static, we don't need a static chain to access it. */
5745 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5746 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5747 {
5748 rtx addr;
5749
5750 /* Mark as non-local and addressable. */
5751 DECL_NONLOCAL (exp) = 1;
5752 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5753 abort ();
5754 mark_addressable (exp);
5755 if (GET_CODE (DECL_RTL (exp)) != MEM)
5756 abort ();
5757 addr = XEXP (DECL_RTL (exp), 0);
5758 if (GET_CODE (addr) == MEM)
5759 addr = gen_rtx_MEM (Pmode,
5760 fix_lexical_addr (XEXP (addr, 0), exp));
5761 else
5762 addr = fix_lexical_addr (addr, exp);
5763 temp = change_address (DECL_RTL (exp), mode, addr);
5764 }
5765
5766 /* This is the case of an array whose size is to be determined
5767 from its initializer, while the initializer is still being parsed.
5768 See expand_decl. */
5769
5770 else if (GET_CODE (DECL_RTL (exp)) == MEM
5771 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5772 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5773 XEXP (DECL_RTL (exp), 0));
5774
5775 /* If DECL_RTL is memory, we are in the normal case and either
5776 the address is not valid or it is not a register and -fforce-addr
5777 is specified, get the address into a register. */
5778
5779 else if (GET_CODE (DECL_RTL (exp)) == MEM
5780 && modifier != EXPAND_CONST_ADDRESS
5781 && modifier != EXPAND_SUM
5782 && modifier != EXPAND_INITIALIZER
5783 && (! memory_address_p (DECL_MODE (exp),
5784 XEXP (DECL_RTL (exp), 0))
5785 || (flag_force_addr
5786 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5787 temp = change_address (DECL_RTL (exp), VOIDmode,
5788 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5789
5790 /* If we got something, return it. But first, set the alignment
5791 the address is a register. */
5792 if (temp != 0)
5793 {
5794 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5795 mark_reg_pointer (XEXP (temp, 0),
5796 DECL_ALIGN (exp) / BITS_PER_UNIT);
5797
5798 return temp;
5799 }
5800
5801 /* If the mode of DECL_RTL does not match that of the decl, it
5802 must be a promoted value. We return a SUBREG of the wanted mode,
5803 but mark it so that we know that it was already extended. */
5804
5805 if (GET_CODE (DECL_RTL (exp)) == REG
5806 && GET_MODE (DECL_RTL (exp)) != mode)
5807 {
5808 /* Get the signedness used for this variable. Ensure we get the
5809 same mode we got when the variable was declared. */
5810 if (GET_MODE (DECL_RTL (exp))
5811 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5812 abort ();
5813
5814 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5815 SUBREG_PROMOTED_VAR_P (temp) = 1;
5816 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5817 return temp;
5818 }
5819
5820 return DECL_RTL (exp);
5821
5822 case INTEGER_CST:
5823 return immed_double_const (TREE_INT_CST_LOW (exp),
5824 TREE_INT_CST_HIGH (exp),
5825 mode);
5826
5827 case CONST_DECL:
5828 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5829 EXPAND_MEMORY_USE_BAD);
5830
5831 case REAL_CST:
5832 /* If optimized, generate immediate CONST_DOUBLE
5833 which will be turned into memory by reload if necessary.
5834
5835 We used to force a register so that loop.c could see it. But
5836 this does not allow gen_* patterns to perform optimizations with
5837 the constants. It also produces two insns in cases like "x = 1.0;".
5838 On most machines, floating-point constants are not permitted in
5839 many insns, so we'd end up copying it to a register in any case.
5840
5841 Now, we do the copying in expand_binop, if appropriate. */
5842 return immed_real_const (exp);
5843
5844 case COMPLEX_CST:
5845 case STRING_CST:
5846 if (! TREE_CST_RTL (exp))
5847 output_constant_def (exp);
5848
5849 /* TREE_CST_RTL probably contains a constant address.
5850 On RISC machines where a constant address isn't valid,
5851 make some insns to get that address into a register. */
5852 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5853 && modifier != EXPAND_CONST_ADDRESS
5854 && modifier != EXPAND_INITIALIZER
5855 && modifier != EXPAND_SUM
5856 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5857 || (flag_force_addr
5858 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5859 return change_address (TREE_CST_RTL (exp), VOIDmode,
5860 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5861 return TREE_CST_RTL (exp);
5862
5863 case EXPR_WITH_FILE_LOCATION:
5864 {
5865 rtx to_return;
5866 char *saved_input_filename = input_filename;
5867 int saved_lineno = lineno;
5868 input_filename = EXPR_WFL_FILENAME (exp);
5869 lineno = EXPR_WFL_LINENO (exp);
5870 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5871 emit_line_note (input_filename, lineno);
5872 /* Possibly avoid switching back and force here */
5873 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5874 input_filename = saved_input_filename;
5875 lineno = saved_lineno;
5876 return to_return;
5877 }
5878
5879 case SAVE_EXPR:
5880 context = decl_function_context (exp);
5881
5882 /* If this SAVE_EXPR was at global context, assume we are an
5883 initialization function and move it into our context. */
5884 if (context == 0)
5885 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5886
5887 /* We treat inline_function_decl as an alias for the current function
5888 because that is the inline function whose vars, types, etc.
5889 are being merged into the current function.
5890 See expand_inline_function. */
5891 if (context == current_function_decl || context == inline_function_decl)
5892 context = 0;
5893
5894 /* If this is non-local, handle it. */
5895 if (context)
5896 {
5897 /* The following call just exists to abort if the context is
5898 not of a containing function. */
5899 find_function_data (context);
5900
5901 temp = SAVE_EXPR_RTL (exp);
5902 if (temp && GET_CODE (temp) == REG)
5903 {
5904 put_var_into_stack (exp);
5905 temp = SAVE_EXPR_RTL (exp);
5906 }
5907 if (temp == 0 || GET_CODE (temp) != MEM)
5908 abort ();
5909 return change_address (temp, mode,
5910 fix_lexical_addr (XEXP (temp, 0), exp));
5911 }
5912 if (SAVE_EXPR_RTL (exp) == 0)
5913 {
5914 if (mode == VOIDmode)
5915 temp = const0_rtx;
5916 else
5917 temp = assign_temp (type, 3, 0, 0);
5918
5919 SAVE_EXPR_RTL (exp) = temp;
5920 if (!optimize && GET_CODE (temp) == REG)
5921 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5922 save_expr_regs);
5923
5924 /* If the mode of TEMP does not match that of the expression, it
5925 must be a promoted value. We pass store_expr a SUBREG of the
5926 wanted mode but mark it so that we know that it was already
5927 extended. Note that `unsignedp' was modified above in
5928 this case. */
5929
5930 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5931 {
5932 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5933 SUBREG_PROMOTED_VAR_P (temp) = 1;
5934 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5935 }
5936
5937 if (temp == const0_rtx)
5938 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5939 EXPAND_MEMORY_USE_BAD);
5940 else
5941 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5942
5943 TREE_USED (exp) = 1;
5944 }
5945
5946 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5947 must be a promoted value. We return a SUBREG of the wanted mode,
5948 but mark it so that we know that it was already extended. */
5949
5950 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5951 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5952 {
5953 /* Compute the signedness and make the proper SUBREG. */
5954 promote_mode (type, mode, &unsignedp, 0);
5955 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5956 SUBREG_PROMOTED_VAR_P (temp) = 1;
5957 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5958 return temp;
5959 }
5960
5961 return SAVE_EXPR_RTL (exp);
5962
5963 case UNSAVE_EXPR:
5964 {
5965 rtx temp;
5966 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5967 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5968 return temp;
5969 }
5970
5971 case PLACEHOLDER_EXPR:
5972 {
5973 tree placeholder_expr;
5974
5975 /* If there is an object on the head of the placeholder list,
5976 see if some object in it of type TYPE or a pointer to it. For
5977 further information, see tree.def. */
5978 for (placeholder_expr = placeholder_list;
5979 placeholder_expr != 0;
5980 placeholder_expr = TREE_CHAIN (placeholder_expr))
5981 {
5982 tree need_type = TYPE_MAIN_VARIANT (type);
5983 tree object = 0;
5984 tree old_list = placeholder_list;
5985 tree elt;
5986
5987 /* Find the outermost reference that is of the type we want.
5988 If none, see if any object has a type that is a pointer to
5989 the type we want. */
5990 for (elt = TREE_PURPOSE (placeholder_expr);
5991 elt != 0 && object == 0;
5992 elt
5993 = ((TREE_CODE (elt) == COMPOUND_EXPR
5994 || TREE_CODE (elt) == COND_EXPR)
5995 ? TREE_OPERAND (elt, 1)
5996 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5997 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5998 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5999 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6000 ? TREE_OPERAND (elt, 0) : 0))
6001 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6002 object = elt;
6003
6004 for (elt = TREE_PURPOSE (placeholder_expr);
6005 elt != 0 && object == 0;
6006 elt
6007 = ((TREE_CODE (elt) == COMPOUND_EXPR
6008 || TREE_CODE (elt) == COND_EXPR)
6009 ? TREE_OPERAND (elt, 1)
6010 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6011 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6012 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6013 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6014 ? TREE_OPERAND (elt, 0) : 0))
6015 if (POINTER_TYPE_P (TREE_TYPE (elt))
6016 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6017 == need_type))
6018 object = build1 (INDIRECT_REF, need_type, elt);
6019
6020 if (object != 0)
6021 {
6022 /* Expand this object skipping the list entries before
6023 it was found in case it is also a PLACEHOLDER_EXPR.
6024 In that case, we want to translate it using subsequent
6025 entries. */
6026 placeholder_list = TREE_CHAIN (placeholder_expr);
6027 temp = expand_expr (object, original_target, tmode,
6028 ro_modifier);
6029 placeholder_list = old_list;
6030 return temp;
6031 }
6032 }
6033 }
6034
6035 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6036 abort ();
6037
6038 case WITH_RECORD_EXPR:
6039 /* Put the object on the placeholder list, expand our first operand,
6040 and pop the list. */
6041 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6042 placeholder_list);
6043 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6044 tmode, ro_modifier);
6045 placeholder_list = TREE_CHAIN (placeholder_list);
6046 return target;
6047
6048 case GOTO_EXPR:
6049 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6050 expand_goto (TREE_OPERAND (exp, 0));
6051 else
6052 expand_computed_goto (TREE_OPERAND (exp, 0));
6053 return const0_rtx;
6054
6055 case EXIT_EXPR:
6056 expand_exit_loop_if_false (NULL_PTR,
6057 invert_truthvalue (TREE_OPERAND (exp, 0)));
6058 return const0_rtx;
6059
6060 case LABELED_BLOCK_EXPR:
6061 if (LABELED_BLOCK_BODY (exp))
6062 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6063 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6064 return const0_rtx;
6065
6066 case EXIT_BLOCK_EXPR:
6067 if (EXIT_BLOCK_RETURN (exp))
6068 sorry ("returned value in block_exit_expr");
6069 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6070 return const0_rtx;
6071
6072 case LOOP_EXPR:
6073 push_temp_slots ();
6074 expand_start_loop (1);
6075 expand_expr_stmt (TREE_OPERAND (exp, 0));
6076 expand_end_loop ();
6077 pop_temp_slots ();
6078
6079 return const0_rtx;
6080
6081 case BIND_EXPR:
6082 {
6083 tree vars = TREE_OPERAND (exp, 0);
6084 int vars_need_expansion = 0;
6085
6086 /* Need to open a binding contour here because
6087 if there are any cleanups they must be contained here. */
6088 expand_start_bindings (0);
6089
6090 /* Mark the corresponding BLOCK for output in its proper place. */
6091 if (TREE_OPERAND (exp, 2) != 0
6092 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6093 insert_block (TREE_OPERAND (exp, 2));
6094
6095 /* If VARS have not yet been expanded, expand them now. */
6096 while (vars)
6097 {
6098 if (DECL_RTL (vars) == 0)
6099 {
6100 vars_need_expansion = 1;
6101 expand_decl (vars);
6102 }
6103 expand_decl_init (vars);
6104 vars = TREE_CHAIN (vars);
6105 }
6106
6107 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6108
6109 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6110
6111 return temp;
6112 }
6113
6114 case RTL_EXPR:
6115 if (RTL_EXPR_SEQUENCE (exp))
6116 {
6117 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6118 abort ();
6119 emit_insns (RTL_EXPR_SEQUENCE (exp));
6120 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6121 }
6122 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6123 free_temps_for_rtl_expr (exp);
6124 return RTL_EXPR_RTL (exp);
6125
6126 case CONSTRUCTOR:
6127 /* If we don't need the result, just ensure we evaluate any
6128 subexpressions. */
6129 if (ignore)
6130 {
6131 tree elt;
6132 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6133 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6134 EXPAND_MEMORY_USE_BAD);
6135 return const0_rtx;
6136 }
6137
6138 /* All elts simple constants => refer to a constant in memory. But
6139 if this is a non-BLKmode mode, let it store a field at a time
6140 since that should make a CONST_INT or CONST_DOUBLE when we
6141 fold. Likewise, if we have a target we can use, it is best to
6142 store directly into the target unless the type is large enough
6143 that memcpy will be used. If we are making an initializer and
6144 all operands are constant, put it in memory as well. */
6145 else if ((TREE_STATIC (exp)
6146 && ((mode == BLKmode
6147 && ! (target != 0 && safe_from_p (target, exp, 1)))
6148 || TREE_ADDRESSABLE (exp)
6149 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6150 && (!MOVE_BY_PIECES_P
6151 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6152 TYPE_ALIGN (type) / BITS_PER_UNIT))
6153 && ! mostly_zeros_p (exp))))
6154 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6155 {
6156 rtx constructor = output_constant_def (exp);
6157 if (modifier != EXPAND_CONST_ADDRESS
6158 && modifier != EXPAND_INITIALIZER
6159 && modifier != EXPAND_SUM
6160 && (! memory_address_p (GET_MODE (constructor),
6161 XEXP (constructor, 0))
6162 || (flag_force_addr
6163 && GET_CODE (XEXP (constructor, 0)) != REG)))
6164 constructor = change_address (constructor, VOIDmode,
6165 XEXP (constructor, 0));
6166 return constructor;
6167 }
6168
6169 else
6170 {
6171 /* Handle calls that pass values in multiple non-contiguous
6172 locations. The Irix 6 ABI has examples of this. */
6173 if (target == 0 || ! safe_from_p (target, exp, 1)
6174 || GET_CODE (target) == PARALLEL)
6175 {
6176 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6177 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6178 else
6179 target = assign_temp (type, 0, 1, 1);
6180 }
6181
6182 if (TREE_READONLY (exp))
6183 {
6184 if (GET_CODE (target) == MEM)
6185 target = copy_rtx (target);
6186
6187 RTX_UNCHANGING_P (target) = 1;
6188 }
6189
6190 store_constructor (exp, target, 0);
6191 return target;
6192 }
6193
6194 case INDIRECT_REF:
6195 {
6196 tree exp1 = TREE_OPERAND (exp, 0);
6197 tree exp2;
6198 tree index;
6199 tree string = string_constant (exp1, &index);
6200 int i;
6201
6202 /* Try to optimize reads from const strings. */
6203 if (string
6204 && TREE_CODE (string) == STRING_CST
6205 && TREE_CODE (index) == INTEGER_CST
6206 && !TREE_INT_CST_HIGH (index)
6207 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6208 && GET_MODE_CLASS (mode) == MODE_INT
6209 && GET_MODE_SIZE (mode) == 1
6210 && modifier != EXPAND_MEMORY_USE_WO)
6211 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6212
6213 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6214 op0 = memory_address (mode, op0);
6215
6216 if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6217 {
6218 enum memory_use_mode memory_usage;
6219 memory_usage = get_memory_usage_from_modifier (modifier);
6220
6221 if (memory_usage != MEMORY_USE_DONT)
6222 {
6223 in_check_memory_usage = 1;
6224 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6225 op0, Pmode,
6226 GEN_INT (int_size_in_bytes (type)),
6227 TYPE_MODE (sizetype),
6228 GEN_INT (memory_usage),
6229 TYPE_MODE (integer_type_node));
6230 in_check_memory_usage = 0;
6231 }
6232 }
6233
6234 temp = gen_rtx_MEM (mode, op0);
6235 /* If address was computed by addition,
6236 mark this as an element of an aggregate. */
6237 if (TREE_CODE (exp1) == PLUS_EXPR
6238 || (TREE_CODE (exp1) == SAVE_EXPR
6239 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6240 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6241 || (TREE_CODE (exp1) == ADDR_EXPR
6242 && (exp2 = TREE_OPERAND (exp1, 0))
6243 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6244 MEM_SET_IN_STRUCT_P (temp, 1);
6245
6246 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6247 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6248
6249 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6250 here, because, in C and C++, the fact that a location is accessed
6251 through a pointer to const does not mean that the value there can
6252 never change. Languages where it can never change should
6253 also set TREE_STATIC. */
6254 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6255 return temp;
6256 }
6257
6258 case ARRAY_REF:
6259 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6260 abort ();
6261
6262 {
6263 tree array = TREE_OPERAND (exp, 0);
6264 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6265 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6266 tree index = TREE_OPERAND (exp, 1);
6267 tree index_type = TREE_TYPE (index);
6268 HOST_WIDE_INT i;
6269
6270 /* Optimize the special-case of a zero lower bound.
6271
6272 We convert the low_bound to sizetype to avoid some problems
6273 with constant folding. (E.g. suppose the lower bound is 1,
6274 and its mode is QI. Without the conversion, (ARRAY
6275 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6276 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6277
6278 But sizetype isn't quite right either (especially if
6279 the lowbound is negative). FIXME */
6280
6281 if (! integer_zerop (low_bound))
6282 index = fold (build (MINUS_EXPR, index_type, index,
6283 convert (sizetype, low_bound)));
6284
6285 /* Fold an expression like: "foo"[2].
6286 This is not done in fold so it won't happen inside &.
6287 Don't fold if this is for wide characters since it's too
6288 difficult to do correctly and this is a very rare case. */
6289
6290 if (TREE_CODE (array) == STRING_CST
6291 && TREE_CODE (index) == INTEGER_CST
6292 && !TREE_INT_CST_HIGH (index)
6293 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6294 && GET_MODE_CLASS (mode) == MODE_INT
6295 && GET_MODE_SIZE (mode) == 1)
6296 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6297
6298 /* If this is a constant index into a constant array,
6299 just get the value from the array. Handle both the cases when
6300 we have an explicit constructor and when our operand is a variable
6301 that was declared const. */
6302
6303 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6304 {
6305 if (TREE_CODE (index) == INTEGER_CST
6306 && TREE_INT_CST_HIGH (index) == 0)
6307 {
6308 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6309
6310 i = TREE_INT_CST_LOW (index);
6311 while (elem && i--)
6312 elem = TREE_CHAIN (elem);
6313 if (elem)
6314 return expand_expr (fold (TREE_VALUE (elem)), target,
6315 tmode, ro_modifier);
6316 }
6317 }
6318
6319 else if (optimize >= 1
6320 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6321 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6322 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6323 {
6324 if (TREE_CODE (index) == INTEGER_CST)
6325 {
6326 tree init = DECL_INITIAL (array);
6327
6328 i = TREE_INT_CST_LOW (index);
6329 if (TREE_CODE (init) == CONSTRUCTOR)
6330 {
6331 tree elem = CONSTRUCTOR_ELTS (init);
6332
6333 while (elem
6334 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6335 elem = TREE_CHAIN (elem);
6336 if (elem)
6337 return expand_expr (fold (TREE_VALUE (elem)), target,
6338 tmode, ro_modifier);
6339 }
6340 else if (TREE_CODE (init) == STRING_CST
6341 && TREE_INT_CST_HIGH (index) == 0
6342 && (TREE_INT_CST_LOW (index)
6343 < TREE_STRING_LENGTH (init)))
6344 return (GEN_INT
6345 (TREE_STRING_POINTER
6346 (init)[TREE_INT_CST_LOW (index)]));
6347 }
6348 }
6349 }
6350
6351 /* ... fall through ... */
6352
6353 case COMPONENT_REF:
6354 case BIT_FIELD_REF:
6355 /* If the operand is a CONSTRUCTOR, we can just extract the
6356 appropriate field if it is present. Don't do this if we have
6357 already written the data since we want to refer to that copy
6358 and varasm.c assumes that's what we'll do. */
6359 if (code != ARRAY_REF
6360 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6361 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6362 {
6363 tree elt;
6364
6365 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6366 elt = TREE_CHAIN (elt))
6367 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6368 /* We can normally use the value of the field in the
6369 CONSTRUCTOR. However, if this is a bitfield in
6370 an integral mode that we can fit in a HOST_WIDE_INT,
6371 we must mask only the number of bits in the bitfield,
6372 since this is done implicitly by the constructor. If
6373 the bitfield does not meet either of those conditions,
6374 we can't do this optimization. */
6375 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6376 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6377 == MODE_INT)
6378 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6379 <= HOST_BITS_PER_WIDE_INT))))
6380 {
6381 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6382 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6383 {
6384 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6385
6386 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6387 {
6388 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6389 op0 = expand_and (op0, op1, target);
6390 }
6391 else
6392 {
6393 enum machine_mode imode
6394 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6395 tree count
6396 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6397 0);
6398
6399 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6400 target, 0);
6401 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6402 target, 0);
6403 }
6404 }
6405
6406 return op0;
6407 }
6408 }
6409
6410 {
6411 enum machine_mode mode1;
6412 int bitsize;
6413 int bitpos;
6414 tree offset;
6415 int volatilep = 0;
6416 int alignment;
6417 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6418 &mode1, &unsignedp, &volatilep,
6419 &alignment);
6420
6421 /* If we got back the original object, something is wrong. Perhaps
6422 we are evaluating an expression too early. In any event, don't
6423 infinitely recurse. */
6424 if (tem == exp)
6425 abort ();
6426
6427 /* If TEM's type is a union of variable size, pass TARGET to the inner
6428 computation, since it will need a temporary and TARGET is known
6429 to have to do. This occurs in unchecked conversion in Ada. */
6430
6431 op0 = expand_expr (tem,
6432 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6433 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6434 != INTEGER_CST)
6435 ? target : NULL_RTX),
6436 VOIDmode,
6437 modifier == EXPAND_INITIALIZER
6438 ? modifier : EXPAND_NORMAL);
6439
6440 /* If this is a constant, put it into a register if it is a
6441 legitimate constant and memory if it isn't. */
6442 if (CONSTANT_P (op0))
6443 {
6444 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6445 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6446 op0 = force_reg (mode, op0);
6447 else
6448 op0 = validize_mem (force_const_mem (mode, op0));
6449 }
6450
6451 if (offset != 0)
6452 {
6453 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6454
6455 if (GET_CODE (op0) != MEM)
6456 abort ();
6457
6458 if (GET_MODE (offset_rtx) != ptr_mode)
6459 {
6460 #ifdef POINTERS_EXTEND_UNSIGNED
6461 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6462 #else
6463 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6464 #endif
6465 }
6466
6467 /* A constant address in TO_RTX can have VOIDmode, we must not try
6468 to call force_reg for that case. Avoid that case. */
6469 if (GET_CODE (op0) == MEM
6470 && GET_MODE (op0) == BLKmode
6471 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6472 && bitsize
6473 && (bitpos % bitsize) == 0
6474 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6475 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6476 {
6477 rtx temp = change_address (op0, mode1,
6478 plus_constant (XEXP (op0, 0),
6479 (bitpos /
6480 BITS_PER_UNIT)));
6481 if (GET_CODE (XEXP (temp, 0)) == REG)
6482 op0 = temp;
6483 else
6484 op0 = change_address (op0, mode1,
6485 force_reg (GET_MODE (XEXP (temp, 0)),
6486 XEXP (temp, 0)));
6487 bitpos = 0;
6488 }
6489
6490
6491 op0 = change_address (op0, VOIDmode,
6492 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6493 force_reg (ptr_mode, offset_rtx)));
6494 }
6495
6496 /* Don't forget about volatility even if this is a bitfield. */
6497 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6498 {
6499 op0 = copy_rtx (op0);
6500 MEM_VOLATILE_P (op0) = 1;
6501 }
6502
6503 /* Check the access. */
6504 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6505 {
6506 enum memory_use_mode memory_usage;
6507 memory_usage = get_memory_usage_from_modifier (modifier);
6508
6509 if (memory_usage != MEMORY_USE_DONT)
6510 {
6511 rtx to;
6512 int size;
6513
6514 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6515 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6516
6517 /* Check the access right of the pointer. */
6518 if (size > BITS_PER_UNIT)
6519 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6520 to, Pmode,
6521 GEN_INT (size / BITS_PER_UNIT),
6522 TYPE_MODE (sizetype),
6523 GEN_INT (memory_usage),
6524 TYPE_MODE (integer_type_node));
6525 }
6526 }
6527
6528 /* In cases where an aligned union has an unaligned object
6529 as a field, we might be extracting a BLKmode value from
6530 an integer-mode (e.g., SImode) object. Handle this case
6531 by doing the extract into an object as wide as the field
6532 (which we know to be the width of a basic mode), then
6533 storing into memory, and changing the mode to BLKmode.
6534 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6535 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6536 if (mode1 == VOIDmode
6537 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6538 || (modifier != EXPAND_CONST_ADDRESS
6539 && modifier != EXPAND_INITIALIZER
6540 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6541 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6542 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6543 /* If the field isn't aligned enough to fetch as a memref,
6544 fetch it as a bit field. */
6545 || (SLOW_UNALIGNED_ACCESS
6546 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6547 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6548 {
6549 enum machine_mode ext_mode = mode;
6550
6551 if (ext_mode == BLKmode)
6552 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6553
6554 if (ext_mode == BLKmode)
6555 {
6556 /* In this case, BITPOS must start at a byte boundary and
6557 TARGET, if specified, must be a MEM. */
6558 if (GET_CODE (op0) != MEM
6559 || (target != 0 && GET_CODE (target) != MEM)
6560 || bitpos % BITS_PER_UNIT != 0)
6561 abort ();
6562
6563 op0 = change_address (op0, VOIDmode,
6564 plus_constant (XEXP (op0, 0),
6565 bitpos / BITS_PER_UNIT));
6566 if (target == 0)
6567 target = assign_temp (type, 0, 1, 1);
6568
6569 emit_block_move (target, op0,
6570 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6571 / BITS_PER_UNIT),
6572 1);
6573
6574 return target;
6575 }
6576
6577 op0 = validize_mem (op0);
6578
6579 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6580 mark_reg_pointer (XEXP (op0, 0), alignment);
6581
6582 op0 = extract_bit_field (op0, bitsize, bitpos,
6583 unsignedp, target, ext_mode, ext_mode,
6584 alignment,
6585 int_size_in_bytes (TREE_TYPE (tem)));
6586
6587 /* If the result is a record type and BITSIZE is narrower than
6588 the mode of OP0, an integral mode, and this is a big endian
6589 machine, we must put the field into the high-order bits. */
6590 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6591 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6592 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6593 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6594 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6595 - bitsize),
6596 op0, 1);
6597
6598 if (mode == BLKmode)
6599 {
6600 rtx new = assign_stack_temp (ext_mode,
6601 bitsize / BITS_PER_UNIT, 0);
6602
6603 emit_move_insn (new, op0);
6604 op0 = copy_rtx (new);
6605 PUT_MODE (op0, BLKmode);
6606 MEM_SET_IN_STRUCT_P (op0, 1);
6607 }
6608
6609 return op0;
6610 }
6611
6612 /* If the result is BLKmode, use that to access the object
6613 now as well. */
6614 if (mode == BLKmode)
6615 mode1 = BLKmode;
6616
6617 /* Get a reference to just this component. */
6618 if (modifier == EXPAND_CONST_ADDRESS
6619 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6620 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6621 (bitpos / BITS_PER_UNIT)));
6622 else
6623 op0 = change_address (op0, mode1,
6624 plus_constant (XEXP (op0, 0),
6625 (bitpos / BITS_PER_UNIT)));
6626
6627 if (GET_CODE (op0) == MEM)
6628 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6629
6630 if (GET_CODE (XEXP (op0, 0)) == REG)
6631 mark_reg_pointer (XEXP (op0, 0), alignment);
6632
6633 MEM_SET_IN_STRUCT_P (op0, 1);
6634 MEM_VOLATILE_P (op0) |= volatilep;
6635 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6636 || modifier == EXPAND_CONST_ADDRESS
6637 || modifier == EXPAND_INITIALIZER)
6638 return op0;
6639 else if (target == 0)
6640 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6641
6642 convert_move (target, op0, unsignedp);
6643 return target;
6644 }
6645
6646 /* Intended for a reference to a buffer of a file-object in Pascal.
6647 But it's not certain that a special tree code will really be
6648 necessary for these. INDIRECT_REF might work for them. */
6649 case BUFFER_REF:
6650 abort ();
6651
6652 case IN_EXPR:
6653 {
6654 /* Pascal set IN expression.
6655
6656 Algorithm:
6657 rlo = set_low - (set_low%bits_per_word);
6658 the_word = set [ (index - rlo)/bits_per_word ];
6659 bit_index = index % bits_per_word;
6660 bitmask = 1 << bit_index;
6661 return !!(the_word & bitmask); */
6662
6663 tree set = TREE_OPERAND (exp, 0);
6664 tree index = TREE_OPERAND (exp, 1);
6665 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6666 tree set_type = TREE_TYPE (set);
6667 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6668 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6669 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6670 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6671 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6672 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6673 rtx setaddr = XEXP (setval, 0);
6674 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6675 rtx rlow;
6676 rtx diff, quo, rem, addr, bit, result;
6677
6678 preexpand_calls (exp);
6679
6680 /* If domain is empty, answer is no. Likewise if index is constant
6681 and out of bounds. */
6682 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6683 && TREE_CODE (set_low_bound) == INTEGER_CST
6684 && tree_int_cst_lt (set_high_bound, set_low_bound))
6685 || (TREE_CODE (index) == INTEGER_CST
6686 && TREE_CODE (set_low_bound) == INTEGER_CST
6687 && tree_int_cst_lt (index, set_low_bound))
6688 || (TREE_CODE (set_high_bound) == INTEGER_CST
6689 && TREE_CODE (index) == INTEGER_CST
6690 && tree_int_cst_lt (set_high_bound, index))))
6691 return const0_rtx;
6692
6693 if (target == 0)
6694 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6695
6696 /* If we get here, we have to generate the code for both cases
6697 (in range and out of range). */
6698
6699 op0 = gen_label_rtx ();
6700 op1 = gen_label_rtx ();
6701
6702 if (! (GET_CODE (index_val) == CONST_INT
6703 && GET_CODE (lo_r) == CONST_INT))
6704 {
6705 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6706 GET_MODE (index_val), iunsignedp, 0, op1);
6707 }
6708
6709 if (! (GET_CODE (index_val) == CONST_INT
6710 && GET_CODE (hi_r) == CONST_INT))
6711 {
6712 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6713 GET_MODE (index_val), iunsignedp, 0, op1);
6714 }
6715
6716 /* Calculate the element number of bit zero in the first word
6717 of the set. */
6718 if (GET_CODE (lo_r) == CONST_INT)
6719 rlow = GEN_INT (INTVAL (lo_r)
6720 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6721 else
6722 rlow = expand_binop (index_mode, and_optab, lo_r,
6723 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6724 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6725
6726 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6727 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6728
6729 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6730 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6731 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6732 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6733
6734 addr = memory_address (byte_mode,
6735 expand_binop (index_mode, add_optab, diff,
6736 setaddr, NULL_RTX, iunsignedp,
6737 OPTAB_LIB_WIDEN));
6738
6739 /* Extract the bit we want to examine */
6740 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6741 gen_rtx_MEM (byte_mode, addr),
6742 make_tree (TREE_TYPE (index), rem),
6743 NULL_RTX, 1);
6744 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6745 GET_MODE (target) == byte_mode ? target : 0,
6746 1, OPTAB_LIB_WIDEN);
6747
6748 if (result != target)
6749 convert_move (target, result, 1);
6750
6751 /* Output the code to handle the out-of-range case. */
6752 emit_jump (op0);
6753 emit_label (op1);
6754 emit_move_insn (target, const0_rtx);
6755 emit_label (op0);
6756 return target;
6757 }
6758
6759 case WITH_CLEANUP_EXPR:
6760 if (RTL_EXPR_RTL (exp) == 0)
6761 {
6762 RTL_EXPR_RTL (exp)
6763 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6764 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6765
6766 /* That's it for this cleanup. */
6767 TREE_OPERAND (exp, 2) = 0;
6768 }
6769 return RTL_EXPR_RTL (exp);
6770
6771 case CLEANUP_POINT_EXPR:
6772 {
6773 /* Start a new binding layer that will keep track of all cleanup
6774 actions to be performed. */
6775 expand_start_bindings (0);
6776
6777 target_temp_slot_level = temp_slot_level;
6778
6779 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6780 /* If we're going to use this value, load it up now. */
6781 if (! ignore)
6782 op0 = force_not_mem (op0);
6783 preserve_temp_slots (op0);
6784 expand_end_bindings (NULL_TREE, 0, 0);
6785 }
6786 return op0;
6787
6788 case CALL_EXPR:
6789 /* Check for a built-in function. */
6790 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6791 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6792 == FUNCTION_DECL)
6793 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6794 return expand_builtin (exp, target, subtarget, tmode, ignore);
6795
6796 /* If this call was expanded already by preexpand_calls,
6797 just return the result we got. */
6798 if (CALL_EXPR_RTL (exp) != 0)
6799 return CALL_EXPR_RTL (exp);
6800
6801 return expand_call (exp, target, ignore);
6802
6803 case NON_LVALUE_EXPR:
6804 case NOP_EXPR:
6805 case CONVERT_EXPR:
6806 case REFERENCE_EXPR:
6807 if (TREE_CODE (type) == UNION_TYPE)
6808 {
6809 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6810 if (target == 0)
6811 {
6812 if (mode != BLKmode)
6813 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6814 else
6815 target = assign_temp (type, 0, 1, 1);
6816 }
6817
6818 if (GET_CODE (target) == MEM)
6819 /* Store data into beginning of memory target. */
6820 store_expr (TREE_OPERAND (exp, 0),
6821 change_address (target, TYPE_MODE (valtype), 0), 0);
6822
6823 else if (GET_CODE (target) == REG)
6824 /* Store this field into a union of the proper type. */
6825 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6826 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6827 VOIDmode, 0, 1,
6828 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6829 0);
6830 else
6831 abort ();
6832
6833 /* Return the entire union. */
6834 return target;
6835 }
6836
6837 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6838 {
6839 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6840 ro_modifier);
6841
6842 /* If the signedness of the conversion differs and OP0 is
6843 a promoted SUBREG, clear that indication since we now
6844 have to do the proper extension. */
6845 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6846 && GET_CODE (op0) == SUBREG)
6847 SUBREG_PROMOTED_VAR_P (op0) = 0;
6848
6849 return op0;
6850 }
6851
6852 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6853 if (GET_MODE (op0) == mode)
6854 return op0;
6855
6856 /* If OP0 is a constant, just convert it into the proper mode. */
6857 if (CONSTANT_P (op0))
6858 return
6859 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6860 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6861
6862 if (modifier == EXPAND_INITIALIZER)
6863 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6864
6865 if (target == 0)
6866 return
6867 convert_to_mode (mode, op0,
6868 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6869 else
6870 convert_move (target, op0,
6871 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6872 return target;
6873
6874 case PLUS_EXPR:
6875 /* We come here from MINUS_EXPR when the second operand is a
6876 constant. */
6877 plus_expr:
6878 this_optab = add_optab;
6879
6880 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6881 something else, make sure we add the register to the constant and
6882 then to the other thing. This case can occur during strength
6883 reduction and doing it this way will produce better code if the
6884 frame pointer or argument pointer is eliminated.
6885
6886 fold-const.c will ensure that the constant is always in the inner
6887 PLUS_EXPR, so the only case we need to do anything about is if
6888 sp, ap, or fp is our second argument, in which case we must swap
6889 the innermost first argument and our second argument. */
6890
6891 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6892 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6893 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6894 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6895 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6896 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6897 {
6898 tree t = TREE_OPERAND (exp, 1);
6899
6900 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6901 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6902 }
6903
6904 /* If the result is to be ptr_mode and we are adding an integer to
6905 something, we might be forming a constant. So try to use
6906 plus_constant. If it produces a sum and we can't accept it,
6907 use force_operand. This allows P = &ARR[const] to generate
6908 efficient code on machines where a SYMBOL_REF is not a valid
6909 address.
6910
6911 If this is an EXPAND_SUM call, always return the sum. */
6912 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6913 || mode == ptr_mode)
6914 {
6915 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6916 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6917 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6918 {
6919 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6920 EXPAND_SUM);
6921 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6922 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6923 op1 = force_operand (op1, target);
6924 return op1;
6925 }
6926
6927 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6928 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6929 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6930 {
6931 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6932 EXPAND_SUM);
6933 if (! CONSTANT_P (op0))
6934 {
6935 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6936 VOIDmode, modifier);
6937 /* Don't go to both_summands if modifier
6938 says it's not right to return a PLUS. */
6939 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6940 goto binop2;
6941 goto both_summands;
6942 }
6943 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6944 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6945 op0 = force_operand (op0, target);
6946 return op0;
6947 }
6948 }
6949
6950 /* No sense saving up arithmetic to be done
6951 if it's all in the wrong mode to form part of an address.
6952 And force_operand won't know whether to sign-extend or
6953 zero-extend. */
6954 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6955 || mode != ptr_mode)
6956 goto binop;
6957
6958 preexpand_calls (exp);
6959 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6960 subtarget = 0;
6961
6962 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6963 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6964
6965 both_summands:
6966 /* Make sure any term that's a sum with a constant comes last. */
6967 if (GET_CODE (op0) == PLUS
6968 && CONSTANT_P (XEXP (op0, 1)))
6969 {
6970 temp = op0;
6971 op0 = op1;
6972 op1 = temp;
6973 }
6974 /* If adding to a sum including a constant,
6975 associate it to put the constant outside. */
6976 if (GET_CODE (op1) == PLUS
6977 && CONSTANT_P (XEXP (op1, 1)))
6978 {
6979 rtx constant_term = const0_rtx;
6980
6981 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6982 if (temp != 0)
6983 op0 = temp;
6984 /* Ensure that MULT comes first if there is one. */
6985 else if (GET_CODE (op0) == MULT)
6986 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6987 else
6988 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6989
6990 /* Let's also eliminate constants from op0 if possible. */
6991 op0 = eliminate_constant_term (op0, &constant_term);
6992
6993 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6994 their sum should be a constant. Form it into OP1, since the
6995 result we want will then be OP0 + OP1. */
6996
6997 temp = simplify_binary_operation (PLUS, mode, constant_term,
6998 XEXP (op1, 1));
6999 if (temp != 0)
7000 op1 = temp;
7001 else
7002 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7003 }
7004
7005 /* Put a constant term last and put a multiplication first. */
7006 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7007 temp = op1, op1 = op0, op0 = temp;
7008
7009 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7010 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7011
7012 case MINUS_EXPR:
7013 /* For initializers, we are allowed to return a MINUS of two
7014 symbolic constants. Here we handle all cases when both operands
7015 are constant. */
7016 /* Handle difference of two symbolic constants,
7017 for the sake of an initializer. */
7018 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7019 && really_constant_p (TREE_OPERAND (exp, 0))
7020 && really_constant_p (TREE_OPERAND (exp, 1)))
7021 {
7022 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7023 VOIDmode, ro_modifier);
7024 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7025 VOIDmode, ro_modifier);
7026
7027 /* If the last operand is a CONST_INT, use plus_constant of
7028 the negated constant. Else make the MINUS. */
7029 if (GET_CODE (op1) == CONST_INT)
7030 return plus_constant (op0, - INTVAL (op1));
7031 else
7032 return gen_rtx_MINUS (mode, op0, op1);
7033 }
7034 /* Convert A - const to A + (-const). */
7035 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7036 {
7037 tree negated = fold (build1 (NEGATE_EXPR, type,
7038 TREE_OPERAND (exp, 1)));
7039
7040 /* Deal with the case where we can't negate the constant
7041 in TYPE. */
7042 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7043 {
7044 tree newtype = signed_type (type);
7045 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7046 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7047 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7048
7049 if (! TREE_OVERFLOW (newneg))
7050 return expand_expr (convert (type,
7051 build (PLUS_EXPR, newtype,
7052 newop0, newneg)),
7053 target, tmode, ro_modifier);
7054 }
7055 else
7056 {
7057 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7058 goto plus_expr;
7059 }
7060 }
7061 this_optab = sub_optab;
7062 goto binop;
7063
7064 case MULT_EXPR:
7065 preexpand_calls (exp);
7066 /* If first operand is constant, swap them.
7067 Thus the following special case checks need only
7068 check the second operand. */
7069 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7070 {
7071 register tree t1 = TREE_OPERAND (exp, 0);
7072 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7073 TREE_OPERAND (exp, 1) = t1;
7074 }
7075
7076 /* Attempt to return something suitable for generating an
7077 indexed address, for machines that support that. */
7078
7079 if (modifier == EXPAND_SUM && mode == ptr_mode
7080 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7081 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7082 {
7083 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7084 EXPAND_SUM);
7085
7086 /* Apply distributive law if OP0 is x+c. */
7087 if (GET_CODE (op0) == PLUS
7088 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7089 return gen_rtx_PLUS (mode,
7090 gen_rtx_MULT (mode, XEXP (op0, 0),
7091 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7092 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7093 * INTVAL (XEXP (op0, 1))));
7094
7095 if (GET_CODE (op0) != REG)
7096 op0 = force_operand (op0, NULL_RTX);
7097 if (GET_CODE (op0) != REG)
7098 op0 = copy_to_mode_reg (mode, op0);
7099
7100 return gen_rtx_MULT (mode, op0,
7101 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7102 }
7103
7104 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7105 subtarget = 0;
7106
7107 /* Check for multiplying things that have been extended
7108 from a narrower type. If this machine supports multiplying
7109 in that narrower type with a result in the desired type,
7110 do it that way, and avoid the explicit type-conversion. */
7111 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7112 && TREE_CODE (type) == INTEGER_TYPE
7113 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7114 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7115 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7116 && int_fits_type_p (TREE_OPERAND (exp, 1),
7117 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7118 /* Don't use a widening multiply if a shift will do. */
7119 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7120 > HOST_BITS_PER_WIDE_INT)
7121 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7122 ||
7123 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7124 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7125 ==
7126 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7127 /* If both operands are extended, they must either both
7128 be zero-extended or both be sign-extended. */
7129 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7130 ==
7131 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7132 {
7133 enum machine_mode innermode
7134 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7135 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7136 ? smul_widen_optab : umul_widen_optab);
7137 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7138 ? umul_widen_optab : smul_widen_optab);
7139 if (mode == GET_MODE_WIDER_MODE (innermode))
7140 {
7141 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7142 {
7143 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7144 NULL_RTX, VOIDmode, 0);
7145 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7146 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7147 VOIDmode, 0);
7148 else
7149 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7150 NULL_RTX, VOIDmode, 0);
7151 goto binop2;
7152 }
7153 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7154 && innermode == word_mode)
7155 {
7156 rtx htem;
7157 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7158 NULL_RTX, VOIDmode, 0);
7159 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7160 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7161 VOIDmode, 0);
7162 else
7163 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7164 NULL_RTX, VOIDmode, 0);
7165 temp = expand_binop (mode, other_optab, op0, op1, target,
7166 unsignedp, OPTAB_LIB_WIDEN);
7167 htem = expand_mult_highpart_adjust (innermode,
7168 gen_highpart (innermode, temp),
7169 op0, op1,
7170 gen_highpart (innermode, temp),
7171 unsignedp);
7172 emit_move_insn (gen_highpart (innermode, temp), htem);
7173 return temp;
7174 }
7175 }
7176 }
7177 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7178 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7179 return expand_mult (mode, op0, op1, target, unsignedp);
7180
7181 case TRUNC_DIV_EXPR:
7182 case FLOOR_DIV_EXPR:
7183 case CEIL_DIV_EXPR:
7184 case ROUND_DIV_EXPR:
7185 case EXACT_DIV_EXPR:
7186 preexpand_calls (exp);
7187 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7188 subtarget = 0;
7189 /* Possible optimization: compute the dividend with EXPAND_SUM
7190 then if the divisor is constant can optimize the case
7191 where some terms of the dividend have coeffs divisible by it. */
7192 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7193 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7194 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7195
7196 case RDIV_EXPR:
7197 this_optab = flodiv_optab;
7198 goto binop;
7199
7200 case TRUNC_MOD_EXPR:
7201 case FLOOR_MOD_EXPR:
7202 case CEIL_MOD_EXPR:
7203 case ROUND_MOD_EXPR:
7204 preexpand_calls (exp);
7205 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7206 subtarget = 0;
7207 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7208 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7209 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7210
7211 case FIX_ROUND_EXPR:
7212 case FIX_FLOOR_EXPR:
7213 case FIX_CEIL_EXPR:
7214 abort (); /* Not used for C. */
7215
7216 case FIX_TRUNC_EXPR:
7217 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7218 if (target == 0)
7219 target = gen_reg_rtx (mode);
7220 expand_fix (target, op0, unsignedp);
7221 return target;
7222
7223 case FLOAT_EXPR:
7224 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7225 if (target == 0)
7226 target = gen_reg_rtx (mode);
7227 /* expand_float can't figure out what to do if FROM has VOIDmode.
7228 So give it the correct mode. With -O, cse will optimize this. */
7229 if (GET_MODE (op0) == VOIDmode)
7230 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7231 op0);
7232 expand_float (target, op0,
7233 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7234 return target;
7235
7236 case NEGATE_EXPR:
7237 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7238 temp = expand_unop (mode, neg_optab, op0, target, 0);
7239 if (temp == 0)
7240 abort ();
7241 return temp;
7242
7243 case ABS_EXPR:
7244 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7245
7246 /* Handle complex values specially. */
7247 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7248 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7249 return expand_complex_abs (mode, op0, target, unsignedp);
7250
7251 /* Unsigned abs is simply the operand. Testing here means we don't
7252 risk generating incorrect code below. */
7253 if (TREE_UNSIGNED (type))
7254 return op0;
7255
7256 return expand_abs (mode, op0, target,
7257 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7258
7259 case MAX_EXPR:
7260 case MIN_EXPR:
7261 target = original_target;
7262 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7263 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7264 || GET_MODE (target) != mode
7265 || (GET_CODE (target) == REG
7266 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7267 target = gen_reg_rtx (mode);
7268 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7269 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7270
7271 /* First try to do it with a special MIN or MAX instruction.
7272 If that does not win, use a conditional jump to select the proper
7273 value. */
7274 this_optab = (TREE_UNSIGNED (type)
7275 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7276 : (code == MIN_EXPR ? smin_optab : smax_optab));
7277
7278 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7279 OPTAB_WIDEN);
7280 if (temp != 0)
7281 return temp;
7282
7283 /* At this point, a MEM target is no longer useful; we will get better
7284 code without it. */
7285
7286 if (GET_CODE (target) == MEM)
7287 target = gen_reg_rtx (mode);
7288
7289 if (target != op0)
7290 emit_move_insn (target, op0);
7291
7292 op0 = gen_label_rtx ();
7293
7294 /* If this mode is an integer too wide to compare properly,
7295 compare word by word. Rely on cse to optimize constant cases. */
7296 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7297 {
7298 if (code == MAX_EXPR)
7299 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7300 target, op1, NULL_RTX, op0);
7301 else
7302 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7303 op1, target, NULL_RTX, op0);
7304 emit_move_insn (target, op1);
7305 }
7306 else
7307 {
7308 if (code == MAX_EXPR)
7309 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7310 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7311 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7312 else
7313 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7314 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7315 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7316 if (temp == const0_rtx)
7317 emit_move_insn (target, op1);
7318 else if (temp != const_true_rtx)
7319 {
7320 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7321 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7322 else
7323 abort ();
7324 emit_move_insn (target, op1);
7325 }
7326 }
7327 emit_label (op0);
7328 return target;
7329
7330 case BIT_NOT_EXPR:
7331 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7332 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7333 if (temp == 0)
7334 abort ();
7335 return temp;
7336
7337 case FFS_EXPR:
7338 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7339 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7340 if (temp == 0)
7341 abort ();
7342 return temp;
7343
7344 /* ??? Can optimize bitwise operations with one arg constant.
7345 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7346 and (a bitwise1 b) bitwise2 b (etc)
7347 but that is probably not worth while. */
7348
7349 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7350 boolean values when we want in all cases to compute both of them. In
7351 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7352 as actual zero-or-1 values and then bitwise anding. In cases where
7353 there cannot be any side effects, better code would be made by
7354 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7355 how to recognize those cases. */
7356
7357 case TRUTH_AND_EXPR:
7358 case BIT_AND_EXPR:
7359 this_optab = and_optab;
7360 goto binop;
7361
7362 case TRUTH_OR_EXPR:
7363 case BIT_IOR_EXPR:
7364 this_optab = ior_optab;
7365 goto binop;
7366
7367 case TRUTH_XOR_EXPR:
7368 case BIT_XOR_EXPR:
7369 this_optab = xor_optab;
7370 goto binop;
7371
7372 case LSHIFT_EXPR:
7373 case RSHIFT_EXPR:
7374 case LROTATE_EXPR:
7375 case RROTATE_EXPR:
7376 preexpand_calls (exp);
7377 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7378 subtarget = 0;
7379 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7380 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7381 unsignedp);
7382
7383 /* Could determine the answer when only additive constants differ. Also,
7384 the addition of one can be handled by changing the condition. */
7385 case LT_EXPR:
7386 case LE_EXPR:
7387 case GT_EXPR:
7388 case GE_EXPR:
7389 case EQ_EXPR:
7390 case NE_EXPR:
7391 preexpand_calls (exp);
7392 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7393 if (temp != 0)
7394 return temp;
7395
7396 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7397 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7398 && original_target
7399 && GET_CODE (original_target) == REG
7400 && (GET_MODE (original_target)
7401 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7402 {
7403 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7404 VOIDmode, 0);
7405
7406 if (temp != original_target)
7407 temp = copy_to_reg (temp);
7408
7409 op1 = gen_label_rtx ();
7410 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7411 GET_MODE (temp), unsignedp, 0, op1);
7412 emit_move_insn (temp, const1_rtx);
7413 emit_label (op1);
7414 return temp;
7415 }
7416
7417 /* If no set-flag instruction, must generate a conditional
7418 store into a temporary variable. Drop through
7419 and handle this like && and ||. */
7420
7421 case TRUTH_ANDIF_EXPR:
7422 case TRUTH_ORIF_EXPR:
7423 if (! ignore
7424 && (target == 0 || ! safe_from_p (target, exp, 1)
7425 /* Make sure we don't have a hard reg (such as function's return
7426 value) live across basic blocks, if not optimizing. */
7427 || (!optimize && GET_CODE (target) == REG
7428 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7429 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7430
7431 if (target)
7432 emit_clr_insn (target);
7433
7434 op1 = gen_label_rtx ();
7435 jumpifnot (exp, op1);
7436
7437 if (target)
7438 emit_0_to_1_insn (target);
7439
7440 emit_label (op1);
7441 return ignore ? const0_rtx : target;
7442
7443 case TRUTH_NOT_EXPR:
7444 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7445 /* The parser is careful to generate TRUTH_NOT_EXPR
7446 only with operands that are always zero or one. */
7447 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7448 target, 1, OPTAB_LIB_WIDEN);
7449 if (temp == 0)
7450 abort ();
7451 return temp;
7452
7453 case COMPOUND_EXPR:
7454 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7455 emit_queue ();
7456 return expand_expr (TREE_OPERAND (exp, 1),
7457 (ignore ? const0_rtx : target),
7458 VOIDmode, 0);
7459
7460 case COND_EXPR:
7461 /* If we would have a "singleton" (see below) were it not for a
7462 conversion in each arm, bring that conversion back out. */
7463 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7464 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7465 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7466 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7467 {
7468 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7469 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7470
7471 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7472 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7473 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7474 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7475 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7476 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7477 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7478 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7479 return expand_expr (build1 (NOP_EXPR, type,
7480 build (COND_EXPR, TREE_TYPE (true),
7481 TREE_OPERAND (exp, 0),
7482 true, false)),
7483 target, tmode, modifier);
7484 }
7485
7486 {
7487 /* Note that COND_EXPRs whose type is a structure or union
7488 are required to be constructed to contain assignments of
7489 a temporary variable, so that we can evaluate them here
7490 for side effect only. If type is void, we must do likewise. */
7491
7492 /* If an arm of the branch requires a cleanup,
7493 only that cleanup is performed. */
7494
7495 tree singleton = 0;
7496 tree binary_op = 0, unary_op = 0;
7497
7498 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7499 convert it to our mode, if necessary. */
7500 if (integer_onep (TREE_OPERAND (exp, 1))
7501 && integer_zerop (TREE_OPERAND (exp, 2))
7502 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7503 {
7504 if (ignore)
7505 {
7506 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7507 ro_modifier);
7508 return const0_rtx;
7509 }
7510
7511 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7512 if (GET_MODE (op0) == mode)
7513 return op0;
7514
7515 if (target == 0)
7516 target = gen_reg_rtx (mode);
7517 convert_move (target, op0, unsignedp);
7518 return target;
7519 }
7520
7521 /* Check for X ? A + B : A. If we have this, we can copy A to the
7522 output and conditionally add B. Similarly for unary operations.
7523 Don't do this if X has side-effects because those side effects
7524 might affect A or B and the "?" operation is a sequence point in
7525 ANSI. (operand_equal_p tests for side effects.) */
7526
7527 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7528 && operand_equal_p (TREE_OPERAND (exp, 2),
7529 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7530 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7531 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7532 && operand_equal_p (TREE_OPERAND (exp, 1),
7533 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7534 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7535 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7536 && operand_equal_p (TREE_OPERAND (exp, 2),
7537 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7538 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7539 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7540 && operand_equal_p (TREE_OPERAND (exp, 1),
7541 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7542 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7543
7544 /* If we are not to produce a result, we have no target. Otherwise,
7545 if a target was specified use it; it will not be used as an
7546 intermediate target unless it is safe. If no target, use a
7547 temporary. */
7548
7549 if (ignore)
7550 temp = 0;
7551 else if (original_target
7552 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7553 || (singleton && GET_CODE (original_target) == REG
7554 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7555 && original_target == var_rtx (singleton)))
7556 && GET_MODE (original_target) == mode
7557 #ifdef HAVE_conditional_move
7558 && (! can_conditionally_move_p (mode)
7559 || GET_CODE (original_target) == REG
7560 || TREE_ADDRESSABLE (type))
7561 #endif
7562 && ! (GET_CODE (original_target) == MEM
7563 && MEM_VOLATILE_P (original_target)))
7564 temp = original_target;
7565 else if (TREE_ADDRESSABLE (type))
7566 abort ();
7567 else
7568 temp = assign_temp (type, 0, 0, 1);
7569
7570 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7571 do the test of X as a store-flag operation, do this as
7572 A + ((X != 0) << log C). Similarly for other simple binary
7573 operators. Only do for C == 1 if BRANCH_COST is low. */
7574 if (temp && singleton && binary_op
7575 && (TREE_CODE (binary_op) == PLUS_EXPR
7576 || TREE_CODE (binary_op) == MINUS_EXPR
7577 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7578 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7579 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7580 : integer_onep (TREE_OPERAND (binary_op, 1)))
7581 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7582 {
7583 rtx result;
7584 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7585 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7586 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7587 : xor_optab);
7588
7589 /* If we had X ? A : A + 1, do this as A + (X == 0).
7590
7591 We have to invert the truth value here and then put it
7592 back later if do_store_flag fails. We cannot simply copy
7593 TREE_OPERAND (exp, 0) to another variable and modify that
7594 because invert_truthvalue can modify the tree pointed to
7595 by its argument. */
7596 if (singleton == TREE_OPERAND (exp, 1))
7597 TREE_OPERAND (exp, 0)
7598 = invert_truthvalue (TREE_OPERAND (exp, 0));
7599
7600 result = do_store_flag (TREE_OPERAND (exp, 0),
7601 (safe_from_p (temp, singleton, 1)
7602 ? temp : NULL_RTX),
7603 mode, BRANCH_COST <= 1);
7604
7605 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7606 result = expand_shift (LSHIFT_EXPR, mode, result,
7607 build_int_2 (tree_log2
7608 (TREE_OPERAND
7609 (binary_op, 1)),
7610 0),
7611 (safe_from_p (temp, singleton, 1)
7612 ? temp : NULL_RTX), 0);
7613
7614 if (result)
7615 {
7616 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7617 return expand_binop (mode, boptab, op1, result, temp,
7618 unsignedp, OPTAB_LIB_WIDEN);
7619 }
7620 else if (singleton == TREE_OPERAND (exp, 1))
7621 TREE_OPERAND (exp, 0)
7622 = invert_truthvalue (TREE_OPERAND (exp, 0));
7623 }
7624
7625 do_pending_stack_adjust ();
7626 NO_DEFER_POP;
7627 op0 = gen_label_rtx ();
7628
7629 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7630 {
7631 if (temp != 0)
7632 {
7633 /* If the target conflicts with the other operand of the
7634 binary op, we can't use it. Also, we can't use the target
7635 if it is a hard register, because evaluating the condition
7636 might clobber it. */
7637 if ((binary_op
7638 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7639 || (GET_CODE (temp) == REG
7640 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7641 temp = gen_reg_rtx (mode);
7642 store_expr (singleton, temp, 0);
7643 }
7644 else
7645 expand_expr (singleton,
7646 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7647 if (singleton == TREE_OPERAND (exp, 1))
7648 jumpif (TREE_OPERAND (exp, 0), op0);
7649 else
7650 jumpifnot (TREE_OPERAND (exp, 0), op0);
7651
7652 start_cleanup_deferral ();
7653 if (binary_op && temp == 0)
7654 /* Just touch the other operand. */
7655 expand_expr (TREE_OPERAND (binary_op, 1),
7656 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7657 else if (binary_op)
7658 store_expr (build (TREE_CODE (binary_op), type,
7659 make_tree (type, temp),
7660 TREE_OPERAND (binary_op, 1)),
7661 temp, 0);
7662 else
7663 store_expr (build1 (TREE_CODE (unary_op), type,
7664 make_tree (type, temp)),
7665 temp, 0);
7666 op1 = op0;
7667 }
7668 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7669 comparison operator. If we have one of these cases, set the
7670 output to A, branch on A (cse will merge these two references),
7671 then set the output to FOO. */
7672 else if (temp
7673 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7674 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7675 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7676 TREE_OPERAND (exp, 1), 0)
7677 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7678 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7679 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7680 {
7681 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7682 temp = gen_reg_rtx (mode);
7683 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7684 jumpif (TREE_OPERAND (exp, 0), op0);
7685
7686 start_cleanup_deferral ();
7687 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7688 op1 = op0;
7689 }
7690 else if (temp
7691 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7692 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7693 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7694 TREE_OPERAND (exp, 2), 0)
7695 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7696 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7697 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7698 {
7699 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7700 temp = gen_reg_rtx (mode);
7701 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7702 jumpifnot (TREE_OPERAND (exp, 0), op0);
7703
7704 start_cleanup_deferral ();
7705 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7706 op1 = op0;
7707 }
7708 else
7709 {
7710 op1 = gen_label_rtx ();
7711 jumpifnot (TREE_OPERAND (exp, 0), op0);
7712
7713 start_cleanup_deferral ();
7714 if (temp != 0)
7715 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7716 else
7717 expand_expr (TREE_OPERAND (exp, 1),
7718 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7719 end_cleanup_deferral ();
7720 emit_queue ();
7721 emit_jump_insn (gen_jump (op1));
7722 emit_barrier ();
7723 emit_label (op0);
7724 start_cleanup_deferral ();
7725 if (temp != 0)
7726 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7727 else
7728 expand_expr (TREE_OPERAND (exp, 2),
7729 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7730 }
7731
7732 end_cleanup_deferral ();
7733
7734 emit_queue ();
7735 emit_label (op1);
7736 OK_DEFER_POP;
7737
7738 return temp;
7739 }
7740
7741 case TARGET_EXPR:
7742 {
7743 /* Something needs to be initialized, but we didn't know
7744 where that thing was when building the tree. For example,
7745 it could be the return value of a function, or a parameter
7746 to a function which lays down in the stack, or a temporary
7747 variable which must be passed by reference.
7748
7749 We guarantee that the expression will either be constructed
7750 or copied into our original target. */
7751
7752 tree slot = TREE_OPERAND (exp, 0);
7753 tree cleanups = NULL_TREE;
7754 tree exp1;
7755
7756 if (TREE_CODE (slot) != VAR_DECL)
7757 abort ();
7758
7759 if (! ignore)
7760 target = original_target;
7761
7762 if (target == 0)
7763 {
7764 if (DECL_RTL (slot) != 0)
7765 {
7766 target = DECL_RTL (slot);
7767 /* If we have already expanded the slot, so don't do
7768 it again. (mrs) */
7769 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7770 return target;
7771 }
7772 else
7773 {
7774 target = assign_temp (type, 2, 0, 1);
7775 /* All temp slots at this level must not conflict. */
7776 preserve_temp_slots (target);
7777 DECL_RTL (slot) = target;
7778 if (TREE_ADDRESSABLE (slot))
7779 {
7780 TREE_ADDRESSABLE (slot) = 0;
7781 mark_addressable (slot);
7782 }
7783
7784 /* Since SLOT is not known to the called function
7785 to belong to its stack frame, we must build an explicit
7786 cleanup. This case occurs when we must build up a reference
7787 to pass the reference as an argument. In this case,
7788 it is very likely that such a reference need not be
7789 built here. */
7790
7791 if (TREE_OPERAND (exp, 2) == 0)
7792 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7793 cleanups = TREE_OPERAND (exp, 2);
7794 }
7795 }
7796 else
7797 {
7798 /* This case does occur, when expanding a parameter which
7799 needs to be constructed on the stack. The target
7800 is the actual stack address that we want to initialize.
7801 The function we call will perform the cleanup in this case. */
7802
7803 /* If we have already assigned it space, use that space,
7804 not target that we were passed in, as our target
7805 parameter is only a hint. */
7806 if (DECL_RTL (slot) != 0)
7807 {
7808 target = DECL_RTL (slot);
7809 /* If we have already expanded the slot, so don't do
7810 it again. (mrs) */
7811 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7812 return target;
7813 }
7814 else
7815 {
7816 DECL_RTL (slot) = target;
7817 /* If we must have an addressable slot, then make sure that
7818 the RTL that we just stored in slot is OK. */
7819 if (TREE_ADDRESSABLE (slot))
7820 {
7821 TREE_ADDRESSABLE (slot) = 0;
7822 mark_addressable (slot);
7823 }
7824 }
7825 }
7826
7827 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7828 /* Mark it as expanded. */
7829 TREE_OPERAND (exp, 1) = NULL_TREE;
7830
7831 TREE_USED (slot) = 1;
7832 store_expr (exp1, target, 0);
7833
7834 expand_decl_cleanup (NULL_TREE, cleanups);
7835
7836 return target;
7837 }
7838
7839 case INIT_EXPR:
7840 {
7841 tree lhs = TREE_OPERAND (exp, 0);
7842 tree rhs = TREE_OPERAND (exp, 1);
7843 tree noncopied_parts = 0;
7844 tree lhs_type = TREE_TYPE (lhs);
7845
7846 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7847 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7848 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7849 TYPE_NONCOPIED_PARTS (lhs_type));
7850 while (noncopied_parts != 0)
7851 {
7852 expand_assignment (TREE_VALUE (noncopied_parts),
7853 TREE_PURPOSE (noncopied_parts), 0, 0);
7854 noncopied_parts = TREE_CHAIN (noncopied_parts);
7855 }
7856 return temp;
7857 }
7858
7859 case MODIFY_EXPR:
7860 {
7861 /* If lhs is complex, expand calls in rhs before computing it.
7862 That's so we don't compute a pointer and save it over a call.
7863 If lhs is simple, compute it first so we can give it as a
7864 target if the rhs is just a call. This avoids an extra temp and copy
7865 and that prevents a partial-subsumption which makes bad code.
7866 Actually we could treat component_ref's of vars like vars. */
7867
7868 tree lhs = TREE_OPERAND (exp, 0);
7869 tree rhs = TREE_OPERAND (exp, 1);
7870 tree noncopied_parts = 0;
7871 tree lhs_type = TREE_TYPE (lhs);
7872
7873 temp = 0;
7874
7875 if (TREE_CODE (lhs) != VAR_DECL
7876 && TREE_CODE (lhs) != RESULT_DECL
7877 && TREE_CODE (lhs) != PARM_DECL
7878 && ! (TREE_CODE (lhs) == INDIRECT_REF
7879 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7880 preexpand_calls (exp);
7881
7882 /* Check for |= or &= of a bitfield of size one into another bitfield
7883 of size 1. In this case, (unless we need the result of the
7884 assignment) we can do this more efficiently with a
7885 test followed by an assignment, if necessary.
7886
7887 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7888 things change so we do, this code should be enhanced to
7889 support it. */
7890 if (ignore
7891 && TREE_CODE (lhs) == COMPONENT_REF
7892 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7893 || TREE_CODE (rhs) == BIT_AND_EXPR)
7894 && TREE_OPERAND (rhs, 0) == lhs
7895 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7896 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7897 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7898 {
7899 rtx label = gen_label_rtx ();
7900
7901 do_jump (TREE_OPERAND (rhs, 1),
7902 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7903 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7904 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7905 (TREE_CODE (rhs) == BIT_IOR_EXPR
7906 ? integer_one_node
7907 : integer_zero_node)),
7908 0, 0);
7909 do_pending_stack_adjust ();
7910 emit_label (label);
7911 return const0_rtx;
7912 }
7913
7914 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7915 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7916 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7917 TYPE_NONCOPIED_PARTS (lhs_type));
7918
7919 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7920 while (noncopied_parts != 0)
7921 {
7922 expand_assignment (TREE_PURPOSE (noncopied_parts),
7923 TREE_VALUE (noncopied_parts), 0, 0);
7924 noncopied_parts = TREE_CHAIN (noncopied_parts);
7925 }
7926 return temp;
7927 }
7928
7929 case RETURN_EXPR:
7930 if (!TREE_OPERAND (exp, 0))
7931 expand_null_return ();
7932 else
7933 expand_return (TREE_OPERAND (exp, 0));
7934 return const0_rtx;
7935
7936 case PREINCREMENT_EXPR:
7937 case PREDECREMENT_EXPR:
7938 return expand_increment (exp, 0, ignore);
7939
7940 case POSTINCREMENT_EXPR:
7941 case POSTDECREMENT_EXPR:
7942 /* Faster to treat as pre-increment if result is not used. */
7943 return expand_increment (exp, ! ignore, ignore);
7944
7945 case ADDR_EXPR:
7946 /* If nonzero, TEMP will be set to the address of something that might
7947 be a MEM corresponding to a stack slot. */
7948 temp = 0;
7949
7950 /* Are we taking the address of a nested function? */
7951 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7952 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7953 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7954 && ! TREE_STATIC (exp))
7955 {
7956 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7957 op0 = force_operand (op0, target);
7958 }
7959 /* If we are taking the address of something erroneous, just
7960 return a zero. */
7961 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7962 return const0_rtx;
7963 else
7964 {
7965 /* We make sure to pass const0_rtx down if we came in with
7966 ignore set, to avoid doing the cleanups twice for something. */
7967 op0 = expand_expr (TREE_OPERAND (exp, 0),
7968 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7969 (modifier == EXPAND_INITIALIZER
7970 ? modifier : EXPAND_CONST_ADDRESS));
7971
7972 /* If we are going to ignore the result, OP0 will have been set
7973 to const0_rtx, so just return it. Don't get confused and
7974 think we are taking the address of the constant. */
7975 if (ignore)
7976 return op0;
7977
7978 op0 = protect_from_queue (op0, 0);
7979
7980 /* We would like the object in memory. If it is a constant,
7981 we can have it be statically allocated into memory. For
7982 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7983 memory and store the value into it. */
7984
7985 if (CONSTANT_P (op0))
7986 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7987 op0);
7988 else if (GET_CODE (op0) == MEM)
7989 {
7990 mark_temp_addr_taken (op0);
7991 temp = XEXP (op0, 0);
7992 }
7993
7994 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7995 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7996 {
7997 /* If this object is in a register, it must be not
7998 be BLKmode. */
7999 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8000 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8001
8002 mark_temp_addr_taken (memloc);
8003 emit_move_insn (memloc, op0);
8004 op0 = memloc;
8005 }
8006
8007 if (GET_CODE (op0) != MEM)
8008 abort ();
8009
8010 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8011 {
8012 temp = XEXP (op0, 0);
8013 #ifdef POINTERS_EXTEND_UNSIGNED
8014 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8015 && mode == ptr_mode)
8016 temp = convert_memory_address (ptr_mode, temp);
8017 #endif
8018 return temp;
8019 }
8020
8021 op0 = force_operand (XEXP (op0, 0), target);
8022 }
8023
8024 if (flag_force_addr && GET_CODE (op0) != REG)
8025 op0 = force_reg (Pmode, op0);
8026
8027 if (GET_CODE (op0) == REG
8028 && ! REG_USERVAR_P (op0))
8029 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8030
8031 /* If we might have had a temp slot, add an equivalent address
8032 for it. */
8033 if (temp != 0)
8034 update_temp_slot_address (temp, op0);
8035
8036 #ifdef POINTERS_EXTEND_UNSIGNED
8037 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8038 && mode == ptr_mode)
8039 op0 = convert_memory_address (ptr_mode, op0);
8040 #endif
8041
8042 return op0;
8043
8044 case ENTRY_VALUE_EXPR:
8045 abort ();
8046
8047 /* COMPLEX type for Extended Pascal & Fortran */
8048 case COMPLEX_EXPR:
8049 {
8050 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8051 rtx insns;
8052
8053 /* Get the rtx code of the operands. */
8054 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8055 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8056
8057 if (! target)
8058 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8059
8060 start_sequence ();
8061
8062 /* Move the real (op0) and imaginary (op1) parts to their location. */
8063 emit_move_insn (gen_realpart (mode, target), op0);
8064 emit_move_insn (gen_imagpart (mode, target), op1);
8065
8066 insns = get_insns ();
8067 end_sequence ();
8068
8069 /* Complex construction should appear as a single unit. */
8070 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8071 each with a separate pseudo as destination.
8072 It's not correct for flow to treat them as a unit. */
8073 if (GET_CODE (target) != CONCAT)
8074 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8075 else
8076 emit_insns (insns);
8077
8078 return target;
8079 }
8080
8081 case REALPART_EXPR:
8082 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8083 return gen_realpart (mode, op0);
8084
8085 case IMAGPART_EXPR:
8086 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8087 return gen_imagpart (mode, op0);
8088
8089 case CONJ_EXPR:
8090 {
8091 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8092 rtx imag_t;
8093 rtx insns;
8094
8095 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8096
8097 if (! target)
8098 target = gen_reg_rtx (mode);
8099
8100 start_sequence ();
8101
8102 /* Store the realpart and the negated imagpart to target. */
8103 emit_move_insn (gen_realpart (partmode, target),
8104 gen_realpart (partmode, op0));
8105
8106 imag_t = gen_imagpart (partmode, target);
8107 temp = expand_unop (partmode, neg_optab,
8108 gen_imagpart (partmode, op0), imag_t, 0);
8109 if (temp != imag_t)
8110 emit_move_insn (imag_t, temp);
8111
8112 insns = get_insns ();
8113 end_sequence ();
8114
8115 /* Conjugate should appear as a single unit
8116 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8117 each with a separate pseudo as destination.
8118 It's not correct for flow to treat them as a unit. */
8119 if (GET_CODE (target) != CONCAT)
8120 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8121 else
8122 emit_insns (insns);
8123
8124 return target;
8125 }
8126
8127 case TRY_CATCH_EXPR:
8128 {
8129 tree handler = TREE_OPERAND (exp, 1);
8130
8131 expand_eh_region_start ();
8132
8133 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8134
8135 expand_eh_region_end (handler);
8136
8137 return op0;
8138 }
8139
8140 case TRY_FINALLY_EXPR:
8141 {
8142 tree try_block = TREE_OPERAND (exp, 0);
8143 tree finally_block = TREE_OPERAND (exp, 1);
8144 rtx finally_label = gen_label_rtx ();
8145 rtx done_label = gen_label_rtx ();
8146 rtx return_link = gen_reg_rtx (Pmode);
8147 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8148 (tree) finally_label, (tree) return_link);
8149 TREE_SIDE_EFFECTS (cleanup) = 1;
8150
8151 /* Start a new binding layer that will keep track of all cleanup
8152 actions to be performed. */
8153 expand_start_bindings (0);
8154
8155 target_temp_slot_level = temp_slot_level;
8156
8157 expand_decl_cleanup (NULL_TREE, cleanup);
8158 op0 = expand_expr (try_block, target, tmode, modifier);
8159
8160 preserve_temp_slots (op0);
8161 expand_end_bindings (NULL_TREE, 0, 0);
8162 emit_jump (done_label);
8163 emit_label (finally_label);
8164 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8165 emit_indirect_jump (return_link);
8166 emit_label (done_label);
8167 return op0;
8168 }
8169
8170 case GOTO_SUBROUTINE_EXPR:
8171 {
8172 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8173 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8174 rtx return_address = gen_label_rtx ();
8175 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8176 emit_jump (subr);
8177 emit_label (return_address);
8178 return const0_rtx;
8179 }
8180
8181 case POPDCC_EXPR:
8182 {
8183 rtx dcc = get_dynamic_cleanup_chain ();
8184 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8185 return const0_rtx;
8186 }
8187
8188 case POPDHC_EXPR:
8189 {
8190 rtx dhc = get_dynamic_handler_chain ();
8191 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8192 return const0_rtx;
8193 }
8194
8195 default:
8196 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8197 }
8198
8199 /* Here to do an ordinary binary operator, generating an instruction
8200 from the optab already placed in `this_optab'. */
8201 binop:
8202 preexpand_calls (exp);
8203 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8204 subtarget = 0;
8205 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8206 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8207 binop2:
8208 temp = expand_binop (mode, this_optab, op0, op1, target,
8209 unsignedp, OPTAB_LIB_WIDEN);
8210 if (temp == 0)
8211 abort ();
8212 return temp;
8213 }
8214
8215
8216 \f
8217 /* Return the alignment in bits of EXP, a pointer valued expression.
8218 But don't return more than MAX_ALIGN no matter what.
8219 The alignment returned is, by default, the alignment of the thing that
8220 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8221
8222 Otherwise, look at the expression to see if we can do better, i.e., if the
8223 expression is actually pointing at an object whose alignment is tighter. */
8224
8225 static int
8226 get_pointer_alignment (exp, max_align)
8227 tree exp;
8228 unsigned max_align;
8229 {
8230 unsigned align, inner;
8231
8232 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8233 return 0;
8234
8235 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8236 align = MIN (align, max_align);
8237
8238 while (1)
8239 {
8240 switch (TREE_CODE (exp))
8241 {
8242 case NOP_EXPR:
8243 case CONVERT_EXPR:
8244 case NON_LVALUE_EXPR:
8245 exp = TREE_OPERAND (exp, 0);
8246 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8247 return align;
8248 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8249 align = MIN (inner, max_align);
8250 break;
8251
8252 case PLUS_EXPR:
8253 /* If sum of pointer + int, restrict our maximum alignment to that
8254 imposed by the integer. If not, we can't do any better than
8255 ALIGN. */
8256 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8257 return align;
8258
8259 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8260 & (max_align - 1))
8261 != 0)
8262 max_align >>= 1;
8263
8264 exp = TREE_OPERAND (exp, 0);
8265 break;
8266
8267 case ADDR_EXPR:
8268 /* See what we are pointing at and look at its alignment. */
8269 exp = TREE_OPERAND (exp, 0);
8270 if (TREE_CODE (exp) == FUNCTION_DECL)
8271 align = FUNCTION_BOUNDARY;
8272 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8273 align = DECL_ALIGN (exp);
8274 #ifdef CONSTANT_ALIGNMENT
8275 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8276 align = CONSTANT_ALIGNMENT (exp, align);
8277 #endif
8278 return MIN (align, max_align);
8279
8280 default:
8281 return align;
8282 }
8283 }
8284 }
8285 \f
8286 /* Return the tree node and offset if a given argument corresponds to
8287 a string constant. */
8288
8289 static tree
8290 string_constant (arg, ptr_offset)
8291 tree arg;
8292 tree *ptr_offset;
8293 {
8294 STRIP_NOPS (arg);
8295
8296 if (TREE_CODE (arg) == ADDR_EXPR
8297 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8298 {
8299 *ptr_offset = integer_zero_node;
8300 return TREE_OPERAND (arg, 0);
8301 }
8302 else if (TREE_CODE (arg) == PLUS_EXPR)
8303 {
8304 tree arg0 = TREE_OPERAND (arg, 0);
8305 tree arg1 = TREE_OPERAND (arg, 1);
8306
8307 STRIP_NOPS (arg0);
8308 STRIP_NOPS (arg1);
8309
8310 if (TREE_CODE (arg0) == ADDR_EXPR
8311 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8312 {
8313 *ptr_offset = arg1;
8314 return TREE_OPERAND (arg0, 0);
8315 }
8316 else if (TREE_CODE (arg1) == ADDR_EXPR
8317 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8318 {
8319 *ptr_offset = arg0;
8320 return TREE_OPERAND (arg1, 0);
8321 }
8322 }
8323
8324 return 0;
8325 }
8326
8327 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8328 way, because it could contain a zero byte in the middle.
8329 TREE_STRING_LENGTH is the size of the character array, not the string.
8330
8331 Unfortunately, string_constant can't access the values of const char
8332 arrays with initializers, so neither can we do so here. */
8333
8334 static tree
8335 c_strlen (src)
8336 tree src;
8337 {
8338 tree offset_node;
8339 int offset, max;
8340 char *ptr;
8341
8342 src = string_constant (src, &offset_node);
8343 if (src == 0)
8344 return 0;
8345 max = TREE_STRING_LENGTH (src);
8346 ptr = TREE_STRING_POINTER (src);
8347 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8348 {
8349 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8350 compute the offset to the following null if we don't know where to
8351 start searching for it. */
8352 int i;
8353 for (i = 0; i < max; i++)
8354 if (ptr[i] == 0)
8355 return 0;
8356 /* We don't know the starting offset, but we do know that the string
8357 has no internal zero bytes. We can assume that the offset falls
8358 within the bounds of the string; otherwise, the programmer deserves
8359 what he gets. Subtract the offset from the length of the string,
8360 and return that. */
8361 /* This would perhaps not be valid if we were dealing with named
8362 arrays in addition to literal string constants. */
8363 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8364 }
8365
8366 /* We have a known offset into the string. Start searching there for
8367 a null character. */
8368 if (offset_node == 0)
8369 offset = 0;
8370 else
8371 {
8372 /* Did we get a long long offset? If so, punt. */
8373 if (TREE_INT_CST_HIGH (offset_node) != 0)
8374 return 0;
8375 offset = TREE_INT_CST_LOW (offset_node);
8376 }
8377 /* If the offset is known to be out of bounds, warn, and call strlen at
8378 runtime. */
8379 if (offset < 0 || offset > max)
8380 {
8381 warning ("offset outside bounds of constant string");
8382 return 0;
8383 }
8384 /* Use strlen to search for the first zero byte. Since any strings
8385 constructed with build_string will have nulls appended, we win even
8386 if we get handed something like (char[4])"abcd".
8387
8388 Since OFFSET is our starting index into the string, no further
8389 calculation is needed. */
8390 return size_int (strlen (ptr + offset));
8391 }
8392
8393 rtx
8394 expand_builtin_return_addr (fndecl_code, count, tem)
8395 enum built_in_function fndecl_code;
8396 int count;
8397 rtx tem;
8398 {
8399 int i;
8400
8401 /* Some machines need special handling before we can access
8402 arbitrary frames. For example, on the sparc, we must first flush
8403 all register windows to the stack. */
8404 #ifdef SETUP_FRAME_ADDRESSES
8405 if (count > 0)
8406 SETUP_FRAME_ADDRESSES ();
8407 #endif
8408
8409 /* On the sparc, the return address is not in the frame, it is in a
8410 register. There is no way to access it off of the current frame
8411 pointer, but it can be accessed off the previous frame pointer by
8412 reading the value from the register window save area. */
8413 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8414 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8415 count--;
8416 #endif
8417
8418 /* Scan back COUNT frames to the specified frame. */
8419 for (i = 0; i < count; i++)
8420 {
8421 /* Assume the dynamic chain pointer is in the word that the
8422 frame address points to, unless otherwise specified. */
8423 #ifdef DYNAMIC_CHAIN_ADDRESS
8424 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8425 #endif
8426 tem = memory_address (Pmode, tem);
8427 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8428 }
8429
8430 /* For __builtin_frame_address, return what we've got. */
8431 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8432 return tem;
8433
8434 /* For __builtin_return_address, Get the return address from that
8435 frame. */
8436 #ifdef RETURN_ADDR_RTX
8437 tem = RETURN_ADDR_RTX (count, tem);
8438 #else
8439 tem = memory_address (Pmode,
8440 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8441 tem = gen_rtx_MEM (Pmode, tem);
8442 #endif
8443 return tem;
8444 }
8445
8446 /* __builtin_setjmp is passed a pointer to an array of five words (not
8447 all will be used on all machines). It operates similarly to the C
8448 library function of the same name, but is more efficient. Much of
8449 the code below (and for longjmp) is copied from the handling of
8450 non-local gotos.
8451
8452 NOTE: This is intended for use by GNAT and the exception handling
8453 scheme in the compiler and will only work in the method used by
8454 them. */
8455
8456 rtx
8457 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
8458 rtx buf_addr;
8459 rtx target;
8460 rtx first_label, next_label;
8461 {
8462 rtx lab1 = gen_label_rtx ();
8463 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8464 enum machine_mode value_mode;
8465 rtx stack_save;
8466
8467 value_mode = TYPE_MODE (integer_type_node);
8468
8469 #ifdef POINTERS_EXTEND_UNSIGNED
8470 buf_addr = convert_memory_address (Pmode, buf_addr);
8471 #endif
8472
8473 buf_addr = force_reg (Pmode, buf_addr);
8474
8475 if (target == 0 || GET_CODE (target) != REG
8476 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8477 target = gen_reg_rtx (value_mode);
8478
8479 emit_queue ();
8480
8481 /* We store the frame pointer and the address of lab1 in the buffer
8482 and use the rest of it for the stack save area, which is
8483 machine-dependent. */
8484
8485 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8486 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8487 #endif
8488
8489 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8490 BUILTIN_SETJMP_FRAME_VALUE);
8491 emit_move_insn (validize_mem
8492 (gen_rtx_MEM (Pmode,
8493 plus_constant (buf_addr,
8494 GET_MODE_SIZE (Pmode)))),
8495 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, lab1)));
8496
8497 stack_save = gen_rtx_MEM (sa_mode,
8498 plus_constant (buf_addr,
8499 2 * GET_MODE_SIZE (Pmode)));
8500 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8501
8502 /* If there is further processing to do, do it. */
8503 #ifdef HAVE_builtin_setjmp_setup
8504 if (HAVE_builtin_setjmp_setup)
8505 emit_insn (gen_builtin_setjmp_setup (buf_addr));
8506 #endif
8507
8508 /* Set TARGET to zero and branch to the first-time-through label. */
8509 emit_move_insn (target, const0_rtx);
8510 emit_jump_insn (gen_jump (first_label));
8511 emit_barrier ();
8512 emit_label (lab1);
8513
8514 /* Tell flow about the strange goings on. Putting `lab1' on
8515 `nonlocal_goto_handler_labels' to indicates that function
8516 calls may traverse the arc back to this label. */
8517
8518 current_function_has_nonlocal_label = 1;
8519 nonlocal_goto_handler_labels =
8520 gen_rtx_EXPR_LIST (VOIDmode, lab1, nonlocal_goto_handler_labels);
8521
8522 /* Clobber the FP when we get here, so we have to make sure it's
8523 marked as used by this function. */
8524 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8525
8526 /* Mark the static chain as clobbered here so life information
8527 doesn't get messed up for it. */
8528 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8529
8530 /* Now put in the code to restore the frame pointer, and argument
8531 pointer, if needed. The code below is from expand_end_bindings
8532 in stmt.c; see detailed documentation there. */
8533 #ifdef HAVE_nonlocal_goto
8534 if (! HAVE_nonlocal_goto)
8535 #endif
8536 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8537
8538 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8539 if (fixed_regs[ARG_POINTER_REGNUM])
8540 {
8541 #ifdef ELIMINABLE_REGS
8542 size_t i;
8543 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8544
8545 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8546 if (elim_regs[i].from == ARG_POINTER_REGNUM
8547 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8548 break;
8549
8550 if (i == sizeof elim_regs / sizeof elim_regs [0])
8551 #endif
8552 {
8553 /* Now restore our arg pointer from the address at which it
8554 was saved in our stack frame.
8555 If there hasn't be space allocated for it yet, make
8556 some now. */
8557 if (arg_pointer_save_area == 0)
8558 arg_pointer_save_area
8559 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8560 emit_move_insn (virtual_incoming_args_rtx,
8561 copy_to_reg (arg_pointer_save_area));
8562 }
8563 }
8564 #endif
8565
8566 #ifdef HAVE_builtin_setjmp_receiver
8567 if (HAVE_builtin_setjmp_receiver)
8568 emit_insn (gen_builtin_setjmp_receiver (lab1));
8569 else
8570 #endif
8571 #ifdef HAVE_nonlocal_goto_receiver
8572 if (HAVE_nonlocal_goto_receiver)
8573 emit_insn (gen_nonlocal_goto_receiver ());
8574 else
8575 #endif
8576 {
8577 ; /* Nothing */
8578 }
8579
8580 /* Set TARGET, and branch to the next-time-through label. */
8581 emit_move_insn (target, const1_rtx);
8582 emit_jump_insn (gen_jump (next_label));
8583 emit_barrier ();
8584
8585 return target;
8586 }
8587
8588 void
8589 expand_builtin_longjmp (buf_addr, value)
8590 rtx buf_addr, value;
8591 {
8592 rtx fp, lab, stack;
8593 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8594
8595 #ifdef POINTERS_EXTEND_UNSIGNED
8596 buf_addr = convert_memory_address (Pmode, buf_addr);
8597 #endif
8598 buf_addr = force_reg (Pmode, buf_addr);
8599
8600 /* We used to store value in static_chain_rtx, but that fails if pointers
8601 are smaller than integers. We instead require that the user must pass
8602 a second argument of 1, because that is what builtin_setjmp will
8603 return. This also makes EH slightly more efficient, since we are no
8604 longer copying around a value that we don't care about. */
8605 if (value != const1_rtx)
8606 abort ();
8607
8608 #ifdef HAVE_builtin_longjmp
8609 if (HAVE_builtin_longjmp)
8610 emit_insn (gen_builtin_longjmp (buf_addr));
8611 else
8612 #endif
8613 {
8614 fp = gen_rtx_MEM (Pmode, buf_addr);
8615 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8616 GET_MODE_SIZE (Pmode)));
8617
8618 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8619 2 * GET_MODE_SIZE (Pmode)));
8620
8621 /* Pick up FP, label, and SP from the block and jump. This code is
8622 from expand_goto in stmt.c; see there for detailed comments. */
8623 #if HAVE_nonlocal_goto
8624 if (HAVE_nonlocal_goto)
8625 /* We have to pass a value to the nonlocal_goto pattern that will
8626 get copied into the static_chain pointer, but it does not matter
8627 what that value is, because builtin_setjmp does not use it. */
8628 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8629 else
8630 #endif
8631 {
8632 lab = copy_to_reg (lab);
8633
8634 emit_move_insn (hard_frame_pointer_rtx, fp);
8635 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8636
8637 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8638 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8639 emit_indirect_jump (lab);
8640 }
8641 }
8642 }
8643
8644 static rtx
8645 get_memory_rtx (exp)
8646 tree exp;
8647 {
8648 rtx mem;
8649 int is_aggregate;
8650
8651 mem = gen_rtx_MEM (BLKmode,
8652 memory_address (BLKmode,
8653 expand_expr (exp, NULL_RTX,
8654 ptr_mode, EXPAND_SUM)));
8655
8656 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8657
8658 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8659 if the value is the address of a structure or if the expression is
8660 cast to a pointer to structure type. */
8661 is_aggregate = 0;
8662
8663 while (TREE_CODE (exp) == NOP_EXPR)
8664 {
8665 tree cast_type = TREE_TYPE (exp);
8666 if (TREE_CODE (cast_type) == POINTER_TYPE
8667 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8668 {
8669 is_aggregate = 1;
8670 break;
8671 }
8672 exp = TREE_OPERAND (exp, 0);
8673 }
8674
8675 if (is_aggregate == 0)
8676 {
8677 tree type;
8678
8679 if (TREE_CODE (exp) == ADDR_EXPR)
8680 /* If this is the address of an object, check whether the
8681 object is an array. */
8682 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8683 else
8684 type = TREE_TYPE (TREE_TYPE (exp));
8685 is_aggregate = AGGREGATE_TYPE_P (type);
8686 }
8687
8688 MEM_SET_IN_STRUCT_P (mem, is_aggregate);
8689 return mem;
8690 }
8691
8692 \f
8693 /* Expand an expression EXP that calls a built-in function,
8694 with result going to TARGET if that's convenient
8695 (and in mode MODE if that's convenient).
8696 SUBTARGET may be used as the target for computing one of EXP's operands.
8697 IGNORE is nonzero if the value is to be ignored. */
8698
8699 #define CALLED_AS_BUILT_IN(NODE) \
8700 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8701
8702 static rtx
8703 expand_builtin (exp, target, subtarget, mode, ignore)
8704 tree exp;
8705 rtx target;
8706 rtx subtarget;
8707 enum machine_mode mode;
8708 int ignore;
8709 {
8710 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8711 tree arglist = TREE_OPERAND (exp, 1);
8712 rtx op0;
8713 rtx lab1, insns;
8714 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8715 optab builtin_optab;
8716
8717 switch (DECL_FUNCTION_CODE (fndecl))
8718 {
8719 case BUILT_IN_ABS:
8720 case BUILT_IN_LABS:
8721 case BUILT_IN_FABS:
8722 /* build_function_call changes these into ABS_EXPR. */
8723 abort ();
8724
8725 case BUILT_IN_SIN:
8726 case BUILT_IN_COS:
8727 /* Treat these like sqrt, but only if the user asks for them. */
8728 if (! flag_fast_math)
8729 break;
8730 case BUILT_IN_FSQRT:
8731 /* If not optimizing, call the library function. */
8732 if (! optimize)
8733 break;
8734
8735 if (arglist == 0
8736 /* Arg could be wrong type if user redeclared this fcn wrong. */
8737 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8738 break;
8739
8740 /* Stabilize and compute the argument. */
8741 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8742 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8743 {
8744 exp = copy_node (exp);
8745 arglist = copy_node (arglist);
8746 TREE_OPERAND (exp, 1) = arglist;
8747 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8748 }
8749 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8750
8751 /* Make a suitable register to place result in. */
8752 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8753
8754 emit_queue ();
8755 start_sequence ();
8756
8757 switch (DECL_FUNCTION_CODE (fndecl))
8758 {
8759 case BUILT_IN_SIN:
8760 builtin_optab = sin_optab; break;
8761 case BUILT_IN_COS:
8762 builtin_optab = cos_optab; break;
8763 case BUILT_IN_FSQRT:
8764 builtin_optab = sqrt_optab; break;
8765 default:
8766 abort ();
8767 }
8768
8769 /* Compute into TARGET.
8770 Set TARGET to wherever the result comes back. */
8771 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8772 builtin_optab, op0, target, 0);
8773
8774 /* If we were unable to expand via the builtin, stop the
8775 sequence (without outputting the insns) and break, causing
8776 a call to the library function. */
8777 if (target == 0)
8778 {
8779 end_sequence ();
8780 break;
8781 }
8782
8783 /* Check the results by default. But if flag_fast_math is turned on,
8784 then assume sqrt will always be called with valid arguments. */
8785
8786 if (flag_errno_math && ! flag_fast_math)
8787 {
8788 /* Don't define the builtin FP instructions
8789 if your machine is not IEEE. */
8790 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8791 abort ();
8792
8793 lab1 = gen_label_rtx ();
8794
8795 /* Test the result; if it is NaN, set errno=EDOM because
8796 the argument was not in the domain. */
8797 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
8798 0, 0, lab1);
8799
8800 #ifdef TARGET_EDOM
8801 {
8802 #ifdef GEN_ERRNO_RTX
8803 rtx errno_rtx = GEN_ERRNO_RTX;
8804 #else
8805 rtx errno_rtx
8806 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8807 #endif
8808
8809 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8810 }
8811 #else
8812 /* We can't set errno=EDOM directly; let the library call do it.
8813 Pop the arguments right away in case the call gets deleted. */
8814 NO_DEFER_POP;
8815 expand_call (exp, target, 0);
8816 OK_DEFER_POP;
8817 #endif
8818
8819 emit_label (lab1);
8820 }
8821
8822 /* Output the entire sequence. */
8823 insns = get_insns ();
8824 end_sequence ();
8825 emit_insns (insns);
8826
8827 return target;
8828
8829 case BUILT_IN_FMOD:
8830 break;
8831
8832 /* __builtin_apply_args returns block of memory allocated on
8833 the stack into which is stored the arg pointer, structure
8834 value address, static chain, and all the registers that might
8835 possibly be used in performing a function call. The code is
8836 moved to the start of the function so the incoming values are
8837 saved. */
8838 case BUILT_IN_APPLY_ARGS:
8839 /* Don't do __builtin_apply_args more than once in a function.
8840 Save the result of the first call and reuse it. */
8841 if (apply_args_value != 0)
8842 return apply_args_value;
8843 {
8844 /* When this function is called, it means that registers must be
8845 saved on entry to this function. So we migrate the
8846 call to the first insn of this function. */
8847 rtx temp;
8848 rtx seq;
8849
8850 start_sequence ();
8851 temp = expand_builtin_apply_args ();
8852 seq = get_insns ();
8853 end_sequence ();
8854
8855 apply_args_value = temp;
8856
8857 /* Put the sequence after the NOTE that starts the function.
8858 If this is inside a SEQUENCE, make the outer-level insn
8859 chain current, so the code is placed at the start of the
8860 function. */
8861 push_topmost_sequence ();
8862 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8863 pop_topmost_sequence ();
8864 return temp;
8865 }
8866
8867 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8868 FUNCTION with a copy of the parameters described by
8869 ARGUMENTS, and ARGSIZE. It returns a block of memory
8870 allocated on the stack into which is stored all the registers
8871 that might possibly be used for returning the result of a
8872 function. ARGUMENTS is the value returned by
8873 __builtin_apply_args. ARGSIZE is the number of bytes of
8874 arguments that must be copied. ??? How should this value be
8875 computed? We'll also need a safe worst case value for varargs
8876 functions. */
8877 case BUILT_IN_APPLY:
8878 if (arglist == 0
8879 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8880 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8881 || TREE_CHAIN (arglist) == 0
8882 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8883 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8884 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8885 return const0_rtx;
8886 else
8887 {
8888 int i;
8889 tree t;
8890 rtx ops[3];
8891
8892 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8893 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8894
8895 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8896 }
8897
8898 /* __builtin_return (RESULT) causes the function to return the
8899 value described by RESULT. RESULT is address of the block of
8900 memory returned by __builtin_apply. */
8901 case BUILT_IN_RETURN:
8902 if (arglist
8903 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8904 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8905 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8906 NULL_RTX, VOIDmode, 0));
8907 return const0_rtx;
8908
8909 case BUILT_IN_SAVEREGS:
8910 /* Don't do __builtin_saveregs more than once in a function.
8911 Save the result of the first call and reuse it. */
8912 if (saveregs_value != 0)
8913 return saveregs_value;
8914 {
8915 /* When this function is called, it means that registers must be
8916 saved on entry to this function. So we migrate the
8917 call to the first insn of this function. */
8918 rtx temp;
8919 rtx seq;
8920
8921 /* Now really call the function. `expand_call' does not call
8922 expand_builtin, so there is no danger of infinite recursion here. */
8923 start_sequence ();
8924
8925 #ifdef EXPAND_BUILTIN_SAVEREGS
8926 /* Do whatever the machine needs done in this case. */
8927 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8928 #else
8929 /* The register where the function returns its value
8930 is likely to have something else in it, such as an argument.
8931 So preserve that register around the call. */
8932
8933 if (value_mode != VOIDmode)
8934 {
8935 rtx valreg = hard_libcall_value (value_mode);
8936 rtx saved_valreg = gen_reg_rtx (value_mode);
8937
8938 emit_move_insn (saved_valreg, valreg);
8939 temp = expand_call (exp, target, ignore);
8940 emit_move_insn (valreg, saved_valreg);
8941 }
8942 else
8943 /* Generate the call, putting the value in a pseudo. */
8944 temp = expand_call (exp, target, ignore);
8945 #endif
8946
8947 seq = get_insns ();
8948 end_sequence ();
8949
8950 saveregs_value = temp;
8951
8952 /* Put the sequence after the NOTE that starts the function.
8953 If this is inside a SEQUENCE, make the outer-level insn
8954 chain current, so the code is placed at the start of the
8955 function. */
8956 push_topmost_sequence ();
8957 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8958 pop_topmost_sequence ();
8959 return temp;
8960 }
8961
8962 /* __builtin_args_info (N) returns word N of the arg space info
8963 for the current function. The number and meanings of words
8964 is controlled by the definition of CUMULATIVE_ARGS. */
8965 case BUILT_IN_ARGS_INFO:
8966 {
8967 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8968 int *word_ptr = (int *) &current_function_args_info;
8969 #if 0
8970 /* These are used by the code below that is if 0'ed away */
8971 int i;
8972 tree type, elts, result;
8973 #endif
8974
8975 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8976 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8977 __FILE__, __LINE__);
8978
8979 if (arglist != 0)
8980 {
8981 tree arg = TREE_VALUE (arglist);
8982 if (TREE_CODE (arg) != INTEGER_CST)
8983 error ("argument of `__builtin_args_info' must be constant");
8984 else
8985 {
8986 int wordnum = TREE_INT_CST_LOW (arg);
8987
8988 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8989 error ("argument of `__builtin_args_info' out of range");
8990 else
8991 return GEN_INT (word_ptr[wordnum]);
8992 }
8993 }
8994 else
8995 error ("missing argument in `__builtin_args_info'");
8996
8997 return const0_rtx;
8998
8999 #if 0
9000 for (i = 0; i < nwords; i++)
9001 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
9002
9003 type = build_array_type (integer_type_node,
9004 build_index_type (build_int_2 (nwords, 0)));
9005 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
9006 TREE_CONSTANT (result) = 1;
9007 TREE_STATIC (result) = 1;
9008 result = build (INDIRECT_REF, build_pointer_type (type), result);
9009 TREE_CONSTANT (result) = 1;
9010 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
9011 #endif
9012 }
9013
9014 /* Return the address of the first anonymous stack arg. */
9015 case BUILT_IN_NEXT_ARG:
9016 {
9017 tree fntype = TREE_TYPE (current_function_decl);
9018
9019 if ((TYPE_ARG_TYPES (fntype) == 0
9020 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
9021 == void_type_node))
9022 && ! current_function_varargs)
9023 {
9024 error ("`va_start' used in function with fixed args");
9025 return const0_rtx;
9026 }
9027
9028 if (arglist)
9029 {
9030 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9031 tree arg = TREE_VALUE (arglist);
9032
9033 /* Strip off all nops for the sake of the comparison. This
9034 is not quite the same as STRIP_NOPS. It does more.
9035 We must also strip off INDIRECT_EXPR for C++ reference
9036 parameters. */
9037 while (TREE_CODE (arg) == NOP_EXPR
9038 || TREE_CODE (arg) == CONVERT_EXPR
9039 || TREE_CODE (arg) == NON_LVALUE_EXPR
9040 || TREE_CODE (arg) == INDIRECT_REF)
9041 arg = TREE_OPERAND (arg, 0);
9042 if (arg != last_parm)
9043 warning ("second parameter of `va_start' not last named argument");
9044 }
9045 else if (! current_function_varargs)
9046 /* Evidently an out of date version of <stdarg.h>; can't validate
9047 va_start's second argument, but can still work as intended. */
9048 warning ("`__builtin_next_arg' called without an argument");
9049 }
9050
9051 return expand_binop (Pmode, add_optab,
9052 current_function_internal_arg_pointer,
9053 current_function_arg_offset_rtx,
9054 NULL_RTX, 0, OPTAB_LIB_WIDEN);
9055
9056 case BUILT_IN_CLASSIFY_TYPE:
9057 if (arglist != 0)
9058 {
9059 tree type = TREE_TYPE (TREE_VALUE (arglist));
9060 enum tree_code code = TREE_CODE (type);
9061 if (code == VOID_TYPE)
9062 return GEN_INT (void_type_class);
9063 if (code == INTEGER_TYPE)
9064 return GEN_INT (integer_type_class);
9065 if (code == CHAR_TYPE)
9066 return GEN_INT (char_type_class);
9067 if (code == ENUMERAL_TYPE)
9068 return GEN_INT (enumeral_type_class);
9069 if (code == BOOLEAN_TYPE)
9070 return GEN_INT (boolean_type_class);
9071 if (code == POINTER_TYPE)
9072 return GEN_INT (pointer_type_class);
9073 if (code == REFERENCE_TYPE)
9074 return GEN_INT (reference_type_class);
9075 if (code == OFFSET_TYPE)
9076 return GEN_INT (offset_type_class);
9077 if (code == REAL_TYPE)
9078 return GEN_INT (real_type_class);
9079 if (code == COMPLEX_TYPE)
9080 return GEN_INT (complex_type_class);
9081 if (code == FUNCTION_TYPE)
9082 return GEN_INT (function_type_class);
9083 if (code == METHOD_TYPE)
9084 return GEN_INT (method_type_class);
9085 if (code == RECORD_TYPE)
9086 return GEN_INT (record_type_class);
9087 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
9088 return GEN_INT (union_type_class);
9089 if (code == ARRAY_TYPE)
9090 {
9091 if (TYPE_STRING_FLAG (type))
9092 return GEN_INT (string_type_class);
9093 else
9094 return GEN_INT (array_type_class);
9095 }
9096 if (code == SET_TYPE)
9097 return GEN_INT (set_type_class);
9098 if (code == FILE_TYPE)
9099 return GEN_INT (file_type_class);
9100 if (code == LANG_TYPE)
9101 return GEN_INT (lang_type_class);
9102 }
9103 return GEN_INT (no_type_class);
9104
9105 case BUILT_IN_CONSTANT_P:
9106 if (arglist == 0)
9107 return const0_rtx;
9108 else
9109 {
9110 tree arg = TREE_VALUE (arglist);
9111 rtx tmp;
9112
9113 /* We return 1 for a numeric type that's known to be a constant
9114 value at compile-time or for an aggregate type that's a
9115 literal constant. */
9116 STRIP_NOPS (arg);
9117
9118 /* If we know this is a constant, emit the constant of one. */
9119 if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
9120 || (TREE_CODE (arg) == CONSTRUCTOR
9121 && TREE_CONSTANT (arg))
9122 || (TREE_CODE (arg) == ADDR_EXPR
9123 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9124 return const1_rtx;
9125
9126 /* If we aren't going to be running CSE or this expression
9127 has side effects, show we don't know it to be a constant.
9128 Likewise if it's a pointer or aggregate type since in those
9129 case we only want literals, since those are only optimized
9130 when generating RTL, not later. */
9131 if (TREE_SIDE_EFFECTS (arg) || cse_not_expected
9132 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9133 || POINTER_TYPE_P (TREE_TYPE (arg)))
9134 return const0_rtx;
9135
9136 /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a
9137 chance to see if it can deduce whether ARG is constant. */
9138
9139 tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
9140 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
9141 return tmp;
9142 }
9143
9144 case BUILT_IN_FRAME_ADDRESS:
9145 /* The argument must be a nonnegative integer constant.
9146 It counts the number of frames to scan up the stack.
9147 The value is the address of that frame. */
9148 case BUILT_IN_RETURN_ADDRESS:
9149 /* The argument must be a nonnegative integer constant.
9150 It counts the number of frames to scan up the stack.
9151 The value is the return address saved in that frame. */
9152 if (arglist == 0)
9153 /* Warning about missing arg was already issued. */
9154 return const0_rtx;
9155 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9156 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9157 {
9158 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9159 error ("invalid arg to `__builtin_frame_address'");
9160 else
9161 error ("invalid arg to `__builtin_return_address'");
9162 return const0_rtx;
9163 }
9164 else
9165 {
9166 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9167 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9168 hard_frame_pointer_rtx);
9169
9170 /* Some ports cannot access arbitrary stack frames. */
9171 if (tem == NULL)
9172 {
9173 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9174 warning ("unsupported arg to `__builtin_frame_address'");
9175 else
9176 warning ("unsupported arg to `__builtin_return_address'");
9177 return const0_rtx;
9178 }
9179
9180 /* For __builtin_frame_address, return what we've got. */
9181 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9182 return tem;
9183
9184 if (GET_CODE (tem) != REG
9185 && ! CONSTANT_P (tem))
9186 tem = copy_to_mode_reg (Pmode, tem);
9187 return tem;
9188 }
9189
9190 /* Returns the address of the area where the structure is returned.
9191 0 otherwise. */
9192 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9193 if (arglist != 0
9194 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9195 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9196 return const0_rtx;
9197 else
9198 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9199
9200 case BUILT_IN_ALLOCA:
9201 if (arglist == 0
9202 /* Arg could be non-integer if user redeclared this fcn wrong. */
9203 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9204 break;
9205
9206 /* Compute the argument. */
9207 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
9208
9209 /* Allocate the desired space. */
9210 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
9211
9212 case BUILT_IN_FFS:
9213 /* If not optimizing, call the library function. */
9214 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9215 break;
9216
9217 if (arglist == 0
9218 /* Arg could be non-integer if user redeclared this fcn wrong. */
9219 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9220 break;
9221
9222 /* Compute the argument. */
9223 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9224 /* Compute ffs, into TARGET if possible.
9225 Set TARGET to wherever the result comes back. */
9226 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9227 ffs_optab, op0, target, 1);
9228 if (target == 0)
9229 abort ();
9230 return target;
9231
9232 case BUILT_IN_STRLEN:
9233 /* If not optimizing, call the library function. */
9234 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9235 break;
9236
9237 if (arglist == 0
9238 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9239 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9240 break;
9241 else
9242 {
9243 tree src = TREE_VALUE (arglist);
9244 tree len = c_strlen (src);
9245
9246 int align
9247 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9248
9249 rtx result, src_rtx, char_rtx;
9250 enum machine_mode insn_mode = value_mode, char_mode;
9251 enum insn_code icode;
9252
9253 /* If the length is known, just return it. */
9254 if (len != 0)
9255 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
9256
9257 /* If SRC is not a pointer type, don't do this operation inline. */
9258 if (align == 0)
9259 break;
9260
9261 /* Call a function if we can't compute strlen in the right mode. */
9262
9263 while (insn_mode != VOIDmode)
9264 {
9265 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9266 if (icode != CODE_FOR_nothing)
9267 break;
9268
9269 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9270 }
9271 if (insn_mode == VOIDmode)
9272 break;
9273
9274 /* Make a place to write the result of the instruction. */
9275 result = target;
9276 if (! (result != 0
9277 && GET_CODE (result) == REG
9278 && GET_MODE (result) == insn_mode
9279 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9280 result = gen_reg_rtx (insn_mode);
9281
9282 /* Make sure the operands are acceptable to the predicates. */
9283
9284 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9285 result = gen_reg_rtx (insn_mode);
9286 src_rtx = memory_address (BLKmode,
9287 expand_expr (src, NULL_RTX, ptr_mode,
9288 EXPAND_NORMAL));
9289
9290 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9291 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9292
9293 /* Check the string is readable and has an end. */
9294 if (current_function_check_memory_usage)
9295 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9296 src_rtx, Pmode,
9297 GEN_INT (MEMORY_USE_RO),
9298 TYPE_MODE (integer_type_node));
9299
9300 char_rtx = const0_rtx;
9301 char_mode = insn_operand_mode[(int)icode][2];
9302 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9303 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9304
9305 emit_insn (GEN_FCN (icode) (result,
9306 gen_rtx_MEM (BLKmode, src_rtx),
9307 char_rtx, GEN_INT (align)));
9308
9309 /* Return the value in the proper mode for this function. */
9310 if (GET_MODE (result) == value_mode)
9311 return result;
9312 else if (target != 0)
9313 {
9314 convert_move (target, result, 0);
9315 return target;
9316 }
9317 else
9318 return convert_to_mode (value_mode, result, 0);
9319 }
9320
9321 case BUILT_IN_STRCPY:
9322 /* If not optimizing, call the library function. */
9323 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9324 break;
9325
9326 if (arglist == 0
9327 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9328 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9329 || TREE_CHAIN (arglist) == 0
9330 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9331 break;
9332 else
9333 {
9334 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9335
9336 if (len == 0)
9337 break;
9338
9339 len = size_binop (PLUS_EXPR, len, integer_one_node);
9340
9341 chainon (arglist, build_tree_list (NULL_TREE, len));
9342 }
9343
9344 /* Drops in. */
9345 case BUILT_IN_MEMCPY:
9346 /* If not optimizing, call the library function. */
9347 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9348 break;
9349
9350 if (arglist == 0
9351 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9352 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9353 || TREE_CHAIN (arglist) == 0
9354 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9355 != POINTER_TYPE)
9356 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9357 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9358 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9359 != INTEGER_TYPE))
9360 break;
9361 else
9362 {
9363 tree dest = TREE_VALUE (arglist);
9364 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9365 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9366
9367 int src_align
9368 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9369 int dest_align
9370 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9371 rtx dest_mem, src_mem, dest_addr, len_rtx;
9372
9373 /* If either SRC or DEST is not a pointer type, don't do
9374 this operation in-line. */
9375 if (src_align == 0 || dest_align == 0)
9376 {
9377 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9378 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9379 break;
9380 }
9381
9382 dest_mem = get_memory_rtx (dest);
9383 src_mem = get_memory_rtx (src);
9384 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9385
9386 /* Just copy the rights of SRC to the rights of DEST. */
9387 if (current_function_check_memory_usage)
9388 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9389 XEXP (dest_mem, 0), Pmode,
9390 XEXP (src_mem, 0), Pmode,
9391 len_rtx, TYPE_MODE (sizetype));
9392
9393 /* Copy word part most expediently. */
9394 dest_addr
9395 = emit_block_move (dest_mem, src_mem, len_rtx,
9396 MIN (src_align, dest_align));
9397
9398 if (dest_addr == 0)
9399 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9400
9401 return dest_addr;
9402 }
9403
9404 case BUILT_IN_MEMSET:
9405 /* If not optimizing, call the library function. */
9406 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9407 break;
9408
9409 if (arglist == 0
9410 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9411 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9412 || TREE_CHAIN (arglist) == 0
9413 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9414 != INTEGER_TYPE)
9415 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9416 || (INTEGER_TYPE
9417 != (TREE_CODE (TREE_TYPE
9418 (TREE_VALUE
9419 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9420 break;
9421 else
9422 {
9423 tree dest = TREE_VALUE (arglist);
9424 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9425 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9426
9427 int dest_align
9428 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9429 rtx dest_mem, dest_addr, len_rtx;
9430
9431 /* If DEST is not a pointer type, don't do this
9432 operation in-line. */
9433 if (dest_align == 0)
9434 break;
9435
9436 /* If the arguments have side-effects, then we can only evaluate
9437 them at most once. The following code evaluates them twice if
9438 they are not constants because we break out to expand_call
9439 in that case. They can't be constants if they have side-effects
9440 so we can check for that first. Alternatively, we could call
9441 save_expr to make multiple evaluation safe. */
9442 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9443 break;
9444
9445 /* If VAL is not 0, don't do this operation in-line. */
9446 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9447 break;
9448
9449 /* If LEN does not expand to a constant, don't do this
9450 operation in-line. */
9451 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9452 if (GET_CODE (len_rtx) != CONST_INT)
9453 break;
9454
9455 dest_mem = get_memory_rtx (dest);
9456
9457 /* Just check DST is writable and mark it as readable. */
9458 if (current_function_check_memory_usage)
9459 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9460 XEXP (dest_mem, 0), Pmode,
9461 len_rtx, TYPE_MODE (sizetype),
9462 GEN_INT (MEMORY_USE_WO),
9463 TYPE_MODE (integer_type_node));
9464
9465
9466 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9467
9468 if (dest_addr == 0)
9469 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9470
9471 return dest_addr;
9472 }
9473
9474 /* These comparison functions need an instruction that returns an actual
9475 index. An ordinary compare that just sets the condition codes
9476 is not enough. */
9477 #ifdef HAVE_cmpstrsi
9478 case BUILT_IN_STRCMP:
9479 /* If not optimizing, call the library function. */
9480 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9481 break;
9482
9483 /* If we need to check memory accesses, call the library function. */
9484 if (current_function_check_memory_usage)
9485 break;
9486
9487 if (arglist == 0
9488 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9489 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9490 || TREE_CHAIN (arglist) == 0
9491 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9492 break;
9493 else if (!HAVE_cmpstrsi)
9494 break;
9495 {
9496 tree arg1 = TREE_VALUE (arglist);
9497 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9498 tree len, len2;
9499
9500 len = c_strlen (arg1);
9501 if (len)
9502 len = size_binop (PLUS_EXPR, integer_one_node, len);
9503 len2 = c_strlen (arg2);
9504 if (len2)
9505 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9506
9507 /* If we don't have a constant length for the first, use the length
9508 of the second, if we know it. We don't require a constant for
9509 this case; some cost analysis could be done if both are available
9510 but neither is constant. For now, assume they're equally cheap.
9511
9512 If both strings have constant lengths, use the smaller. This
9513 could arise if optimization results in strcpy being called with
9514 two fixed strings, or if the code was machine-generated. We should
9515 add some code to the `memcmp' handler below to deal with such
9516 situations, someday. */
9517 if (!len || TREE_CODE (len) != INTEGER_CST)
9518 {
9519 if (len2)
9520 len = len2;
9521 else if (len == 0)
9522 break;
9523 }
9524 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9525 {
9526 if (tree_int_cst_lt (len2, len))
9527 len = len2;
9528 }
9529
9530 chainon (arglist, build_tree_list (NULL_TREE, len));
9531 }
9532
9533 /* Drops in. */
9534 case BUILT_IN_MEMCMP:
9535 /* If not optimizing, call the library function. */
9536 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9537 break;
9538
9539 /* If we need to check memory accesses, call the library function. */
9540 if (current_function_check_memory_usage)
9541 break;
9542
9543 if (arglist == 0
9544 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9545 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9546 || TREE_CHAIN (arglist) == 0
9547 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9548 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9549 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9550 break;
9551 else if (!HAVE_cmpstrsi)
9552 break;
9553 {
9554 tree arg1 = TREE_VALUE (arglist);
9555 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9556 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9557 rtx result;
9558
9559 int arg1_align
9560 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9561 int arg2_align
9562 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9563 enum machine_mode insn_mode
9564 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9565
9566 /* If we don't have POINTER_TYPE, call the function. */
9567 if (arg1_align == 0 || arg2_align == 0)
9568 {
9569 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9570 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9571 break;
9572 }
9573
9574 /* Make a place to write the result of the instruction. */
9575 result = target;
9576 if (! (result != 0
9577 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9578 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9579 result = gen_reg_rtx (insn_mode);
9580
9581 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9582 get_memory_rtx (arg2),
9583 expand_expr (len, NULL_RTX, VOIDmode, 0),
9584 GEN_INT (MIN (arg1_align, arg2_align))));
9585
9586 /* Return the value in the proper mode for this function. */
9587 mode = TYPE_MODE (TREE_TYPE (exp));
9588 if (GET_MODE (result) == mode)
9589 return result;
9590 else if (target != 0)
9591 {
9592 convert_move (target, result, 0);
9593 return target;
9594 }
9595 else
9596 return convert_to_mode (mode, result, 0);
9597 }
9598 #else
9599 case BUILT_IN_STRCMP:
9600 case BUILT_IN_MEMCMP:
9601 break;
9602 #endif
9603
9604 case BUILT_IN_SETJMP:
9605 if (arglist == 0
9606 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9607 break;
9608 else
9609 {
9610 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9611 VOIDmode, 0);
9612 rtx lab = gen_label_rtx ();
9613 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9614 emit_label (lab);
9615 return ret;
9616 }
9617
9618 /* __builtin_longjmp is passed a pointer to an array of five words.
9619 It's similar to the C library longjmp function but works with
9620 __builtin_setjmp above. */
9621 case BUILT_IN_LONGJMP:
9622 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9623 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9624 break;
9625 else
9626 {
9627 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9628 VOIDmode, 0);
9629 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9630 NULL_RTX, VOIDmode, 0);
9631
9632 if (value != const1_rtx)
9633 {
9634 error ("__builtin_longjmp second argument must be 1");
9635 return const0_rtx;
9636 }
9637
9638 expand_builtin_longjmp (buf_addr, value);
9639 return const0_rtx;
9640 }
9641
9642 case BUILT_IN_TRAP:
9643 #ifdef HAVE_trap
9644 if (HAVE_trap)
9645 emit_insn (gen_trap ());
9646 else
9647 #endif
9648 error ("__builtin_trap not supported by this target");
9649 emit_barrier ();
9650 return const0_rtx;
9651
9652 /* Various hooks for the DWARF 2 __throw routine. */
9653 case BUILT_IN_UNWIND_INIT:
9654 expand_builtin_unwind_init ();
9655 return const0_rtx;
9656 case BUILT_IN_DWARF_CFA:
9657 return virtual_cfa_rtx;
9658 #ifdef DWARF2_UNWIND_INFO
9659 case BUILT_IN_DWARF_FP_REGNUM:
9660 return expand_builtin_dwarf_fp_regnum ();
9661 case BUILT_IN_DWARF_REG_SIZE:
9662 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9663 #endif
9664 case BUILT_IN_FROB_RETURN_ADDR:
9665 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9666 case BUILT_IN_EXTRACT_RETURN_ADDR:
9667 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9668 case BUILT_IN_EH_RETURN:
9669 expand_builtin_eh_return (TREE_VALUE (arglist),
9670 TREE_VALUE (TREE_CHAIN (arglist)),
9671 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
9672 return const0_rtx;
9673
9674 default: /* just do library call, if unknown builtin */
9675 error ("built-in function `%s' not currently supported",
9676 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9677 }
9678
9679 /* The switch statement above can drop through to cause the function
9680 to be called normally. */
9681
9682 return expand_call (exp, target, ignore);
9683 }
9684 \f
9685 /* Built-in functions to perform an untyped call and return. */
9686
9687 /* For each register that may be used for calling a function, this
9688 gives a mode used to copy the register's value. VOIDmode indicates
9689 the register is not used for calling a function. If the machine
9690 has register windows, this gives only the outbound registers.
9691 INCOMING_REGNO gives the corresponding inbound register. */
9692 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9693
9694 /* For each register that may be used for returning values, this gives
9695 a mode used to copy the register's value. VOIDmode indicates the
9696 register is not used for returning values. If the machine has
9697 register windows, this gives only the outbound registers.
9698 INCOMING_REGNO gives the corresponding inbound register. */
9699 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9700
9701 /* For each register that may be used for calling a function, this
9702 gives the offset of that register into the block returned by
9703 __builtin_apply_args. 0 indicates that the register is not
9704 used for calling a function. */
9705 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9706
9707 /* Return the offset of register REGNO into the block returned by
9708 __builtin_apply_args. This is not declared static, since it is
9709 needed in objc-act.c. */
9710
9711 int
9712 apply_args_register_offset (regno)
9713 int regno;
9714 {
9715 apply_args_size ();
9716
9717 /* Arguments are always put in outgoing registers (in the argument
9718 block) if such make sense. */
9719 #ifdef OUTGOING_REGNO
9720 regno = OUTGOING_REGNO(regno);
9721 #endif
9722 return apply_args_reg_offset[regno];
9723 }
9724
9725 /* Return the size required for the block returned by __builtin_apply_args,
9726 and initialize apply_args_mode. */
9727
9728 static int
9729 apply_args_size ()
9730 {
9731 static int size = -1;
9732 int align, regno;
9733 enum machine_mode mode;
9734
9735 /* The values computed by this function never change. */
9736 if (size < 0)
9737 {
9738 /* The first value is the incoming arg-pointer. */
9739 size = GET_MODE_SIZE (Pmode);
9740
9741 /* The second value is the structure value address unless this is
9742 passed as an "invisible" first argument. */
9743 if (struct_value_rtx)
9744 size += GET_MODE_SIZE (Pmode);
9745
9746 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9747 if (FUNCTION_ARG_REGNO_P (regno))
9748 {
9749 /* Search for the proper mode for copying this register's
9750 value. I'm not sure this is right, but it works so far. */
9751 enum machine_mode best_mode = VOIDmode;
9752
9753 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9754 mode != VOIDmode;
9755 mode = GET_MODE_WIDER_MODE (mode))
9756 if (HARD_REGNO_MODE_OK (regno, mode)
9757 && HARD_REGNO_NREGS (regno, mode) == 1)
9758 best_mode = mode;
9759
9760 if (best_mode == VOIDmode)
9761 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9762 mode != VOIDmode;
9763 mode = GET_MODE_WIDER_MODE (mode))
9764 if (HARD_REGNO_MODE_OK (regno, mode)
9765 && (mov_optab->handlers[(int) mode].insn_code
9766 != CODE_FOR_nothing))
9767 best_mode = mode;
9768
9769 mode = best_mode;
9770 if (mode == VOIDmode)
9771 abort ();
9772
9773 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9774 if (size % align != 0)
9775 size = CEIL (size, align) * align;
9776 apply_args_reg_offset[regno] = size;
9777 size += GET_MODE_SIZE (mode);
9778 apply_args_mode[regno] = mode;
9779 }
9780 else
9781 {
9782 apply_args_mode[regno] = VOIDmode;
9783 apply_args_reg_offset[regno] = 0;
9784 }
9785 }
9786 return size;
9787 }
9788
9789 /* Return the size required for the block returned by __builtin_apply,
9790 and initialize apply_result_mode. */
9791
9792 static int
9793 apply_result_size ()
9794 {
9795 static int size = -1;
9796 int align, regno;
9797 enum machine_mode mode;
9798
9799 /* The values computed by this function never change. */
9800 if (size < 0)
9801 {
9802 size = 0;
9803
9804 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9805 if (FUNCTION_VALUE_REGNO_P (regno))
9806 {
9807 /* Search for the proper mode for copying this register's
9808 value. I'm not sure this is right, but it works so far. */
9809 enum machine_mode best_mode = VOIDmode;
9810
9811 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9812 mode != TImode;
9813 mode = GET_MODE_WIDER_MODE (mode))
9814 if (HARD_REGNO_MODE_OK (regno, mode))
9815 best_mode = mode;
9816
9817 if (best_mode == VOIDmode)
9818 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9819 mode != VOIDmode;
9820 mode = GET_MODE_WIDER_MODE (mode))
9821 if (HARD_REGNO_MODE_OK (regno, mode)
9822 && (mov_optab->handlers[(int) mode].insn_code
9823 != CODE_FOR_nothing))
9824 best_mode = mode;
9825
9826 mode = best_mode;
9827 if (mode == VOIDmode)
9828 abort ();
9829
9830 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9831 if (size % align != 0)
9832 size = CEIL (size, align) * align;
9833 size += GET_MODE_SIZE (mode);
9834 apply_result_mode[regno] = mode;
9835 }
9836 else
9837 apply_result_mode[regno] = VOIDmode;
9838
9839 /* Allow targets that use untyped_call and untyped_return to override
9840 the size so that machine-specific information can be stored here. */
9841 #ifdef APPLY_RESULT_SIZE
9842 size = APPLY_RESULT_SIZE;
9843 #endif
9844 }
9845 return size;
9846 }
9847
9848 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9849 /* Create a vector describing the result block RESULT. If SAVEP is true,
9850 the result block is used to save the values; otherwise it is used to
9851 restore the values. */
9852
9853 static rtx
9854 result_vector (savep, result)
9855 int savep;
9856 rtx result;
9857 {
9858 int regno, size, align, nelts;
9859 enum machine_mode mode;
9860 rtx reg, mem;
9861 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9862
9863 size = nelts = 0;
9864 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9865 if ((mode = apply_result_mode[regno]) != VOIDmode)
9866 {
9867 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9868 if (size % align != 0)
9869 size = CEIL (size, align) * align;
9870 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9871 mem = change_address (result, mode,
9872 plus_constant (XEXP (result, 0), size));
9873 savevec[nelts++] = (savep
9874 ? gen_rtx_SET (VOIDmode, mem, reg)
9875 : gen_rtx_SET (VOIDmode, reg, mem));
9876 size += GET_MODE_SIZE (mode);
9877 }
9878 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9879 }
9880 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9881
9882 /* Save the state required to perform an untyped call with the same
9883 arguments as were passed to the current function. */
9884
9885 static rtx
9886 expand_builtin_apply_args ()
9887 {
9888 rtx registers;
9889 int size, align, regno;
9890 enum machine_mode mode;
9891
9892 /* Create a block where the arg-pointer, structure value address,
9893 and argument registers can be saved. */
9894 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9895
9896 /* Walk past the arg-pointer and structure value address. */
9897 size = GET_MODE_SIZE (Pmode);
9898 if (struct_value_rtx)
9899 size += GET_MODE_SIZE (Pmode);
9900
9901 /* Save each register used in calling a function to the block. */
9902 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9903 if ((mode = apply_args_mode[regno]) != VOIDmode)
9904 {
9905 rtx tem;
9906
9907 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9908 if (size % align != 0)
9909 size = CEIL (size, align) * align;
9910
9911 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9912
9913 #ifdef STACK_REGS
9914 /* For reg-stack.c's stack register household.
9915 Compare with a similar piece of code in function.c. */
9916
9917 emit_insn (gen_rtx_USE (mode, tem));
9918 #endif
9919
9920 emit_move_insn (change_address (registers, mode,
9921 plus_constant (XEXP (registers, 0),
9922 size)),
9923 tem);
9924 size += GET_MODE_SIZE (mode);
9925 }
9926
9927 /* Save the arg pointer to the block. */
9928 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9929 copy_to_reg (virtual_incoming_args_rtx));
9930 size = GET_MODE_SIZE (Pmode);
9931
9932 /* Save the structure value address unless this is passed as an
9933 "invisible" first argument. */
9934 if (struct_value_incoming_rtx)
9935 {
9936 emit_move_insn (change_address (registers, Pmode,
9937 plus_constant (XEXP (registers, 0),
9938 size)),
9939 copy_to_reg (struct_value_incoming_rtx));
9940 size += GET_MODE_SIZE (Pmode);
9941 }
9942
9943 /* Return the address of the block. */
9944 return copy_addr_to_reg (XEXP (registers, 0));
9945 }
9946
9947 /* Perform an untyped call and save the state required to perform an
9948 untyped return of whatever value was returned by the given function. */
9949
9950 static rtx
9951 expand_builtin_apply (function, arguments, argsize)
9952 rtx function, arguments, argsize;
9953 {
9954 int size, align, regno;
9955 enum machine_mode mode;
9956 rtx incoming_args, result, reg, dest, call_insn;
9957 rtx old_stack_level = 0;
9958 rtx call_fusage = 0;
9959
9960 /* Create a block where the return registers can be saved. */
9961 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9962
9963 /* ??? The argsize value should be adjusted here. */
9964
9965 /* Fetch the arg pointer from the ARGUMENTS block. */
9966 incoming_args = gen_reg_rtx (Pmode);
9967 emit_move_insn (incoming_args,
9968 gen_rtx_MEM (Pmode, arguments));
9969 #ifndef STACK_GROWS_DOWNWARD
9970 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9971 incoming_args, 0, OPTAB_LIB_WIDEN);
9972 #endif
9973
9974 /* Perform postincrements before actually calling the function. */
9975 emit_queue ();
9976
9977 /* Push a new argument block and copy the arguments. */
9978 do_pending_stack_adjust ();
9979
9980 /* Save the stack with nonlocal if available */
9981 #ifdef HAVE_save_stack_nonlocal
9982 if (HAVE_save_stack_nonlocal)
9983 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9984 else
9985 #endif
9986 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9987
9988 /* Push a block of memory onto the stack to store the memory arguments.
9989 Save the address in a register, and copy the memory arguments. ??? I
9990 haven't figured out how the calling convention macros effect this,
9991 but it's likely that the source and/or destination addresses in
9992 the block copy will need updating in machine specific ways. */
9993 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9994 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9995 gen_rtx_MEM (BLKmode, incoming_args),
9996 argsize,
9997 PARM_BOUNDARY / BITS_PER_UNIT);
9998
9999 /* Refer to the argument block. */
10000 apply_args_size ();
10001 arguments = gen_rtx_MEM (BLKmode, arguments);
10002
10003 /* Walk past the arg-pointer and structure value address. */
10004 size = GET_MODE_SIZE (Pmode);
10005 if (struct_value_rtx)
10006 size += GET_MODE_SIZE (Pmode);
10007
10008 /* Restore each of the registers previously saved. Make USE insns
10009 for each of these registers for use in making the call. */
10010 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10011 if ((mode = apply_args_mode[regno]) != VOIDmode)
10012 {
10013 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10014 if (size % align != 0)
10015 size = CEIL (size, align) * align;
10016 reg = gen_rtx_REG (mode, regno);
10017 emit_move_insn (reg,
10018 change_address (arguments, mode,
10019 plus_constant (XEXP (arguments, 0),
10020 size)));
10021
10022 use_reg (&call_fusage, reg);
10023 size += GET_MODE_SIZE (mode);
10024 }
10025
10026 /* Restore the structure value address unless this is passed as an
10027 "invisible" first argument. */
10028 size = GET_MODE_SIZE (Pmode);
10029 if (struct_value_rtx)
10030 {
10031 rtx value = gen_reg_rtx (Pmode);
10032 emit_move_insn (value,
10033 change_address (arguments, Pmode,
10034 plus_constant (XEXP (arguments, 0),
10035 size)));
10036 emit_move_insn (struct_value_rtx, value);
10037 if (GET_CODE (struct_value_rtx) == REG)
10038 use_reg (&call_fusage, struct_value_rtx);
10039 size += GET_MODE_SIZE (Pmode);
10040 }
10041
10042 /* All arguments and registers used for the call are set up by now! */
10043 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
10044
10045 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
10046 and we don't want to load it into a register as an optimization,
10047 because prepare_call_address already did it if it should be done. */
10048 if (GET_CODE (function) != SYMBOL_REF)
10049 function = memory_address (FUNCTION_MODE, function);
10050
10051 /* Generate the actual call instruction and save the return value. */
10052 #ifdef HAVE_untyped_call
10053 if (HAVE_untyped_call)
10054 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
10055 result, result_vector (1, result)));
10056 else
10057 #endif
10058 #ifdef HAVE_call_value
10059 if (HAVE_call_value)
10060 {
10061 rtx valreg = 0;
10062
10063 /* Locate the unique return register. It is not possible to
10064 express a call that sets more than one return register using
10065 call_value; use untyped_call for that. In fact, untyped_call
10066 only needs to save the return registers in the given block. */
10067 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10068 if ((mode = apply_result_mode[regno]) != VOIDmode)
10069 {
10070 if (valreg)
10071 abort (); /* HAVE_untyped_call required. */
10072 valreg = gen_rtx_REG (mode, regno);
10073 }
10074
10075 emit_call_insn (gen_call_value (valreg,
10076 gen_rtx_MEM (FUNCTION_MODE, function),
10077 const0_rtx, NULL_RTX, const0_rtx));
10078
10079 emit_move_insn (change_address (result, GET_MODE (valreg),
10080 XEXP (result, 0)),
10081 valreg);
10082 }
10083 else
10084 #endif
10085 abort ();
10086
10087 /* Find the CALL insn we just emitted. */
10088 for (call_insn = get_last_insn ();
10089 call_insn && GET_CODE (call_insn) != CALL_INSN;
10090 call_insn = PREV_INSN (call_insn))
10091 ;
10092
10093 if (! call_insn)
10094 abort ();
10095
10096 /* Put the register usage information on the CALL. If there is already
10097 some usage information, put ours at the end. */
10098 if (CALL_INSN_FUNCTION_USAGE (call_insn))
10099 {
10100 rtx link;
10101
10102 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
10103 link = XEXP (link, 1))
10104 ;
10105
10106 XEXP (link, 1) = call_fusage;
10107 }
10108 else
10109 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
10110
10111 /* Restore the stack. */
10112 #ifdef HAVE_save_stack_nonlocal
10113 if (HAVE_save_stack_nonlocal)
10114 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
10115 else
10116 #endif
10117 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10118
10119 /* Return the address of the result block. */
10120 return copy_addr_to_reg (XEXP (result, 0));
10121 }
10122
10123 /* Perform an untyped return. */
10124
10125 static void
10126 expand_builtin_return (result)
10127 rtx result;
10128 {
10129 int size, align, regno;
10130 enum machine_mode mode;
10131 rtx reg;
10132 rtx call_fusage = 0;
10133
10134 apply_result_size ();
10135 result = gen_rtx_MEM (BLKmode, result);
10136
10137 #ifdef HAVE_untyped_return
10138 if (HAVE_untyped_return)
10139 {
10140 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10141 emit_barrier ();
10142 return;
10143 }
10144 #endif
10145
10146 /* Restore the return value and note that each value is used. */
10147 size = 0;
10148 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10149 if ((mode = apply_result_mode[regno]) != VOIDmode)
10150 {
10151 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10152 if (size % align != 0)
10153 size = CEIL (size, align) * align;
10154 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10155 emit_move_insn (reg,
10156 change_address (result, mode,
10157 plus_constant (XEXP (result, 0),
10158 size)));
10159
10160 push_to_sequence (call_fusage);
10161 emit_insn (gen_rtx_USE (VOIDmode, reg));
10162 call_fusage = get_insns ();
10163 end_sequence ();
10164 size += GET_MODE_SIZE (mode);
10165 }
10166
10167 /* Put the USE insns before the return. */
10168 emit_insns (call_fusage);
10169
10170 /* Return whatever values was restored by jumping directly to the end
10171 of the function. */
10172 expand_null_return ();
10173 }
10174 \f
10175 /* Expand code for a post- or pre- increment or decrement
10176 and return the RTX for the result.
10177 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
10178
10179 static rtx
10180 expand_increment (exp, post, ignore)
10181 register tree exp;
10182 int post, ignore;
10183 {
10184 register rtx op0, op1;
10185 register rtx temp, value;
10186 register tree incremented = TREE_OPERAND (exp, 0);
10187 optab this_optab = add_optab;
10188 int icode;
10189 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10190 int op0_is_copy = 0;
10191 int single_insn = 0;
10192 /* 1 means we can't store into OP0 directly,
10193 because it is a subreg narrower than a word,
10194 and we don't dare clobber the rest of the word. */
10195 int bad_subreg = 0;
10196
10197 /* Stabilize any component ref that might need to be
10198 evaluated more than once below. */
10199 if (!post
10200 || TREE_CODE (incremented) == BIT_FIELD_REF
10201 || (TREE_CODE (incremented) == COMPONENT_REF
10202 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10203 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10204 incremented = stabilize_reference (incremented);
10205 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10206 ones into save exprs so that they don't accidentally get evaluated
10207 more than once by the code below. */
10208 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10209 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10210 incremented = save_expr (incremented);
10211
10212 /* Compute the operands as RTX.
10213 Note whether OP0 is the actual lvalue or a copy of it:
10214 I believe it is a copy iff it is a register or subreg
10215 and insns were generated in computing it. */
10216
10217 temp = get_last_insn ();
10218 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
10219
10220 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10221 in place but instead must do sign- or zero-extension during assignment,
10222 so we copy it into a new register and let the code below use it as
10223 a copy.
10224
10225 Note that we can safely modify this SUBREG since it is know not to be
10226 shared (it was made by the expand_expr call above). */
10227
10228 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10229 {
10230 if (post)
10231 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10232 else
10233 bad_subreg = 1;
10234 }
10235 else if (GET_CODE (op0) == SUBREG
10236 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10237 {
10238 /* We cannot increment this SUBREG in place. If we are
10239 post-incrementing, get a copy of the old value. Otherwise,
10240 just mark that we cannot increment in place. */
10241 if (post)
10242 op0 = copy_to_reg (op0);
10243 else
10244 bad_subreg = 1;
10245 }
10246
10247 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10248 && temp != get_last_insn ());
10249 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10250 EXPAND_MEMORY_USE_BAD);
10251
10252 /* Decide whether incrementing or decrementing. */
10253 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10254 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10255 this_optab = sub_optab;
10256
10257 /* Convert decrement by a constant into a negative increment. */
10258 if (this_optab == sub_optab
10259 && GET_CODE (op1) == CONST_INT)
10260 {
10261 op1 = GEN_INT (- INTVAL (op1));
10262 this_optab = add_optab;
10263 }
10264
10265 /* For a preincrement, see if we can do this with a single instruction. */
10266 if (!post)
10267 {
10268 icode = (int) this_optab->handlers[(int) mode].insn_code;
10269 if (icode != (int) CODE_FOR_nothing
10270 /* Make sure that OP0 is valid for operands 0 and 1
10271 of the insn we want to queue. */
10272 && (*insn_operand_predicate[icode][0]) (op0, mode)
10273 && (*insn_operand_predicate[icode][1]) (op0, mode)
10274 && (*insn_operand_predicate[icode][2]) (op1, mode))
10275 single_insn = 1;
10276 }
10277
10278 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10279 then we cannot just increment OP0. We must therefore contrive to
10280 increment the original value. Then, for postincrement, we can return
10281 OP0 since it is a copy of the old value. For preincrement, expand here
10282 unless we can do it with a single insn.
10283
10284 Likewise if storing directly into OP0 would clobber high bits
10285 we need to preserve (bad_subreg). */
10286 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10287 {
10288 /* This is the easiest way to increment the value wherever it is.
10289 Problems with multiple evaluation of INCREMENTED are prevented
10290 because either (1) it is a component_ref or preincrement,
10291 in which case it was stabilized above, or (2) it is an array_ref
10292 with constant index in an array in a register, which is
10293 safe to reevaluate. */
10294 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10295 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10296 ? MINUS_EXPR : PLUS_EXPR),
10297 TREE_TYPE (exp),
10298 incremented,
10299 TREE_OPERAND (exp, 1));
10300
10301 while (TREE_CODE (incremented) == NOP_EXPR
10302 || TREE_CODE (incremented) == CONVERT_EXPR)
10303 {
10304 newexp = convert (TREE_TYPE (incremented), newexp);
10305 incremented = TREE_OPERAND (incremented, 0);
10306 }
10307
10308 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10309 return post ? op0 : temp;
10310 }
10311
10312 if (post)
10313 {
10314 /* We have a true reference to the value in OP0.
10315 If there is an insn to add or subtract in this mode, queue it.
10316 Queueing the increment insn avoids the register shuffling
10317 that often results if we must increment now and first save
10318 the old value for subsequent use. */
10319
10320 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10321 op0 = stabilize (op0);
10322 #endif
10323
10324 icode = (int) this_optab->handlers[(int) mode].insn_code;
10325 if (icode != (int) CODE_FOR_nothing
10326 /* Make sure that OP0 is valid for operands 0 and 1
10327 of the insn we want to queue. */
10328 && (*insn_operand_predicate[icode][0]) (op0, mode)
10329 && (*insn_operand_predicate[icode][1]) (op0, mode))
10330 {
10331 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10332 op1 = force_reg (mode, op1);
10333
10334 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10335 }
10336 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10337 {
10338 rtx addr = (general_operand (XEXP (op0, 0), mode)
10339 ? force_reg (Pmode, XEXP (op0, 0))
10340 : copy_to_reg (XEXP (op0, 0)));
10341 rtx temp, result;
10342
10343 op0 = change_address (op0, VOIDmode, addr);
10344 temp = force_reg (GET_MODE (op0), op0);
10345 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10346 op1 = force_reg (mode, op1);
10347
10348 /* The increment queue is LIFO, thus we have to `queue'
10349 the instructions in reverse order. */
10350 enqueue_insn (op0, gen_move_insn (op0, temp));
10351 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10352 return result;
10353 }
10354 }
10355
10356 /* Preincrement, or we can't increment with one simple insn. */
10357 if (post)
10358 /* Save a copy of the value before inc or dec, to return it later. */
10359 temp = value = copy_to_reg (op0);
10360 else
10361 /* Arrange to return the incremented value. */
10362 /* Copy the rtx because expand_binop will protect from the queue,
10363 and the results of that would be invalid for us to return
10364 if our caller does emit_queue before using our result. */
10365 temp = copy_rtx (value = op0);
10366
10367 /* Increment however we can. */
10368 op1 = expand_binop (mode, this_optab, value, op1,
10369 current_function_check_memory_usage ? NULL_RTX : op0,
10370 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10371 /* Make sure the value is stored into OP0. */
10372 if (op1 != op0)
10373 emit_move_insn (op0, op1);
10374
10375 return temp;
10376 }
10377 \f
10378 /* Expand all function calls contained within EXP, innermost ones first.
10379 But don't look within expressions that have sequence points.
10380 For each CALL_EXPR, record the rtx for its value
10381 in the CALL_EXPR_RTL field. */
10382
10383 static void
10384 preexpand_calls (exp)
10385 tree exp;
10386 {
10387 register int nops, i;
10388 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10389
10390 if (! do_preexpand_calls)
10391 return;
10392
10393 /* Only expressions and references can contain calls. */
10394
10395 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10396 return;
10397
10398 switch (TREE_CODE (exp))
10399 {
10400 case CALL_EXPR:
10401 /* Do nothing if already expanded. */
10402 if (CALL_EXPR_RTL (exp) != 0
10403 /* Do nothing if the call returns a variable-sized object. */
10404 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10405 /* Do nothing to built-in functions. */
10406 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10407 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10408 == FUNCTION_DECL)
10409 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10410 return;
10411
10412 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10413 return;
10414
10415 case COMPOUND_EXPR:
10416 case COND_EXPR:
10417 case TRUTH_ANDIF_EXPR:
10418 case TRUTH_ORIF_EXPR:
10419 /* If we find one of these, then we can be sure
10420 the adjust will be done for it (since it makes jumps).
10421 Do it now, so that if this is inside an argument
10422 of a function, we don't get the stack adjustment
10423 after some other args have already been pushed. */
10424 do_pending_stack_adjust ();
10425 return;
10426
10427 case BLOCK:
10428 case RTL_EXPR:
10429 case WITH_CLEANUP_EXPR:
10430 case CLEANUP_POINT_EXPR:
10431 case TRY_CATCH_EXPR:
10432 return;
10433
10434 case SAVE_EXPR:
10435 if (SAVE_EXPR_RTL (exp) != 0)
10436 return;
10437
10438 default:
10439 break;
10440 }
10441
10442 nops = tree_code_length[(int) TREE_CODE (exp)];
10443 for (i = 0; i < nops; i++)
10444 if (TREE_OPERAND (exp, i) != 0)
10445 {
10446 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10447 if (type == 'e' || type == '<' || type == '1' || type == '2'
10448 || type == 'r')
10449 preexpand_calls (TREE_OPERAND (exp, i));
10450 }
10451 }
10452 \f
10453 /* At the start of a function, record that we have no previously-pushed
10454 arguments waiting to be popped. */
10455
10456 void
10457 init_pending_stack_adjust ()
10458 {
10459 pending_stack_adjust = 0;
10460 }
10461
10462 /* When exiting from function, if safe, clear out any pending stack adjust
10463 so the adjustment won't get done.
10464
10465 Note, if the current function calls alloca, then it must have a
10466 frame pointer regardless of the value of flag_omit_frame_pointer. */
10467
10468 void
10469 clear_pending_stack_adjust ()
10470 {
10471 #ifdef EXIT_IGNORE_STACK
10472 if (optimize > 0
10473 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10474 && EXIT_IGNORE_STACK
10475 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10476 && ! flag_inline_functions)
10477 pending_stack_adjust = 0;
10478 #endif
10479 }
10480
10481 /* Pop any previously-pushed arguments that have not been popped yet. */
10482
10483 void
10484 do_pending_stack_adjust ()
10485 {
10486 if (inhibit_defer_pop == 0)
10487 {
10488 if (pending_stack_adjust != 0)
10489 adjust_stack (GEN_INT (pending_stack_adjust));
10490 pending_stack_adjust = 0;
10491 }
10492 }
10493 \f
10494 /* Expand conditional expressions. */
10495
10496 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10497 LABEL is an rtx of code CODE_LABEL, in this function and all the
10498 functions here. */
10499
10500 void
10501 jumpifnot (exp, label)
10502 tree exp;
10503 rtx label;
10504 {
10505 do_jump (exp, label, NULL_RTX);
10506 }
10507
10508 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10509
10510 void
10511 jumpif (exp, label)
10512 tree exp;
10513 rtx label;
10514 {
10515 do_jump (exp, NULL_RTX, label);
10516 }
10517
10518 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10519 the result is zero, or IF_TRUE_LABEL if the result is one.
10520 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10521 meaning fall through in that case.
10522
10523 do_jump always does any pending stack adjust except when it does not
10524 actually perform a jump. An example where there is no jump
10525 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10526
10527 This function is responsible for optimizing cases such as
10528 &&, || and comparison operators in EXP. */
10529
10530 void
10531 do_jump (exp, if_false_label, if_true_label)
10532 tree exp;
10533 rtx if_false_label, if_true_label;
10534 {
10535 register enum tree_code code = TREE_CODE (exp);
10536 /* Some cases need to create a label to jump to
10537 in order to properly fall through.
10538 These cases set DROP_THROUGH_LABEL nonzero. */
10539 rtx drop_through_label = 0;
10540 rtx temp;
10541 rtx comparison = 0;
10542 int i;
10543 tree type;
10544 enum machine_mode mode;
10545
10546 #ifdef MAX_INTEGER_COMPUTATION_MODE
10547 check_max_integer_computation_mode (exp);
10548 #endif
10549
10550 emit_queue ();
10551
10552 switch (code)
10553 {
10554 case ERROR_MARK:
10555 break;
10556
10557 case INTEGER_CST:
10558 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10559 if (temp)
10560 emit_jump (temp);
10561 break;
10562
10563 #if 0
10564 /* This is not true with #pragma weak */
10565 case ADDR_EXPR:
10566 /* The address of something can never be zero. */
10567 if (if_true_label)
10568 emit_jump (if_true_label);
10569 break;
10570 #endif
10571
10572 case NOP_EXPR:
10573 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10574 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10575 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10576 goto normal;
10577 case CONVERT_EXPR:
10578 /* If we are narrowing the operand, we have to do the compare in the
10579 narrower mode. */
10580 if ((TYPE_PRECISION (TREE_TYPE (exp))
10581 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10582 goto normal;
10583 case NON_LVALUE_EXPR:
10584 case REFERENCE_EXPR:
10585 case ABS_EXPR:
10586 case NEGATE_EXPR:
10587 case LROTATE_EXPR:
10588 case RROTATE_EXPR:
10589 /* These cannot change zero->non-zero or vice versa. */
10590 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10591 break;
10592
10593 #if 0
10594 /* This is never less insns than evaluating the PLUS_EXPR followed by
10595 a test and can be longer if the test is eliminated. */
10596 case PLUS_EXPR:
10597 /* Reduce to minus. */
10598 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10599 TREE_OPERAND (exp, 0),
10600 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10601 TREE_OPERAND (exp, 1))));
10602 /* Process as MINUS. */
10603 #endif
10604
10605 case MINUS_EXPR:
10606 /* Non-zero iff operands of minus differ. */
10607 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10608 TREE_OPERAND (exp, 0),
10609 TREE_OPERAND (exp, 1)),
10610 NE, NE);
10611 break;
10612
10613 case BIT_AND_EXPR:
10614 /* If we are AND'ing with a small constant, do this comparison in the
10615 smallest type that fits. If the machine doesn't have comparisons
10616 that small, it will be converted back to the wider comparison.
10617 This helps if we are testing the sign bit of a narrower object.
10618 combine can't do this for us because it can't know whether a
10619 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10620
10621 if (! SLOW_BYTE_ACCESS
10622 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10623 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10624 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10625 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10626 && (type = type_for_mode (mode, 1)) != 0
10627 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10628 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10629 != CODE_FOR_nothing))
10630 {
10631 do_jump (convert (type, exp), if_false_label, if_true_label);
10632 break;
10633 }
10634 goto normal;
10635
10636 case TRUTH_NOT_EXPR:
10637 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10638 break;
10639
10640 case TRUTH_ANDIF_EXPR:
10641 if (if_false_label == 0)
10642 if_false_label = drop_through_label = gen_label_rtx ();
10643 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10644 start_cleanup_deferral ();
10645 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10646 end_cleanup_deferral ();
10647 break;
10648
10649 case TRUTH_ORIF_EXPR:
10650 if (if_true_label == 0)
10651 if_true_label = drop_through_label = gen_label_rtx ();
10652 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10653 start_cleanup_deferral ();
10654 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10655 end_cleanup_deferral ();
10656 break;
10657
10658 case COMPOUND_EXPR:
10659 push_temp_slots ();
10660 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10661 preserve_temp_slots (NULL_RTX);
10662 free_temp_slots ();
10663 pop_temp_slots ();
10664 emit_queue ();
10665 do_pending_stack_adjust ();
10666 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10667 break;
10668
10669 case COMPONENT_REF:
10670 case BIT_FIELD_REF:
10671 case ARRAY_REF:
10672 {
10673 int bitsize, bitpos, unsignedp;
10674 enum machine_mode mode;
10675 tree type;
10676 tree offset;
10677 int volatilep = 0;
10678 int alignment;
10679
10680 /* Get description of this reference. We don't actually care
10681 about the underlying object here. */
10682 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10683 &mode, &unsignedp, &volatilep,
10684 &alignment);
10685
10686 type = type_for_size (bitsize, unsignedp);
10687 if (! SLOW_BYTE_ACCESS
10688 && type != 0 && bitsize >= 0
10689 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10690 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10691 != CODE_FOR_nothing))
10692 {
10693 do_jump (convert (type, exp), if_false_label, if_true_label);
10694 break;
10695 }
10696 goto normal;
10697 }
10698
10699 case COND_EXPR:
10700 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10701 if (integer_onep (TREE_OPERAND (exp, 1))
10702 && integer_zerop (TREE_OPERAND (exp, 2)))
10703 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10704
10705 else if (integer_zerop (TREE_OPERAND (exp, 1))
10706 && integer_onep (TREE_OPERAND (exp, 2)))
10707 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10708
10709 else
10710 {
10711 register rtx label1 = gen_label_rtx ();
10712 drop_through_label = gen_label_rtx ();
10713
10714 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10715
10716 start_cleanup_deferral ();
10717 /* Now the THEN-expression. */
10718 do_jump (TREE_OPERAND (exp, 1),
10719 if_false_label ? if_false_label : drop_through_label,
10720 if_true_label ? if_true_label : drop_through_label);
10721 /* In case the do_jump just above never jumps. */
10722 do_pending_stack_adjust ();
10723 emit_label (label1);
10724
10725 /* Now the ELSE-expression. */
10726 do_jump (TREE_OPERAND (exp, 2),
10727 if_false_label ? if_false_label : drop_through_label,
10728 if_true_label ? if_true_label : drop_through_label);
10729 end_cleanup_deferral ();
10730 }
10731 break;
10732
10733 case EQ_EXPR:
10734 {
10735 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10736
10737 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10738 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10739 {
10740 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10741 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10742 do_jump
10743 (fold
10744 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10745 fold (build (EQ_EXPR, TREE_TYPE (exp),
10746 fold (build1 (REALPART_EXPR,
10747 TREE_TYPE (inner_type),
10748 exp0)),
10749 fold (build1 (REALPART_EXPR,
10750 TREE_TYPE (inner_type),
10751 exp1)))),
10752 fold (build (EQ_EXPR, TREE_TYPE (exp),
10753 fold (build1 (IMAGPART_EXPR,
10754 TREE_TYPE (inner_type),
10755 exp0)),
10756 fold (build1 (IMAGPART_EXPR,
10757 TREE_TYPE (inner_type),
10758 exp1)))))),
10759 if_false_label, if_true_label);
10760 }
10761
10762 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10763 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10764
10765 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10766 && !can_compare_p (TYPE_MODE (inner_type)))
10767 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10768 else
10769 comparison = compare (exp, EQ, EQ);
10770 break;
10771 }
10772
10773 case NE_EXPR:
10774 {
10775 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10776
10777 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10778 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10779 {
10780 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10781 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10782 do_jump
10783 (fold
10784 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10785 fold (build (NE_EXPR, TREE_TYPE (exp),
10786 fold (build1 (REALPART_EXPR,
10787 TREE_TYPE (inner_type),
10788 exp0)),
10789 fold (build1 (REALPART_EXPR,
10790 TREE_TYPE (inner_type),
10791 exp1)))),
10792 fold (build (NE_EXPR, TREE_TYPE (exp),
10793 fold (build1 (IMAGPART_EXPR,
10794 TREE_TYPE (inner_type),
10795 exp0)),
10796 fold (build1 (IMAGPART_EXPR,
10797 TREE_TYPE (inner_type),
10798 exp1)))))),
10799 if_false_label, if_true_label);
10800 }
10801
10802 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10803 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10804
10805 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10806 && !can_compare_p (TYPE_MODE (inner_type)))
10807 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10808 else
10809 comparison = compare (exp, NE, NE);
10810 break;
10811 }
10812
10813 case LT_EXPR:
10814 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10815 == MODE_INT)
10816 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10817 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10818 else
10819 comparison = compare (exp, LT, LTU);
10820 break;
10821
10822 case LE_EXPR:
10823 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10824 == MODE_INT)
10825 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10826 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10827 else
10828 comparison = compare (exp, LE, LEU);
10829 break;
10830
10831 case GT_EXPR:
10832 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10833 == MODE_INT)
10834 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10835 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10836 else
10837 comparison = compare (exp, GT, GTU);
10838 break;
10839
10840 case GE_EXPR:
10841 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10842 == MODE_INT)
10843 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10844 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10845 else
10846 comparison = compare (exp, GE, GEU);
10847 break;
10848
10849 default:
10850 normal:
10851 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10852 #if 0
10853 /* This is not needed any more and causes poor code since it causes
10854 comparisons and tests from non-SI objects to have different code
10855 sequences. */
10856 /* Copy to register to avoid generating bad insns by cse
10857 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10858 if (!cse_not_expected && GET_CODE (temp) == MEM)
10859 temp = copy_to_reg (temp);
10860 #endif
10861 do_pending_stack_adjust ();
10862 if (GET_CODE (temp) == CONST_INT)
10863 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10864 else if (GET_CODE (temp) == LABEL_REF)
10865 comparison = const_true_rtx;
10866 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10867 && !can_compare_p (GET_MODE (temp)))
10868 /* Note swapping the labels gives us not-equal. */
10869 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10870 else if (GET_MODE (temp) != VOIDmode)
10871 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10872 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10873 GET_MODE (temp), NULL_RTX, 0);
10874 else
10875 abort ();
10876 }
10877
10878 /* Do any postincrements in the expression that was tested. */
10879 emit_queue ();
10880
10881 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10882 straight into a conditional jump instruction as the jump condition.
10883 Otherwise, all the work has been done already. */
10884
10885 if (comparison == const_true_rtx)
10886 {
10887 if (if_true_label)
10888 emit_jump (if_true_label);
10889 }
10890 else if (comparison == const0_rtx)
10891 {
10892 if (if_false_label)
10893 emit_jump (if_false_label);
10894 }
10895 else if (comparison)
10896 do_jump_for_compare (comparison, if_false_label, if_true_label);
10897
10898 if (drop_through_label)
10899 {
10900 /* If do_jump produces code that might be jumped around,
10901 do any stack adjusts from that code, before the place
10902 where control merges in. */
10903 do_pending_stack_adjust ();
10904 emit_label (drop_through_label);
10905 }
10906 }
10907 \f
10908 /* Given a comparison expression EXP for values too wide to be compared
10909 with one insn, test the comparison and jump to the appropriate label.
10910 The code of EXP is ignored; we always test GT if SWAP is 0,
10911 and LT if SWAP is 1. */
10912
10913 static void
10914 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10915 tree exp;
10916 int swap;
10917 rtx if_false_label, if_true_label;
10918 {
10919 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10920 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10921 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10922 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10923 rtx drop_through_label = 0;
10924 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10925 int i;
10926
10927 if (! if_true_label || ! if_false_label)
10928 drop_through_label = gen_label_rtx ();
10929 if (! if_true_label)
10930 if_true_label = drop_through_label;
10931 if (! if_false_label)
10932 if_false_label = drop_through_label;
10933
10934 /* Compare a word at a time, high order first. */
10935 for (i = 0; i < nwords; i++)
10936 {
10937 rtx comp;
10938 rtx op0_word, op1_word;
10939
10940 if (WORDS_BIG_ENDIAN)
10941 {
10942 op0_word = operand_subword_force (op0, i, mode);
10943 op1_word = operand_subword_force (op1, i, mode);
10944 }
10945 else
10946 {
10947 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10948 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10949 }
10950
10951 /* All but high-order word must be compared as unsigned. */
10952 comp = compare_from_rtx (op0_word, op1_word,
10953 (unsignedp || i > 0) ? GTU : GT,
10954 unsignedp, word_mode, NULL_RTX, 0);
10955 if (comp == const_true_rtx)
10956 emit_jump (if_true_label);
10957 else if (comp != const0_rtx)
10958 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10959
10960 /* Consider lower words only if these are equal. */
10961 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10962 NULL_RTX, 0);
10963 if (comp == const_true_rtx)
10964 emit_jump (if_false_label);
10965 else if (comp != const0_rtx)
10966 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10967 }
10968
10969 if (if_false_label)
10970 emit_jump (if_false_label);
10971 if (drop_through_label)
10972 emit_label (drop_through_label);
10973 }
10974
10975 /* Compare OP0 with OP1, word at a time, in mode MODE.
10976 UNSIGNEDP says to do unsigned comparison.
10977 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10978
10979 void
10980 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10981 enum machine_mode mode;
10982 int unsignedp;
10983 rtx op0, op1;
10984 rtx if_false_label, if_true_label;
10985 {
10986 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10987 rtx drop_through_label = 0;
10988 int i;
10989
10990 if (! if_true_label || ! if_false_label)
10991 drop_through_label = gen_label_rtx ();
10992 if (! if_true_label)
10993 if_true_label = drop_through_label;
10994 if (! if_false_label)
10995 if_false_label = drop_through_label;
10996
10997 /* Compare a word at a time, high order first. */
10998 for (i = 0; i < nwords; i++)
10999 {
11000 rtx comp;
11001 rtx op0_word, op1_word;
11002
11003 if (WORDS_BIG_ENDIAN)
11004 {
11005 op0_word = operand_subword_force (op0, i, mode);
11006 op1_word = operand_subword_force (op1, i, mode);
11007 }
11008 else
11009 {
11010 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
11011 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
11012 }
11013
11014 /* All but high-order word must be compared as unsigned. */
11015 comp = compare_from_rtx (op0_word, op1_word,
11016 (unsignedp || i > 0) ? GTU : GT,
11017 unsignedp, word_mode, NULL_RTX, 0);
11018 if (comp == const_true_rtx)
11019 emit_jump (if_true_label);
11020 else if (comp != const0_rtx)
11021 do_jump_for_compare (comp, NULL_RTX, if_true_label);
11022
11023 /* Consider lower words only if these are equal. */
11024 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
11025 NULL_RTX, 0);
11026 if (comp == const_true_rtx)
11027 emit_jump (if_false_label);
11028 else if (comp != const0_rtx)
11029 do_jump_for_compare (comp, NULL_RTX, if_false_label);
11030 }
11031
11032 if (if_false_label)
11033 emit_jump (if_false_label);
11034 if (drop_through_label)
11035 emit_label (drop_through_label);
11036 }
11037
11038 /* Given an EQ_EXPR expression EXP for values too wide to be compared
11039 with one insn, test the comparison and jump to the appropriate label. */
11040
11041 static void
11042 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
11043 tree exp;
11044 rtx if_false_label, if_true_label;
11045 {
11046 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11047 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11048 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
11049 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11050 int i;
11051 rtx drop_through_label = 0;
11052
11053 if (! if_false_label)
11054 drop_through_label = if_false_label = gen_label_rtx ();
11055
11056 for (i = 0; i < nwords; i++)
11057 {
11058 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
11059 operand_subword_force (op1, i, mode),
11060 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
11061 word_mode, NULL_RTX, 0);
11062 if (comp == const_true_rtx)
11063 emit_jump (if_false_label);
11064 else if (comp != const0_rtx)
11065 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11066 }
11067
11068 if (if_true_label)
11069 emit_jump (if_true_label);
11070 if (drop_through_label)
11071 emit_label (drop_through_label);
11072 }
11073 \f
11074 /* Jump according to whether OP0 is 0.
11075 We assume that OP0 has an integer mode that is too wide
11076 for the available compare insns. */
11077
11078 void
11079 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
11080 rtx op0;
11081 rtx if_false_label, if_true_label;
11082 {
11083 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
11084 rtx part;
11085 int i;
11086 rtx drop_through_label = 0;
11087
11088 /* The fastest way of doing this comparison on almost any machine is to
11089 "or" all the words and compare the result. If all have to be loaded
11090 from memory and this is a very wide item, it's possible this may
11091 be slower, but that's highly unlikely. */
11092
11093 part = gen_reg_rtx (word_mode);
11094 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
11095 for (i = 1; i < nwords && part != 0; i++)
11096 part = expand_binop (word_mode, ior_optab, part,
11097 operand_subword_force (op0, i, GET_MODE (op0)),
11098 part, 1, OPTAB_WIDEN);
11099
11100 if (part != 0)
11101 {
11102 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
11103 NULL_RTX, 0);
11104
11105 if (comp == const_true_rtx)
11106 emit_jump (if_false_label);
11107 else if (comp == const0_rtx)
11108 emit_jump (if_true_label);
11109 else
11110 do_jump_for_compare (comp, if_false_label, if_true_label);
11111
11112 return;
11113 }
11114
11115 /* If we couldn't do the "or" simply, do this with a series of compares. */
11116 if (! if_false_label)
11117 drop_through_label = if_false_label = gen_label_rtx ();
11118
11119 for (i = 0; i < nwords; i++)
11120 {
11121 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11122 GET_MODE (op0)),
11123 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11124 if (comp == const_true_rtx)
11125 emit_jump (if_false_label);
11126 else if (comp != const0_rtx)
11127 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11128 }
11129
11130 if (if_true_label)
11131 emit_jump (if_true_label);
11132
11133 if (drop_through_label)
11134 emit_label (drop_through_label);
11135 }
11136
11137 /* Given a comparison expression in rtl form, output conditional branches to
11138 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
11139
11140 static void
11141 do_jump_for_compare (comparison, if_false_label, if_true_label)
11142 rtx comparison, if_false_label, if_true_label;
11143 {
11144 if (if_true_label)
11145 {
11146 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11147 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11148 (if_true_label));
11149 else
11150 abort ();
11151
11152 if (if_false_label)
11153 emit_jump (if_false_label);
11154 }
11155 else if (if_false_label)
11156 {
11157 rtx first = get_last_insn (), insn, branch;
11158 int br_count;
11159
11160 /* Output the branch with the opposite condition. Then try to invert
11161 what is generated. If more than one insn is a branch, or if the
11162 branch is not the last insn written, abort. If we can't invert
11163 the branch, emit make a true label, redirect this jump to that,
11164 emit a jump to the false label and define the true label. */
11165 /* ??? Note that we wouldn't have to do any of this nonsense if
11166 we passed both labels into a combined compare-and-branch.
11167 Ah well, jump threading does a good job of repairing the damage. */
11168
11169 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11170 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11171 (if_false_label));
11172 else
11173 abort ();
11174
11175 /* Here we get the first insn that was just emitted. It used to be the
11176 case that, on some machines, emitting the branch would discard
11177 the previous compare insn and emit a replacement. This isn't
11178 done anymore, but abort if we see that FIRST is deleted. */
11179
11180 if (first == 0)
11181 first = get_insns ();
11182 else if (INSN_DELETED_P (first))
11183 abort ();
11184 else
11185 first = NEXT_INSN (first);
11186
11187 /* Look for multiple branches in this sequence, as might be generated
11188 for a multi-word integer comparison. */
11189
11190 br_count = 0;
11191 branch = NULL_RTX;
11192 for (insn = first; insn ; insn = NEXT_INSN (insn))
11193 if (GET_CODE (insn) == JUMP_INSN)
11194 {
11195 branch = insn;
11196 br_count += 1;
11197 }
11198
11199 /* If we've got one branch at the end of the sequence,
11200 we can try to reverse it. */
11201
11202 if (br_count == 1 && NEXT_INSN (branch) == NULL_RTX)
11203 {
11204 rtx insn_label;
11205 insn_label = XEXP (condjump_label (branch), 0);
11206 JUMP_LABEL (branch) = insn_label;
11207
11208 if (insn_label != if_false_label)
11209 abort ();
11210
11211 if (invert_jump (branch, if_false_label))
11212 return;
11213 }
11214
11215 /* Multiple branches, or reversion failed. Convert to branches
11216 around an unconditional jump. */
11217
11218 if_true_label = gen_label_rtx ();
11219 for (insn = first; insn; insn = NEXT_INSN (insn))
11220 if (GET_CODE (insn) == JUMP_INSN)
11221 {
11222 rtx insn_label;
11223 insn_label = XEXP (condjump_label (insn), 0);
11224 JUMP_LABEL (insn) = insn_label;
11225
11226 if (insn_label == if_false_label)
11227 redirect_jump (insn, if_true_label);
11228 }
11229 emit_jump (if_false_label);
11230 emit_label (if_true_label);
11231 }
11232 }
11233 \f
11234 /* Generate code for a comparison expression EXP
11235 (including code to compute the values to be compared)
11236 and set (CC0) according to the result.
11237 SIGNED_CODE should be the rtx operation for this comparison for
11238 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11239
11240 We force a stack adjustment unless there are currently
11241 things pushed on the stack that aren't yet used. */
11242
11243 static rtx
11244 compare (exp, signed_code, unsigned_code)
11245 register tree exp;
11246 enum rtx_code signed_code, unsigned_code;
11247 {
11248 register rtx op0, op1;
11249 register tree type;
11250 register enum machine_mode mode;
11251 int unsignedp;
11252 enum rtx_code code;
11253
11254 /* Don't crash if the comparison was erroneous. */
11255 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11256 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
11257 return op0;
11258
11259 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11260 type = TREE_TYPE (TREE_OPERAND (exp, 0));
11261 mode = TYPE_MODE (type);
11262 unsignedp = TREE_UNSIGNED (type);
11263 code = unsignedp ? unsigned_code : signed_code;
11264
11265 #ifdef HAVE_canonicalize_funcptr_for_compare
11266 /* If function pointers need to be "canonicalized" before they can
11267 be reliably compared, then canonicalize them. */
11268 if (HAVE_canonicalize_funcptr_for_compare
11269 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11270 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11271 == FUNCTION_TYPE))
11272 {
11273 rtx new_op0 = gen_reg_rtx (mode);
11274
11275 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11276 op0 = new_op0;
11277 }
11278
11279 if (HAVE_canonicalize_funcptr_for_compare
11280 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11281 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11282 == FUNCTION_TYPE))
11283 {
11284 rtx new_op1 = gen_reg_rtx (mode);
11285
11286 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11287 op1 = new_op1;
11288 }
11289 #endif
11290
11291 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11292 ((mode == BLKmode)
11293 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11294 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
11295 }
11296
11297 /* Like compare but expects the values to compare as two rtx's.
11298 The decision as to signed or unsigned comparison must be made by the caller.
11299
11300 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11301 compared.
11302
11303 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11304 size of MODE should be used. */
11305
11306 rtx
11307 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11308 register rtx op0, op1;
11309 enum rtx_code code;
11310 int unsignedp;
11311 enum machine_mode mode;
11312 rtx size;
11313 int align;
11314 {
11315 rtx tem;
11316
11317 /* If one operand is constant, make it the second one. Only do this
11318 if the other operand is not constant as well. */
11319
11320 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11321 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11322 {
11323 tem = op0;
11324 op0 = op1;
11325 op1 = tem;
11326 code = swap_condition (code);
11327 }
11328
11329 if (flag_force_mem)
11330 {
11331 op0 = force_not_mem (op0);
11332 op1 = force_not_mem (op1);
11333 }
11334
11335 do_pending_stack_adjust ();
11336
11337 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11338 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11339 return tem;
11340
11341 #if 0
11342 /* There's no need to do this now that combine.c can eliminate lots of
11343 sign extensions. This can be less efficient in certain cases on other
11344 machines. */
11345
11346 /* If this is a signed equality comparison, we can do it as an
11347 unsigned comparison since zero-extension is cheaper than sign
11348 extension and comparisons with zero are done as unsigned. This is
11349 the case even on machines that can do fast sign extension, since
11350 zero-extension is easier to combine with other operations than
11351 sign-extension is. If we are comparing against a constant, we must
11352 convert it to what it would look like unsigned. */
11353 if ((code == EQ || code == NE) && ! unsignedp
11354 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11355 {
11356 if (GET_CODE (op1) == CONST_INT
11357 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11358 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11359 unsignedp = 1;
11360 }
11361 #endif
11362
11363 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11364
11365 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11366 }
11367 \f
11368 /* Generate code to calculate EXP using a store-flag instruction
11369 and return an rtx for the result. EXP is either a comparison
11370 or a TRUTH_NOT_EXPR whose operand is a comparison.
11371
11372 If TARGET is nonzero, store the result there if convenient.
11373
11374 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11375 cheap.
11376
11377 Return zero if there is no suitable set-flag instruction
11378 available on this machine.
11379
11380 Once expand_expr has been called on the arguments of the comparison,
11381 we are committed to doing the store flag, since it is not safe to
11382 re-evaluate the expression. We emit the store-flag insn by calling
11383 emit_store_flag, but only expand the arguments if we have a reason
11384 to believe that emit_store_flag will be successful. If we think that
11385 it will, but it isn't, we have to simulate the store-flag with a
11386 set/jump/set sequence. */
11387
11388 static rtx
11389 do_store_flag (exp, target, mode, only_cheap)
11390 tree exp;
11391 rtx target;
11392 enum machine_mode mode;
11393 int only_cheap;
11394 {
11395 enum rtx_code code;
11396 tree arg0, arg1, type;
11397 tree tem;
11398 enum machine_mode operand_mode;
11399 int invert = 0;
11400 int unsignedp;
11401 rtx op0, op1;
11402 enum insn_code icode;
11403 rtx subtarget = target;
11404 rtx result, label;
11405
11406 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11407 result at the end. We can't simply invert the test since it would
11408 have already been inverted if it were valid. This case occurs for
11409 some floating-point comparisons. */
11410
11411 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11412 invert = 1, exp = TREE_OPERAND (exp, 0);
11413
11414 arg0 = TREE_OPERAND (exp, 0);
11415 arg1 = TREE_OPERAND (exp, 1);
11416 type = TREE_TYPE (arg0);
11417 operand_mode = TYPE_MODE (type);
11418 unsignedp = TREE_UNSIGNED (type);
11419
11420 /* We won't bother with BLKmode store-flag operations because it would mean
11421 passing a lot of information to emit_store_flag. */
11422 if (operand_mode == BLKmode)
11423 return 0;
11424
11425 /* We won't bother with store-flag operations involving function pointers
11426 when function pointers must be canonicalized before comparisons. */
11427 #ifdef HAVE_canonicalize_funcptr_for_compare
11428 if (HAVE_canonicalize_funcptr_for_compare
11429 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11430 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11431 == FUNCTION_TYPE))
11432 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11433 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11434 == FUNCTION_TYPE))))
11435 return 0;
11436 #endif
11437
11438 STRIP_NOPS (arg0);
11439 STRIP_NOPS (arg1);
11440
11441 /* Get the rtx comparison code to use. We know that EXP is a comparison
11442 operation of some type. Some comparisons against 1 and -1 can be
11443 converted to comparisons with zero. Do so here so that the tests
11444 below will be aware that we have a comparison with zero. These
11445 tests will not catch constants in the first operand, but constants
11446 are rarely passed as the first operand. */
11447
11448 switch (TREE_CODE (exp))
11449 {
11450 case EQ_EXPR:
11451 code = EQ;
11452 break;
11453 case NE_EXPR:
11454 code = NE;
11455 break;
11456 case LT_EXPR:
11457 if (integer_onep (arg1))
11458 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11459 else
11460 code = unsignedp ? LTU : LT;
11461 break;
11462 case LE_EXPR:
11463 if (! unsignedp && integer_all_onesp (arg1))
11464 arg1 = integer_zero_node, code = LT;
11465 else
11466 code = unsignedp ? LEU : LE;
11467 break;
11468 case GT_EXPR:
11469 if (! unsignedp && integer_all_onesp (arg1))
11470 arg1 = integer_zero_node, code = GE;
11471 else
11472 code = unsignedp ? GTU : GT;
11473 break;
11474 case GE_EXPR:
11475 if (integer_onep (arg1))
11476 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11477 else
11478 code = unsignedp ? GEU : GE;
11479 break;
11480 default:
11481 abort ();
11482 }
11483
11484 /* Put a constant second. */
11485 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11486 {
11487 tem = arg0; arg0 = arg1; arg1 = tem;
11488 code = swap_condition (code);
11489 }
11490
11491 /* If this is an equality or inequality test of a single bit, we can
11492 do this by shifting the bit being tested to the low-order bit and
11493 masking the result with the constant 1. If the condition was EQ,
11494 we xor it with 1. This does not require an scc insn and is faster
11495 than an scc insn even if we have it. */
11496
11497 if ((code == NE || code == EQ)
11498 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11499 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11500 {
11501 tree inner = TREE_OPERAND (arg0, 0);
11502 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11503 int ops_unsignedp;
11504
11505 /* If INNER is a right shift of a constant and it plus BITNUM does
11506 not overflow, adjust BITNUM and INNER. */
11507
11508 if (TREE_CODE (inner) == RSHIFT_EXPR
11509 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11510 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11511 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11512 < TYPE_PRECISION (type)))
11513 {
11514 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11515 inner = TREE_OPERAND (inner, 0);
11516 }
11517
11518 /* If we are going to be able to omit the AND below, we must do our
11519 operations as unsigned. If we must use the AND, we have a choice.
11520 Normally unsigned is faster, but for some machines signed is. */
11521 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11522 #ifdef LOAD_EXTEND_OP
11523 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11524 #else
11525 : 1
11526 #endif
11527 );
11528
11529 if (subtarget == 0 || GET_CODE (subtarget) != REG
11530 || GET_MODE (subtarget) != operand_mode
11531 || ! safe_from_p (subtarget, inner, 1))
11532 subtarget = 0;
11533
11534 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11535
11536 if (bitnum != 0)
11537 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11538 size_int (bitnum), subtarget, ops_unsignedp);
11539
11540 if (GET_MODE (op0) != mode)
11541 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11542
11543 if ((code == EQ && ! invert) || (code == NE && invert))
11544 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11545 ops_unsignedp, OPTAB_LIB_WIDEN);
11546
11547 /* Put the AND last so it can combine with more things. */
11548 if (bitnum != TYPE_PRECISION (type) - 1)
11549 op0 = expand_and (op0, const1_rtx, subtarget);
11550
11551 return op0;
11552 }
11553
11554 /* Now see if we are likely to be able to do this. Return if not. */
11555 if (! can_compare_p (operand_mode))
11556 return 0;
11557 icode = setcc_gen_code[(int) code];
11558 if (icode == CODE_FOR_nothing
11559 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11560 {
11561 /* We can only do this if it is one of the special cases that
11562 can be handled without an scc insn. */
11563 if ((code == LT && integer_zerop (arg1))
11564 || (! only_cheap && code == GE && integer_zerop (arg1)))
11565 ;
11566 else if (BRANCH_COST >= 0
11567 && ! only_cheap && (code == NE || code == EQ)
11568 && TREE_CODE (type) != REAL_TYPE
11569 && ((abs_optab->handlers[(int) operand_mode].insn_code
11570 != CODE_FOR_nothing)
11571 || (ffs_optab->handlers[(int) operand_mode].insn_code
11572 != CODE_FOR_nothing)))
11573 ;
11574 else
11575 return 0;
11576 }
11577
11578 preexpand_calls (exp);
11579 if (subtarget == 0 || GET_CODE (subtarget) != REG
11580 || GET_MODE (subtarget) != operand_mode
11581 || ! safe_from_p (subtarget, arg1, 1))
11582 subtarget = 0;
11583
11584 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11585 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11586
11587 if (target == 0)
11588 target = gen_reg_rtx (mode);
11589
11590 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11591 because, if the emit_store_flag does anything it will succeed and
11592 OP0 and OP1 will not be used subsequently. */
11593
11594 result = emit_store_flag (target, code,
11595 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11596 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11597 operand_mode, unsignedp, 1);
11598
11599 if (result)
11600 {
11601 if (invert)
11602 result = expand_binop (mode, xor_optab, result, const1_rtx,
11603 result, 0, OPTAB_LIB_WIDEN);
11604 return result;
11605 }
11606
11607 /* If this failed, we have to do this with set/compare/jump/set code. */
11608 if (GET_CODE (target) != REG
11609 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11610 target = gen_reg_rtx (GET_MODE (target));
11611
11612 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11613 result = compare_from_rtx (op0, op1, code, unsignedp,
11614 operand_mode, NULL_RTX, 0);
11615 if (GET_CODE (result) == CONST_INT)
11616 return (((result == const0_rtx && ! invert)
11617 || (result != const0_rtx && invert))
11618 ? const0_rtx : const1_rtx);
11619
11620 label = gen_label_rtx ();
11621 if (bcc_gen_fctn[(int) code] == 0)
11622 abort ();
11623
11624 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11625 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11626 emit_label (label);
11627
11628 return target;
11629 }
11630 \f
11631 /* Generate a tablejump instruction (used for switch statements). */
11632
11633 #ifdef HAVE_tablejump
11634
11635 /* INDEX is the value being switched on, with the lowest value
11636 in the table already subtracted.
11637 MODE is its expected mode (needed if INDEX is constant).
11638 RANGE is the length of the jump table.
11639 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11640
11641 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11642 index value is out of range. */
11643
11644 void
11645 do_tablejump (index, mode, range, table_label, default_label)
11646 rtx index, range, table_label, default_label;
11647 enum machine_mode mode;
11648 {
11649 register rtx temp, vector;
11650
11651 /* Do an unsigned comparison (in the proper mode) between the index
11652 expression and the value which represents the length of the range.
11653 Since we just finished subtracting the lower bound of the range
11654 from the index expression, this comparison allows us to simultaneously
11655 check that the original index expression value is both greater than
11656 or equal to the minimum value of the range and less than or equal to
11657 the maximum value of the range. */
11658
11659 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11660 0, default_label);
11661
11662 /* If index is in range, it must fit in Pmode.
11663 Convert to Pmode so we can index with it. */
11664 if (mode != Pmode)
11665 index = convert_to_mode (Pmode, index, 1);
11666
11667 /* Don't let a MEM slip thru, because then INDEX that comes
11668 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11669 and break_out_memory_refs will go to work on it and mess it up. */
11670 #ifdef PIC_CASE_VECTOR_ADDRESS
11671 if (flag_pic && GET_CODE (index) != REG)
11672 index = copy_to_mode_reg (Pmode, index);
11673 #endif
11674
11675 /* If flag_force_addr were to affect this address
11676 it could interfere with the tricky assumptions made
11677 about addresses that contain label-refs,
11678 which may be valid only very near the tablejump itself. */
11679 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11680 GET_MODE_SIZE, because this indicates how large insns are. The other
11681 uses should all be Pmode, because they are addresses. This code
11682 could fail if addresses and insns are not the same size. */
11683 index = gen_rtx_PLUS (Pmode,
11684 gen_rtx_MULT (Pmode, index,
11685 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11686 gen_rtx_LABEL_REF (Pmode, table_label));
11687 #ifdef PIC_CASE_VECTOR_ADDRESS
11688 if (flag_pic)
11689 index = PIC_CASE_VECTOR_ADDRESS (index);
11690 else
11691 #endif
11692 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11693 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11694 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11695 RTX_UNCHANGING_P (vector) = 1;
11696 convert_move (temp, vector, 0);
11697
11698 emit_jump_insn (gen_tablejump (temp, table_label));
11699
11700 /* If we are generating PIC code or if the table is PC-relative, the
11701 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11702 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11703 emit_barrier ();
11704 }
11705
11706 #endif /* HAVE_tablejump */