Makefile.in (expr.o): Depends on insn-attr.h.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
47
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
50
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
53
54 #ifdef PUSH_ROUNDING
55
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first. */
58 #endif
59
60 #endif
61
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
65 #else
66 #define STACK_PUSH_CODE PRE_INC
67 #endif
68 #endif
69
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
73 #endif
74
75 /* Hook called by safe_from_p for language-specific tree codes. It is
76 up to the language front-end to install a hook if it has any such
77 codes that safe_from_p needs to know about. Since same_from_p will
78 recursively explore the TREE_OPERANDs of an expression, this hook
79 should not reexamine those pieces. This routine may recursively
80 call safe_from_p; it should always pass `0' as the TOP_P
81 parameter. */
82 int (*lang_safe_from_p) PARAMS ((rtx, tree));
83
84 /* If this is nonzero, we do not bother generating VOLATILE
85 around volatile memory references, and we are willing to
86 output indirect addresses. If cse is to follow, we reject
87 indirect addresses so a useful potential cse is generated;
88 if it is used only once, instruction combination will produce
89 the same indirect address eventually. */
90 int cse_not_expected;
91
92 /* Don't check memory usage, since code is being emitted to check a memory
93 usage. Used when current_function_check_memory_usage is true, to avoid
94 infinite recursion. */
95 static int in_check_memory_usage;
96
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 static tree placeholder_list = 0;
99
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
103 {
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
115 };
116
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
119
120 struct store_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 PTR constfundata;
130 int reverse;
131 };
132
133 extern struct obstack permanent_obstack;
134
135 static rtx get_push_address PARAMS ((int));
136
137 static rtx enqueue_insn PARAMS ((rtx, rtx));
138 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
139 PARAMS ((unsigned HOST_WIDE_INT,
140 unsigned int));
141 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
142 struct move_by_pieces *));
143 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
144 enum machine_mode));
145 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
146 unsigned int));
147 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
148 unsigned int));
149 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
150 enum machine_mode,
151 struct store_by_pieces *));
152 static rtx get_subtarget PARAMS ((rtx));
153 static int is_zeros_p PARAMS ((tree));
154 static int mostly_zeros_p PARAMS ((tree));
155 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
156 HOST_WIDE_INT, enum machine_mode,
157 tree, tree, int, int));
158 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
159 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
160 HOST_WIDE_INT, enum machine_mode,
161 tree, enum machine_mode, int,
162 HOST_WIDE_INT, int));
163 static enum memory_use_mode
164 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
165 static rtx var_rtx PARAMS ((tree));
166 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
167 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
168 static rtx expand_increment PARAMS ((tree, int, int));
169 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
170 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
171 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
172 rtx, rtx));
173 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
174 #ifdef PUSH_ROUNDING
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
176 #endif
177 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
178
179 /* Record for each mode whether we can move a register directly to or
180 from an object of that mode in memory. If we can't, we won't try
181 to use that mode directly when accessing a field of that mode. */
182
183 static char direct_load[NUM_MACHINE_MODES];
184 static char direct_store[NUM_MACHINE_MODES];
185
186 /* If a memory-to-memory move would take MOVE_RATIO or more simple
187 move-instruction sequences, we will do a movstr or libcall instead. */
188
189 #ifndef MOVE_RATIO
190 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
191 #define MOVE_RATIO 2
192 #else
193 /* If we are optimizing for space (-Os), cut down the default move ratio. */
194 #define MOVE_RATIO (optimize_size ? 3 : 15)
195 #endif
196 #endif
197
198 /* This macro is used to determine whether move_by_pieces should be called
199 to perform a structure copy. */
200 #ifndef MOVE_BY_PIECES_P
201 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
203 #endif
204
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movstr_optab[NUM_MACHINE_MODES];
207
208 /* This array records the insn_code of insns to perform block clears. */
209 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
210
211 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
212
213 #ifndef SLOW_UNALIGNED_ACCESS
214 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
215 #endif
216 \f
217 /* This is run once per compilation to set up which modes can be used
218 directly in memory and to initialize the block move optab. */
219
220 void
221 init_expr_once ()
222 {
223 rtx insn, pat;
224 enum machine_mode mode;
225 int num_clobbers;
226 rtx mem, mem1;
227
228 start_sequence ();
229
230 /* Try indexing by frame ptr and try by stack ptr.
231 It is known that on the Convex the stack ptr isn't a valid index.
232 With luck, one or the other is valid on any machine. */
233 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
234 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
235
236 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
237 pat = PATTERN (insn);
238
239 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
240 mode = (enum machine_mode) ((int) mode + 1))
241 {
242 int regno;
243 rtx reg;
244
245 direct_load[(int) mode] = direct_store[(int) mode] = 0;
246 PUT_MODE (mem, mode);
247 PUT_MODE (mem1, mode);
248
249 /* See if there is some register that can be used in this mode and
250 directly loaded or stored from memory. */
251
252 if (mode != VOIDmode && mode != BLKmode)
253 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
254 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
255 regno++)
256 {
257 if (! HARD_REGNO_MODE_OK (regno, mode))
258 continue;
259
260 reg = gen_rtx_REG (mode, regno);
261
262 SET_SRC (pat) = mem;
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
266
267 SET_SRC (pat) = mem1;
268 SET_DEST (pat) = reg;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_load[(int) mode] = 1;
271
272 SET_SRC (pat) = reg;
273 SET_DEST (pat) = mem;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
276
277 SET_SRC (pat) = reg;
278 SET_DEST (pat) = mem1;
279 if (recog (pat, insn, &num_clobbers) >= 0)
280 direct_store[(int) mode] = 1;
281 }
282 }
283
284 end_sequence ();
285 }
286
287 /* This is run at the start of compiling a function. */
288
289 void
290 init_expr ()
291 {
292 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
293
294 pending_chain = 0;
295 pending_stack_adjust = 0;
296 stack_pointer_delta = 0;
297 inhibit_defer_pop = 0;
298 saveregs_value = 0;
299 apply_args_value = 0;
300 forced_labels = 0;
301 }
302
303 void
304 mark_expr_status (p)
305 struct expr_status *p;
306 {
307 if (p == NULL)
308 return;
309
310 ggc_mark_rtx (p->x_saveregs_value);
311 ggc_mark_rtx (p->x_apply_args_value);
312 ggc_mark_rtx (p->x_forced_labels);
313 }
314
315 void
316 free_expr_status (f)
317 struct function *f;
318 {
319 free (f->expr);
320 f->expr = NULL;
321 }
322
323 /* Small sanity check that the queue is empty at the end of a function. */
324
325 void
326 finish_expr_for_function ()
327 {
328 if (pending_chain)
329 abort ();
330 }
331 \f
332 /* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
334
335 /* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
338
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
341
342 static rtx
343 enqueue_insn (var, body)
344 rtx var, body;
345 {
346 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
347 body, pending_chain);
348 return pending_chain;
349 }
350
351 /* Use protect_from_queue to convert a QUEUED expression
352 into something that you can put immediately into an instruction.
353 If the queued incrementation has not happened yet,
354 protect_from_queue returns the variable itself.
355 If the incrementation has happened, protect_from_queue returns a temp
356 that contains a copy of the old value of the variable.
357
358 Any time an rtx which might possibly be a QUEUED is to be put
359 into an instruction, it must be passed through protect_from_queue first.
360 QUEUED expressions are not meaningful in instructions.
361
362 Do not pass a value through protect_from_queue and then hold
363 on to it for a while before putting it in an instruction!
364 If the queue is flushed in between, incorrect code will result. */
365
366 rtx
367 protect_from_queue (x, modify)
368 rtx x;
369 int modify;
370 {
371 RTX_CODE code = GET_CODE (x);
372
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain == 0)
376 return x;
377 #endif
378
379 if (code != QUEUED)
380 {
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
385 shared. */
386 if (code == MEM && GET_MODE (x) != BLKmode
387 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
388 {
389 rtx y = XEXP (x, 0);
390 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
391
392 if (QUEUED_INSN (y))
393 {
394 rtx temp = gen_reg_rtx (GET_MODE (x));
395
396 emit_insn_before (gen_move_insn (temp, new),
397 QUEUED_INSN (y));
398 return temp;
399 }
400
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
404 }
405
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
408 if (code == MEM)
409 {
410 rtx tem = protect_from_queue (XEXP (x, 0), 0);
411 if (tem != XEXP (x, 0))
412 {
413 x = copy_rtx (x);
414 XEXP (x, 0) = tem;
415 }
416 }
417 else if (code == PLUS || code == MULT)
418 {
419 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
420 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
421 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
422 {
423 x = copy_rtx (x);
424 XEXP (x, 0) = new0;
425 XEXP (x, 1) = new1;
426 }
427 }
428 return x;
429 }
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
432 emit_queue. */
433 if (QUEUED_INSN (x) == 0)
434 return copy_to_reg (QUEUED_VAR (x));
435 /* If the increment has happened and a pre-increment copy exists,
436 use that copy. */
437 if (QUEUED_COPY (x) != 0)
438 return QUEUED_COPY (x);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
443 QUEUED_INSN (x));
444 return QUEUED_COPY (x);
445 }
446
447 /* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
451
452 int
453 queued_subexp_p (x)
454 rtx x;
455 {
456 enum rtx_code code = GET_CODE (x);
457 switch (code)
458 {
459 case QUEUED:
460 return 1;
461 case MEM:
462 return queued_subexp_p (XEXP (x, 0));
463 case MULT:
464 case PLUS:
465 case MINUS:
466 return (queued_subexp_p (XEXP (x, 0))
467 || queued_subexp_p (XEXP (x, 1)));
468 default:
469 return 0;
470 }
471 }
472
473 /* Perform all the pending incrementations. */
474
475 void
476 emit_queue ()
477 {
478 rtx p;
479 while ((p = pending_chain))
480 {
481 rtx body = QUEUED_BODY (p);
482
483 if (GET_CODE (body) == SEQUENCE)
484 {
485 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
486 emit_insn (QUEUED_BODY (p));
487 }
488 else
489 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
490 pending_chain = QUEUED_NEXT (p);
491 }
492 }
493 \f
494 /* Copy data from FROM to TO, where the machine modes are not the same.
495 Both modes may be integer, or both may be floating.
496 UNSIGNEDP should be nonzero if FROM is an unsigned type.
497 This causes zero-extension instead of sign-extension. */
498
499 void
500 convert_move (to, from, unsignedp)
501 rtx to, from;
502 int unsignedp;
503 {
504 enum machine_mode to_mode = GET_MODE (to);
505 enum machine_mode from_mode = GET_MODE (from);
506 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
507 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
508 enum insn_code code;
509 rtx libcall;
510
511 /* rtx code for making an equivalent value. */
512 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
513
514 to = protect_from_queue (to, 1);
515 from = protect_from_queue (from, 0);
516
517 if (to_real != from_real)
518 abort ();
519
520 /* If FROM is a SUBREG that indicates that we have already done at least
521 the required extension, strip it. We don't handle such SUBREGs as
522 TO here. */
523
524 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
525 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
526 >= GET_MODE_SIZE (to_mode))
527 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
528 from = gen_lowpart (to_mode, from), from_mode = to_mode;
529
530 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
531 abort ();
532
533 if (to_mode == from_mode
534 || (from_mode == VOIDmode && CONSTANT_P (from)))
535 {
536 emit_move_insn (to, from);
537 return;
538 }
539
540 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
541 {
542 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
543 abort ();
544
545 if (VECTOR_MODE_P (to_mode))
546 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
547 else
548 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
549
550 emit_move_insn (to, from);
551 return;
552 }
553
554 if (to_real != from_real)
555 abort ();
556
557 if (to_real)
558 {
559 rtx value, insns;
560
561 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
562 {
563 /* Try converting directly if the insn is supported. */
564 if ((code = can_extend_p (to_mode, from_mode, 0))
565 != CODE_FOR_nothing)
566 {
567 emit_unop_insn (code, to, from, UNKNOWN);
568 return;
569 }
570 }
571
572 #ifdef HAVE_trunchfqf2
573 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
574 {
575 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
576 return;
577 }
578 #endif
579 #ifdef HAVE_trunctqfqf2
580 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
583 return;
584 }
585 #endif
586 #ifdef HAVE_truncsfqf2
587 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
588 {
589 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
590 return;
591 }
592 #endif
593 #ifdef HAVE_truncdfqf2
594 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
595 {
596 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
597 return;
598 }
599 #endif
600 #ifdef HAVE_truncxfqf2
601 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
602 {
603 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
604 return;
605 }
606 #endif
607 #ifdef HAVE_trunctfqf2
608 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
609 {
610 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
611 return;
612 }
613 #endif
614
615 #ifdef HAVE_trunctqfhf2
616 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
617 {
618 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
619 return;
620 }
621 #endif
622 #ifdef HAVE_truncsfhf2
623 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
624 {
625 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
626 return;
627 }
628 #endif
629 #ifdef HAVE_truncdfhf2
630 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
631 {
632 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
633 return;
634 }
635 #endif
636 #ifdef HAVE_truncxfhf2
637 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
638 {
639 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
640 return;
641 }
642 #endif
643 #ifdef HAVE_trunctfhf2
644 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
645 {
646 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
647 return;
648 }
649 #endif
650
651 #ifdef HAVE_truncsftqf2
652 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
653 {
654 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
655 return;
656 }
657 #endif
658 #ifdef HAVE_truncdftqf2
659 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
660 {
661 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
662 return;
663 }
664 #endif
665 #ifdef HAVE_truncxftqf2
666 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
667 {
668 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
669 return;
670 }
671 #endif
672 #ifdef HAVE_trunctftqf2
673 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
674 {
675 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
676 return;
677 }
678 #endif
679
680 #ifdef HAVE_truncdfsf2
681 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
682 {
683 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
684 return;
685 }
686 #endif
687 #ifdef HAVE_truncxfsf2
688 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
689 {
690 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694 #ifdef HAVE_trunctfsf2
695 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
696 {
697 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
698 return;
699 }
700 #endif
701 #ifdef HAVE_truncxfdf2
702 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
703 {
704 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
705 return;
706 }
707 #endif
708 #ifdef HAVE_trunctfdf2
709 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
710 {
711 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
712 return;
713 }
714 #endif
715
716 libcall = (rtx) 0;
717 switch (from_mode)
718 {
719 case SFmode:
720 switch (to_mode)
721 {
722 case DFmode:
723 libcall = extendsfdf2_libfunc;
724 break;
725
726 case XFmode:
727 libcall = extendsfxf2_libfunc;
728 break;
729
730 case TFmode:
731 libcall = extendsftf2_libfunc;
732 break;
733
734 default:
735 break;
736 }
737 break;
738
739 case DFmode:
740 switch (to_mode)
741 {
742 case SFmode:
743 libcall = truncdfsf2_libfunc;
744 break;
745
746 case XFmode:
747 libcall = extenddfxf2_libfunc;
748 break;
749
750 case TFmode:
751 libcall = extenddftf2_libfunc;
752 break;
753
754 default:
755 break;
756 }
757 break;
758
759 case XFmode:
760 switch (to_mode)
761 {
762 case SFmode:
763 libcall = truncxfsf2_libfunc;
764 break;
765
766 case DFmode:
767 libcall = truncxfdf2_libfunc;
768 break;
769
770 default:
771 break;
772 }
773 break;
774
775 case TFmode:
776 switch (to_mode)
777 {
778 case SFmode:
779 libcall = trunctfsf2_libfunc;
780 break;
781
782 case DFmode:
783 libcall = trunctfdf2_libfunc;
784 break;
785
786 default:
787 break;
788 }
789 break;
790
791 default:
792 break;
793 }
794
795 if (libcall == (rtx) 0)
796 /* This conversion is not implemented yet. */
797 abort ();
798
799 start_sequence ();
800 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
801 1, from, from_mode);
802 insns = get_insns ();
803 end_sequence ();
804 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
805 from));
806 return;
807 }
808
809 /* Now both modes are integers. */
810
811 /* Handle expanding beyond a word. */
812 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
813 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
814 {
815 rtx insns;
816 rtx lowpart;
817 rtx fill_value;
818 rtx lowfrom;
819 int i;
820 enum machine_mode lowpart_mode;
821 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
822
823 /* Try converting directly if the insn is supported. */
824 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
825 != CODE_FOR_nothing)
826 {
827 /* If FROM is a SUBREG, put it into a register. Do this
828 so that we always generate the same set of insns for
829 better cse'ing; if an intermediate assignment occurred,
830 we won't be doing the operation directly on the SUBREG. */
831 if (optimize > 0 && GET_CODE (from) == SUBREG)
832 from = force_reg (from_mode, from);
833 emit_unop_insn (code, to, from, equiv_code);
834 return;
835 }
836 /* Next, try converting via full word. */
837 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
838 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
839 != CODE_FOR_nothing))
840 {
841 if (GET_CODE (to) == REG)
842 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
843 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
844 emit_unop_insn (code, to,
845 gen_lowpart (word_mode, to), equiv_code);
846 return;
847 }
848
849 /* No special multiword conversion insn; do it by hand. */
850 start_sequence ();
851
852 /* Since we will turn this into a no conflict block, we must ensure
853 that the source does not overlap the target. */
854
855 if (reg_overlap_mentioned_p (to, from))
856 from = force_reg (from_mode, from);
857
858 /* Get a copy of FROM widened to a word, if necessary. */
859 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
860 lowpart_mode = word_mode;
861 else
862 lowpart_mode = from_mode;
863
864 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
865
866 lowpart = gen_lowpart (lowpart_mode, to);
867 emit_move_insn (lowpart, lowfrom);
868
869 /* Compute the value to put in each remaining word. */
870 if (unsignedp)
871 fill_value = const0_rtx;
872 else
873 {
874 #ifdef HAVE_slt
875 if (HAVE_slt
876 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
877 && STORE_FLAG_VALUE == -1)
878 {
879 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
880 lowpart_mode, 0, 0);
881 fill_value = gen_reg_rtx (word_mode);
882 emit_insn (gen_slt (fill_value));
883 }
884 else
885 #endif
886 {
887 fill_value
888 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
889 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
890 NULL_RTX, 0);
891 fill_value = convert_to_mode (word_mode, fill_value, 1);
892 }
893 }
894
895 /* Fill the remaining words. */
896 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
897 {
898 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
899 rtx subword = operand_subword (to, index, 1, to_mode);
900
901 if (subword == 0)
902 abort ();
903
904 if (fill_value != subword)
905 emit_move_insn (subword, fill_value);
906 }
907
908 insns = get_insns ();
909 end_sequence ();
910
911 emit_no_conflict_block (insns, to, from, NULL_RTX,
912 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
913 return;
914 }
915
916 /* Truncating multi-word to a word or less. */
917 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
918 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
919 {
920 if (!((GET_CODE (from) == MEM
921 && ! MEM_VOLATILE_P (from)
922 && direct_load[(int) to_mode]
923 && ! mode_dependent_address_p (XEXP (from, 0)))
924 || GET_CODE (from) == REG
925 || GET_CODE (from) == SUBREG))
926 from = force_reg (from_mode, from);
927 convert_move (to, gen_lowpart (word_mode, from), 0);
928 return;
929 }
930
931 /* Handle pointer conversion. */ /* SPEE 900220. */
932 if (to_mode == PQImode)
933 {
934 if (from_mode != QImode)
935 from = convert_to_mode (QImode, from, unsignedp);
936
937 #ifdef HAVE_truncqipqi2
938 if (HAVE_truncqipqi2)
939 {
940 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
941 return;
942 }
943 #endif /* HAVE_truncqipqi2 */
944 abort ();
945 }
946
947 if (from_mode == PQImode)
948 {
949 if (to_mode != QImode)
950 {
951 from = convert_to_mode (QImode, from, unsignedp);
952 from_mode = QImode;
953 }
954 else
955 {
956 #ifdef HAVE_extendpqiqi2
957 if (HAVE_extendpqiqi2)
958 {
959 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
960 return;
961 }
962 #endif /* HAVE_extendpqiqi2 */
963 abort ();
964 }
965 }
966
967 if (to_mode == PSImode)
968 {
969 if (from_mode != SImode)
970 from = convert_to_mode (SImode, from, unsignedp);
971
972 #ifdef HAVE_truncsipsi2
973 if (HAVE_truncsipsi2)
974 {
975 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
976 return;
977 }
978 #endif /* HAVE_truncsipsi2 */
979 abort ();
980 }
981
982 if (from_mode == PSImode)
983 {
984 if (to_mode != SImode)
985 {
986 from = convert_to_mode (SImode, from, unsignedp);
987 from_mode = SImode;
988 }
989 else
990 {
991 #ifdef HAVE_extendpsisi2
992 if (! unsignedp && HAVE_extendpsisi2)
993 {
994 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
995 return;
996 }
997 #endif /* HAVE_extendpsisi2 */
998 #ifdef HAVE_zero_extendpsisi2
999 if (unsignedp && HAVE_zero_extendpsisi2)
1000 {
1001 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1002 return;
1003 }
1004 #endif /* HAVE_zero_extendpsisi2 */
1005 abort ();
1006 }
1007 }
1008
1009 if (to_mode == PDImode)
1010 {
1011 if (from_mode != DImode)
1012 from = convert_to_mode (DImode, from, unsignedp);
1013
1014 #ifdef HAVE_truncdipdi2
1015 if (HAVE_truncdipdi2)
1016 {
1017 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1018 return;
1019 }
1020 #endif /* HAVE_truncdipdi2 */
1021 abort ();
1022 }
1023
1024 if (from_mode == PDImode)
1025 {
1026 if (to_mode != DImode)
1027 {
1028 from = convert_to_mode (DImode, from, unsignedp);
1029 from_mode = DImode;
1030 }
1031 else
1032 {
1033 #ifdef HAVE_extendpdidi2
1034 if (HAVE_extendpdidi2)
1035 {
1036 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1037 return;
1038 }
1039 #endif /* HAVE_extendpdidi2 */
1040 abort ();
1041 }
1042 }
1043
1044 /* Now follow all the conversions between integers
1045 no more than a word long. */
1046
1047 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1048 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1049 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1050 GET_MODE_BITSIZE (from_mode)))
1051 {
1052 if (!((GET_CODE (from) == MEM
1053 && ! MEM_VOLATILE_P (from)
1054 && direct_load[(int) to_mode]
1055 && ! mode_dependent_address_p (XEXP (from, 0)))
1056 || GET_CODE (from) == REG
1057 || GET_CODE (from) == SUBREG))
1058 from = force_reg (from_mode, from);
1059 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1060 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1061 from = copy_to_reg (from);
1062 emit_move_insn (to, gen_lowpart (to_mode, from));
1063 return;
1064 }
1065
1066 /* Handle extension. */
1067 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1068 {
1069 /* Convert directly if that works. */
1070 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1071 != CODE_FOR_nothing)
1072 {
1073 emit_unop_insn (code, to, from, equiv_code);
1074 return;
1075 }
1076 else
1077 {
1078 enum machine_mode intermediate;
1079 rtx tmp;
1080 tree shift_amount;
1081
1082 /* Search for a mode to convert via. */
1083 for (intermediate = from_mode; intermediate != VOIDmode;
1084 intermediate = GET_MODE_WIDER_MODE (intermediate))
1085 if (((can_extend_p (to_mode, intermediate, unsignedp)
1086 != CODE_FOR_nothing)
1087 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1088 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1089 GET_MODE_BITSIZE (intermediate))))
1090 && (can_extend_p (intermediate, from_mode, unsignedp)
1091 != CODE_FOR_nothing))
1092 {
1093 convert_move (to, convert_to_mode (intermediate, from,
1094 unsignedp), unsignedp);
1095 return;
1096 }
1097
1098 /* No suitable intermediate mode.
1099 Generate what we need with shifts. */
1100 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1101 - GET_MODE_BITSIZE (from_mode), 0);
1102 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1103 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1104 to, unsignedp);
1105 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1106 to, unsignedp);
1107 if (tmp != to)
1108 emit_move_insn (to, tmp);
1109 return;
1110 }
1111 }
1112
1113 /* Support special truncate insns for certain modes. */
1114
1115 if (from_mode == DImode && to_mode == SImode)
1116 {
1117 #ifdef HAVE_truncdisi2
1118 if (HAVE_truncdisi2)
1119 {
1120 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1121 return;
1122 }
1123 #endif
1124 convert_move (to, force_reg (from_mode, from), unsignedp);
1125 return;
1126 }
1127
1128 if (from_mode == DImode && to_mode == HImode)
1129 {
1130 #ifdef HAVE_truncdihi2
1131 if (HAVE_truncdihi2)
1132 {
1133 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1134 return;
1135 }
1136 #endif
1137 convert_move (to, force_reg (from_mode, from), unsignedp);
1138 return;
1139 }
1140
1141 if (from_mode == DImode && to_mode == QImode)
1142 {
1143 #ifdef HAVE_truncdiqi2
1144 if (HAVE_truncdiqi2)
1145 {
1146 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1147 return;
1148 }
1149 #endif
1150 convert_move (to, force_reg (from_mode, from), unsignedp);
1151 return;
1152 }
1153
1154 if (from_mode == SImode && to_mode == HImode)
1155 {
1156 #ifdef HAVE_truncsihi2
1157 if (HAVE_truncsihi2)
1158 {
1159 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1160 return;
1161 }
1162 #endif
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1164 return;
1165 }
1166
1167 if (from_mode == SImode && to_mode == QImode)
1168 {
1169 #ifdef HAVE_truncsiqi2
1170 if (HAVE_truncsiqi2)
1171 {
1172 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1173 return;
1174 }
1175 #endif
1176 convert_move (to, force_reg (from_mode, from), unsignedp);
1177 return;
1178 }
1179
1180 if (from_mode == HImode && to_mode == QImode)
1181 {
1182 #ifdef HAVE_trunchiqi2
1183 if (HAVE_trunchiqi2)
1184 {
1185 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1186 return;
1187 }
1188 #endif
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 return;
1191 }
1192
1193 if (from_mode == TImode && to_mode == DImode)
1194 {
1195 #ifdef HAVE_trunctidi2
1196 if (HAVE_trunctidi2)
1197 {
1198 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1199 return;
1200 }
1201 #endif
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 return;
1204 }
1205
1206 if (from_mode == TImode && to_mode == SImode)
1207 {
1208 #ifdef HAVE_trunctisi2
1209 if (HAVE_trunctisi2)
1210 {
1211 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1212 return;
1213 }
1214 #endif
1215 convert_move (to, force_reg (from_mode, from), unsignedp);
1216 return;
1217 }
1218
1219 if (from_mode == TImode && to_mode == HImode)
1220 {
1221 #ifdef HAVE_trunctihi2
1222 if (HAVE_trunctihi2)
1223 {
1224 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1225 return;
1226 }
1227 #endif
1228 convert_move (to, force_reg (from_mode, from), unsignedp);
1229 return;
1230 }
1231
1232 if (from_mode == TImode && to_mode == QImode)
1233 {
1234 #ifdef HAVE_trunctiqi2
1235 if (HAVE_trunctiqi2)
1236 {
1237 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1238 return;
1239 }
1240 #endif
1241 convert_move (to, force_reg (from_mode, from), unsignedp);
1242 return;
1243 }
1244
1245 /* Handle truncation of volatile memrefs, and so on;
1246 the things that couldn't be truncated directly,
1247 and for which there was no special instruction. */
1248 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1249 {
1250 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1251 emit_move_insn (to, temp);
1252 return;
1253 }
1254
1255 /* Mode combination is not recognized. */
1256 abort ();
1257 }
1258
1259 /* Return an rtx for a value that would result
1260 from converting X to mode MODE.
1261 Both X and MODE may be floating, or both integer.
1262 UNSIGNEDP is nonzero if X is an unsigned value.
1263 This can be done by referring to a part of X in place
1264 or by copying to a new temporary with conversion.
1265
1266 This function *must not* call protect_from_queue
1267 except when putting X into an insn (in which case convert_move does it). */
1268
1269 rtx
1270 convert_to_mode (mode, x, unsignedp)
1271 enum machine_mode mode;
1272 rtx x;
1273 int unsignedp;
1274 {
1275 return convert_modes (mode, VOIDmode, x, unsignedp);
1276 }
1277
1278 /* Return an rtx for a value that would result
1279 from converting X from mode OLDMODE to mode MODE.
1280 Both modes may be floating, or both integer.
1281 UNSIGNEDP is nonzero if X is an unsigned value.
1282
1283 This can be done by referring to a part of X in place
1284 or by copying to a new temporary with conversion.
1285
1286 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1287
1288 This function *must not* call protect_from_queue
1289 except when putting X into an insn (in which case convert_move does it). */
1290
1291 rtx
1292 convert_modes (mode, oldmode, x, unsignedp)
1293 enum machine_mode mode, oldmode;
1294 rtx x;
1295 int unsignedp;
1296 {
1297 rtx temp;
1298
1299 /* If FROM is a SUBREG that indicates that we have already done at least
1300 the required extension, strip it. */
1301
1302 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1303 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1304 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1305 x = gen_lowpart (mode, x);
1306
1307 if (GET_MODE (x) != VOIDmode)
1308 oldmode = GET_MODE (x);
1309
1310 if (mode == oldmode)
1311 return x;
1312
1313 /* There is one case that we must handle specially: If we are converting
1314 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1315 we are to interpret the constant as unsigned, gen_lowpart will do
1316 the wrong if the constant appears negative. What we want to do is
1317 make the high-order word of the constant zero, not all ones. */
1318
1319 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1320 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1321 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1322 {
1323 HOST_WIDE_INT val = INTVAL (x);
1324
1325 if (oldmode != VOIDmode
1326 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1327 {
1328 int width = GET_MODE_BITSIZE (oldmode);
1329
1330 /* We need to zero extend VAL. */
1331 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1332 }
1333
1334 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1335 }
1336
1337 /* We can do this with a gen_lowpart if both desired and current modes
1338 are integer, and this is either a constant integer, a register, or a
1339 non-volatile MEM. Except for the constant case where MODE is no
1340 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1341
1342 if ((GET_CODE (x) == CONST_INT
1343 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1344 || (GET_MODE_CLASS (mode) == MODE_INT
1345 && GET_MODE_CLASS (oldmode) == MODE_INT
1346 && (GET_CODE (x) == CONST_DOUBLE
1347 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1348 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1349 && direct_load[(int) mode])
1350 || (GET_CODE (x) == REG
1351 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1352 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1353 {
1354 /* ?? If we don't know OLDMODE, we have to assume here that
1355 X does not need sign- or zero-extension. This may not be
1356 the case, but it's the best we can do. */
1357 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1358 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1359 {
1360 HOST_WIDE_INT val = INTVAL (x);
1361 int width = GET_MODE_BITSIZE (oldmode);
1362
1363 /* We must sign or zero-extend in this case. Start by
1364 zero-extending, then sign extend if we need to. */
1365 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1366 if (! unsignedp
1367 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1368 val |= (HOST_WIDE_INT) (-1) << width;
1369
1370 return GEN_INT (trunc_int_for_mode (val, mode));
1371 }
1372
1373 return gen_lowpart (mode, x);
1374 }
1375
1376 temp = gen_reg_rtx (mode);
1377 convert_move (temp, x, unsignedp);
1378 return temp;
1379 }
1380 \f
1381 /* This macro is used to determine what the largest unit size that
1382 move_by_pieces can use is. */
1383
1384 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1385 move efficiently, as opposed to MOVE_MAX which is the maximum
1386 number of bytes we can move with a single instruction. */
1387
1388 #ifndef MOVE_MAX_PIECES
1389 #define MOVE_MAX_PIECES MOVE_MAX
1390 #endif
1391
1392 /* Generate several move instructions to copy LEN bytes from block FROM to
1393 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1394 and TO through protect_from_queue before calling.
1395
1396 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1397 used to push FROM to the stack.
1398
1399 ALIGN is maximum alignment we can assume. */
1400
1401 void
1402 move_by_pieces (to, from, len, align)
1403 rtx to, from;
1404 unsigned HOST_WIDE_INT len;
1405 unsigned int align;
1406 {
1407 struct move_by_pieces data;
1408 rtx to_addr, from_addr = XEXP (from, 0);
1409 unsigned int max_size = MOVE_MAX_PIECES + 1;
1410 enum machine_mode mode = VOIDmode, tmode;
1411 enum insn_code icode;
1412
1413 data.offset = 0;
1414 data.from_addr = from_addr;
1415 if (to)
1416 {
1417 to_addr = XEXP (to, 0);
1418 data.to = to;
1419 data.autinc_to
1420 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1421 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1422 data.reverse
1423 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1424 }
1425 else
1426 {
1427 to_addr = NULL_RTX;
1428 data.to = NULL_RTX;
1429 data.autinc_to = 1;
1430 #ifdef STACK_GROWS_DOWNWARD
1431 data.reverse = 1;
1432 #else
1433 data.reverse = 0;
1434 #endif
1435 }
1436 data.to_addr = to_addr;
1437 data.from = from;
1438 data.autinc_from
1439 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1440 || GET_CODE (from_addr) == POST_INC
1441 || GET_CODE (from_addr) == POST_DEC);
1442
1443 data.explicit_inc_from = 0;
1444 data.explicit_inc_to = 0;
1445 if (data.reverse) data.offset = len;
1446 data.len = len;
1447
1448 /* If copying requires more than two move insns,
1449 copy addresses to registers (to make displacements shorter)
1450 and use post-increment if available. */
1451 if (!(data.autinc_from && data.autinc_to)
1452 && move_by_pieces_ninsns (len, align) > 2)
1453 {
1454 /* Find the mode of the largest move... */
1455 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1456 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1457 if (GET_MODE_SIZE (tmode) < max_size)
1458 mode = tmode;
1459
1460 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1461 {
1462 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1463 data.autinc_from = 1;
1464 data.explicit_inc_from = -1;
1465 }
1466 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1467 {
1468 data.from_addr = copy_addr_to_reg (from_addr);
1469 data.autinc_from = 1;
1470 data.explicit_inc_from = 1;
1471 }
1472 if (!data.autinc_from && CONSTANT_P (from_addr))
1473 data.from_addr = copy_addr_to_reg (from_addr);
1474 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1475 {
1476 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1477 data.autinc_to = 1;
1478 data.explicit_inc_to = -1;
1479 }
1480 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1481 {
1482 data.to_addr = copy_addr_to_reg (to_addr);
1483 data.autinc_to = 1;
1484 data.explicit_inc_to = 1;
1485 }
1486 if (!data.autinc_to && CONSTANT_P (to_addr))
1487 data.to_addr = copy_addr_to_reg (to_addr);
1488 }
1489
1490 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1491 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1492 align = MOVE_MAX * BITS_PER_UNIT;
1493
1494 /* First move what we can in the largest integer mode, then go to
1495 successively smaller modes. */
1496
1497 while (max_size > 1)
1498 {
1499 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1500 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1501 if (GET_MODE_SIZE (tmode) < max_size)
1502 mode = tmode;
1503
1504 if (mode == VOIDmode)
1505 break;
1506
1507 icode = mov_optab->handlers[(int) mode].insn_code;
1508 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1509 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1510
1511 max_size = GET_MODE_SIZE (mode);
1512 }
1513
1514 /* The code above should have handled everything. */
1515 if (data.len > 0)
1516 abort ();
1517 }
1518
1519 /* Return number of insns required to move L bytes by pieces.
1520 ALIGN (in bits) is maximum alignment we can assume. */
1521
1522 static unsigned HOST_WIDE_INT
1523 move_by_pieces_ninsns (l, align)
1524 unsigned HOST_WIDE_INT l;
1525 unsigned int align;
1526 {
1527 unsigned HOST_WIDE_INT n_insns = 0;
1528 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1529
1530 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1531 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1532 align = MOVE_MAX * BITS_PER_UNIT;
1533
1534 while (max_size > 1)
1535 {
1536 enum machine_mode mode = VOIDmode, tmode;
1537 enum insn_code icode;
1538
1539 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1540 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1541 if (GET_MODE_SIZE (tmode) < max_size)
1542 mode = tmode;
1543
1544 if (mode == VOIDmode)
1545 break;
1546
1547 icode = mov_optab->handlers[(int) mode].insn_code;
1548 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1549 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1550
1551 max_size = GET_MODE_SIZE (mode);
1552 }
1553
1554 if (l)
1555 abort ();
1556 return n_insns;
1557 }
1558
1559 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1560 with move instructions for mode MODE. GENFUN is the gen_... function
1561 to make a move insn for that mode. DATA has all the other info. */
1562
1563 static void
1564 move_by_pieces_1 (genfun, mode, data)
1565 rtx (*genfun) PARAMS ((rtx, ...));
1566 enum machine_mode mode;
1567 struct move_by_pieces *data;
1568 {
1569 unsigned int size = GET_MODE_SIZE (mode);
1570 rtx to1 = NULL_RTX, from1;
1571
1572 while (data->len >= size)
1573 {
1574 if (data->reverse)
1575 data->offset -= size;
1576
1577 if (data->to)
1578 {
1579 if (data->autinc_to)
1580 {
1581 to1 = replace_equiv_address (data->to, data->to_addr);
1582 to1 = adjust_address (to1, mode, 0);
1583 }
1584 else
1585 to1 = adjust_address (data->to, mode, data->offset);
1586 }
1587
1588 if (data->autinc_from)
1589 {
1590 from1 = replace_equiv_address (data->from, data->from_addr);
1591 from1 = adjust_address (from1, mode, 0);
1592 }
1593 else
1594 from1 = adjust_address (data->from, mode, data->offset);
1595
1596 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1597 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1598 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1599 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1600
1601 if (data->to)
1602 emit_insn ((*genfun) (to1, from1));
1603 else
1604 {
1605 #ifdef PUSH_ROUNDING
1606 emit_single_push_insn (mode, from1, NULL);
1607 #else
1608 abort ();
1609 #endif
1610 }
1611
1612 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1613 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1614 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1615 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1616
1617 if (! data->reverse)
1618 data->offset += size;
1619
1620 data->len -= size;
1621 }
1622 }
1623 \f
1624 /* Emit code to move a block Y to a block X.
1625 This may be done with string-move instructions,
1626 with multiple scalar move instructions, or with a library call.
1627
1628 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1629 with mode BLKmode.
1630 SIZE is an rtx that says how long they are.
1631 ALIGN is the maximum alignment we can assume they have.
1632
1633 Return the address of the new block, if memcpy is called and returns it,
1634 0 otherwise. */
1635
1636 rtx
1637 emit_block_move (x, y, size)
1638 rtx x, y;
1639 rtx size;
1640 {
1641 rtx retval = 0;
1642 #ifdef TARGET_MEM_FUNCTIONS
1643 static tree fn;
1644 tree call_expr, arg_list;
1645 #endif
1646 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1647
1648 if (GET_MODE (x) != BLKmode)
1649 abort ();
1650
1651 if (GET_MODE (y) != BLKmode)
1652 abort ();
1653
1654 x = protect_from_queue (x, 1);
1655 y = protect_from_queue (y, 0);
1656 size = protect_from_queue (size, 0);
1657
1658 if (GET_CODE (x) != MEM)
1659 abort ();
1660 if (GET_CODE (y) != MEM)
1661 abort ();
1662 if (size == 0)
1663 abort ();
1664
1665 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1666 move_by_pieces (x, y, INTVAL (size), align);
1667 else
1668 {
1669 /* Try the most limited insn first, because there's no point
1670 including more than one in the machine description unless
1671 the more limited one has some advantage. */
1672
1673 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1674 enum machine_mode mode;
1675
1676 /* Since this is a move insn, we don't care about volatility. */
1677 volatile_ok = 1;
1678
1679 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1680 mode = GET_MODE_WIDER_MODE (mode))
1681 {
1682 enum insn_code code = movstr_optab[(int) mode];
1683 insn_operand_predicate_fn pred;
1684
1685 if (code != CODE_FOR_nothing
1686 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1687 here because if SIZE is less than the mode mask, as it is
1688 returned by the macro, it will definitely be less than the
1689 actual mode mask. */
1690 && ((GET_CODE (size) == CONST_INT
1691 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1692 <= (GET_MODE_MASK (mode) >> 1)))
1693 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1694 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1695 || (*pred) (x, BLKmode))
1696 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1697 || (*pred) (y, BLKmode))
1698 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1699 || (*pred) (opalign, VOIDmode)))
1700 {
1701 rtx op2;
1702 rtx last = get_last_insn ();
1703 rtx pat;
1704
1705 op2 = convert_to_mode (mode, size, 1);
1706 pred = insn_data[(int) code].operand[2].predicate;
1707 if (pred != 0 && ! (*pred) (op2, mode))
1708 op2 = copy_to_mode_reg (mode, op2);
1709
1710 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1711 if (pat)
1712 {
1713 emit_insn (pat);
1714 volatile_ok = 0;
1715 return 0;
1716 }
1717 else
1718 delete_insns_since (last);
1719 }
1720 }
1721
1722 volatile_ok = 0;
1723
1724 /* X, Y, or SIZE may have been passed through protect_from_queue.
1725
1726 It is unsafe to save the value generated by protect_from_queue
1727 and reuse it later. Consider what happens if emit_queue is
1728 called before the return value from protect_from_queue is used.
1729
1730 Expansion of the CALL_EXPR below will call emit_queue before
1731 we are finished emitting RTL for argument setup. So if we are
1732 not careful we could get the wrong value for an argument.
1733
1734 To avoid this problem we go ahead and emit code to copy X, Y &
1735 SIZE into new pseudos. We can then place those new pseudos
1736 into an RTL_EXPR and use them later, even after a call to
1737 emit_queue.
1738
1739 Note this is not strictly needed for library calls since they
1740 do not call emit_queue before loading their arguments. However,
1741 we may need to have library calls call emit_queue in the future
1742 since failing to do so could cause problems for targets which
1743 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1744 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1745 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1746
1747 #ifdef TARGET_MEM_FUNCTIONS
1748 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1749 #else
1750 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1751 TREE_UNSIGNED (integer_type_node));
1752 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1753 #endif
1754
1755 #ifdef TARGET_MEM_FUNCTIONS
1756 /* It is incorrect to use the libcall calling conventions to call
1757 memcpy in this context.
1758
1759 This could be a user call to memcpy and the user may wish to
1760 examine the return value from memcpy.
1761
1762 For targets where libcalls and normal calls have different conventions
1763 for returning pointers, we could end up generating incorrect code.
1764
1765 So instead of using a libcall sequence we build up a suitable
1766 CALL_EXPR and expand the call in the normal fashion. */
1767 if (fn == NULL_TREE)
1768 {
1769 tree fntype;
1770
1771 /* This was copied from except.c, I don't know if all this is
1772 necessary in this context or not. */
1773 fn = get_identifier ("memcpy");
1774 fntype = build_pointer_type (void_type_node);
1775 fntype = build_function_type (fntype, NULL_TREE);
1776 fn = build_decl (FUNCTION_DECL, fn, fntype);
1777 ggc_add_tree_root (&fn, 1);
1778 DECL_EXTERNAL (fn) = 1;
1779 TREE_PUBLIC (fn) = 1;
1780 DECL_ARTIFICIAL (fn) = 1;
1781 TREE_NOTHROW (fn) = 1;
1782 make_decl_rtl (fn, NULL);
1783 assemble_external (fn);
1784 }
1785
1786 /* We need to make an argument list for the function call.
1787
1788 memcpy has three arguments, the first two are void * addresses and
1789 the last is a size_t byte count for the copy. */
1790 arg_list
1791 = build_tree_list (NULL_TREE,
1792 make_tree (build_pointer_type (void_type_node), x));
1793 TREE_CHAIN (arg_list)
1794 = build_tree_list (NULL_TREE,
1795 make_tree (build_pointer_type (void_type_node), y));
1796 TREE_CHAIN (TREE_CHAIN (arg_list))
1797 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1798 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1799
1800 /* Now we have to build up the CALL_EXPR itself. */
1801 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1802 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1803 call_expr, arg_list, NULL_TREE);
1804 TREE_SIDE_EFFECTS (call_expr) = 1;
1805
1806 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1807 #else
1808 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1809 VOIDmode, 3, y, Pmode, x, Pmode,
1810 convert_to_mode (TYPE_MODE (integer_type_node), size,
1811 TREE_UNSIGNED (integer_type_node)),
1812 TYPE_MODE (integer_type_node));
1813 #endif
1814
1815 /* If we are initializing a readonly value, show the above call
1816 clobbered it. Otherwise, a load from it may erroneously be hoisted
1817 from a loop. */
1818 if (RTX_UNCHANGING_P (x))
1819 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1820 }
1821
1822 return retval;
1823 }
1824 \f
1825 /* Copy all or part of a value X into registers starting at REGNO.
1826 The number of registers to be filled is NREGS. */
1827
1828 void
1829 move_block_to_reg (regno, x, nregs, mode)
1830 int regno;
1831 rtx x;
1832 int nregs;
1833 enum machine_mode mode;
1834 {
1835 int i;
1836 #ifdef HAVE_load_multiple
1837 rtx pat;
1838 rtx last;
1839 #endif
1840
1841 if (nregs == 0)
1842 return;
1843
1844 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1845 x = validize_mem (force_const_mem (mode, x));
1846
1847 /* See if the machine can do this with a load multiple insn. */
1848 #ifdef HAVE_load_multiple
1849 if (HAVE_load_multiple)
1850 {
1851 last = get_last_insn ();
1852 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1853 GEN_INT (nregs));
1854 if (pat)
1855 {
1856 emit_insn (pat);
1857 return;
1858 }
1859 else
1860 delete_insns_since (last);
1861 }
1862 #endif
1863
1864 for (i = 0; i < nregs; i++)
1865 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1866 operand_subword_force (x, i, mode));
1867 }
1868
1869 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1870 The number of registers to be filled is NREGS. SIZE indicates the number
1871 of bytes in the object X. */
1872
1873 void
1874 move_block_from_reg (regno, x, nregs, size)
1875 int regno;
1876 rtx x;
1877 int nregs;
1878 int size;
1879 {
1880 int i;
1881 #ifdef HAVE_store_multiple
1882 rtx pat;
1883 rtx last;
1884 #endif
1885 enum machine_mode mode;
1886
1887 if (nregs == 0)
1888 return;
1889
1890 /* If SIZE is that of a mode no bigger than a word, just use that
1891 mode's store operation. */
1892 if (size <= UNITS_PER_WORD
1893 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1894 {
1895 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1896 return;
1897 }
1898
1899 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1900 to the left before storing to memory. Note that the previous test
1901 doesn't handle all cases (e.g. SIZE == 3). */
1902 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1903 {
1904 rtx tem = operand_subword (x, 0, 1, BLKmode);
1905 rtx shift;
1906
1907 if (tem == 0)
1908 abort ();
1909
1910 shift = expand_shift (LSHIFT_EXPR, word_mode,
1911 gen_rtx_REG (word_mode, regno),
1912 build_int_2 ((UNITS_PER_WORD - size)
1913 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1914 emit_move_insn (tem, shift);
1915 return;
1916 }
1917
1918 /* See if the machine can do this with a store multiple insn. */
1919 #ifdef HAVE_store_multiple
1920 if (HAVE_store_multiple)
1921 {
1922 last = get_last_insn ();
1923 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1924 GEN_INT (nregs));
1925 if (pat)
1926 {
1927 emit_insn (pat);
1928 return;
1929 }
1930 else
1931 delete_insns_since (last);
1932 }
1933 #endif
1934
1935 for (i = 0; i < nregs; i++)
1936 {
1937 rtx tem = operand_subword (x, i, 1, BLKmode);
1938
1939 if (tem == 0)
1940 abort ();
1941
1942 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1943 }
1944 }
1945
1946 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1947 registers represented by a PARALLEL. SSIZE represents the total size of
1948 block SRC in bytes, or -1 if not known. */
1949 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1950 the balance will be in what would be the low-order memory addresses, i.e.
1951 left justified for big endian, right justified for little endian. This
1952 happens to be true for the targets currently using this support. If this
1953 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1954 would be needed. */
1955
1956 void
1957 emit_group_load (dst, orig_src, ssize)
1958 rtx dst, orig_src;
1959 int ssize;
1960 {
1961 rtx *tmps, src;
1962 int start, i;
1963
1964 if (GET_CODE (dst) != PARALLEL)
1965 abort ();
1966
1967 /* Check for a NULL entry, used to indicate that the parameter goes
1968 both on the stack and in registers. */
1969 if (XEXP (XVECEXP (dst, 0, 0), 0))
1970 start = 0;
1971 else
1972 start = 1;
1973
1974 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1975
1976 /* Process the pieces. */
1977 for (i = start; i < XVECLEN (dst, 0); i++)
1978 {
1979 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1980 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1981 unsigned int bytelen = GET_MODE_SIZE (mode);
1982 int shift = 0;
1983
1984 /* Handle trailing fragments that run over the size of the struct. */
1985 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1986 {
1987 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1988 bytelen = ssize - bytepos;
1989 if (bytelen <= 0)
1990 abort ();
1991 }
1992
1993 /* If we won't be loading directly from memory, protect the real source
1994 from strange tricks we might play; but make sure that the source can
1995 be loaded directly into the destination. */
1996 src = orig_src;
1997 if (GET_CODE (orig_src) != MEM
1998 && (!CONSTANT_P (orig_src)
1999 || (GET_MODE (orig_src) != mode
2000 && GET_MODE (orig_src) != VOIDmode)))
2001 {
2002 if (GET_MODE (orig_src) == VOIDmode)
2003 src = gen_reg_rtx (mode);
2004 else
2005 src = gen_reg_rtx (GET_MODE (orig_src));
2006
2007 emit_move_insn (src, orig_src);
2008 }
2009
2010 /* Optimize the access just a bit. */
2011 if (GET_CODE (src) == MEM
2012 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2013 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2014 && bytelen == GET_MODE_SIZE (mode))
2015 {
2016 tmps[i] = gen_reg_rtx (mode);
2017 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2018 }
2019 else if (GET_CODE (src) == CONCAT)
2020 {
2021 if (bytepos == 0
2022 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2023 tmps[i] = XEXP (src, 0);
2024 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2025 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2026 tmps[i] = XEXP (src, 1);
2027 else if (bytepos == 0)
2028 {
2029 rtx mem = assign_stack_temp (GET_MODE (src),
2030 GET_MODE_SIZE (GET_MODE (src)), 0);
2031 emit_move_insn (mem, src);
2032 tmps[i] = adjust_address (mem, mode, 0);
2033 }
2034 else
2035 abort ();
2036 }
2037 else if (CONSTANT_P (src)
2038 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2039 tmps[i] = src;
2040 else
2041 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2042 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2043 mode, mode, ssize);
2044
2045 if (BYTES_BIG_ENDIAN && shift)
2046 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2047 tmps[i], 0, OPTAB_WIDEN);
2048 }
2049
2050 emit_queue ();
2051
2052 /* Copy the extracted pieces into the proper (probable) hard regs. */
2053 for (i = start; i < XVECLEN (dst, 0); i++)
2054 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2055 }
2056
2057 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2058 registers represented by a PARALLEL. SSIZE represents the total size of
2059 block DST, or -1 if not known. */
2060
2061 void
2062 emit_group_store (orig_dst, src, ssize)
2063 rtx orig_dst, src;
2064 int ssize;
2065 {
2066 rtx *tmps, dst;
2067 int start, i;
2068
2069 if (GET_CODE (src) != PARALLEL)
2070 abort ();
2071
2072 /* Check for a NULL entry, used to indicate that the parameter goes
2073 both on the stack and in registers. */
2074 if (XEXP (XVECEXP (src, 0, 0), 0))
2075 start = 0;
2076 else
2077 start = 1;
2078
2079 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2080
2081 /* Copy the (probable) hard regs into pseudos. */
2082 for (i = start; i < XVECLEN (src, 0); i++)
2083 {
2084 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2085 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2086 emit_move_insn (tmps[i], reg);
2087 }
2088 emit_queue ();
2089
2090 /* If we won't be storing directly into memory, protect the real destination
2091 from strange tricks we might play. */
2092 dst = orig_dst;
2093 if (GET_CODE (dst) == PARALLEL)
2094 {
2095 rtx temp;
2096
2097 /* We can get a PARALLEL dst if there is a conditional expression in
2098 a return statement. In that case, the dst and src are the same,
2099 so no action is necessary. */
2100 if (rtx_equal_p (dst, src))
2101 return;
2102
2103 /* It is unclear if we can ever reach here, but we may as well handle
2104 it. Allocate a temporary, and split this into a store/load to/from
2105 the temporary. */
2106
2107 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2108 emit_group_store (temp, src, ssize);
2109 emit_group_load (dst, temp, ssize);
2110 return;
2111 }
2112 else if (GET_CODE (dst) != MEM)
2113 {
2114 dst = gen_reg_rtx (GET_MODE (orig_dst));
2115 /* Make life a bit easier for combine. */
2116 emit_move_insn (dst, const0_rtx);
2117 }
2118
2119 /* Process the pieces. */
2120 for (i = start; i < XVECLEN (src, 0); i++)
2121 {
2122 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2123 enum machine_mode mode = GET_MODE (tmps[i]);
2124 unsigned int bytelen = GET_MODE_SIZE (mode);
2125
2126 /* Handle trailing fragments that run over the size of the struct. */
2127 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2128 {
2129 if (BYTES_BIG_ENDIAN)
2130 {
2131 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2132 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2133 tmps[i], 0, OPTAB_WIDEN);
2134 }
2135 bytelen = ssize - bytepos;
2136 }
2137
2138 /* Optimize the access just a bit. */
2139 if (GET_CODE (dst) == MEM
2140 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2141 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2142 && bytelen == GET_MODE_SIZE (mode))
2143 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2144 else
2145 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2146 mode, tmps[i], ssize);
2147 }
2148
2149 emit_queue ();
2150
2151 /* Copy from the pseudo into the (probable) hard reg. */
2152 if (GET_CODE (dst) == REG)
2153 emit_move_insn (orig_dst, dst);
2154 }
2155
2156 /* Generate code to copy a BLKmode object of TYPE out of a
2157 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2158 is null, a stack temporary is created. TGTBLK is returned.
2159
2160 The primary purpose of this routine is to handle functions
2161 that return BLKmode structures in registers. Some machines
2162 (the PA for example) want to return all small structures
2163 in registers regardless of the structure's alignment. */
2164
2165 rtx
2166 copy_blkmode_from_reg (tgtblk, srcreg, type)
2167 rtx tgtblk;
2168 rtx srcreg;
2169 tree type;
2170 {
2171 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2172 rtx src = NULL, dst = NULL;
2173 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2174 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2175
2176 if (tgtblk == 0)
2177 {
2178 tgtblk = assign_temp (build_qualified_type (type,
2179 (TYPE_QUALS (type)
2180 | TYPE_QUAL_CONST)),
2181 0, 1, 1);
2182 preserve_temp_slots (tgtblk);
2183 }
2184
2185 /* This code assumes srcreg is at least a full word. If it isn't,
2186 copy it into a new pseudo which is a full word. */
2187 if (GET_MODE (srcreg) != BLKmode
2188 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2189 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2190
2191 /* Structures whose size is not a multiple of a word are aligned
2192 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2193 machine, this means we must skip the empty high order bytes when
2194 calculating the bit offset. */
2195 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2196 big_endian_correction
2197 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2198
2199 /* Copy the structure BITSIZE bites at a time.
2200
2201 We could probably emit more efficient code for machines which do not use
2202 strict alignment, but it doesn't seem worth the effort at the current
2203 time. */
2204 for (bitpos = 0, xbitpos = big_endian_correction;
2205 bitpos < bytes * BITS_PER_UNIT;
2206 bitpos += bitsize, xbitpos += bitsize)
2207 {
2208 /* We need a new source operand each time xbitpos is on a
2209 word boundary and when xbitpos == big_endian_correction
2210 (the first time through). */
2211 if (xbitpos % BITS_PER_WORD == 0
2212 || xbitpos == big_endian_correction)
2213 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2214 GET_MODE (srcreg));
2215
2216 /* We need a new destination operand each time bitpos is on
2217 a word boundary. */
2218 if (bitpos % BITS_PER_WORD == 0)
2219 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2220
2221 /* Use xbitpos for the source extraction (right justified) and
2222 xbitpos for the destination store (left justified). */
2223 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2224 extract_bit_field (src, bitsize,
2225 xbitpos % BITS_PER_WORD, 1,
2226 NULL_RTX, word_mode, word_mode,
2227 BITS_PER_WORD),
2228 BITS_PER_WORD);
2229 }
2230
2231 return tgtblk;
2232 }
2233
2234 /* Add a USE expression for REG to the (possibly empty) list pointed
2235 to by CALL_FUSAGE. REG must denote a hard register. */
2236
2237 void
2238 use_reg (call_fusage, reg)
2239 rtx *call_fusage, reg;
2240 {
2241 if (GET_CODE (reg) != REG
2242 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2243 abort ();
2244
2245 *call_fusage
2246 = gen_rtx_EXPR_LIST (VOIDmode,
2247 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2248 }
2249
2250 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2251 starting at REGNO. All of these registers must be hard registers. */
2252
2253 void
2254 use_regs (call_fusage, regno, nregs)
2255 rtx *call_fusage;
2256 int regno;
2257 int nregs;
2258 {
2259 int i;
2260
2261 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2262 abort ();
2263
2264 for (i = 0; i < nregs; i++)
2265 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2266 }
2267
2268 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2269 PARALLEL REGS. This is for calls that pass values in multiple
2270 non-contiguous locations. The Irix 6 ABI has examples of this. */
2271
2272 void
2273 use_group_regs (call_fusage, regs)
2274 rtx *call_fusage;
2275 rtx regs;
2276 {
2277 int i;
2278
2279 for (i = 0; i < XVECLEN (regs, 0); i++)
2280 {
2281 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2282
2283 /* A NULL entry means the parameter goes both on the stack and in
2284 registers. This can also be a MEM for targets that pass values
2285 partially on the stack and partially in registers. */
2286 if (reg != 0 && GET_CODE (reg) == REG)
2287 use_reg (call_fusage, reg);
2288 }
2289 }
2290 \f
2291
2292 int
2293 can_store_by_pieces (len, constfun, constfundata, align)
2294 unsigned HOST_WIDE_INT len;
2295 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2296 PTR constfundata;
2297 unsigned int align;
2298 {
2299 unsigned HOST_WIDE_INT max_size, l;
2300 HOST_WIDE_INT offset = 0;
2301 enum machine_mode mode, tmode;
2302 enum insn_code icode;
2303 int reverse;
2304 rtx cst;
2305
2306 if (! MOVE_BY_PIECES_P (len, align))
2307 return 0;
2308
2309 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2310 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2311 align = MOVE_MAX * BITS_PER_UNIT;
2312
2313 /* We would first store what we can in the largest integer mode, then go to
2314 successively smaller modes. */
2315
2316 for (reverse = 0;
2317 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2318 reverse++)
2319 {
2320 l = len;
2321 mode = VOIDmode;
2322 max_size = MOVE_MAX_PIECES + 1;
2323 while (max_size > 1)
2324 {
2325 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2326 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2327 if (GET_MODE_SIZE (tmode) < max_size)
2328 mode = tmode;
2329
2330 if (mode == VOIDmode)
2331 break;
2332
2333 icode = mov_optab->handlers[(int) mode].insn_code;
2334 if (icode != CODE_FOR_nothing
2335 && align >= GET_MODE_ALIGNMENT (mode))
2336 {
2337 unsigned int size = GET_MODE_SIZE (mode);
2338
2339 while (l >= size)
2340 {
2341 if (reverse)
2342 offset -= size;
2343
2344 cst = (*constfun) (constfundata, offset, mode);
2345 if (!LEGITIMATE_CONSTANT_P (cst))
2346 return 0;
2347
2348 if (!reverse)
2349 offset += size;
2350
2351 l -= size;
2352 }
2353 }
2354
2355 max_size = GET_MODE_SIZE (mode);
2356 }
2357
2358 /* The code above should have handled everything. */
2359 if (l != 0)
2360 abort ();
2361 }
2362
2363 return 1;
2364 }
2365
2366 /* Generate several move instructions to store LEN bytes generated by
2367 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2368 pointer which will be passed as argument in every CONSTFUN call.
2369 ALIGN is maximum alignment we can assume. */
2370
2371 void
2372 store_by_pieces (to, len, constfun, constfundata, align)
2373 rtx to;
2374 unsigned HOST_WIDE_INT len;
2375 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2376 PTR constfundata;
2377 unsigned int align;
2378 {
2379 struct store_by_pieces data;
2380
2381 if (! MOVE_BY_PIECES_P (len, align))
2382 abort ();
2383 to = protect_from_queue (to, 1);
2384 data.constfun = constfun;
2385 data.constfundata = constfundata;
2386 data.len = len;
2387 data.to = to;
2388 store_by_pieces_1 (&data, align);
2389 }
2390
2391 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2392 rtx with BLKmode). The caller must pass TO through protect_from_queue
2393 before calling. ALIGN is maximum alignment we can assume. */
2394
2395 static void
2396 clear_by_pieces (to, len, align)
2397 rtx to;
2398 unsigned HOST_WIDE_INT len;
2399 unsigned int align;
2400 {
2401 struct store_by_pieces data;
2402
2403 data.constfun = clear_by_pieces_1;
2404 data.constfundata = NULL;
2405 data.len = len;
2406 data.to = to;
2407 store_by_pieces_1 (&data, align);
2408 }
2409
2410 /* Callback routine for clear_by_pieces.
2411 Return const0_rtx unconditionally. */
2412
2413 static rtx
2414 clear_by_pieces_1 (data, offset, mode)
2415 PTR data ATTRIBUTE_UNUSED;
2416 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2417 enum machine_mode mode ATTRIBUTE_UNUSED;
2418 {
2419 return const0_rtx;
2420 }
2421
2422 /* Subroutine of clear_by_pieces and store_by_pieces.
2423 Generate several move instructions to store LEN bytes of block TO. (A MEM
2424 rtx with BLKmode). The caller must pass TO through protect_from_queue
2425 before calling. ALIGN is maximum alignment we can assume. */
2426
2427 static void
2428 store_by_pieces_1 (data, align)
2429 struct store_by_pieces *data;
2430 unsigned int align;
2431 {
2432 rtx to_addr = XEXP (data->to, 0);
2433 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2434 enum machine_mode mode = VOIDmode, tmode;
2435 enum insn_code icode;
2436
2437 data->offset = 0;
2438 data->to_addr = to_addr;
2439 data->autinc_to
2440 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2441 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2442
2443 data->explicit_inc_to = 0;
2444 data->reverse
2445 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2446 if (data->reverse)
2447 data->offset = data->len;
2448
2449 /* If storing requires more than two move insns,
2450 copy addresses to registers (to make displacements shorter)
2451 and use post-increment if available. */
2452 if (!data->autinc_to
2453 && move_by_pieces_ninsns (data->len, align) > 2)
2454 {
2455 /* Determine the main mode we'll be using. */
2456 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2457 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2458 if (GET_MODE_SIZE (tmode) < max_size)
2459 mode = tmode;
2460
2461 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2462 {
2463 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2464 data->autinc_to = 1;
2465 data->explicit_inc_to = -1;
2466 }
2467
2468 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2469 && ! data->autinc_to)
2470 {
2471 data->to_addr = copy_addr_to_reg (to_addr);
2472 data->autinc_to = 1;
2473 data->explicit_inc_to = 1;
2474 }
2475
2476 if ( !data->autinc_to && CONSTANT_P (to_addr))
2477 data->to_addr = copy_addr_to_reg (to_addr);
2478 }
2479
2480 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2481 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2482 align = MOVE_MAX * BITS_PER_UNIT;
2483
2484 /* First store what we can in the largest integer mode, then go to
2485 successively smaller modes. */
2486
2487 while (max_size > 1)
2488 {
2489 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2490 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2491 if (GET_MODE_SIZE (tmode) < max_size)
2492 mode = tmode;
2493
2494 if (mode == VOIDmode)
2495 break;
2496
2497 icode = mov_optab->handlers[(int) mode].insn_code;
2498 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2499 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2500
2501 max_size = GET_MODE_SIZE (mode);
2502 }
2503
2504 /* The code above should have handled everything. */
2505 if (data->len != 0)
2506 abort ();
2507 }
2508
2509 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2510 with move instructions for mode MODE. GENFUN is the gen_... function
2511 to make a move insn for that mode. DATA has all the other info. */
2512
2513 static void
2514 store_by_pieces_2 (genfun, mode, data)
2515 rtx (*genfun) PARAMS ((rtx, ...));
2516 enum machine_mode mode;
2517 struct store_by_pieces *data;
2518 {
2519 unsigned int size = GET_MODE_SIZE (mode);
2520 rtx to1, cst;
2521
2522 while (data->len >= size)
2523 {
2524 if (data->reverse)
2525 data->offset -= size;
2526
2527 if (data->autinc_to)
2528 {
2529 to1 = replace_equiv_address (data->to, data->to_addr);
2530 to1 = adjust_address (to1, mode, 0);
2531 }
2532 else
2533 to1 = adjust_address (data->to, mode, data->offset);
2534
2535 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2536 emit_insn (gen_add2_insn (data->to_addr,
2537 GEN_INT (-(HOST_WIDE_INT) size)));
2538
2539 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2540 emit_insn ((*genfun) (to1, cst));
2541
2542 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2543 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2544
2545 if (! data->reverse)
2546 data->offset += size;
2547
2548 data->len -= size;
2549 }
2550 }
2551 \f
2552 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2553 its length in bytes. */
2554
2555 rtx
2556 clear_storage (object, size)
2557 rtx object;
2558 rtx size;
2559 {
2560 #ifdef TARGET_MEM_FUNCTIONS
2561 static tree fn;
2562 tree call_expr, arg_list;
2563 #endif
2564 rtx retval = 0;
2565 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2566 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2567
2568 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2569 just move a zero. Otherwise, do this a piece at a time. */
2570 if (GET_MODE (object) != BLKmode
2571 && GET_CODE (size) == CONST_INT
2572 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2573 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2574 else
2575 {
2576 object = protect_from_queue (object, 1);
2577 size = protect_from_queue (size, 0);
2578
2579 if (GET_CODE (size) == CONST_INT
2580 && MOVE_BY_PIECES_P (INTVAL (size), align))
2581 clear_by_pieces (object, INTVAL (size), align);
2582 else
2583 {
2584 /* Try the most limited insn first, because there's no point
2585 including more than one in the machine description unless
2586 the more limited one has some advantage. */
2587
2588 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2589 enum machine_mode mode;
2590
2591 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2592 mode = GET_MODE_WIDER_MODE (mode))
2593 {
2594 enum insn_code code = clrstr_optab[(int) mode];
2595 insn_operand_predicate_fn pred;
2596
2597 if (code != CODE_FOR_nothing
2598 /* We don't need MODE to be narrower than
2599 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2600 the mode mask, as it is returned by the macro, it will
2601 definitely be less than the actual mode mask. */
2602 && ((GET_CODE (size) == CONST_INT
2603 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2604 <= (GET_MODE_MASK (mode) >> 1)))
2605 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2606 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2607 || (*pred) (object, BLKmode))
2608 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2609 || (*pred) (opalign, VOIDmode)))
2610 {
2611 rtx op1;
2612 rtx last = get_last_insn ();
2613 rtx pat;
2614
2615 op1 = convert_to_mode (mode, size, 1);
2616 pred = insn_data[(int) code].operand[1].predicate;
2617 if (pred != 0 && ! (*pred) (op1, mode))
2618 op1 = copy_to_mode_reg (mode, op1);
2619
2620 pat = GEN_FCN ((int) code) (object, op1, opalign);
2621 if (pat)
2622 {
2623 emit_insn (pat);
2624 return 0;
2625 }
2626 else
2627 delete_insns_since (last);
2628 }
2629 }
2630
2631 /* OBJECT or SIZE may have been passed through protect_from_queue.
2632
2633 It is unsafe to save the value generated by protect_from_queue
2634 and reuse it later. Consider what happens if emit_queue is
2635 called before the return value from protect_from_queue is used.
2636
2637 Expansion of the CALL_EXPR below will call emit_queue before
2638 we are finished emitting RTL for argument setup. So if we are
2639 not careful we could get the wrong value for an argument.
2640
2641 To avoid this problem we go ahead and emit code to copy OBJECT
2642 and SIZE into new pseudos. We can then place those new pseudos
2643 into an RTL_EXPR and use them later, even after a call to
2644 emit_queue.
2645
2646 Note this is not strictly needed for library calls since they
2647 do not call emit_queue before loading their arguments. However,
2648 we may need to have library calls call emit_queue in the future
2649 since failing to do so could cause problems for targets which
2650 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2651 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2652
2653 #ifdef TARGET_MEM_FUNCTIONS
2654 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2655 #else
2656 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2657 TREE_UNSIGNED (integer_type_node));
2658 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2659 #endif
2660
2661 #ifdef TARGET_MEM_FUNCTIONS
2662 /* It is incorrect to use the libcall calling conventions to call
2663 memset in this context.
2664
2665 This could be a user call to memset and the user may wish to
2666 examine the return value from memset.
2667
2668 For targets where libcalls and normal calls have different
2669 conventions for returning pointers, we could end up generating
2670 incorrect code.
2671
2672 So instead of using a libcall sequence we build up a suitable
2673 CALL_EXPR and expand the call in the normal fashion. */
2674 if (fn == NULL_TREE)
2675 {
2676 tree fntype;
2677
2678 /* This was copied from except.c, I don't know if all this is
2679 necessary in this context or not. */
2680 fn = get_identifier ("memset");
2681 fntype = build_pointer_type (void_type_node);
2682 fntype = build_function_type (fntype, NULL_TREE);
2683 fn = build_decl (FUNCTION_DECL, fn, fntype);
2684 ggc_add_tree_root (&fn, 1);
2685 DECL_EXTERNAL (fn) = 1;
2686 TREE_PUBLIC (fn) = 1;
2687 DECL_ARTIFICIAL (fn) = 1;
2688 TREE_NOTHROW (fn) = 1;
2689 make_decl_rtl (fn, NULL);
2690 assemble_external (fn);
2691 }
2692
2693 /* We need to make an argument list for the function call.
2694
2695 memset has three arguments, the first is a void * addresses, the
2696 second an integer with the initialization value, the last is a
2697 size_t byte count for the copy. */
2698 arg_list
2699 = build_tree_list (NULL_TREE,
2700 make_tree (build_pointer_type (void_type_node),
2701 object));
2702 TREE_CHAIN (arg_list)
2703 = build_tree_list (NULL_TREE,
2704 make_tree (integer_type_node, const0_rtx));
2705 TREE_CHAIN (TREE_CHAIN (arg_list))
2706 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2707 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2708
2709 /* Now we have to build up the CALL_EXPR itself. */
2710 call_expr = build1 (ADDR_EXPR,
2711 build_pointer_type (TREE_TYPE (fn)), fn);
2712 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2713 call_expr, arg_list, NULL_TREE);
2714 TREE_SIDE_EFFECTS (call_expr) = 1;
2715
2716 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2717 #else
2718 emit_library_call (bzero_libfunc, LCT_NORMAL,
2719 VOIDmode, 2, object, Pmode, size,
2720 TYPE_MODE (integer_type_node));
2721 #endif
2722
2723 /* If we are initializing a readonly value, show the above call
2724 clobbered it. Otherwise, a load from it may erroneously be
2725 hoisted from a loop. */
2726 if (RTX_UNCHANGING_P (object))
2727 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2728 }
2729 }
2730
2731 return retval;
2732 }
2733
2734 /* Generate code to copy Y into X.
2735 Both Y and X must have the same mode, except that
2736 Y can be a constant with VOIDmode.
2737 This mode cannot be BLKmode; use emit_block_move for that.
2738
2739 Return the last instruction emitted. */
2740
2741 rtx
2742 emit_move_insn (x, y)
2743 rtx x, y;
2744 {
2745 enum machine_mode mode = GET_MODE (x);
2746 rtx y_cst = NULL_RTX;
2747 rtx last_insn;
2748
2749 x = protect_from_queue (x, 1);
2750 y = protect_from_queue (y, 0);
2751
2752 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2753 abort ();
2754
2755 /* Never force constant_p_rtx to memory. */
2756 if (GET_CODE (y) == CONSTANT_P_RTX)
2757 ;
2758 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2759 {
2760 y_cst = y;
2761 y = force_const_mem (mode, y);
2762 }
2763
2764 /* If X or Y are memory references, verify that their addresses are valid
2765 for the machine. */
2766 if (GET_CODE (x) == MEM
2767 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2768 && ! push_operand (x, GET_MODE (x)))
2769 || (flag_force_addr
2770 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2771 x = validize_mem (x);
2772
2773 if (GET_CODE (y) == MEM
2774 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2775 || (flag_force_addr
2776 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2777 y = validize_mem (y);
2778
2779 if (mode == BLKmode)
2780 abort ();
2781
2782 last_insn = emit_move_insn_1 (x, y);
2783
2784 if (y_cst && GET_CODE (x) == REG)
2785 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2786
2787 return last_insn;
2788 }
2789
2790 /* Low level part of emit_move_insn.
2791 Called just like emit_move_insn, but assumes X and Y
2792 are basically valid. */
2793
2794 rtx
2795 emit_move_insn_1 (x, y)
2796 rtx x, y;
2797 {
2798 enum machine_mode mode = GET_MODE (x);
2799 enum machine_mode submode;
2800 enum mode_class class = GET_MODE_CLASS (mode);
2801 unsigned int i;
2802
2803 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2804 abort ();
2805
2806 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2807 return
2808 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2809
2810 /* Expand complex moves by moving real part and imag part, if possible. */
2811 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2812 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2813 * BITS_PER_UNIT),
2814 (class == MODE_COMPLEX_INT
2815 ? MODE_INT : MODE_FLOAT),
2816 0))
2817 && (mov_optab->handlers[(int) submode].insn_code
2818 != CODE_FOR_nothing))
2819 {
2820 /* Don't split destination if it is a stack push. */
2821 int stack = push_operand (x, GET_MODE (x));
2822
2823 #ifdef PUSH_ROUNDING
2824 /* In case we output to the stack, but the size is smaller machine can
2825 push exactly, we need to use move instructions. */
2826 if (stack
2827 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2828 {
2829 rtx temp;
2830 int offset1, offset2;
2831
2832 /* Do not use anti_adjust_stack, since we don't want to update
2833 stack_pointer_delta. */
2834 temp = expand_binop (Pmode,
2835 #ifdef STACK_GROWS_DOWNWARD
2836 sub_optab,
2837 #else
2838 add_optab,
2839 #endif
2840 stack_pointer_rtx,
2841 GEN_INT
2842 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2843 stack_pointer_rtx,
2844 0,
2845 OPTAB_LIB_WIDEN);
2846 if (temp != stack_pointer_rtx)
2847 emit_move_insn (stack_pointer_rtx, temp);
2848 #ifdef STACK_GROWS_DOWNWARD
2849 offset1 = 0;
2850 offset2 = GET_MODE_SIZE (submode);
2851 #else
2852 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2853 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2854 + GET_MODE_SIZE (submode));
2855 #endif
2856 emit_move_insn (change_address (x, submode,
2857 gen_rtx_PLUS (Pmode,
2858 stack_pointer_rtx,
2859 GEN_INT (offset1))),
2860 gen_realpart (submode, y));
2861 emit_move_insn (change_address (x, submode,
2862 gen_rtx_PLUS (Pmode,
2863 stack_pointer_rtx,
2864 GEN_INT (offset2))),
2865 gen_imagpart (submode, y));
2866 }
2867 else
2868 #endif
2869 /* If this is a stack, push the highpart first, so it
2870 will be in the argument order.
2871
2872 In that case, change_address is used only to convert
2873 the mode, not to change the address. */
2874 if (stack)
2875 {
2876 /* Note that the real part always precedes the imag part in memory
2877 regardless of machine's endianness. */
2878 #ifdef STACK_GROWS_DOWNWARD
2879 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2880 (gen_rtx_MEM (submode, XEXP (x, 0)),
2881 gen_imagpart (submode, y)));
2882 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2883 (gen_rtx_MEM (submode, XEXP (x, 0)),
2884 gen_realpart (submode, y)));
2885 #else
2886 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2887 (gen_rtx_MEM (submode, XEXP (x, 0)),
2888 gen_realpart (submode, y)));
2889 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2890 (gen_rtx_MEM (submode, XEXP (x, 0)),
2891 gen_imagpart (submode, y)));
2892 #endif
2893 }
2894 else
2895 {
2896 rtx realpart_x, realpart_y;
2897 rtx imagpart_x, imagpart_y;
2898
2899 /* If this is a complex value with each part being smaller than a
2900 word, the usual calling sequence will likely pack the pieces into
2901 a single register. Unfortunately, SUBREG of hard registers only
2902 deals in terms of words, so we have a problem converting input
2903 arguments to the CONCAT of two registers that is used elsewhere
2904 for complex values. If this is before reload, we can copy it into
2905 memory and reload. FIXME, we should see about using extract and
2906 insert on integer registers, but complex short and complex char
2907 variables should be rarely used. */
2908 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2909 && (reload_in_progress | reload_completed) == 0)
2910 {
2911 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2912 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2913
2914 if (packed_dest_p || packed_src_p)
2915 {
2916 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2917 ? MODE_FLOAT : MODE_INT);
2918
2919 enum machine_mode reg_mode
2920 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2921
2922 if (reg_mode != BLKmode)
2923 {
2924 rtx mem = assign_stack_temp (reg_mode,
2925 GET_MODE_SIZE (mode), 0);
2926 rtx cmem = adjust_address (mem, mode, 0);
2927
2928 cfun->cannot_inline
2929 = N_("function using short complex types cannot be inline");
2930
2931 if (packed_dest_p)
2932 {
2933 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2934 emit_move_insn_1 (cmem, y);
2935 return emit_move_insn_1 (sreg, mem);
2936 }
2937 else
2938 {
2939 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2940 emit_move_insn_1 (mem, sreg);
2941 return emit_move_insn_1 (x, cmem);
2942 }
2943 }
2944 }
2945 }
2946
2947 realpart_x = gen_realpart (submode, x);
2948 realpart_y = gen_realpart (submode, y);
2949 imagpart_x = gen_imagpart (submode, x);
2950 imagpart_y = gen_imagpart (submode, y);
2951
2952 /* Show the output dies here. This is necessary for SUBREGs
2953 of pseudos since we cannot track their lifetimes correctly;
2954 hard regs shouldn't appear here except as return values.
2955 We never want to emit such a clobber after reload. */
2956 if (x != y
2957 && ! (reload_in_progress || reload_completed)
2958 && (GET_CODE (realpart_x) == SUBREG
2959 || GET_CODE (imagpart_x) == SUBREG))
2960 {
2961 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2962 }
2963
2964 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2965 (realpart_x, realpart_y));
2966 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2967 (imagpart_x, imagpart_y));
2968 }
2969
2970 return get_last_insn ();
2971 }
2972
2973 /* This will handle any multi-word mode that lacks a move_insn pattern.
2974 However, you will get better code if you define such patterns,
2975 even if they must turn into multiple assembler instructions. */
2976 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2977 {
2978 rtx last_insn = 0;
2979 rtx seq, inner;
2980 int need_clobber;
2981
2982 #ifdef PUSH_ROUNDING
2983
2984 /* If X is a push on the stack, do the push now and replace
2985 X with a reference to the stack pointer. */
2986 if (push_operand (x, GET_MODE (x)))
2987 {
2988 rtx temp;
2989 enum rtx_code code;
2990
2991 /* Do not use anti_adjust_stack, since we don't want to update
2992 stack_pointer_delta. */
2993 temp = expand_binop (Pmode,
2994 #ifdef STACK_GROWS_DOWNWARD
2995 sub_optab,
2996 #else
2997 add_optab,
2998 #endif
2999 stack_pointer_rtx,
3000 GEN_INT
3001 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
3002 stack_pointer_rtx,
3003 0,
3004 OPTAB_LIB_WIDEN);
3005 if (temp != stack_pointer_rtx)
3006 emit_move_insn (stack_pointer_rtx, temp);
3007
3008 code = GET_CODE (XEXP (x, 0));
3009 /* Just hope that small offsets off SP are OK. */
3010 if (code == POST_INC)
3011 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3012 GEN_INT (-(HOST_WIDE_INT)
3013 GET_MODE_SIZE (GET_MODE (x))));
3014 else if (code == POST_DEC)
3015 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3016 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3017 else
3018 temp = stack_pointer_rtx;
3019
3020 x = change_address (x, VOIDmode, temp);
3021 }
3022 #endif
3023
3024 /* If we are in reload, see if either operand is a MEM whose address
3025 is scheduled for replacement. */
3026 if (reload_in_progress && GET_CODE (x) == MEM
3027 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3028 x = replace_equiv_address_nv (x, inner);
3029 if (reload_in_progress && GET_CODE (y) == MEM
3030 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3031 y = replace_equiv_address_nv (y, inner);
3032
3033 start_sequence ();
3034
3035 need_clobber = 0;
3036 for (i = 0;
3037 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3038 i++)
3039 {
3040 rtx xpart = operand_subword (x, i, 1, mode);
3041 rtx ypart = operand_subword (y, i, 1, mode);
3042
3043 /* If we can't get a part of Y, put Y into memory if it is a
3044 constant. Otherwise, force it into a register. If we still
3045 can't get a part of Y, abort. */
3046 if (ypart == 0 && CONSTANT_P (y))
3047 {
3048 y = force_const_mem (mode, y);
3049 ypart = operand_subword (y, i, 1, mode);
3050 }
3051 else if (ypart == 0)
3052 ypart = operand_subword_force (y, i, mode);
3053
3054 if (xpart == 0 || ypart == 0)
3055 abort ();
3056
3057 need_clobber |= (GET_CODE (xpart) == SUBREG);
3058
3059 last_insn = emit_move_insn (xpart, ypart);
3060 }
3061
3062 seq = gen_sequence ();
3063 end_sequence ();
3064
3065 /* Show the output dies here. This is necessary for SUBREGs
3066 of pseudos since we cannot track their lifetimes correctly;
3067 hard regs shouldn't appear here except as return values.
3068 We never want to emit such a clobber after reload. */
3069 if (x != y
3070 && ! (reload_in_progress || reload_completed)
3071 && need_clobber != 0)
3072 {
3073 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3074 }
3075
3076 emit_insn (seq);
3077
3078 return last_insn;
3079 }
3080 else
3081 abort ();
3082 }
3083 \f
3084 /* Pushing data onto the stack. */
3085
3086 /* Push a block of length SIZE (perhaps variable)
3087 and return an rtx to address the beginning of the block.
3088 Note that it is not possible for the value returned to be a QUEUED.
3089 The value may be virtual_outgoing_args_rtx.
3090
3091 EXTRA is the number of bytes of padding to push in addition to SIZE.
3092 BELOW nonzero means this padding comes at low addresses;
3093 otherwise, the padding comes at high addresses. */
3094
3095 rtx
3096 push_block (size, extra, below)
3097 rtx size;
3098 int extra, below;
3099 {
3100 rtx temp;
3101
3102 size = convert_modes (Pmode, ptr_mode, size, 1);
3103 if (CONSTANT_P (size))
3104 anti_adjust_stack (plus_constant (size, extra));
3105 else if (GET_CODE (size) == REG && extra == 0)
3106 anti_adjust_stack (size);
3107 else
3108 {
3109 temp = copy_to_mode_reg (Pmode, size);
3110 if (extra != 0)
3111 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3112 temp, 0, OPTAB_LIB_WIDEN);
3113 anti_adjust_stack (temp);
3114 }
3115
3116 #ifndef STACK_GROWS_DOWNWARD
3117 if (0)
3118 #else
3119 if (1)
3120 #endif
3121 {
3122 temp = virtual_outgoing_args_rtx;
3123 if (extra != 0 && below)
3124 temp = plus_constant (temp, extra);
3125 }
3126 else
3127 {
3128 if (GET_CODE (size) == CONST_INT)
3129 temp = plus_constant (virtual_outgoing_args_rtx,
3130 -INTVAL (size) - (below ? 0 : extra));
3131 else if (extra != 0 && !below)
3132 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3133 negate_rtx (Pmode, plus_constant (size, extra)));
3134 else
3135 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3136 negate_rtx (Pmode, size));
3137 }
3138
3139 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3140 }
3141
3142
3143 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3144 block of SIZE bytes. */
3145
3146 static rtx
3147 get_push_address (size)
3148 int size;
3149 {
3150 rtx temp;
3151
3152 if (STACK_PUSH_CODE == POST_DEC)
3153 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3154 else if (STACK_PUSH_CODE == POST_INC)
3155 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3156 else
3157 temp = stack_pointer_rtx;
3158
3159 return copy_to_reg (temp);
3160 }
3161
3162 #ifdef PUSH_ROUNDING
3163
3164 /* Emit single push insn. */
3165
3166 static void
3167 emit_single_push_insn (mode, x, type)
3168 rtx x;
3169 enum machine_mode mode;
3170 tree type;
3171 {
3172 rtx dest_addr;
3173 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3174 rtx dest;
3175 enum insn_code icode;
3176 insn_operand_predicate_fn pred;
3177
3178 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3179 /* If there is push pattern, use it. Otherwise try old way of throwing
3180 MEM representing push operation to move expander. */
3181 icode = push_optab->handlers[(int) mode].insn_code;
3182 if (icode != CODE_FOR_nothing)
3183 {
3184 if (((pred = insn_data[(int) icode].operand[0].predicate)
3185 && !((*pred) (x, mode))))
3186 x = force_reg (mode, x);
3187 emit_insn (GEN_FCN (icode) (x));
3188 return;
3189 }
3190 if (GET_MODE_SIZE (mode) == rounded_size)
3191 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3192 else
3193 {
3194 #ifdef STACK_GROWS_DOWNWARD
3195 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3196 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3197 #else
3198 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3199 GEN_INT (rounded_size));
3200 #endif
3201 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3202 }
3203
3204 dest = gen_rtx_MEM (mode, dest_addr);
3205
3206 if (type != 0)
3207 {
3208 set_mem_attributes (dest, type, 1);
3209 /* Function incoming arguments may overlap with sibling call
3210 outgoing arguments and we cannot allow reordering of reads
3211 from function arguments with stores to outgoing arguments
3212 of sibling calls. */
3213 set_mem_alias_set (dest, 0);
3214 }
3215 emit_move_insn (dest, x);
3216 }
3217 #endif
3218
3219 /* Generate code to push X onto the stack, assuming it has mode MODE and
3220 type TYPE.
3221 MODE is redundant except when X is a CONST_INT (since they don't
3222 carry mode info).
3223 SIZE is an rtx for the size of data to be copied (in bytes),
3224 needed only if X is BLKmode.
3225
3226 ALIGN (in bits) is maximum alignment we can assume.
3227
3228 If PARTIAL and REG are both nonzero, then copy that many of the first
3229 words of X into registers starting with REG, and push the rest of X.
3230 The amount of space pushed is decreased by PARTIAL words,
3231 rounded *down* to a multiple of PARM_BOUNDARY.
3232 REG must be a hard register in this case.
3233 If REG is zero but PARTIAL is not, take any all others actions for an
3234 argument partially in registers, but do not actually load any
3235 registers.
3236
3237 EXTRA is the amount in bytes of extra space to leave next to this arg.
3238 This is ignored if an argument block has already been allocated.
3239
3240 On a machine that lacks real push insns, ARGS_ADDR is the address of
3241 the bottom of the argument block for this call. We use indexing off there
3242 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3243 argument block has not been preallocated.
3244
3245 ARGS_SO_FAR is the size of args previously pushed for this call.
3246
3247 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3248 for arguments passed in registers. If nonzero, it will be the number
3249 of bytes required. */
3250
3251 void
3252 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3253 args_addr, args_so_far, reg_parm_stack_space,
3254 alignment_pad)
3255 rtx x;
3256 enum machine_mode mode;
3257 tree type;
3258 rtx size;
3259 unsigned int align;
3260 int partial;
3261 rtx reg;
3262 int extra;
3263 rtx args_addr;
3264 rtx args_so_far;
3265 int reg_parm_stack_space;
3266 rtx alignment_pad;
3267 {
3268 rtx xinner;
3269 enum direction stack_direction
3270 #ifdef STACK_GROWS_DOWNWARD
3271 = downward;
3272 #else
3273 = upward;
3274 #endif
3275
3276 /* Decide where to pad the argument: `downward' for below,
3277 `upward' for above, or `none' for don't pad it.
3278 Default is below for small data on big-endian machines; else above. */
3279 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3280
3281 /* Invert direction if stack is post-decrement.
3282 FIXME: why? */
3283 if (STACK_PUSH_CODE == POST_DEC)
3284 if (where_pad != none)
3285 where_pad = (where_pad == downward ? upward : downward);
3286
3287 xinner = x = protect_from_queue (x, 0);
3288
3289 if (mode == BLKmode)
3290 {
3291 /* Copy a block into the stack, entirely or partially. */
3292
3293 rtx temp;
3294 int used = partial * UNITS_PER_WORD;
3295 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3296 int skip;
3297
3298 if (size == 0)
3299 abort ();
3300
3301 used -= offset;
3302
3303 /* USED is now the # of bytes we need not copy to the stack
3304 because registers will take care of them. */
3305
3306 if (partial != 0)
3307 xinner = adjust_address (xinner, BLKmode, used);
3308
3309 /* If the partial register-part of the arg counts in its stack size,
3310 skip the part of stack space corresponding to the registers.
3311 Otherwise, start copying to the beginning of the stack space,
3312 by setting SKIP to 0. */
3313 skip = (reg_parm_stack_space == 0) ? 0 : used;
3314
3315 #ifdef PUSH_ROUNDING
3316 /* Do it with several push insns if that doesn't take lots of insns
3317 and if there is no difficulty with push insns that skip bytes
3318 on the stack for alignment purposes. */
3319 if (args_addr == 0
3320 && PUSH_ARGS
3321 && GET_CODE (size) == CONST_INT
3322 && skip == 0
3323 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3324 /* Here we avoid the case of a structure whose weak alignment
3325 forces many pushes of a small amount of data,
3326 and such small pushes do rounding that causes trouble. */
3327 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3328 || align >= BIGGEST_ALIGNMENT
3329 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3330 == (align / BITS_PER_UNIT)))
3331 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3332 {
3333 /* Push padding now if padding above and stack grows down,
3334 or if padding below and stack grows up.
3335 But if space already allocated, this has already been done. */
3336 if (extra && args_addr == 0
3337 && where_pad != none && where_pad != stack_direction)
3338 anti_adjust_stack (GEN_INT (extra));
3339
3340 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3341
3342 if (current_function_check_memory_usage && ! in_check_memory_usage)
3343 {
3344 rtx temp;
3345
3346 in_check_memory_usage = 1;
3347 temp = get_push_address (INTVAL (size) - used);
3348 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3349 emit_library_call (chkr_copy_bitmap_libfunc,
3350 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3351 Pmode, XEXP (xinner, 0), Pmode,
3352 GEN_INT (INTVAL (size) - used),
3353 TYPE_MODE (sizetype));
3354 else
3355 emit_library_call (chkr_set_right_libfunc,
3356 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3357 Pmode, GEN_INT (INTVAL (size) - used),
3358 TYPE_MODE (sizetype),
3359 GEN_INT (MEMORY_USE_RW),
3360 TYPE_MODE (integer_type_node));
3361 in_check_memory_usage = 0;
3362 }
3363 }
3364 else
3365 #endif /* PUSH_ROUNDING */
3366 {
3367 rtx target;
3368
3369 /* Otherwise make space on the stack and copy the data
3370 to the address of that space. */
3371
3372 /* Deduct words put into registers from the size we must copy. */
3373 if (partial != 0)
3374 {
3375 if (GET_CODE (size) == CONST_INT)
3376 size = GEN_INT (INTVAL (size) - used);
3377 else
3378 size = expand_binop (GET_MODE (size), sub_optab, size,
3379 GEN_INT (used), NULL_RTX, 0,
3380 OPTAB_LIB_WIDEN);
3381 }
3382
3383 /* Get the address of the stack space.
3384 In this case, we do not deal with EXTRA separately.
3385 A single stack adjust will do. */
3386 if (! args_addr)
3387 {
3388 temp = push_block (size, extra, where_pad == downward);
3389 extra = 0;
3390 }
3391 else if (GET_CODE (args_so_far) == CONST_INT)
3392 temp = memory_address (BLKmode,
3393 plus_constant (args_addr,
3394 skip + INTVAL (args_so_far)));
3395 else
3396 temp = memory_address (BLKmode,
3397 plus_constant (gen_rtx_PLUS (Pmode,
3398 args_addr,
3399 args_so_far),
3400 skip));
3401 if (current_function_check_memory_usage && ! in_check_memory_usage)
3402 {
3403 in_check_memory_usage = 1;
3404 target = copy_to_reg (temp);
3405 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3406 emit_library_call (chkr_copy_bitmap_libfunc,
3407 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3408 target, Pmode,
3409 XEXP (xinner, 0), Pmode,
3410 size, TYPE_MODE (sizetype));
3411 else
3412 emit_library_call (chkr_set_right_libfunc,
3413 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3414 target, Pmode,
3415 size, TYPE_MODE (sizetype),
3416 GEN_INT (MEMORY_USE_RW),
3417 TYPE_MODE (integer_type_node));
3418 in_check_memory_usage = 0;
3419 }
3420
3421 target = gen_rtx_MEM (BLKmode, temp);
3422
3423 if (type != 0)
3424 {
3425 set_mem_attributes (target, type, 1);
3426 /* Function incoming arguments may overlap with sibling call
3427 outgoing arguments and we cannot allow reordering of reads
3428 from function arguments with stores to outgoing arguments
3429 of sibling calls. */
3430 set_mem_alias_set (target, 0);
3431 }
3432 else
3433 set_mem_align (target, align);
3434
3435 /* TEMP is the address of the block. Copy the data there. */
3436 if (GET_CODE (size) == CONST_INT
3437 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3438 {
3439 move_by_pieces (target, xinner, INTVAL (size), align);
3440 goto ret;
3441 }
3442 else
3443 {
3444 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3445 enum machine_mode mode;
3446
3447 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3448 mode != VOIDmode;
3449 mode = GET_MODE_WIDER_MODE (mode))
3450 {
3451 enum insn_code code = movstr_optab[(int) mode];
3452 insn_operand_predicate_fn pred;
3453
3454 if (code != CODE_FOR_nothing
3455 && ((GET_CODE (size) == CONST_INT
3456 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3457 <= (GET_MODE_MASK (mode) >> 1)))
3458 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3459 && (!(pred = insn_data[(int) code].operand[0].predicate)
3460 || ((*pred) (target, BLKmode)))
3461 && (!(pred = insn_data[(int) code].operand[1].predicate)
3462 || ((*pred) (xinner, BLKmode)))
3463 && (!(pred = insn_data[(int) code].operand[3].predicate)
3464 || ((*pred) (opalign, VOIDmode))))
3465 {
3466 rtx op2 = convert_to_mode (mode, size, 1);
3467 rtx last = get_last_insn ();
3468 rtx pat;
3469
3470 pred = insn_data[(int) code].operand[2].predicate;
3471 if (pred != 0 && ! (*pred) (op2, mode))
3472 op2 = copy_to_mode_reg (mode, op2);
3473
3474 pat = GEN_FCN ((int) code) (target, xinner,
3475 op2, opalign);
3476 if (pat)
3477 {
3478 emit_insn (pat);
3479 goto ret;
3480 }
3481 else
3482 delete_insns_since (last);
3483 }
3484 }
3485 }
3486
3487 if (!ACCUMULATE_OUTGOING_ARGS)
3488 {
3489 /* If the source is referenced relative to the stack pointer,
3490 copy it to another register to stabilize it. We do not need
3491 to do this if we know that we won't be changing sp. */
3492
3493 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3494 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3495 temp = copy_to_reg (temp);
3496 }
3497
3498 /* Make inhibit_defer_pop nonzero around the library call
3499 to force it to pop the bcopy-arguments right away. */
3500 NO_DEFER_POP;
3501 #ifdef TARGET_MEM_FUNCTIONS
3502 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3503 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3504 convert_to_mode (TYPE_MODE (sizetype),
3505 size, TREE_UNSIGNED (sizetype)),
3506 TYPE_MODE (sizetype));
3507 #else
3508 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3509 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3510 convert_to_mode (TYPE_MODE (integer_type_node),
3511 size,
3512 TREE_UNSIGNED (integer_type_node)),
3513 TYPE_MODE (integer_type_node));
3514 #endif
3515 OK_DEFER_POP;
3516 }
3517 }
3518 else if (partial > 0)
3519 {
3520 /* Scalar partly in registers. */
3521
3522 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3523 int i;
3524 int not_stack;
3525 /* # words of start of argument
3526 that we must make space for but need not store. */
3527 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3528 int args_offset = INTVAL (args_so_far);
3529 int skip;
3530
3531 /* Push padding now if padding above and stack grows down,
3532 or if padding below and stack grows up.
3533 But if space already allocated, this has already been done. */
3534 if (extra && args_addr == 0
3535 && where_pad != none && where_pad != stack_direction)
3536 anti_adjust_stack (GEN_INT (extra));
3537
3538 /* If we make space by pushing it, we might as well push
3539 the real data. Otherwise, we can leave OFFSET nonzero
3540 and leave the space uninitialized. */
3541 if (args_addr == 0)
3542 offset = 0;
3543
3544 /* Now NOT_STACK gets the number of words that we don't need to
3545 allocate on the stack. */
3546 not_stack = partial - offset;
3547
3548 /* If the partial register-part of the arg counts in its stack size,
3549 skip the part of stack space corresponding to the registers.
3550 Otherwise, start copying to the beginning of the stack space,
3551 by setting SKIP to 0. */
3552 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3553
3554 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3555 x = validize_mem (force_const_mem (mode, x));
3556
3557 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3558 SUBREGs of such registers are not allowed. */
3559 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3560 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3561 x = copy_to_reg (x);
3562
3563 /* Loop over all the words allocated on the stack for this arg. */
3564 /* We can do it by words, because any scalar bigger than a word
3565 has a size a multiple of a word. */
3566 #ifndef PUSH_ARGS_REVERSED
3567 for (i = not_stack; i < size; i++)
3568 #else
3569 for (i = size - 1; i >= not_stack; i--)
3570 #endif
3571 if (i >= not_stack + offset)
3572 emit_push_insn (operand_subword_force (x, i, mode),
3573 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3574 0, args_addr,
3575 GEN_INT (args_offset + ((i - not_stack + skip)
3576 * UNITS_PER_WORD)),
3577 reg_parm_stack_space, alignment_pad);
3578 }
3579 else
3580 {
3581 rtx addr;
3582 rtx target = NULL_RTX;
3583 rtx dest;
3584
3585 /* Push padding now if padding above and stack grows down,
3586 or if padding below and stack grows up.
3587 But if space already allocated, this has already been done. */
3588 if (extra && args_addr == 0
3589 && where_pad != none && where_pad != stack_direction)
3590 anti_adjust_stack (GEN_INT (extra));
3591
3592 #ifdef PUSH_ROUNDING
3593 if (args_addr == 0 && PUSH_ARGS)
3594 emit_single_push_insn (mode, x, type);
3595 else
3596 #endif
3597 {
3598 if (GET_CODE (args_so_far) == CONST_INT)
3599 addr
3600 = memory_address (mode,
3601 plus_constant (args_addr,
3602 INTVAL (args_so_far)));
3603 else
3604 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3605 args_so_far));
3606 target = addr;
3607 dest = gen_rtx_MEM (mode, addr);
3608 if (type != 0)
3609 {
3610 set_mem_attributes (dest, type, 1);
3611 /* Function incoming arguments may overlap with sibling call
3612 outgoing arguments and we cannot allow reordering of reads
3613 from function arguments with stores to outgoing arguments
3614 of sibling calls. */
3615 set_mem_alias_set (dest, 0);
3616 }
3617
3618 emit_move_insn (dest, x);
3619
3620 }
3621
3622 if (current_function_check_memory_usage && ! in_check_memory_usage)
3623 {
3624 in_check_memory_usage = 1;
3625 if (target == 0)
3626 target = get_push_address (GET_MODE_SIZE (mode));
3627
3628 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3629 emit_library_call (chkr_copy_bitmap_libfunc,
3630 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3631 Pmode, XEXP (x, 0), Pmode,
3632 GEN_INT (GET_MODE_SIZE (mode)),
3633 TYPE_MODE (sizetype));
3634 else
3635 emit_library_call (chkr_set_right_libfunc,
3636 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3637 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3638 TYPE_MODE (sizetype),
3639 GEN_INT (MEMORY_USE_RW),
3640 TYPE_MODE (integer_type_node));
3641 in_check_memory_usage = 0;
3642 }
3643 }
3644
3645 ret:
3646 /* If part should go in registers, copy that part
3647 into the appropriate registers. Do this now, at the end,
3648 since mem-to-mem copies above may do function calls. */
3649 if (partial > 0 && reg != 0)
3650 {
3651 /* Handle calls that pass values in multiple non-contiguous locations.
3652 The Irix 6 ABI has examples of this. */
3653 if (GET_CODE (reg) == PARALLEL)
3654 emit_group_load (reg, x, -1); /* ??? size? */
3655 else
3656 move_block_to_reg (REGNO (reg), x, partial, mode);
3657 }
3658
3659 if (extra && args_addr == 0 && where_pad == stack_direction)
3660 anti_adjust_stack (GEN_INT (extra));
3661
3662 if (alignment_pad && args_addr == 0)
3663 anti_adjust_stack (alignment_pad);
3664 }
3665 \f
3666 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3667 operations. */
3668
3669 static rtx
3670 get_subtarget (x)
3671 rtx x;
3672 {
3673 return ((x == 0
3674 /* Only registers can be subtargets. */
3675 || GET_CODE (x) != REG
3676 /* If the register is readonly, it can't be set more than once. */
3677 || RTX_UNCHANGING_P (x)
3678 /* Don't use hard regs to avoid extending their life. */
3679 || REGNO (x) < FIRST_PSEUDO_REGISTER
3680 /* Avoid subtargets inside loops,
3681 since they hide some invariant expressions. */
3682 || preserve_subexpressions_p ())
3683 ? 0 : x);
3684 }
3685
3686 /* Expand an assignment that stores the value of FROM into TO.
3687 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3688 (This may contain a QUEUED rtx;
3689 if the value is constant, this rtx is a constant.)
3690 Otherwise, the returned value is NULL_RTX.
3691
3692 SUGGEST_REG is no longer actually used.
3693 It used to mean, copy the value through a register
3694 and return that register, if that is possible.
3695 We now use WANT_VALUE to decide whether to do this. */
3696
3697 rtx
3698 expand_assignment (to, from, want_value, suggest_reg)
3699 tree to, from;
3700 int want_value;
3701 int suggest_reg ATTRIBUTE_UNUSED;
3702 {
3703 rtx to_rtx = 0;
3704 rtx result;
3705
3706 /* Don't crash if the lhs of the assignment was erroneous. */
3707
3708 if (TREE_CODE (to) == ERROR_MARK)
3709 {
3710 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3711 return want_value ? result : NULL_RTX;
3712 }
3713
3714 /* Assignment of a structure component needs special treatment
3715 if the structure component's rtx is not simply a MEM.
3716 Assignment of an array element at a constant index, and assignment of
3717 an array element in an unaligned packed structure field, has the same
3718 problem. */
3719
3720 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3721 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3722 {
3723 enum machine_mode mode1;
3724 HOST_WIDE_INT bitsize, bitpos;
3725 tree offset;
3726 int unsignedp;
3727 int volatilep = 0;
3728 tree tem;
3729 unsigned int alignment;
3730
3731 push_temp_slots ();
3732 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3733 &unsignedp, &volatilep, &alignment);
3734
3735 /* If we are going to use store_bit_field and extract_bit_field,
3736 make sure to_rtx will be safe for multiple use. */
3737
3738 if (mode1 == VOIDmode && want_value)
3739 tem = stabilize_reference (tem);
3740
3741 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3742 if (offset != 0)
3743 {
3744 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3745
3746 if (GET_CODE (to_rtx) != MEM)
3747 abort ();
3748
3749 if (GET_MODE (offset_rtx) != ptr_mode)
3750 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3751
3752 #ifdef POINTERS_EXTEND_UNSIGNED
3753 if (GET_MODE (offset_rtx) != Pmode)
3754 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3755 #endif
3756
3757 /* A constant address in TO_RTX can have VOIDmode, we must not try
3758 to call force_reg for that case. Avoid that case. */
3759 if (GET_CODE (to_rtx) == MEM
3760 && GET_MODE (to_rtx) == BLKmode
3761 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3762 && bitsize
3763 && (bitpos % bitsize) == 0
3764 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3765 && alignment == GET_MODE_ALIGNMENT (mode1))
3766 {
3767 rtx temp
3768 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3769
3770 if (GET_CODE (XEXP (temp, 0)) == REG)
3771 to_rtx = temp;
3772 else
3773 to_rtx = (replace_equiv_address
3774 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3775 XEXP (temp, 0))));
3776 bitpos = 0;
3777 }
3778
3779 to_rtx = offset_address (to_rtx, offset_rtx,
3780 highest_pow2_factor (offset));
3781 }
3782
3783 if (volatilep)
3784 {
3785 if (GET_CODE (to_rtx) == MEM)
3786 {
3787 /* When the offset is zero, to_rtx is the address of the
3788 structure we are storing into, and hence may be shared.
3789 We must make a new MEM before setting the volatile bit. */
3790 if (offset == 0)
3791 to_rtx = copy_rtx (to_rtx);
3792
3793 MEM_VOLATILE_P (to_rtx) = 1;
3794 }
3795 #if 0 /* This was turned off because, when a field is volatile
3796 in an object which is not volatile, the object may be in a register,
3797 and then we would abort over here. */
3798 else
3799 abort ();
3800 #endif
3801 }
3802
3803 if (TREE_CODE (to) == COMPONENT_REF
3804 && TREE_READONLY (TREE_OPERAND (to, 1)))
3805 {
3806 if (offset == 0)
3807 to_rtx = copy_rtx (to_rtx);
3808
3809 RTX_UNCHANGING_P (to_rtx) = 1;
3810 }
3811
3812 /* Check the access. */
3813 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3814 {
3815 rtx to_addr;
3816 int size;
3817 int best_mode_size;
3818 enum machine_mode best_mode;
3819
3820 best_mode = get_best_mode (bitsize, bitpos,
3821 TYPE_ALIGN (TREE_TYPE (tem)),
3822 mode1, volatilep);
3823 if (best_mode == VOIDmode)
3824 best_mode = QImode;
3825
3826 best_mode_size = GET_MODE_BITSIZE (best_mode);
3827 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3828 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3829 size *= GET_MODE_SIZE (best_mode);
3830
3831 /* Check the access right of the pointer. */
3832 in_check_memory_usage = 1;
3833 if (size)
3834 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3835 VOIDmode, 3, to_addr, Pmode,
3836 GEN_INT (size), TYPE_MODE (sizetype),
3837 GEN_INT (MEMORY_USE_WO),
3838 TYPE_MODE (integer_type_node));
3839 in_check_memory_usage = 0;
3840 }
3841
3842 /* If this is a varying-length object, we must get the address of
3843 the source and do an explicit block move. */
3844 if (bitsize < 0)
3845 {
3846 unsigned int from_align;
3847 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3848 rtx inner_to_rtx
3849 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3850
3851 emit_block_move (inner_to_rtx, from_rtx, expr_size (from));
3852
3853 free_temp_slots ();
3854 pop_temp_slots ();
3855 return to_rtx;
3856 }
3857 else
3858 {
3859 if (! can_address_p (to))
3860 {
3861 to_rtx = copy_rtx (to_rtx);
3862 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3863 }
3864
3865 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3866 (want_value
3867 /* Spurious cast for HPUX compiler. */
3868 ? ((enum machine_mode)
3869 TYPE_MODE (TREE_TYPE (to)))
3870 : VOIDmode),
3871 unsignedp, int_size_in_bytes (TREE_TYPE (tem)),
3872 get_alias_set (to));
3873
3874 preserve_temp_slots (result);
3875 free_temp_slots ();
3876 pop_temp_slots ();
3877
3878 /* If the value is meaningful, convert RESULT to the proper mode.
3879 Otherwise, return nothing. */
3880 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3881 TYPE_MODE (TREE_TYPE (from)),
3882 result,
3883 TREE_UNSIGNED (TREE_TYPE (to)))
3884 : NULL_RTX);
3885 }
3886 }
3887
3888 /* If the rhs is a function call and its value is not an aggregate,
3889 call the function before we start to compute the lhs.
3890 This is needed for correct code for cases such as
3891 val = setjmp (buf) on machines where reference to val
3892 requires loading up part of an address in a separate insn.
3893
3894 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3895 since it might be a promoted variable where the zero- or sign- extension
3896 needs to be done. Handling this in the normal way is safe because no
3897 computation is done before the call. */
3898 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3899 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3900 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3901 && GET_CODE (DECL_RTL (to)) == REG))
3902 {
3903 rtx value;
3904
3905 push_temp_slots ();
3906 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3907 if (to_rtx == 0)
3908 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3909
3910 /* Handle calls that return values in multiple non-contiguous locations.
3911 The Irix 6 ABI has examples of this. */
3912 if (GET_CODE (to_rtx) == PARALLEL)
3913 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3914 else if (GET_MODE (to_rtx) == BLKmode)
3915 emit_block_move (to_rtx, value, expr_size (from));
3916 else
3917 {
3918 #ifdef POINTERS_EXTEND_UNSIGNED
3919 if (POINTER_TYPE_P (TREE_TYPE (to))
3920 && GET_MODE (to_rtx) != GET_MODE (value))
3921 value = convert_memory_address (GET_MODE (to_rtx), value);
3922 #endif
3923 emit_move_insn (to_rtx, value);
3924 }
3925 preserve_temp_slots (to_rtx);
3926 free_temp_slots ();
3927 pop_temp_slots ();
3928 return want_value ? to_rtx : NULL_RTX;
3929 }
3930
3931 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3932 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3933
3934 if (to_rtx == 0)
3935 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3936
3937 /* Don't move directly into a return register. */
3938 if (TREE_CODE (to) == RESULT_DECL
3939 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3940 {
3941 rtx temp;
3942
3943 push_temp_slots ();
3944 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3945
3946 if (GET_CODE (to_rtx) == PARALLEL)
3947 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3948 else
3949 emit_move_insn (to_rtx, temp);
3950
3951 preserve_temp_slots (to_rtx);
3952 free_temp_slots ();
3953 pop_temp_slots ();
3954 return want_value ? to_rtx : NULL_RTX;
3955 }
3956
3957 /* In case we are returning the contents of an object which overlaps
3958 the place the value is being stored, use a safe function when copying
3959 a value through a pointer into a structure value return block. */
3960 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3961 && current_function_returns_struct
3962 && !current_function_returns_pcc_struct)
3963 {
3964 rtx from_rtx, size;
3965
3966 push_temp_slots ();
3967 size = expr_size (from);
3968 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3969 EXPAND_MEMORY_USE_DONT);
3970
3971 /* Copy the rights of the bitmap. */
3972 if (current_function_check_memory_usage)
3973 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3974 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3975 XEXP (from_rtx, 0), Pmode,
3976 convert_to_mode (TYPE_MODE (sizetype),
3977 size, TREE_UNSIGNED (sizetype)),
3978 TYPE_MODE (sizetype));
3979
3980 #ifdef TARGET_MEM_FUNCTIONS
3981 emit_library_call (memmove_libfunc, LCT_NORMAL,
3982 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3983 XEXP (from_rtx, 0), Pmode,
3984 convert_to_mode (TYPE_MODE (sizetype),
3985 size, TREE_UNSIGNED (sizetype)),
3986 TYPE_MODE (sizetype));
3987 #else
3988 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3989 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3990 XEXP (to_rtx, 0), Pmode,
3991 convert_to_mode (TYPE_MODE (integer_type_node),
3992 size, TREE_UNSIGNED (integer_type_node)),
3993 TYPE_MODE (integer_type_node));
3994 #endif
3995
3996 preserve_temp_slots (to_rtx);
3997 free_temp_slots ();
3998 pop_temp_slots ();
3999 return want_value ? to_rtx : NULL_RTX;
4000 }
4001
4002 /* Compute FROM and store the value in the rtx we got. */
4003
4004 push_temp_slots ();
4005 result = store_expr (from, to_rtx, want_value);
4006 preserve_temp_slots (result);
4007 free_temp_slots ();
4008 pop_temp_slots ();
4009 return want_value ? result : NULL_RTX;
4010 }
4011
4012 /* Generate code for computing expression EXP,
4013 and storing the value into TARGET.
4014 TARGET may contain a QUEUED rtx.
4015
4016 If WANT_VALUE is nonzero, return a copy of the value
4017 not in TARGET, so that we can be sure to use the proper
4018 value in a containing expression even if TARGET has something
4019 else stored in it. If possible, we copy the value through a pseudo
4020 and return that pseudo. Or, if the value is constant, we try to
4021 return the constant. In some cases, we return a pseudo
4022 copied *from* TARGET.
4023
4024 If the mode is BLKmode then we may return TARGET itself.
4025 It turns out that in BLKmode it doesn't cause a problem.
4026 because C has no operators that could combine two different
4027 assignments into the same BLKmode object with different values
4028 with no sequence point. Will other languages need this to
4029 be more thorough?
4030
4031 If WANT_VALUE is 0, we return NULL, to make sure
4032 to catch quickly any cases where the caller uses the value
4033 and fails to set WANT_VALUE. */
4034
4035 rtx
4036 store_expr (exp, target, want_value)
4037 tree exp;
4038 rtx target;
4039 int want_value;
4040 {
4041 rtx temp;
4042 int dont_return_target = 0;
4043 int dont_store_target = 0;
4044
4045 if (TREE_CODE (exp) == COMPOUND_EXPR)
4046 {
4047 /* Perform first part of compound expression, then assign from second
4048 part. */
4049 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4050 emit_queue ();
4051 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4052 }
4053 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4054 {
4055 /* For conditional expression, get safe form of the target. Then
4056 test the condition, doing the appropriate assignment on either
4057 side. This avoids the creation of unnecessary temporaries.
4058 For non-BLKmode, it is more efficient not to do this. */
4059
4060 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4061
4062 emit_queue ();
4063 target = protect_from_queue (target, 1);
4064
4065 do_pending_stack_adjust ();
4066 NO_DEFER_POP;
4067 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4068 start_cleanup_deferral ();
4069 store_expr (TREE_OPERAND (exp, 1), target, 0);
4070 end_cleanup_deferral ();
4071 emit_queue ();
4072 emit_jump_insn (gen_jump (lab2));
4073 emit_barrier ();
4074 emit_label (lab1);
4075 start_cleanup_deferral ();
4076 store_expr (TREE_OPERAND (exp, 2), target, 0);
4077 end_cleanup_deferral ();
4078 emit_queue ();
4079 emit_label (lab2);
4080 OK_DEFER_POP;
4081
4082 return want_value ? target : NULL_RTX;
4083 }
4084 else if (queued_subexp_p (target))
4085 /* If target contains a postincrement, let's not risk
4086 using it as the place to generate the rhs. */
4087 {
4088 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4089 {
4090 /* Expand EXP into a new pseudo. */
4091 temp = gen_reg_rtx (GET_MODE (target));
4092 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4093 }
4094 else
4095 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4096
4097 /* If target is volatile, ANSI requires accessing the value
4098 *from* the target, if it is accessed. So make that happen.
4099 In no case return the target itself. */
4100 if (! MEM_VOLATILE_P (target) && want_value)
4101 dont_return_target = 1;
4102 }
4103 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4104 && GET_MODE (target) != BLKmode)
4105 /* If target is in memory and caller wants value in a register instead,
4106 arrange that. Pass TARGET as target for expand_expr so that,
4107 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4108 We know expand_expr will not use the target in that case.
4109 Don't do this if TARGET is volatile because we are supposed
4110 to write it and then read it. */
4111 {
4112 temp = expand_expr (exp, target, GET_MODE (target), 0);
4113 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4114 {
4115 /* If TEMP is already in the desired TARGET, only copy it from
4116 memory and don't store it there again. */
4117 if (temp == target
4118 || (rtx_equal_p (temp, target)
4119 && ! side_effects_p (temp) && ! side_effects_p (target)))
4120 dont_store_target = 1;
4121 temp = copy_to_reg (temp);
4122 }
4123 dont_return_target = 1;
4124 }
4125 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4126 /* If this is an scalar in a register that is stored in a wider mode
4127 than the declared mode, compute the result into its declared mode
4128 and then convert to the wider mode. Our value is the computed
4129 expression. */
4130 {
4131 /* If we don't want a value, we can do the conversion inside EXP,
4132 which will often result in some optimizations. Do the conversion
4133 in two steps: first change the signedness, if needed, then
4134 the extend. But don't do this if the type of EXP is a subtype
4135 of something else since then the conversion might involve
4136 more than just converting modes. */
4137 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4138 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4139 {
4140 if (TREE_UNSIGNED (TREE_TYPE (exp))
4141 != SUBREG_PROMOTED_UNSIGNED_P (target))
4142 exp
4143 = convert
4144 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4145 TREE_TYPE (exp)),
4146 exp);
4147
4148 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4149 SUBREG_PROMOTED_UNSIGNED_P (target)),
4150 exp);
4151 }
4152
4153 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4154
4155 /* If TEMP is a volatile MEM and we want a result value, make
4156 the access now so it gets done only once. Likewise if
4157 it contains TARGET. */
4158 if (GET_CODE (temp) == MEM && want_value
4159 && (MEM_VOLATILE_P (temp)
4160 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4161 temp = copy_to_reg (temp);
4162
4163 /* If TEMP is a VOIDmode constant, use convert_modes to make
4164 sure that we properly convert it. */
4165 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4166 {
4167 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4168 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4169 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4170 GET_MODE (target), temp,
4171 SUBREG_PROMOTED_UNSIGNED_P (target));
4172 }
4173
4174 convert_move (SUBREG_REG (target), temp,
4175 SUBREG_PROMOTED_UNSIGNED_P (target));
4176
4177 /* If we promoted a constant, change the mode back down to match
4178 target. Otherwise, the caller might get confused by a result whose
4179 mode is larger than expected. */
4180
4181 if (want_value && GET_MODE (temp) != GET_MODE (target)
4182 && GET_MODE (temp) != VOIDmode)
4183 {
4184 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4185 SUBREG_PROMOTED_VAR_P (temp) = 1;
4186 SUBREG_PROMOTED_UNSIGNED_P (temp)
4187 = SUBREG_PROMOTED_UNSIGNED_P (target);
4188 }
4189
4190 return want_value ? temp : NULL_RTX;
4191 }
4192 else
4193 {
4194 temp = expand_expr (exp, target, GET_MODE (target), 0);
4195 /* Return TARGET if it's a specified hardware register.
4196 If TARGET is a volatile mem ref, either return TARGET
4197 or return a reg copied *from* TARGET; ANSI requires this.
4198
4199 Otherwise, if TEMP is not TARGET, return TEMP
4200 if it is constant (for efficiency),
4201 or if we really want the correct value. */
4202 if (!(target && GET_CODE (target) == REG
4203 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4204 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4205 && ! rtx_equal_p (temp, target)
4206 && (CONSTANT_P (temp) || want_value))
4207 dont_return_target = 1;
4208 }
4209
4210 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4211 the same as that of TARGET, adjust the constant. This is needed, for
4212 example, in case it is a CONST_DOUBLE and we want only a word-sized
4213 value. */
4214 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4215 && TREE_CODE (exp) != ERROR_MARK
4216 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4217 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4218 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4219
4220 if (current_function_check_memory_usage
4221 && GET_CODE (target) == MEM
4222 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4223 {
4224 in_check_memory_usage = 1;
4225 if (GET_CODE (temp) == MEM)
4226 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4227 VOIDmode, 3, XEXP (target, 0), Pmode,
4228 XEXP (temp, 0), Pmode,
4229 expr_size (exp), TYPE_MODE (sizetype));
4230 else
4231 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4232 VOIDmode, 3, XEXP (target, 0), Pmode,
4233 expr_size (exp), TYPE_MODE (sizetype),
4234 GEN_INT (MEMORY_USE_WO),
4235 TYPE_MODE (integer_type_node));
4236 in_check_memory_usage = 0;
4237 }
4238
4239 /* If value was not generated in the target, store it there.
4240 Convert the value to TARGET's type first if nec. */
4241 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4242 one or both of them are volatile memory refs, we have to distinguish
4243 two cases:
4244 - expand_expr has used TARGET. In this case, we must not generate
4245 another copy. This can be detected by TARGET being equal according
4246 to == .
4247 - expand_expr has not used TARGET - that means that the source just
4248 happens to have the same RTX form. Since temp will have been created
4249 by expand_expr, it will compare unequal according to == .
4250 We must generate a copy in this case, to reach the correct number
4251 of volatile memory references. */
4252
4253 if ((! rtx_equal_p (temp, target)
4254 || (temp != target && (side_effects_p (temp)
4255 || side_effects_p (target))))
4256 && TREE_CODE (exp) != ERROR_MARK
4257 && ! dont_store_target)
4258 {
4259 target = protect_from_queue (target, 1);
4260 if (GET_MODE (temp) != GET_MODE (target)
4261 && GET_MODE (temp) != VOIDmode)
4262 {
4263 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4264 if (dont_return_target)
4265 {
4266 /* In this case, we will return TEMP,
4267 so make sure it has the proper mode.
4268 But don't forget to store the value into TARGET. */
4269 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4270 emit_move_insn (target, temp);
4271 }
4272 else
4273 convert_move (target, temp, unsignedp);
4274 }
4275
4276 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4277 {
4278 /* Handle copying a string constant into an array.
4279 The string constant may be shorter than the array.
4280 So copy just the string's actual length, and clear the rest. */
4281 rtx size;
4282 rtx addr;
4283
4284 /* Get the size of the data type of the string,
4285 which is actually the size of the target. */
4286 size = expr_size (exp);
4287 if (GET_CODE (size) == CONST_INT
4288 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4289 emit_block_move (target, temp, size);
4290 else
4291 {
4292 /* Compute the size of the data to copy from the string. */
4293 tree copy_size
4294 = size_binop (MIN_EXPR,
4295 make_tree (sizetype, size),
4296 size_int (TREE_STRING_LENGTH (exp)));
4297 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4298 VOIDmode, 0);
4299 rtx label = 0;
4300
4301 /* Copy that much. */
4302 emit_block_move (target, temp, copy_size_rtx);
4303
4304 /* Figure out how much is left in TARGET that we have to clear.
4305 Do all calculations in ptr_mode. */
4306
4307 addr = XEXP (target, 0);
4308 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4309
4310 if (GET_CODE (copy_size_rtx) == CONST_INT)
4311 {
4312 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4313 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4314 }
4315 else
4316 {
4317 addr = force_reg (ptr_mode, addr);
4318 addr = expand_binop (ptr_mode, add_optab, addr,
4319 copy_size_rtx, NULL_RTX, 0,
4320 OPTAB_LIB_WIDEN);
4321
4322 size = expand_binop (ptr_mode, sub_optab, size,
4323 copy_size_rtx, NULL_RTX, 0,
4324 OPTAB_LIB_WIDEN);
4325
4326 label = gen_label_rtx ();
4327 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4328 GET_MODE (size), 0, 0, label);
4329 }
4330
4331 if (size != const0_rtx)
4332 {
4333 rtx dest = gen_rtx_MEM (BLKmode, addr);
4334
4335 MEM_COPY_ATTRIBUTES (dest, target);
4336
4337 /* Be sure we can write on ADDR. */
4338 in_check_memory_usage = 1;
4339 if (current_function_check_memory_usage)
4340 emit_library_call (chkr_check_addr_libfunc,
4341 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4342 addr, Pmode,
4343 size, TYPE_MODE (sizetype),
4344 GEN_INT (MEMORY_USE_WO),
4345 TYPE_MODE (integer_type_node));
4346 in_check_memory_usage = 0;
4347 clear_storage (dest, size);
4348 }
4349
4350 if (label)
4351 emit_label (label);
4352 }
4353 }
4354 /* Handle calls that return values in multiple non-contiguous locations.
4355 The Irix 6 ABI has examples of this. */
4356 else if (GET_CODE (target) == PARALLEL)
4357 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4358 else if (GET_MODE (temp) == BLKmode)
4359 emit_block_move (target, temp, expr_size (exp));
4360 else
4361 emit_move_insn (target, temp);
4362 }
4363
4364 /* If we don't want a value, return NULL_RTX. */
4365 if (! want_value)
4366 return NULL_RTX;
4367
4368 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4369 ??? The latter test doesn't seem to make sense. */
4370 else if (dont_return_target && GET_CODE (temp) != MEM)
4371 return temp;
4372
4373 /* Return TARGET itself if it is a hard register. */
4374 else if (want_value && GET_MODE (target) != BLKmode
4375 && ! (GET_CODE (target) == REG
4376 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4377 return copy_to_reg (target);
4378
4379 else
4380 return target;
4381 }
4382 \f
4383 /* Return 1 if EXP just contains zeros. */
4384
4385 static int
4386 is_zeros_p (exp)
4387 tree exp;
4388 {
4389 tree elt;
4390
4391 switch (TREE_CODE (exp))
4392 {
4393 case CONVERT_EXPR:
4394 case NOP_EXPR:
4395 case NON_LVALUE_EXPR:
4396 return is_zeros_p (TREE_OPERAND (exp, 0));
4397
4398 case INTEGER_CST:
4399 return integer_zerop (exp);
4400
4401 case COMPLEX_CST:
4402 return
4403 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4404
4405 case REAL_CST:
4406 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4407
4408 case CONSTRUCTOR:
4409 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4410 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4411 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4412 if (! is_zeros_p (TREE_VALUE (elt)))
4413 return 0;
4414
4415 return 1;
4416
4417 default:
4418 return 0;
4419 }
4420 }
4421
4422 /* Return 1 if EXP contains mostly (3/4) zeros. */
4423
4424 static int
4425 mostly_zeros_p (exp)
4426 tree exp;
4427 {
4428 if (TREE_CODE (exp) == CONSTRUCTOR)
4429 {
4430 int elts = 0, zeros = 0;
4431 tree elt = CONSTRUCTOR_ELTS (exp);
4432 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4433 {
4434 /* If there are no ranges of true bits, it is all zero. */
4435 return elt == NULL_TREE;
4436 }
4437 for (; elt; elt = TREE_CHAIN (elt))
4438 {
4439 /* We do not handle the case where the index is a RANGE_EXPR,
4440 so the statistic will be somewhat inaccurate.
4441 We do make a more accurate count in store_constructor itself,
4442 so since this function is only used for nested array elements,
4443 this should be close enough. */
4444 if (mostly_zeros_p (TREE_VALUE (elt)))
4445 zeros++;
4446 elts++;
4447 }
4448
4449 return 4 * zeros >= 3 * elts;
4450 }
4451
4452 return is_zeros_p (exp);
4453 }
4454 \f
4455 /* Helper function for store_constructor.
4456 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4457 TYPE is the type of the CONSTRUCTOR, not the element type.
4458 CLEARED is as for store_constructor.
4459 ALIAS_SET is the alias set to use for any stores.
4460
4461 This provides a recursive shortcut back to store_constructor when it isn't
4462 necessary to go through store_field. This is so that we can pass through
4463 the cleared field to let store_constructor know that we may not have to
4464 clear a substructure if the outer structure has already been cleared. */
4465
4466 static void
4467 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4468 alias_set)
4469 rtx target;
4470 unsigned HOST_WIDE_INT bitsize;
4471 HOST_WIDE_INT bitpos;
4472 enum machine_mode mode;
4473 tree exp, type;
4474 int cleared;
4475 int alias_set;
4476 {
4477 if (TREE_CODE (exp) == CONSTRUCTOR
4478 && bitpos % BITS_PER_UNIT == 0
4479 /* If we have a non-zero bitpos for a register target, then we just
4480 let store_field do the bitfield handling. This is unlikely to
4481 generate unnecessary clear instructions anyways. */
4482 && (bitpos == 0 || GET_CODE (target) == MEM))
4483 {
4484 if (GET_CODE (target) == MEM)
4485 target
4486 = adjust_address (target,
4487 GET_MODE (target) == BLKmode
4488 || 0 != (bitpos
4489 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4490 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4491
4492
4493 /* Update the alias set, if required. */
4494 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4495 && MEM_ALIAS_SET (target) != 0)
4496 {
4497 target = copy_rtx (target);
4498 set_mem_alias_set (target, alias_set);
4499 }
4500
4501 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4502 }
4503 else
4504 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4505 int_size_in_bytes (type), alias_set);
4506 }
4507
4508 /* Store the value of constructor EXP into the rtx TARGET.
4509 TARGET is either a REG or a MEM; we know it cannot conflict, since
4510 safe_from_p has been called.
4511 CLEARED is true if TARGET is known to have been zero'd.
4512 SIZE is the number of bytes of TARGET we are allowed to modify: this
4513 may not be the same as the size of EXP if we are assigning to a field
4514 which has been packed to exclude padding bits. */
4515
4516 static void
4517 store_constructor (exp, target, cleared, size)
4518 tree exp;
4519 rtx target;
4520 int cleared;
4521 HOST_WIDE_INT size;
4522 {
4523 tree type = TREE_TYPE (exp);
4524 #ifdef WORD_REGISTER_OPERATIONS
4525 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4526 #endif
4527
4528 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4529 || TREE_CODE (type) == QUAL_UNION_TYPE)
4530 {
4531 tree elt;
4532
4533 /* We either clear the aggregate or indicate the value is dead. */
4534 if ((TREE_CODE (type) == UNION_TYPE
4535 || TREE_CODE (type) == QUAL_UNION_TYPE)
4536 && ! cleared
4537 && ! CONSTRUCTOR_ELTS (exp))
4538 /* If the constructor is empty, clear the union. */
4539 {
4540 clear_storage (target, expr_size (exp));
4541 cleared = 1;
4542 }
4543
4544 /* If we are building a static constructor into a register,
4545 set the initial value as zero so we can fold the value into
4546 a constant. But if more than one register is involved,
4547 this probably loses. */
4548 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4549 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4550 {
4551 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4552 cleared = 1;
4553 }
4554
4555 /* If the constructor has fewer fields than the structure
4556 or if we are initializing the structure to mostly zeros,
4557 clear the whole structure first. Don't do this if TARGET is a
4558 register whose mode size isn't equal to SIZE since clear_storage
4559 can't handle this case. */
4560 else if (! cleared && size > 0
4561 && ((list_length (CONSTRUCTOR_ELTS (exp))
4562 != fields_length (type))
4563 || mostly_zeros_p (exp))
4564 && (GET_CODE (target) != REG
4565 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4566 == size)))
4567 {
4568 clear_storage (target, GEN_INT (size));
4569 cleared = 1;
4570 }
4571
4572 if (! cleared)
4573 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4574
4575 /* Store each element of the constructor into
4576 the corresponding field of TARGET. */
4577
4578 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4579 {
4580 tree field = TREE_PURPOSE (elt);
4581 #ifdef WORD_REGISTER_OPERATIONS
4582 tree value = TREE_VALUE (elt);
4583 #endif
4584 enum machine_mode mode;
4585 HOST_WIDE_INT bitsize;
4586 HOST_WIDE_INT bitpos = 0;
4587 int unsignedp;
4588 tree offset;
4589 rtx to_rtx = target;
4590
4591 /* Just ignore missing fields.
4592 We cleared the whole structure, above,
4593 if any fields are missing. */
4594 if (field == 0)
4595 continue;
4596
4597 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4598 continue;
4599
4600 if (host_integerp (DECL_SIZE (field), 1))
4601 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4602 else
4603 bitsize = -1;
4604
4605 unsignedp = TREE_UNSIGNED (field);
4606 mode = DECL_MODE (field);
4607 if (DECL_BIT_FIELD (field))
4608 mode = VOIDmode;
4609
4610 offset = DECL_FIELD_OFFSET (field);
4611 if (host_integerp (offset, 0)
4612 && host_integerp (bit_position (field), 0))
4613 {
4614 bitpos = int_bit_position (field);
4615 offset = 0;
4616 }
4617 else
4618 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4619
4620 if (offset)
4621 {
4622 rtx offset_rtx;
4623
4624 if (contains_placeholder_p (offset))
4625 offset = build (WITH_RECORD_EXPR, sizetype,
4626 offset, make_tree (TREE_TYPE (exp), target));
4627
4628 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4629 if (GET_CODE (to_rtx) != MEM)
4630 abort ();
4631
4632 if (GET_MODE (offset_rtx) != ptr_mode)
4633 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4634
4635 #ifdef POINTERS_EXTEND_UNSIGNED
4636 if (GET_MODE (offset_rtx) != Pmode)
4637 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4638 #endif
4639
4640 to_rtx = offset_address (to_rtx, offset_rtx,
4641 highest_pow2_factor (offset));
4642 }
4643
4644 if (TREE_READONLY (field))
4645 {
4646 if (GET_CODE (to_rtx) == MEM)
4647 to_rtx = copy_rtx (to_rtx);
4648
4649 RTX_UNCHANGING_P (to_rtx) = 1;
4650 }
4651
4652 #ifdef WORD_REGISTER_OPERATIONS
4653 /* If this initializes a field that is smaller than a word, at the
4654 start of a word, try to widen it to a full word.
4655 This special case allows us to output C++ member function
4656 initializations in a form that the optimizers can understand. */
4657 if (GET_CODE (target) == REG
4658 && bitsize < BITS_PER_WORD
4659 && bitpos % BITS_PER_WORD == 0
4660 && GET_MODE_CLASS (mode) == MODE_INT
4661 && TREE_CODE (value) == INTEGER_CST
4662 && exp_size >= 0
4663 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4664 {
4665 tree type = TREE_TYPE (value);
4666
4667 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4668 {
4669 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4670 value = convert (type, value);
4671 }
4672
4673 if (BYTES_BIG_ENDIAN)
4674 value
4675 = fold (build (LSHIFT_EXPR, type, value,
4676 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4677 bitsize = BITS_PER_WORD;
4678 mode = word_mode;
4679 }
4680 #endif
4681
4682 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4683 && DECL_NONADDRESSABLE_P (field))
4684 {
4685 to_rtx = copy_rtx (to_rtx);
4686 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4687 }
4688
4689 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4690 TREE_VALUE (elt), type, cleared,
4691 get_alias_set (TREE_TYPE (field)));
4692 }
4693 }
4694 else if (TREE_CODE (type) == ARRAY_TYPE)
4695 {
4696 tree elt;
4697 int i;
4698 int need_to_clear;
4699 tree domain = TYPE_DOMAIN (type);
4700 tree elttype = TREE_TYPE (type);
4701 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4702 && TYPE_MAX_VALUE (domain)
4703 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4704 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4705 HOST_WIDE_INT minelt = 0;
4706 HOST_WIDE_INT maxelt = 0;
4707
4708 /* If we have constant bounds for the range of the type, get them. */
4709 if (const_bounds_p)
4710 {
4711 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4712 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4713 }
4714
4715 /* If the constructor has fewer elements than the array,
4716 clear the whole array first. Similarly if this is
4717 static constructor of a non-BLKmode object. */
4718 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4719 need_to_clear = 1;
4720 else
4721 {
4722 HOST_WIDE_INT count = 0, zero_count = 0;
4723 need_to_clear = ! const_bounds_p;
4724
4725 /* This loop is a more accurate version of the loop in
4726 mostly_zeros_p (it handles RANGE_EXPR in an index).
4727 It is also needed to check for missing elements. */
4728 for (elt = CONSTRUCTOR_ELTS (exp);
4729 elt != NULL_TREE && ! need_to_clear;
4730 elt = TREE_CHAIN (elt))
4731 {
4732 tree index = TREE_PURPOSE (elt);
4733 HOST_WIDE_INT this_node_count;
4734
4735 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4736 {
4737 tree lo_index = TREE_OPERAND (index, 0);
4738 tree hi_index = TREE_OPERAND (index, 1);
4739
4740 if (! host_integerp (lo_index, 1)
4741 || ! host_integerp (hi_index, 1))
4742 {
4743 need_to_clear = 1;
4744 break;
4745 }
4746
4747 this_node_count = (tree_low_cst (hi_index, 1)
4748 - tree_low_cst (lo_index, 1) + 1);
4749 }
4750 else
4751 this_node_count = 1;
4752
4753 count += this_node_count;
4754 if (mostly_zeros_p (TREE_VALUE (elt)))
4755 zero_count += this_node_count;
4756 }
4757
4758 /* Clear the entire array first if there are any missing elements,
4759 or if the incidence of zero elements is >= 75%. */
4760 if (! need_to_clear
4761 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4762 need_to_clear = 1;
4763 }
4764
4765 if (need_to_clear && size > 0)
4766 {
4767 if (! cleared)
4768 clear_storage (target, GEN_INT (size));
4769 cleared = 1;
4770 }
4771 else if (REG_P (target))
4772 /* Inform later passes that the old value is dead. */
4773 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4774
4775 /* Store each element of the constructor into
4776 the corresponding element of TARGET, determined
4777 by counting the elements. */
4778 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4779 elt;
4780 elt = TREE_CHAIN (elt), i++)
4781 {
4782 enum machine_mode mode;
4783 HOST_WIDE_INT bitsize;
4784 HOST_WIDE_INT bitpos;
4785 int unsignedp;
4786 tree value = TREE_VALUE (elt);
4787 tree index = TREE_PURPOSE (elt);
4788 rtx xtarget = target;
4789
4790 if (cleared && is_zeros_p (value))
4791 continue;
4792
4793 unsignedp = TREE_UNSIGNED (elttype);
4794 mode = TYPE_MODE (elttype);
4795 if (mode == BLKmode)
4796 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4797 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4798 : -1);
4799 else
4800 bitsize = GET_MODE_BITSIZE (mode);
4801
4802 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4803 {
4804 tree lo_index = TREE_OPERAND (index, 0);
4805 tree hi_index = TREE_OPERAND (index, 1);
4806 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4807 struct nesting *loop;
4808 HOST_WIDE_INT lo, hi, count;
4809 tree position;
4810
4811 /* If the range is constant and "small", unroll the loop. */
4812 if (const_bounds_p
4813 && host_integerp (lo_index, 0)
4814 && host_integerp (hi_index, 0)
4815 && (lo = tree_low_cst (lo_index, 0),
4816 hi = tree_low_cst (hi_index, 0),
4817 count = hi - lo + 1,
4818 (GET_CODE (target) != MEM
4819 || count <= 2
4820 || (host_integerp (TYPE_SIZE (elttype), 1)
4821 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4822 <= 40 * 8)))))
4823 {
4824 lo -= minelt; hi -= minelt;
4825 for (; lo <= hi; lo++)
4826 {
4827 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4828
4829 if (GET_CODE (target) == MEM
4830 && !MEM_KEEP_ALIAS_SET_P (target)
4831 && TYPE_NONALIASED_COMPONENT (type))
4832 {
4833 target = copy_rtx (target);
4834 MEM_KEEP_ALIAS_SET_P (target) = 1;
4835 }
4836
4837 store_constructor_field
4838 (target, bitsize, bitpos, mode, value, type, cleared,
4839 get_alias_set (elttype));
4840 }
4841 }
4842 else
4843 {
4844 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4845 loop_top = gen_label_rtx ();
4846 loop_end = gen_label_rtx ();
4847
4848 unsignedp = TREE_UNSIGNED (domain);
4849
4850 index = build_decl (VAR_DECL, NULL_TREE, domain);
4851
4852 index_r
4853 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4854 &unsignedp, 0));
4855 SET_DECL_RTL (index, index_r);
4856 if (TREE_CODE (value) == SAVE_EXPR
4857 && SAVE_EXPR_RTL (value) == 0)
4858 {
4859 /* Make sure value gets expanded once before the
4860 loop. */
4861 expand_expr (value, const0_rtx, VOIDmode, 0);
4862 emit_queue ();
4863 }
4864 store_expr (lo_index, index_r, 0);
4865 loop = expand_start_loop (0);
4866
4867 /* Assign value to element index. */
4868 position
4869 = convert (ssizetype,
4870 fold (build (MINUS_EXPR, TREE_TYPE (index),
4871 index, TYPE_MIN_VALUE (domain))));
4872 position = size_binop (MULT_EXPR, position,
4873 convert (ssizetype,
4874 TYPE_SIZE_UNIT (elttype)));
4875
4876 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4877 xtarget = offset_address (target, pos_rtx,
4878 highest_pow2_factor (position));
4879 xtarget = adjust_address (xtarget, mode, 0);
4880 if (TREE_CODE (value) == CONSTRUCTOR)
4881 store_constructor (value, xtarget, cleared,
4882 bitsize / BITS_PER_UNIT);
4883 else
4884 store_expr (value, xtarget, 0);
4885
4886 expand_exit_loop_if_false (loop,
4887 build (LT_EXPR, integer_type_node,
4888 index, hi_index));
4889
4890 expand_increment (build (PREINCREMENT_EXPR,
4891 TREE_TYPE (index),
4892 index, integer_one_node), 0, 0);
4893 expand_end_loop ();
4894 emit_label (loop_end);
4895 }
4896 }
4897 else if ((index != 0 && ! host_integerp (index, 0))
4898 || ! host_integerp (TYPE_SIZE (elttype), 1))
4899 {
4900 tree position;
4901
4902 if (index == 0)
4903 index = ssize_int (1);
4904
4905 if (minelt)
4906 index = convert (ssizetype,
4907 fold (build (MINUS_EXPR, index,
4908 TYPE_MIN_VALUE (domain))));
4909
4910 position = size_binop (MULT_EXPR, index,
4911 convert (ssizetype,
4912 TYPE_SIZE_UNIT (elttype)));
4913 xtarget = offset_address (target,
4914 expand_expr (position, 0, VOIDmode, 0),
4915 highest_pow2_factor (position));
4916 xtarget = adjust_address (xtarget, mode, 0);
4917 store_expr (value, xtarget, 0);
4918 }
4919 else
4920 {
4921 if (index != 0)
4922 bitpos = ((tree_low_cst (index, 0) - minelt)
4923 * tree_low_cst (TYPE_SIZE (elttype), 1));
4924 else
4925 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4926
4927 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4928 && TYPE_NONALIASED_COMPONENT (type))
4929 {
4930 target = copy_rtx (target);
4931 MEM_KEEP_ALIAS_SET_P (target) = 1;
4932 }
4933
4934 store_constructor_field (target, bitsize, bitpos, mode, value,
4935 type, cleared, get_alias_set (elttype));
4936
4937 }
4938 }
4939 }
4940
4941 /* Set constructor assignments. */
4942 else if (TREE_CODE (type) == SET_TYPE)
4943 {
4944 tree elt = CONSTRUCTOR_ELTS (exp);
4945 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4946 tree domain = TYPE_DOMAIN (type);
4947 tree domain_min, domain_max, bitlength;
4948
4949 /* The default implementation strategy is to extract the constant
4950 parts of the constructor, use that to initialize the target,
4951 and then "or" in whatever non-constant ranges we need in addition.
4952
4953 If a large set is all zero or all ones, it is
4954 probably better to set it using memset (if available) or bzero.
4955 Also, if a large set has just a single range, it may also be
4956 better to first clear all the first clear the set (using
4957 bzero/memset), and set the bits we want. */
4958
4959 /* Check for all zeros. */
4960 if (elt == NULL_TREE && size > 0)
4961 {
4962 if (!cleared)
4963 clear_storage (target, GEN_INT (size));
4964 return;
4965 }
4966
4967 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4968 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4969 bitlength = size_binop (PLUS_EXPR,
4970 size_diffop (domain_max, domain_min),
4971 ssize_int (1));
4972
4973 nbits = tree_low_cst (bitlength, 1);
4974
4975 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4976 are "complicated" (more than one range), initialize (the
4977 constant parts) by copying from a constant. */
4978 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4979 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4980 {
4981 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4982 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4983 char *bit_buffer = (char *) alloca (nbits);
4984 HOST_WIDE_INT word = 0;
4985 unsigned int bit_pos = 0;
4986 unsigned int ibit = 0;
4987 unsigned int offset = 0; /* In bytes from beginning of set. */
4988
4989 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4990 for (;;)
4991 {
4992 if (bit_buffer[ibit])
4993 {
4994 if (BYTES_BIG_ENDIAN)
4995 word |= (1 << (set_word_size - 1 - bit_pos));
4996 else
4997 word |= 1 << bit_pos;
4998 }
4999
5000 bit_pos++; ibit++;
5001 if (bit_pos >= set_word_size || ibit == nbits)
5002 {
5003 if (word != 0 || ! cleared)
5004 {
5005 rtx datum = GEN_INT (word);
5006 rtx to_rtx;
5007
5008 /* The assumption here is that it is safe to use
5009 XEXP if the set is multi-word, but not if
5010 it's single-word. */
5011 if (GET_CODE (target) == MEM)
5012 to_rtx = adjust_address (target, mode, offset);
5013 else if (offset == 0)
5014 to_rtx = target;
5015 else
5016 abort ();
5017 emit_move_insn (to_rtx, datum);
5018 }
5019
5020 if (ibit == nbits)
5021 break;
5022 word = 0;
5023 bit_pos = 0;
5024 offset += set_word_size / BITS_PER_UNIT;
5025 }
5026 }
5027 }
5028 else if (!cleared)
5029 /* Don't bother clearing storage if the set is all ones. */
5030 if (TREE_CHAIN (elt) != NULL_TREE
5031 || (TREE_PURPOSE (elt) == NULL_TREE
5032 ? nbits != 1
5033 : ( ! host_integerp (TREE_VALUE (elt), 0)
5034 || ! host_integerp (TREE_PURPOSE (elt), 0)
5035 || (tree_low_cst (TREE_VALUE (elt), 0)
5036 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5037 != (HOST_WIDE_INT) nbits))))
5038 clear_storage (target, expr_size (exp));
5039
5040 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5041 {
5042 /* Start of range of element or NULL. */
5043 tree startbit = TREE_PURPOSE (elt);
5044 /* End of range of element, or element value. */
5045 tree endbit = TREE_VALUE (elt);
5046 #ifdef TARGET_MEM_FUNCTIONS
5047 HOST_WIDE_INT startb, endb;
5048 #endif
5049 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5050
5051 bitlength_rtx = expand_expr (bitlength,
5052 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5053
5054 /* Handle non-range tuple element like [ expr ]. */
5055 if (startbit == NULL_TREE)
5056 {
5057 startbit = save_expr (endbit);
5058 endbit = startbit;
5059 }
5060
5061 startbit = convert (sizetype, startbit);
5062 endbit = convert (sizetype, endbit);
5063 if (! integer_zerop (domain_min))
5064 {
5065 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5066 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5067 }
5068 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5069 EXPAND_CONST_ADDRESS);
5070 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5071 EXPAND_CONST_ADDRESS);
5072
5073 if (REG_P (target))
5074 {
5075 targetx
5076 = assign_temp
5077 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5078 TYPE_QUAL_CONST)),
5079 0, 1, 1);
5080 emit_move_insn (targetx, target);
5081 }
5082
5083 else if (GET_CODE (target) == MEM)
5084 targetx = target;
5085 else
5086 abort ();
5087
5088 #ifdef TARGET_MEM_FUNCTIONS
5089 /* Optimization: If startbit and endbit are
5090 constants divisible by BITS_PER_UNIT,
5091 call memset instead. */
5092 if (TREE_CODE (startbit) == INTEGER_CST
5093 && TREE_CODE (endbit) == INTEGER_CST
5094 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5095 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5096 {
5097 emit_library_call (memset_libfunc, LCT_NORMAL,
5098 VOIDmode, 3,
5099 plus_constant (XEXP (targetx, 0),
5100 startb / BITS_PER_UNIT),
5101 Pmode,
5102 constm1_rtx, TYPE_MODE (integer_type_node),
5103 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5104 TYPE_MODE (sizetype));
5105 }
5106 else
5107 #endif
5108 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5109 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5110 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5111 startbit_rtx, TYPE_MODE (sizetype),
5112 endbit_rtx, TYPE_MODE (sizetype));
5113
5114 if (REG_P (target))
5115 emit_move_insn (target, targetx);
5116 }
5117 }
5118
5119 else
5120 abort ();
5121 }
5122
5123 /* Store the value of EXP (an expression tree)
5124 into a subfield of TARGET which has mode MODE and occupies
5125 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5126 If MODE is VOIDmode, it means that we are storing into a bit-field.
5127
5128 If VALUE_MODE is VOIDmode, return nothing in particular.
5129 UNSIGNEDP is not used in this case.
5130
5131 Otherwise, return an rtx for the value stored. This rtx
5132 has mode VALUE_MODE if that is convenient to do.
5133 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5134
5135 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5136
5137 ALIAS_SET is the alias set for the destination. This value will
5138 (in general) be different from that for TARGET, since TARGET is a
5139 reference to the containing structure. */
5140
5141 static rtx
5142 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp,
5143 total_size, alias_set)
5144 rtx target;
5145 HOST_WIDE_INT bitsize;
5146 HOST_WIDE_INT bitpos;
5147 enum machine_mode mode;
5148 tree exp;
5149 enum machine_mode value_mode;
5150 int unsignedp;
5151 HOST_WIDE_INT total_size;
5152 int alias_set;
5153 {
5154 HOST_WIDE_INT width_mask = 0;
5155
5156 if (TREE_CODE (exp) == ERROR_MARK)
5157 return const0_rtx;
5158
5159 /* If we have nothing to store, do nothing unless the expression has
5160 side-effects. */
5161 if (bitsize == 0)
5162 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5163
5164 if (bitsize < HOST_BITS_PER_WIDE_INT)
5165 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5166
5167 /* If we are storing into an unaligned field of an aligned union that is
5168 in a register, we may have the mode of TARGET being an integer mode but
5169 MODE == BLKmode. In that case, get an aligned object whose size and
5170 alignment are the same as TARGET and store TARGET into it (we can avoid
5171 the store if the field being stored is the entire width of TARGET). Then
5172 call ourselves recursively to store the field into a BLKmode version of
5173 that object. Finally, load from the object into TARGET. This is not
5174 very efficient in general, but should only be slightly more expensive
5175 than the otherwise-required unaligned accesses. Perhaps this can be
5176 cleaned up later. */
5177
5178 if (mode == BLKmode
5179 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5180 {
5181 rtx object
5182 = assign_temp
5183 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5184 TYPE_QUAL_CONST),
5185 0, 1, 1);
5186 rtx blk_object = copy_rtx (object);
5187
5188 PUT_MODE (blk_object, BLKmode);
5189 set_mem_alias_set (blk_object, 0);
5190
5191 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5192 emit_move_insn (object, target);
5193
5194 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5195 total_size, alias_set);
5196
5197 /* Even though we aren't returning target, we need to
5198 give it the updated value. */
5199 emit_move_insn (target, object);
5200
5201 return blk_object;
5202 }
5203
5204 if (GET_CODE (target) == CONCAT)
5205 {
5206 /* We're storing into a struct containing a single __complex. */
5207
5208 if (bitpos != 0)
5209 abort ();
5210 return store_expr (exp, target, 0);
5211 }
5212
5213 /* If the structure is in a register or if the component
5214 is a bit field, we cannot use addressing to access it.
5215 Use bit-field techniques or SUBREG to store in it. */
5216
5217 if (mode == VOIDmode
5218 || (mode != BLKmode && ! direct_store[(int) mode]
5219 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5220 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5221 || GET_CODE (target) == REG
5222 || GET_CODE (target) == SUBREG
5223 /* If the field isn't aligned enough to store as an ordinary memref,
5224 store it as a bit field. */
5225 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5226 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5227 || bitpos % GET_MODE_ALIGNMENT (mode)))
5228 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5229 && (TYPE_ALIGN (TREE_TYPE (exp)) > MEM_ALIGN (target)
5230 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5231 /* If the RHS and field are a constant size and the size of the
5232 RHS isn't the same size as the bitfield, we must use bitfield
5233 operations. */
5234 || (bitsize >= 0
5235 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5236 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5237 {
5238 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5239
5240 /* If BITSIZE is narrower than the size of the type of EXP
5241 we will be narrowing TEMP. Normally, what's wanted are the
5242 low-order bits. However, if EXP's type is a record and this is
5243 big-endian machine, we want the upper BITSIZE bits. */
5244 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5245 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5246 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5247 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5248 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5249 - bitsize),
5250 temp, 1);
5251
5252 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5253 MODE. */
5254 if (mode != VOIDmode && mode != BLKmode
5255 && mode != TYPE_MODE (TREE_TYPE (exp)))
5256 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5257
5258 /* If the modes of TARGET and TEMP are both BLKmode, both
5259 must be in memory and BITPOS must be aligned on a byte
5260 boundary. If so, we simply do a block copy. */
5261 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5262 {
5263 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5264 || bitpos % BITS_PER_UNIT != 0)
5265 abort ();
5266
5267 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5268 emit_block_move (target, temp,
5269 bitsize == -1 ? expr_size (exp)
5270 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5271 / BITS_PER_UNIT));
5272
5273 return value_mode == VOIDmode ? const0_rtx : target;
5274 }
5275
5276 /* Store the value in the bitfield. */
5277 store_bit_field (target, bitsize, bitpos, mode, temp, total_size);
5278 if (value_mode != VOIDmode)
5279 {
5280 /* The caller wants an rtx for the value.
5281 If possible, avoid refetching from the bitfield itself. */
5282 if (width_mask != 0
5283 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5284 {
5285 tree count;
5286 enum machine_mode tmode;
5287
5288 if (unsignedp)
5289 return expand_and (temp,
5290 GEN_INT
5291 (trunc_int_for_mode
5292 (width_mask,
5293 GET_MODE (temp) == VOIDmode
5294 ? value_mode
5295 : GET_MODE (temp))), NULL_RTX);
5296
5297 tmode = GET_MODE (temp);
5298 if (tmode == VOIDmode)
5299 tmode = value_mode;
5300 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5301 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5302 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5303 }
5304
5305 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5306 NULL_RTX, value_mode, VOIDmode,
5307 total_size);
5308 }
5309 return const0_rtx;
5310 }
5311 else
5312 {
5313 rtx addr = XEXP (target, 0);
5314 rtx to_rtx;
5315
5316 /* If a value is wanted, it must be the lhs;
5317 so make the address stable for multiple use. */
5318
5319 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5320 && ! CONSTANT_ADDRESS_P (addr)
5321 /* A frame-pointer reference is already stable. */
5322 && ! (GET_CODE (addr) == PLUS
5323 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5324 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5325 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5326 target = replace_equiv_address (target, copy_to_reg (addr));
5327
5328 /* Now build a reference to just the desired component. */
5329
5330 to_rtx = copy_rtx (adjust_address (target, mode,
5331 bitpos / BITS_PER_UNIT));
5332
5333 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5334 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5335 {
5336 to_rtx = copy_rtx (to_rtx);
5337 set_mem_alias_set (to_rtx, alias_set);
5338 }
5339
5340 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5341 }
5342 }
5343 \f
5344 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5345 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5346 codes and find the ultimate containing object, which we return.
5347
5348 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5349 bit position, and *PUNSIGNEDP to the signedness of the field.
5350 If the position of the field is variable, we store a tree
5351 giving the variable offset (in units) in *POFFSET.
5352 This offset is in addition to the bit position.
5353 If the position is not variable, we store 0 in *POFFSET.
5354 We set *PALIGNMENT to the alignment of the address that will be
5355 computed. This is the alignment of the thing we return if *POFFSET
5356 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5357
5358 If any of the extraction expressions is volatile,
5359 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5360
5361 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5362 is a mode that can be used to access the field. In that case, *PBITSIZE
5363 is redundant.
5364
5365 If the field describes a variable-sized object, *PMODE is set to
5366 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5367 this case, but the address of the object can be found. */
5368
5369 tree
5370 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5371 punsignedp, pvolatilep, palignment)
5372 tree exp;
5373 HOST_WIDE_INT *pbitsize;
5374 HOST_WIDE_INT *pbitpos;
5375 tree *poffset;
5376 enum machine_mode *pmode;
5377 int *punsignedp;
5378 int *pvolatilep;
5379 unsigned int *palignment;
5380 {
5381 tree size_tree = 0;
5382 enum machine_mode mode = VOIDmode;
5383 tree offset = size_zero_node;
5384 tree bit_offset = bitsize_zero_node;
5385 unsigned int alignment = BIGGEST_ALIGNMENT;
5386 tree placeholder_ptr = 0;
5387 tree tem;
5388
5389 /* First get the mode, signedness, and size. We do this from just the
5390 outermost expression. */
5391 if (TREE_CODE (exp) == COMPONENT_REF)
5392 {
5393 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5394 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5395 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5396
5397 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5398 }
5399 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5400 {
5401 size_tree = TREE_OPERAND (exp, 1);
5402 *punsignedp = TREE_UNSIGNED (exp);
5403 }
5404 else
5405 {
5406 mode = TYPE_MODE (TREE_TYPE (exp));
5407 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5408
5409 if (mode == BLKmode)
5410 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5411 else
5412 *pbitsize = GET_MODE_BITSIZE (mode);
5413 }
5414
5415 if (size_tree != 0)
5416 {
5417 if (! host_integerp (size_tree, 1))
5418 mode = BLKmode, *pbitsize = -1;
5419 else
5420 *pbitsize = tree_low_cst (size_tree, 1);
5421 }
5422
5423 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5424 and find the ultimate containing object. */
5425 while (1)
5426 {
5427 if (TREE_CODE (exp) == BIT_FIELD_REF)
5428 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5429 else if (TREE_CODE (exp) == COMPONENT_REF)
5430 {
5431 tree field = TREE_OPERAND (exp, 1);
5432 tree this_offset = DECL_FIELD_OFFSET (field);
5433
5434 /* If this field hasn't been filled in yet, don't go
5435 past it. This should only happen when folding expressions
5436 made during type construction. */
5437 if (this_offset == 0)
5438 break;
5439 else if (! TREE_CONSTANT (this_offset)
5440 && contains_placeholder_p (this_offset))
5441 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5442
5443 offset = size_binop (PLUS_EXPR, offset, this_offset);
5444 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5445 DECL_FIELD_BIT_OFFSET (field));
5446
5447 if (! host_integerp (offset, 0))
5448 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5449 }
5450
5451 else if (TREE_CODE (exp) == ARRAY_REF
5452 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5453 {
5454 tree index = TREE_OPERAND (exp, 1);
5455 tree array = TREE_OPERAND (exp, 0);
5456 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5457 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5458 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5459
5460 /* We assume all arrays have sizes that are a multiple of a byte.
5461 First subtract the lower bound, if any, in the type of the
5462 index, then convert to sizetype and multiply by the size of the
5463 array element. */
5464 if (low_bound != 0 && ! integer_zerop (low_bound))
5465 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5466 index, low_bound));
5467
5468 /* If the index has a self-referential type, pass it to a
5469 WITH_RECORD_EXPR; if the component size is, pass our
5470 component to one. */
5471 if (! TREE_CONSTANT (index)
5472 && contains_placeholder_p (index))
5473 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5474 if (! TREE_CONSTANT (unit_size)
5475 && contains_placeholder_p (unit_size))
5476 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5477
5478 offset = size_binop (PLUS_EXPR, offset,
5479 size_binop (MULT_EXPR,
5480 convert (sizetype, index),
5481 unit_size));
5482 }
5483
5484 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5485 {
5486 tree new = find_placeholder (exp, &placeholder_ptr);
5487
5488 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5489 We might have been called from tree optimization where we
5490 haven't set up an object yet. */
5491 if (new == 0)
5492 break;
5493 else
5494 exp = new;
5495
5496 continue;
5497 }
5498 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5499 && ! ((TREE_CODE (exp) == NOP_EXPR
5500 || TREE_CODE (exp) == CONVERT_EXPR)
5501 && (TYPE_MODE (TREE_TYPE (exp))
5502 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5503 break;
5504
5505 /* If any reference in the chain is volatile, the effect is volatile. */
5506 if (TREE_THIS_VOLATILE (exp))
5507 *pvolatilep = 1;
5508
5509 /* If the offset is non-constant already, then we can't assume any
5510 alignment more than the alignment here. */
5511 if (! TREE_CONSTANT (offset))
5512 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5513
5514 exp = TREE_OPERAND (exp, 0);
5515 }
5516
5517 if (DECL_P (exp))
5518 alignment = MIN (alignment, DECL_ALIGN (exp));
5519 else if (TREE_TYPE (exp) != 0)
5520 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5521
5522 /* If OFFSET is constant, see if we can return the whole thing as a
5523 constant bit position. Otherwise, split it up. */
5524 if (host_integerp (offset, 0)
5525 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5526 bitsize_unit_node))
5527 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5528 && host_integerp (tem, 0))
5529 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5530 else
5531 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5532
5533 *pmode = mode;
5534 *palignment = alignment;
5535 return exp;
5536 }
5537
5538 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5539
5540 static enum memory_use_mode
5541 get_memory_usage_from_modifier (modifier)
5542 enum expand_modifier modifier;
5543 {
5544 switch (modifier)
5545 {
5546 case EXPAND_NORMAL:
5547 case EXPAND_SUM:
5548 return MEMORY_USE_RO;
5549 break;
5550 case EXPAND_MEMORY_USE_WO:
5551 return MEMORY_USE_WO;
5552 break;
5553 case EXPAND_MEMORY_USE_RW:
5554 return MEMORY_USE_RW;
5555 break;
5556 case EXPAND_MEMORY_USE_DONT:
5557 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5558 MEMORY_USE_DONT, because they are modifiers to a call of
5559 expand_expr in the ADDR_EXPR case of expand_expr. */
5560 case EXPAND_CONST_ADDRESS:
5561 case EXPAND_INITIALIZER:
5562 return MEMORY_USE_DONT;
5563 case EXPAND_MEMORY_USE_BAD:
5564 default:
5565 abort ();
5566 }
5567 }
5568 \f
5569 /* Given an rtx VALUE that may contain additions and multiplications, return
5570 an equivalent value that just refers to a register, memory, or constant.
5571 This is done by generating instructions to perform the arithmetic and
5572 returning a pseudo-register containing the value.
5573
5574 The returned value may be a REG, SUBREG, MEM or constant. */
5575
5576 rtx
5577 force_operand (value, target)
5578 rtx value, target;
5579 {
5580 optab binoptab = 0;
5581 /* Use a temporary to force order of execution of calls to
5582 `force_operand'. */
5583 rtx tmp;
5584 rtx op2;
5585 /* Use subtarget as the target for operand 0 of a binary operation. */
5586 rtx subtarget = get_subtarget (target);
5587
5588 /* Check for a PIC address load. */
5589 if (flag_pic
5590 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5591 && XEXP (value, 0) == pic_offset_table_rtx
5592 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5593 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5594 || GET_CODE (XEXP (value, 1)) == CONST))
5595 {
5596 if (!subtarget)
5597 subtarget = gen_reg_rtx (GET_MODE (value));
5598 emit_move_insn (subtarget, value);
5599 return subtarget;
5600 }
5601
5602 if (GET_CODE (value) == PLUS)
5603 binoptab = add_optab;
5604 else if (GET_CODE (value) == MINUS)
5605 binoptab = sub_optab;
5606 else if (GET_CODE (value) == MULT)
5607 {
5608 op2 = XEXP (value, 1);
5609 if (!CONSTANT_P (op2)
5610 && !(GET_CODE (op2) == REG && op2 != subtarget))
5611 subtarget = 0;
5612 tmp = force_operand (XEXP (value, 0), subtarget);
5613 return expand_mult (GET_MODE (value), tmp,
5614 force_operand (op2, NULL_RTX),
5615 target, 1);
5616 }
5617
5618 if (binoptab)
5619 {
5620 op2 = XEXP (value, 1);
5621 if (!CONSTANT_P (op2)
5622 && !(GET_CODE (op2) == REG && op2 != subtarget))
5623 subtarget = 0;
5624 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5625 {
5626 binoptab = add_optab;
5627 op2 = negate_rtx (GET_MODE (value), op2);
5628 }
5629
5630 /* Check for an addition with OP2 a constant integer and our first
5631 operand a PLUS of a virtual register and something else. In that
5632 case, we want to emit the sum of the virtual register and the
5633 constant first and then add the other value. This allows virtual
5634 register instantiation to simply modify the constant rather than
5635 creating another one around this addition. */
5636 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5637 && GET_CODE (XEXP (value, 0)) == PLUS
5638 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5639 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5640 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5641 {
5642 rtx temp = expand_binop (GET_MODE (value), binoptab,
5643 XEXP (XEXP (value, 0), 0), op2,
5644 subtarget, 0, OPTAB_LIB_WIDEN);
5645 return expand_binop (GET_MODE (value), binoptab, temp,
5646 force_operand (XEXP (XEXP (value, 0), 1), 0),
5647 target, 0, OPTAB_LIB_WIDEN);
5648 }
5649
5650 tmp = force_operand (XEXP (value, 0), subtarget);
5651 return expand_binop (GET_MODE (value), binoptab, tmp,
5652 force_operand (op2, NULL_RTX),
5653 target, 0, OPTAB_LIB_WIDEN);
5654 /* We give UNSIGNEDP = 0 to expand_binop
5655 because the only operations we are expanding here are signed ones. */
5656 }
5657
5658 #ifdef INSN_SCHEDULING
5659 /* On machines that have insn scheduling, we want all memory reference to be
5660 explicit, so we need to deal with such paradoxical SUBREGs. */
5661 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5662 && (GET_MODE_SIZE (GET_MODE (value))
5663 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5664 value
5665 = simplify_gen_subreg (GET_MODE (value),
5666 force_reg (GET_MODE (SUBREG_REG (value)),
5667 force_operand (SUBREG_REG (value),
5668 NULL_RTX)),
5669 GET_MODE (SUBREG_REG (value)),
5670 SUBREG_BYTE (value));
5671 #endif
5672
5673 return value;
5674 }
5675 \f
5676 /* Subroutine of expand_expr: return nonzero iff there is no way that
5677 EXP can reference X, which is being modified. TOP_P is nonzero if this
5678 call is going to be used to determine whether we need a temporary
5679 for EXP, as opposed to a recursive call to this function.
5680
5681 It is always safe for this routine to return zero since it merely
5682 searches for optimization opportunities. */
5683
5684 int
5685 safe_from_p (x, exp, top_p)
5686 rtx x;
5687 tree exp;
5688 int top_p;
5689 {
5690 rtx exp_rtl = 0;
5691 int i, nops;
5692 static tree save_expr_list;
5693
5694 if (x == 0
5695 /* If EXP has varying size, we MUST use a target since we currently
5696 have no way of allocating temporaries of variable size
5697 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5698 So we assume here that something at a higher level has prevented a
5699 clash. This is somewhat bogus, but the best we can do. Only
5700 do this when X is BLKmode and when we are at the top level. */
5701 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5702 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5703 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5704 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5705 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5706 != INTEGER_CST)
5707 && GET_MODE (x) == BLKmode)
5708 /* If X is in the outgoing argument area, it is always safe. */
5709 || (GET_CODE (x) == MEM
5710 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5711 || (GET_CODE (XEXP (x, 0)) == PLUS
5712 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5713 return 1;
5714
5715 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5716 find the underlying pseudo. */
5717 if (GET_CODE (x) == SUBREG)
5718 {
5719 x = SUBREG_REG (x);
5720 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5721 return 0;
5722 }
5723
5724 /* A SAVE_EXPR might appear many times in the expression passed to the
5725 top-level safe_from_p call, and if it has a complex subexpression,
5726 examining it multiple times could result in a combinatorial explosion.
5727 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5728 with optimization took about 28 minutes to compile -- even though it was
5729 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5730 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5731 we have processed. Note that the only test of top_p was above. */
5732
5733 if (top_p)
5734 {
5735 int rtn;
5736 tree t;
5737
5738 save_expr_list = 0;
5739
5740 rtn = safe_from_p (x, exp, 0);
5741
5742 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5743 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5744
5745 return rtn;
5746 }
5747
5748 /* Now look at our tree code and possibly recurse. */
5749 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5750 {
5751 case 'd':
5752 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5753 break;
5754
5755 case 'c':
5756 return 1;
5757
5758 case 'x':
5759 if (TREE_CODE (exp) == TREE_LIST)
5760 return ((TREE_VALUE (exp) == 0
5761 || safe_from_p (x, TREE_VALUE (exp), 0))
5762 && (TREE_CHAIN (exp) == 0
5763 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5764 else if (TREE_CODE (exp) == ERROR_MARK)
5765 return 1; /* An already-visited SAVE_EXPR? */
5766 else
5767 return 0;
5768
5769 case '1':
5770 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5771
5772 case '2':
5773 case '<':
5774 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5775 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5776
5777 case 'e':
5778 case 'r':
5779 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5780 the expression. If it is set, we conflict iff we are that rtx or
5781 both are in memory. Otherwise, we check all operands of the
5782 expression recursively. */
5783
5784 switch (TREE_CODE (exp))
5785 {
5786 case ADDR_EXPR:
5787 /* If the operand is static or we are static, we can't conflict.
5788 Likewise if we don't conflict with the operand at all. */
5789 if (staticp (TREE_OPERAND (exp, 0))
5790 || TREE_STATIC (exp)
5791 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5792 return 1;
5793
5794 /* Otherwise, the only way this can conflict is if we are taking
5795 the address of a DECL a that address if part of X, which is
5796 very rare. */
5797 exp = TREE_OPERAND (exp, 0);
5798 if (DECL_P (exp))
5799 {
5800 if (!DECL_RTL_SET_P (exp)
5801 || GET_CODE (DECL_RTL (exp)) != MEM)
5802 return 0;
5803 else
5804 exp_rtl = XEXP (DECL_RTL (exp), 0);
5805 }
5806 break;
5807
5808 case INDIRECT_REF:
5809 if (GET_CODE (x) == MEM
5810 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5811 get_alias_set (exp)))
5812 return 0;
5813 break;
5814
5815 case CALL_EXPR:
5816 /* Assume that the call will clobber all hard registers and
5817 all of memory. */
5818 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5819 || GET_CODE (x) == MEM)
5820 return 0;
5821 break;
5822
5823 case RTL_EXPR:
5824 /* If a sequence exists, we would have to scan every instruction
5825 in the sequence to see if it was safe. This is probably not
5826 worthwhile. */
5827 if (RTL_EXPR_SEQUENCE (exp))
5828 return 0;
5829
5830 exp_rtl = RTL_EXPR_RTL (exp);
5831 break;
5832
5833 case WITH_CLEANUP_EXPR:
5834 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5835 break;
5836
5837 case CLEANUP_POINT_EXPR:
5838 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5839
5840 case SAVE_EXPR:
5841 exp_rtl = SAVE_EXPR_RTL (exp);
5842 if (exp_rtl)
5843 break;
5844
5845 /* If we've already scanned this, don't do it again. Otherwise,
5846 show we've scanned it and record for clearing the flag if we're
5847 going on. */
5848 if (TREE_PRIVATE (exp))
5849 return 1;
5850
5851 TREE_PRIVATE (exp) = 1;
5852 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5853 {
5854 TREE_PRIVATE (exp) = 0;
5855 return 0;
5856 }
5857
5858 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5859 return 1;
5860
5861 case BIND_EXPR:
5862 /* The only operand we look at is operand 1. The rest aren't
5863 part of the expression. */
5864 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5865
5866 case METHOD_CALL_EXPR:
5867 /* This takes an rtx argument, but shouldn't appear here. */
5868 abort ();
5869
5870 default:
5871 break;
5872 }
5873
5874 /* If we have an rtx, we do not need to scan our operands. */
5875 if (exp_rtl)
5876 break;
5877
5878 nops = first_rtl_op (TREE_CODE (exp));
5879 for (i = 0; i < nops; i++)
5880 if (TREE_OPERAND (exp, i) != 0
5881 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5882 return 0;
5883
5884 /* If this is a language-specific tree code, it may require
5885 special handling. */
5886 if ((unsigned int) TREE_CODE (exp)
5887 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5888 && lang_safe_from_p
5889 && !(*lang_safe_from_p) (x, exp))
5890 return 0;
5891 }
5892
5893 /* If we have an rtl, find any enclosed object. Then see if we conflict
5894 with it. */
5895 if (exp_rtl)
5896 {
5897 if (GET_CODE (exp_rtl) == SUBREG)
5898 {
5899 exp_rtl = SUBREG_REG (exp_rtl);
5900 if (GET_CODE (exp_rtl) == REG
5901 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5902 return 0;
5903 }
5904
5905 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5906 are memory and they conflict. */
5907 return ! (rtx_equal_p (x, exp_rtl)
5908 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5909 && true_dependence (exp_rtl, GET_MODE (x), x,
5910 rtx_addr_varies_p)));
5911 }
5912
5913 /* If we reach here, it is safe. */
5914 return 1;
5915 }
5916
5917 /* Subroutine of expand_expr: return rtx if EXP is a
5918 variable or parameter; else return 0. */
5919
5920 static rtx
5921 var_rtx (exp)
5922 tree exp;
5923 {
5924 STRIP_NOPS (exp);
5925 switch (TREE_CODE (exp))
5926 {
5927 case PARM_DECL:
5928 case VAR_DECL:
5929 return DECL_RTL (exp);
5930 default:
5931 return 0;
5932 }
5933 }
5934
5935 #ifdef MAX_INTEGER_COMPUTATION_MODE
5936
5937 void
5938 check_max_integer_computation_mode (exp)
5939 tree exp;
5940 {
5941 enum tree_code code;
5942 enum machine_mode mode;
5943
5944 /* Strip any NOPs that don't change the mode. */
5945 STRIP_NOPS (exp);
5946 code = TREE_CODE (exp);
5947
5948 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5949 if (code == NOP_EXPR
5950 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5951 return;
5952
5953 /* First check the type of the overall operation. We need only look at
5954 unary, binary and relational operations. */
5955 if (TREE_CODE_CLASS (code) == '1'
5956 || TREE_CODE_CLASS (code) == '2'
5957 || TREE_CODE_CLASS (code) == '<')
5958 {
5959 mode = TYPE_MODE (TREE_TYPE (exp));
5960 if (GET_MODE_CLASS (mode) == MODE_INT
5961 && mode > MAX_INTEGER_COMPUTATION_MODE)
5962 internal_error ("unsupported wide integer operation");
5963 }
5964
5965 /* Check operand of a unary op. */
5966 if (TREE_CODE_CLASS (code) == '1')
5967 {
5968 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5969 if (GET_MODE_CLASS (mode) == MODE_INT
5970 && mode > MAX_INTEGER_COMPUTATION_MODE)
5971 internal_error ("unsupported wide integer operation");
5972 }
5973
5974 /* Check operands of a binary/comparison op. */
5975 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5976 {
5977 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5978 if (GET_MODE_CLASS (mode) == MODE_INT
5979 && mode > MAX_INTEGER_COMPUTATION_MODE)
5980 internal_error ("unsupported wide integer operation");
5981
5982 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5983 if (GET_MODE_CLASS (mode) == MODE_INT
5984 && mode > MAX_INTEGER_COMPUTATION_MODE)
5985 internal_error ("unsupported wide integer operation");
5986 }
5987 }
5988 #endif
5989 \f
5990 /* Return the highest power of two that EXP is known to be a multiple of.
5991 This is used in updating alignment of MEMs in array references. */
5992
5993 static HOST_WIDE_INT
5994 highest_pow2_factor (exp)
5995 tree exp;
5996 {
5997 HOST_WIDE_INT c0, c1;
5998
5999 switch (TREE_CODE (exp))
6000 {
6001 case INTEGER_CST:
6002 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
6003 lowest bit that's a one. If the result is zero, pessimize by
6004 returning 1. This is overly-conservative, but such things should not
6005 happen in the offset expressions that we are called with. */
6006 if (host_integerp (exp, 0))
6007 {
6008 c0 = tree_low_cst (exp, 0);
6009 c0 = c0 < 0 ? - c0 : c0;
6010 return c0 != 0 ? c0 & -c0 : 1;
6011 }
6012 break;
6013
6014 case PLUS_EXPR: case MINUS_EXPR:
6015 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6016 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6017 return MIN (c0, c1);
6018
6019 case MULT_EXPR:
6020 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6021 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6022 return c0 * c1;
6023
6024 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6025 case CEIL_DIV_EXPR:
6026 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6027 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6028 return MAX (1, c0 / c1);
6029
6030 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6031 case COMPOUND_EXPR: case SAVE_EXPR:
6032 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6033
6034 case COND_EXPR:
6035 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6036 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6037 return MIN (c0, c1);
6038
6039 default:
6040 break;
6041 }
6042
6043 return 1;
6044 }
6045 \f
6046 /* Return an object on the placeholder list that matches EXP, a
6047 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6048 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6049 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6050 is a location which initially points to a starting location in the
6051 placeholder list (zero means start of the list) and where a pointer into
6052 the placeholder list at which the object is found is placed. */
6053
6054 tree
6055 find_placeholder (exp, plist)
6056 tree exp;
6057 tree *plist;
6058 {
6059 tree type = TREE_TYPE (exp);
6060 tree placeholder_expr;
6061
6062 for (placeholder_expr
6063 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6064 placeholder_expr != 0;
6065 placeholder_expr = TREE_CHAIN (placeholder_expr))
6066 {
6067 tree need_type = TYPE_MAIN_VARIANT (type);
6068 tree elt;
6069
6070 /* Find the outermost reference that is of the type we want. If none,
6071 see if any object has a type that is a pointer to the type we
6072 want. */
6073 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6074 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6075 || TREE_CODE (elt) == COND_EXPR)
6076 ? TREE_OPERAND (elt, 1)
6077 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6078 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6079 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6080 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6081 ? TREE_OPERAND (elt, 0) : 0))
6082 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6083 {
6084 if (plist)
6085 *plist = placeholder_expr;
6086 return elt;
6087 }
6088
6089 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6090 elt
6091 = ((TREE_CODE (elt) == COMPOUND_EXPR
6092 || TREE_CODE (elt) == COND_EXPR)
6093 ? TREE_OPERAND (elt, 1)
6094 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6095 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6096 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6097 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6098 ? TREE_OPERAND (elt, 0) : 0))
6099 if (POINTER_TYPE_P (TREE_TYPE (elt))
6100 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6101 == need_type))
6102 {
6103 if (plist)
6104 *plist = placeholder_expr;
6105 return build1 (INDIRECT_REF, need_type, elt);
6106 }
6107 }
6108
6109 return 0;
6110 }
6111 \f
6112 /* expand_expr: generate code for computing expression EXP.
6113 An rtx for the computed value is returned. The value is never null.
6114 In the case of a void EXP, const0_rtx is returned.
6115
6116 The value may be stored in TARGET if TARGET is nonzero.
6117 TARGET is just a suggestion; callers must assume that
6118 the rtx returned may not be the same as TARGET.
6119
6120 If TARGET is CONST0_RTX, it means that the value will be ignored.
6121
6122 If TMODE is not VOIDmode, it suggests generating the
6123 result in mode TMODE. But this is done only when convenient.
6124 Otherwise, TMODE is ignored and the value generated in its natural mode.
6125 TMODE is just a suggestion; callers must assume that
6126 the rtx returned may not have mode TMODE.
6127
6128 Note that TARGET may have neither TMODE nor MODE. In that case, it
6129 probably will not be used.
6130
6131 If MODIFIER is EXPAND_SUM then when EXP is an addition
6132 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6133 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6134 products as above, or REG or MEM, or constant.
6135 Ordinarily in such cases we would output mul or add instructions
6136 and then return a pseudo reg containing the sum.
6137
6138 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6139 it also marks a label as absolutely required (it can't be dead).
6140 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6141 This is used for outputting expressions used in initializers.
6142
6143 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6144 with a constant address even if that address is not normally legitimate.
6145 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6146
6147 rtx
6148 expand_expr (exp, target, tmode, modifier)
6149 tree exp;
6150 rtx target;
6151 enum machine_mode tmode;
6152 enum expand_modifier modifier;
6153 {
6154 rtx op0, op1, temp;
6155 tree type = TREE_TYPE (exp);
6156 int unsignedp = TREE_UNSIGNED (type);
6157 enum machine_mode mode;
6158 enum tree_code code = TREE_CODE (exp);
6159 optab this_optab;
6160 rtx subtarget, original_target;
6161 int ignore;
6162 tree context;
6163 /* Used by check-memory-usage to make modifier read only. */
6164 enum expand_modifier ro_modifier;
6165
6166 /* Handle ERROR_MARK before anybody tries to access its type. */
6167 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6168 {
6169 op0 = CONST0_RTX (tmode);
6170 if (op0 != 0)
6171 return op0;
6172 return const0_rtx;
6173 }
6174
6175 mode = TYPE_MODE (type);
6176 /* Use subtarget as the target for operand 0 of a binary operation. */
6177 subtarget = get_subtarget (target);
6178 original_target = target;
6179 ignore = (target == const0_rtx
6180 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6181 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6182 || code == COND_EXPR)
6183 && TREE_CODE (type) == VOID_TYPE));
6184
6185 /* Make a read-only version of the modifier. */
6186 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6187 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6188 ro_modifier = modifier;
6189 else
6190 ro_modifier = EXPAND_NORMAL;
6191
6192 /* If we are going to ignore this result, we need only do something
6193 if there is a side-effect somewhere in the expression. If there
6194 is, short-circuit the most common cases here. Note that we must
6195 not call expand_expr with anything but const0_rtx in case this
6196 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6197
6198 if (ignore)
6199 {
6200 if (! TREE_SIDE_EFFECTS (exp))
6201 return const0_rtx;
6202
6203 /* Ensure we reference a volatile object even if value is ignored, but
6204 don't do this if all we are doing is taking its address. */
6205 if (TREE_THIS_VOLATILE (exp)
6206 && TREE_CODE (exp) != FUNCTION_DECL
6207 && mode != VOIDmode && mode != BLKmode
6208 && modifier != EXPAND_CONST_ADDRESS)
6209 {
6210 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6211 if (GET_CODE (temp) == MEM)
6212 temp = copy_to_reg (temp);
6213 return const0_rtx;
6214 }
6215
6216 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6217 || code == INDIRECT_REF || code == BUFFER_REF)
6218 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6219 VOIDmode, ro_modifier);
6220 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6221 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6222 {
6223 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6224 ro_modifier);
6225 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6226 ro_modifier);
6227 return const0_rtx;
6228 }
6229 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6230 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6231 /* If the second operand has no side effects, just evaluate
6232 the first. */
6233 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6234 VOIDmode, ro_modifier);
6235 else if (code == BIT_FIELD_REF)
6236 {
6237 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6238 ro_modifier);
6239 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6240 ro_modifier);
6241 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6242 ro_modifier);
6243 return const0_rtx;
6244 }
6245 ;
6246 target = 0;
6247 }
6248
6249 #ifdef MAX_INTEGER_COMPUTATION_MODE
6250 /* Only check stuff here if the mode we want is different from the mode
6251 of the expression; if it's the same, check_max_integer_computiation_mode
6252 will handle it. Do we really need to check this stuff at all? */
6253
6254 if (target
6255 && GET_MODE (target) != mode
6256 && TREE_CODE (exp) != INTEGER_CST
6257 && TREE_CODE (exp) != PARM_DECL
6258 && TREE_CODE (exp) != ARRAY_REF
6259 && TREE_CODE (exp) != ARRAY_RANGE_REF
6260 && TREE_CODE (exp) != COMPONENT_REF
6261 && TREE_CODE (exp) != BIT_FIELD_REF
6262 && TREE_CODE (exp) != INDIRECT_REF
6263 && TREE_CODE (exp) != CALL_EXPR
6264 && TREE_CODE (exp) != VAR_DECL
6265 && TREE_CODE (exp) != RTL_EXPR)
6266 {
6267 enum machine_mode mode = GET_MODE (target);
6268
6269 if (GET_MODE_CLASS (mode) == MODE_INT
6270 && mode > MAX_INTEGER_COMPUTATION_MODE)
6271 internal_error ("unsupported wide integer operation");
6272 }
6273
6274 if (tmode != mode
6275 && TREE_CODE (exp) != INTEGER_CST
6276 && TREE_CODE (exp) != PARM_DECL
6277 && TREE_CODE (exp) != ARRAY_REF
6278 && TREE_CODE (exp) != ARRAY_RANGE_REF
6279 && TREE_CODE (exp) != COMPONENT_REF
6280 && TREE_CODE (exp) != BIT_FIELD_REF
6281 && TREE_CODE (exp) != INDIRECT_REF
6282 && TREE_CODE (exp) != VAR_DECL
6283 && TREE_CODE (exp) != CALL_EXPR
6284 && TREE_CODE (exp) != RTL_EXPR
6285 && GET_MODE_CLASS (tmode) == MODE_INT
6286 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6287 internal_error ("unsupported wide integer operation");
6288
6289 check_max_integer_computation_mode (exp);
6290 #endif
6291
6292 /* If will do cse, generate all results into pseudo registers
6293 since 1) that allows cse to find more things
6294 and 2) otherwise cse could produce an insn the machine
6295 cannot support. */
6296
6297 if (! cse_not_expected && mode != BLKmode && target
6298 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6299 target = subtarget;
6300
6301 switch (code)
6302 {
6303 case LABEL_DECL:
6304 {
6305 tree function = decl_function_context (exp);
6306 /* Handle using a label in a containing function. */
6307 if (function != current_function_decl
6308 && function != inline_function_decl && function != 0)
6309 {
6310 struct function *p = find_function_data (function);
6311 p->expr->x_forced_labels
6312 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6313 p->expr->x_forced_labels);
6314 }
6315 else
6316 {
6317 if (modifier == EXPAND_INITIALIZER)
6318 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6319 label_rtx (exp),
6320 forced_labels);
6321 }
6322
6323 temp = gen_rtx_MEM (FUNCTION_MODE,
6324 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6325 if (function != current_function_decl
6326 && function != inline_function_decl && function != 0)
6327 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6328 return temp;
6329 }
6330
6331 case PARM_DECL:
6332 if (DECL_RTL (exp) == 0)
6333 {
6334 error_with_decl (exp, "prior parameter's size depends on `%s'");
6335 return CONST0_RTX (mode);
6336 }
6337
6338 /* ... fall through ... */
6339
6340 case VAR_DECL:
6341 /* If a static var's type was incomplete when the decl was written,
6342 but the type is complete now, lay out the decl now. */
6343 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6344 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6345 {
6346 layout_decl (exp, 0);
6347 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6348 }
6349
6350 /* Although static-storage variables start off initialized, according to
6351 ANSI C, a memcpy could overwrite them with uninitialized values. So
6352 we check them too. This also lets us check for read-only variables
6353 accessed via a non-const declaration, in case it won't be detected
6354 any other way (e.g., in an embedded system or OS kernel without
6355 memory protection).
6356
6357 Aggregates are not checked here; they're handled elsewhere. */
6358 if (cfun && current_function_check_memory_usage
6359 && code == VAR_DECL
6360 && GET_CODE (DECL_RTL (exp)) == MEM
6361 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6362 {
6363 enum memory_use_mode memory_usage;
6364 memory_usage = get_memory_usage_from_modifier (modifier);
6365
6366 in_check_memory_usage = 1;
6367 if (memory_usage != MEMORY_USE_DONT)
6368 emit_library_call (chkr_check_addr_libfunc,
6369 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6370 XEXP (DECL_RTL (exp), 0), Pmode,
6371 GEN_INT (int_size_in_bytes (type)),
6372 TYPE_MODE (sizetype),
6373 GEN_INT (memory_usage),
6374 TYPE_MODE (integer_type_node));
6375 in_check_memory_usage = 0;
6376 }
6377
6378 /* ... fall through ... */
6379
6380 case FUNCTION_DECL:
6381 case RESULT_DECL:
6382 if (DECL_RTL (exp) == 0)
6383 abort ();
6384
6385 /* Ensure variable marked as used even if it doesn't go through
6386 a parser. If it hasn't be used yet, write out an external
6387 definition. */
6388 if (! TREE_USED (exp))
6389 {
6390 assemble_external (exp);
6391 TREE_USED (exp) = 1;
6392 }
6393
6394 /* Show we haven't gotten RTL for this yet. */
6395 temp = 0;
6396
6397 /* Handle variables inherited from containing functions. */
6398 context = decl_function_context (exp);
6399
6400 /* We treat inline_function_decl as an alias for the current function
6401 because that is the inline function whose vars, types, etc.
6402 are being merged into the current function.
6403 See expand_inline_function. */
6404
6405 if (context != 0 && context != current_function_decl
6406 && context != inline_function_decl
6407 /* If var is static, we don't need a static chain to access it. */
6408 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6409 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6410 {
6411 rtx addr;
6412
6413 /* Mark as non-local and addressable. */
6414 DECL_NONLOCAL (exp) = 1;
6415 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6416 abort ();
6417 mark_addressable (exp);
6418 if (GET_CODE (DECL_RTL (exp)) != MEM)
6419 abort ();
6420 addr = XEXP (DECL_RTL (exp), 0);
6421 if (GET_CODE (addr) == MEM)
6422 addr
6423 = replace_equiv_address (addr,
6424 fix_lexical_addr (XEXP (addr, 0), exp));
6425 else
6426 addr = fix_lexical_addr (addr, exp);
6427
6428 temp = replace_equiv_address (DECL_RTL (exp), addr);
6429 }
6430
6431 /* This is the case of an array whose size is to be determined
6432 from its initializer, while the initializer is still being parsed.
6433 See expand_decl. */
6434
6435 else if (GET_CODE (DECL_RTL (exp)) == MEM
6436 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6437 temp = validize_mem (DECL_RTL (exp));
6438
6439 /* If DECL_RTL is memory, we are in the normal case and either
6440 the address is not valid or it is not a register and -fforce-addr
6441 is specified, get the address into a register. */
6442
6443 else if (GET_CODE (DECL_RTL (exp)) == MEM
6444 && modifier != EXPAND_CONST_ADDRESS
6445 && modifier != EXPAND_SUM
6446 && modifier != EXPAND_INITIALIZER
6447 && (! memory_address_p (DECL_MODE (exp),
6448 XEXP (DECL_RTL (exp), 0))
6449 || (flag_force_addr
6450 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6451 temp = replace_equiv_address (DECL_RTL (exp),
6452 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6453
6454 /* If we got something, return it. But first, set the alignment
6455 if the address is a register. */
6456 if (temp != 0)
6457 {
6458 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6459 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6460
6461 return temp;
6462 }
6463
6464 /* If the mode of DECL_RTL does not match that of the decl, it
6465 must be a promoted value. We return a SUBREG of the wanted mode,
6466 but mark it so that we know that it was already extended. */
6467
6468 if (GET_CODE (DECL_RTL (exp)) == REG
6469 && GET_MODE (DECL_RTL (exp)) != mode)
6470 {
6471 /* Get the signedness used for this variable. Ensure we get the
6472 same mode we got when the variable was declared. */
6473 if (GET_MODE (DECL_RTL (exp))
6474 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6475 abort ();
6476
6477 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6478 SUBREG_PROMOTED_VAR_P (temp) = 1;
6479 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6480 return temp;
6481 }
6482
6483 return DECL_RTL (exp);
6484
6485 case INTEGER_CST:
6486 return immed_double_const (TREE_INT_CST_LOW (exp),
6487 TREE_INT_CST_HIGH (exp), mode);
6488
6489 case CONST_DECL:
6490 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6491 EXPAND_MEMORY_USE_BAD);
6492
6493 case REAL_CST:
6494 /* If optimized, generate immediate CONST_DOUBLE
6495 which will be turned into memory by reload if necessary.
6496
6497 We used to force a register so that loop.c could see it. But
6498 this does not allow gen_* patterns to perform optimizations with
6499 the constants. It also produces two insns in cases like "x = 1.0;".
6500 On most machines, floating-point constants are not permitted in
6501 many insns, so we'd end up copying it to a register in any case.
6502
6503 Now, we do the copying in expand_binop, if appropriate. */
6504 return immed_real_const (exp);
6505
6506 case COMPLEX_CST:
6507 case STRING_CST:
6508 if (! TREE_CST_RTL (exp))
6509 output_constant_def (exp, 1);
6510
6511 /* TREE_CST_RTL probably contains a constant address.
6512 On RISC machines where a constant address isn't valid,
6513 make some insns to get that address into a register. */
6514 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6515 && modifier != EXPAND_CONST_ADDRESS
6516 && modifier != EXPAND_INITIALIZER
6517 && modifier != EXPAND_SUM
6518 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6519 || (flag_force_addr
6520 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6521 return replace_equiv_address (TREE_CST_RTL (exp),
6522 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6523 return TREE_CST_RTL (exp);
6524
6525 case EXPR_WITH_FILE_LOCATION:
6526 {
6527 rtx to_return;
6528 const char *saved_input_filename = input_filename;
6529 int saved_lineno = lineno;
6530 input_filename = EXPR_WFL_FILENAME (exp);
6531 lineno = EXPR_WFL_LINENO (exp);
6532 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6533 emit_line_note (input_filename, lineno);
6534 /* Possibly avoid switching back and forth here. */
6535 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6536 input_filename = saved_input_filename;
6537 lineno = saved_lineno;
6538 return to_return;
6539 }
6540
6541 case SAVE_EXPR:
6542 context = decl_function_context (exp);
6543
6544 /* If this SAVE_EXPR was at global context, assume we are an
6545 initialization function and move it into our context. */
6546 if (context == 0)
6547 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6548
6549 /* We treat inline_function_decl as an alias for the current function
6550 because that is the inline function whose vars, types, etc.
6551 are being merged into the current function.
6552 See expand_inline_function. */
6553 if (context == current_function_decl || context == inline_function_decl)
6554 context = 0;
6555
6556 /* If this is non-local, handle it. */
6557 if (context)
6558 {
6559 /* The following call just exists to abort if the context is
6560 not of a containing function. */
6561 find_function_data (context);
6562
6563 temp = SAVE_EXPR_RTL (exp);
6564 if (temp && GET_CODE (temp) == REG)
6565 {
6566 put_var_into_stack (exp);
6567 temp = SAVE_EXPR_RTL (exp);
6568 }
6569 if (temp == 0 || GET_CODE (temp) != MEM)
6570 abort ();
6571 return
6572 replace_equiv_address (temp,
6573 fix_lexical_addr (XEXP (temp, 0), exp));
6574 }
6575 if (SAVE_EXPR_RTL (exp) == 0)
6576 {
6577 if (mode == VOIDmode)
6578 temp = const0_rtx;
6579 else
6580 temp = assign_temp (build_qualified_type (type,
6581 (TYPE_QUALS (type)
6582 | TYPE_QUAL_CONST)),
6583 3, 0, 0);
6584
6585 SAVE_EXPR_RTL (exp) = temp;
6586 if (!optimize && GET_CODE (temp) == REG)
6587 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6588 save_expr_regs);
6589
6590 /* If the mode of TEMP does not match that of the expression, it
6591 must be a promoted value. We pass store_expr a SUBREG of the
6592 wanted mode but mark it so that we know that it was already
6593 extended. Note that `unsignedp' was modified above in
6594 this case. */
6595
6596 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6597 {
6598 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6599 SUBREG_PROMOTED_VAR_P (temp) = 1;
6600 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6601 }
6602
6603 if (temp == const0_rtx)
6604 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6605 EXPAND_MEMORY_USE_BAD);
6606 else
6607 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6608
6609 TREE_USED (exp) = 1;
6610 }
6611
6612 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6613 must be a promoted value. We return a SUBREG of the wanted mode,
6614 but mark it so that we know that it was already extended. */
6615
6616 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6617 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6618 {
6619 /* Compute the signedness and make the proper SUBREG. */
6620 promote_mode (type, mode, &unsignedp, 0);
6621 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6622 SUBREG_PROMOTED_VAR_P (temp) = 1;
6623 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6624 return temp;
6625 }
6626
6627 return SAVE_EXPR_RTL (exp);
6628
6629 case UNSAVE_EXPR:
6630 {
6631 rtx temp;
6632 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6633 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6634 return temp;
6635 }
6636
6637 case PLACEHOLDER_EXPR:
6638 {
6639 tree old_list = placeholder_list;
6640 tree placeholder_expr = 0;
6641
6642 exp = find_placeholder (exp, &placeholder_expr);
6643 if (exp == 0)
6644 abort ();
6645
6646 placeholder_list = TREE_CHAIN (placeholder_expr);
6647 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6648 placeholder_list = old_list;
6649 return temp;
6650 }
6651
6652 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6653 abort ();
6654
6655 case WITH_RECORD_EXPR:
6656 /* Put the object on the placeholder list, expand our first operand,
6657 and pop the list. */
6658 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6659 placeholder_list);
6660 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6661 tmode, ro_modifier);
6662 placeholder_list = TREE_CHAIN (placeholder_list);
6663 return target;
6664
6665 case GOTO_EXPR:
6666 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6667 expand_goto (TREE_OPERAND (exp, 0));
6668 else
6669 expand_computed_goto (TREE_OPERAND (exp, 0));
6670 return const0_rtx;
6671
6672 case EXIT_EXPR:
6673 expand_exit_loop_if_false (NULL,
6674 invert_truthvalue (TREE_OPERAND (exp, 0)));
6675 return const0_rtx;
6676
6677 case LABELED_BLOCK_EXPR:
6678 if (LABELED_BLOCK_BODY (exp))
6679 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6680 /* Should perhaps use expand_label, but this is simpler and safer. */
6681 do_pending_stack_adjust ();
6682 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6683 return const0_rtx;
6684
6685 case EXIT_BLOCK_EXPR:
6686 if (EXIT_BLOCK_RETURN (exp))
6687 sorry ("returned value in block_exit_expr");
6688 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6689 return const0_rtx;
6690
6691 case LOOP_EXPR:
6692 push_temp_slots ();
6693 expand_start_loop (1);
6694 expand_expr_stmt (TREE_OPERAND (exp, 0));
6695 expand_end_loop ();
6696 pop_temp_slots ();
6697
6698 return const0_rtx;
6699
6700 case BIND_EXPR:
6701 {
6702 tree vars = TREE_OPERAND (exp, 0);
6703 int vars_need_expansion = 0;
6704
6705 /* Need to open a binding contour here because
6706 if there are any cleanups they must be contained here. */
6707 expand_start_bindings (2);
6708
6709 /* Mark the corresponding BLOCK for output in its proper place. */
6710 if (TREE_OPERAND (exp, 2) != 0
6711 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6712 insert_block (TREE_OPERAND (exp, 2));
6713
6714 /* If VARS have not yet been expanded, expand them now. */
6715 while (vars)
6716 {
6717 if (!DECL_RTL_SET_P (vars))
6718 {
6719 vars_need_expansion = 1;
6720 expand_decl (vars);
6721 }
6722 expand_decl_init (vars);
6723 vars = TREE_CHAIN (vars);
6724 }
6725
6726 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6727
6728 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6729
6730 return temp;
6731 }
6732
6733 case RTL_EXPR:
6734 if (RTL_EXPR_SEQUENCE (exp))
6735 {
6736 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6737 abort ();
6738 emit_insns (RTL_EXPR_SEQUENCE (exp));
6739 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6740 }
6741 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6742 free_temps_for_rtl_expr (exp);
6743 return RTL_EXPR_RTL (exp);
6744
6745 case CONSTRUCTOR:
6746 /* If we don't need the result, just ensure we evaluate any
6747 subexpressions. */
6748 if (ignore)
6749 {
6750 tree elt;
6751 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6752 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6753 EXPAND_MEMORY_USE_BAD);
6754 return const0_rtx;
6755 }
6756
6757 /* All elts simple constants => refer to a constant in memory. But
6758 if this is a non-BLKmode mode, let it store a field at a time
6759 since that should make a CONST_INT or CONST_DOUBLE when we
6760 fold. Likewise, if we have a target we can use, it is best to
6761 store directly into the target unless the type is large enough
6762 that memcpy will be used. If we are making an initializer and
6763 all operands are constant, put it in memory as well. */
6764 else if ((TREE_STATIC (exp)
6765 && ((mode == BLKmode
6766 && ! (target != 0 && safe_from_p (target, exp, 1)))
6767 || TREE_ADDRESSABLE (exp)
6768 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6769 && (! MOVE_BY_PIECES_P
6770 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6771 TYPE_ALIGN (type)))
6772 && ! mostly_zeros_p (exp))))
6773 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6774 {
6775 rtx constructor = output_constant_def (exp, 1);
6776
6777 if (modifier != EXPAND_CONST_ADDRESS
6778 && modifier != EXPAND_INITIALIZER
6779 && modifier != EXPAND_SUM)
6780 constructor = validize_mem (constructor);
6781
6782 return constructor;
6783 }
6784 else
6785 {
6786 /* Handle calls that pass values in multiple non-contiguous
6787 locations. The Irix 6 ABI has examples of this. */
6788 if (target == 0 || ! safe_from_p (target, exp, 1)
6789 || GET_CODE (target) == PARALLEL)
6790 target
6791 = assign_temp (build_qualified_type (type,
6792 (TYPE_QUALS (type)
6793 | (TREE_READONLY (exp)
6794 * TYPE_QUAL_CONST))),
6795 TREE_ADDRESSABLE (exp), 1, 1);
6796
6797 store_constructor (exp, target, 0,
6798 int_size_in_bytes (TREE_TYPE (exp)));
6799 return target;
6800 }
6801
6802 case INDIRECT_REF:
6803 {
6804 tree exp1 = TREE_OPERAND (exp, 0);
6805 tree index;
6806 tree string = string_constant (exp1, &index);
6807
6808 /* Try to optimize reads from const strings. */
6809 if (string
6810 && TREE_CODE (string) == STRING_CST
6811 && TREE_CODE (index) == INTEGER_CST
6812 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6813 && GET_MODE_CLASS (mode) == MODE_INT
6814 && GET_MODE_SIZE (mode) == 1
6815 && modifier != EXPAND_MEMORY_USE_WO)
6816 return
6817 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6818
6819 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6820 op0 = memory_address (mode, op0);
6821
6822 if (cfun && current_function_check_memory_usage
6823 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6824 {
6825 enum memory_use_mode memory_usage;
6826 memory_usage = get_memory_usage_from_modifier (modifier);
6827
6828 if (memory_usage != MEMORY_USE_DONT)
6829 {
6830 in_check_memory_usage = 1;
6831 emit_library_call (chkr_check_addr_libfunc,
6832 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6833 Pmode, GEN_INT (int_size_in_bytes (type)),
6834 TYPE_MODE (sizetype),
6835 GEN_INT (memory_usage),
6836 TYPE_MODE (integer_type_node));
6837 in_check_memory_usage = 0;
6838 }
6839 }
6840
6841 temp = gen_rtx_MEM (mode, op0);
6842 set_mem_attributes (temp, exp, 0);
6843
6844 /* If we are writing to this object and its type is a record with
6845 readonly fields, we must mark it as readonly so it will
6846 conflict with readonly references to those fields. */
6847 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6848 RTX_UNCHANGING_P (temp) = 1;
6849
6850 return temp;
6851 }
6852
6853 case ARRAY_REF:
6854 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6855 abort ();
6856
6857 {
6858 tree array = TREE_OPERAND (exp, 0);
6859 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6860 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6861 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6862 HOST_WIDE_INT i;
6863
6864 /* Optimize the special-case of a zero lower bound.
6865
6866 We convert the low_bound to sizetype to avoid some problems
6867 with constant folding. (E.g. suppose the lower bound is 1,
6868 and its mode is QI. Without the conversion, (ARRAY
6869 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6870 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6871
6872 if (! integer_zerop (low_bound))
6873 index = size_diffop (index, convert (sizetype, low_bound));
6874
6875 /* Fold an expression like: "foo"[2].
6876 This is not done in fold so it won't happen inside &.
6877 Don't fold if this is for wide characters since it's too
6878 difficult to do correctly and this is a very rare case. */
6879
6880 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6881 && TREE_CODE (array) == STRING_CST
6882 && TREE_CODE (index) == INTEGER_CST
6883 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6884 && GET_MODE_CLASS (mode) == MODE_INT
6885 && GET_MODE_SIZE (mode) == 1)
6886 return
6887 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6888
6889 /* If this is a constant index into a constant array,
6890 just get the value from the array. Handle both the cases when
6891 we have an explicit constructor and when our operand is a variable
6892 that was declared const. */
6893
6894 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6895 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6896 && TREE_CODE (index) == INTEGER_CST
6897 && 0 > compare_tree_int (index,
6898 list_length (CONSTRUCTOR_ELTS
6899 (TREE_OPERAND (exp, 0)))))
6900 {
6901 tree elem;
6902
6903 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6904 i = TREE_INT_CST_LOW (index);
6905 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6906 ;
6907
6908 if (elem)
6909 return expand_expr (fold (TREE_VALUE (elem)), target,
6910 tmode, ro_modifier);
6911 }
6912
6913 else if (optimize >= 1
6914 && modifier != EXPAND_CONST_ADDRESS
6915 && modifier != EXPAND_INITIALIZER
6916 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6917 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6918 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6919 {
6920 if (TREE_CODE (index) == INTEGER_CST)
6921 {
6922 tree init = DECL_INITIAL (array);
6923
6924 if (TREE_CODE (init) == CONSTRUCTOR)
6925 {
6926 tree elem;
6927
6928 for (elem = CONSTRUCTOR_ELTS (init);
6929 (elem
6930 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6931 elem = TREE_CHAIN (elem))
6932 ;
6933
6934 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6935 return expand_expr (fold (TREE_VALUE (elem)), target,
6936 tmode, ro_modifier);
6937 }
6938 else if (TREE_CODE (init) == STRING_CST
6939 && 0 > compare_tree_int (index,
6940 TREE_STRING_LENGTH (init)))
6941 {
6942 tree type = TREE_TYPE (TREE_TYPE (init));
6943 enum machine_mode mode = TYPE_MODE (type);
6944
6945 if (GET_MODE_CLASS (mode) == MODE_INT
6946 && GET_MODE_SIZE (mode) == 1)
6947 return (GEN_INT
6948 (TREE_STRING_POINTER
6949 (init)[TREE_INT_CST_LOW (index)]));
6950 }
6951 }
6952 }
6953 }
6954 /* Fall through. */
6955
6956 case COMPONENT_REF:
6957 case BIT_FIELD_REF:
6958 case ARRAY_RANGE_REF:
6959 /* If the operand is a CONSTRUCTOR, we can just extract the
6960 appropriate field if it is present. Don't do this if we have
6961 already written the data since we want to refer to that copy
6962 and varasm.c assumes that's what we'll do. */
6963 if (code == COMPONENT_REF
6964 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6965 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6966 {
6967 tree elt;
6968
6969 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6970 elt = TREE_CHAIN (elt))
6971 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6972 /* We can normally use the value of the field in the
6973 CONSTRUCTOR. However, if this is a bitfield in
6974 an integral mode that we can fit in a HOST_WIDE_INT,
6975 we must mask only the number of bits in the bitfield,
6976 since this is done implicitly by the constructor. If
6977 the bitfield does not meet either of those conditions,
6978 we can't do this optimization. */
6979 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6980 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6981 == MODE_INT)
6982 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6983 <= HOST_BITS_PER_WIDE_INT))))
6984 {
6985 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6986 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6987 {
6988 HOST_WIDE_INT bitsize
6989 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6990
6991 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6992 {
6993 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6994 op0 = expand_and (op0, op1, target);
6995 }
6996 else
6997 {
6998 enum machine_mode imode
6999 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7000 tree count
7001 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7002 0);
7003
7004 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7005 target, 0);
7006 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7007 target, 0);
7008 }
7009 }
7010
7011 return op0;
7012 }
7013 }
7014
7015 {
7016 enum machine_mode mode1;
7017 HOST_WIDE_INT bitsize, bitpos;
7018 tree offset;
7019 int volatilep = 0;
7020 unsigned int alignment;
7021 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7022 &mode1, &unsignedp, &volatilep,
7023 &alignment);
7024 rtx orig_op0;
7025
7026 /* If we got back the original object, something is wrong. Perhaps
7027 we are evaluating an expression too early. In any event, don't
7028 infinitely recurse. */
7029 if (tem == exp)
7030 abort ();
7031
7032 /* If TEM's type is a union of variable size, pass TARGET to the inner
7033 computation, since it will need a temporary and TARGET is known
7034 to have to do. This occurs in unchecked conversion in Ada. */
7035
7036 orig_op0 = op0
7037 = expand_expr (tem,
7038 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7039 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7040 != INTEGER_CST)
7041 ? target : NULL_RTX),
7042 VOIDmode,
7043 (modifier == EXPAND_INITIALIZER
7044 || modifier == EXPAND_CONST_ADDRESS)
7045 ? modifier : EXPAND_NORMAL);
7046
7047 /* If this is a constant, put it into a register if it is a
7048 legitimate constant and OFFSET is 0 and memory if it isn't. */
7049 if (CONSTANT_P (op0))
7050 {
7051 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7052 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7053 && offset == 0)
7054 op0 = force_reg (mode, op0);
7055 else
7056 op0 = validize_mem (force_const_mem (mode, op0));
7057 }
7058
7059 if (offset != 0)
7060 {
7061 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7062
7063 /* If this object is in a register, put it into memory.
7064 This case can't occur in C, but can in Ada if we have
7065 unchecked conversion of an expression from a scalar type to
7066 an array or record type. */
7067 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7068 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7069 {
7070 /* If the operand is a SAVE_EXPR, we can deal with this by
7071 forcing the SAVE_EXPR into memory. */
7072 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7073 {
7074 put_var_into_stack (TREE_OPERAND (exp, 0));
7075 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7076 }
7077 else
7078 {
7079 tree nt
7080 = build_qualified_type (TREE_TYPE (tem),
7081 (TYPE_QUALS (TREE_TYPE (tem))
7082 | TYPE_QUAL_CONST));
7083 rtx memloc = assign_temp (nt, 1, 1, 1);
7084
7085 emit_move_insn (memloc, op0);
7086 op0 = memloc;
7087 }
7088 }
7089
7090 if (GET_CODE (op0) != MEM)
7091 abort ();
7092
7093 if (GET_MODE (offset_rtx) != ptr_mode)
7094 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7095
7096 #ifdef POINTERS_EXTEND_UNSIGNED
7097 if (GET_MODE (offset_rtx) != Pmode)
7098 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7099 #endif
7100
7101 /* A constant address in OP0 can have VOIDmode, we must not try
7102 to call force_reg for that case. Avoid that case. */
7103 if (GET_CODE (op0) == MEM
7104 && GET_MODE (op0) == BLKmode
7105 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7106 && bitsize != 0
7107 && (bitpos % bitsize) == 0
7108 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7109 && alignment == GET_MODE_ALIGNMENT (mode1))
7110 {
7111 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7112
7113 if (GET_CODE (XEXP (temp, 0)) == REG)
7114 op0 = temp;
7115 else
7116 op0 = (replace_equiv_address
7117 (op0,
7118 force_reg (GET_MODE (XEXP (temp, 0)),
7119 XEXP (temp, 0))));
7120 bitpos = 0;
7121 }
7122
7123 op0 = offset_address (op0, offset_rtx,
7124 highest_pow2_factor (offset));
7125 }
7126
7127 /* Don't forget about volatility even if this is a bitfield. */
7128 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7129 {
7130 if (op0 == orig_op0)
7131 op0 = copy_rtx (op0);
7132
7133 MEM_VOLATILE_P (op0) = 1;
7134 }
7135
7136 /* Check the access. */
7137 if (cfun != 0 && current_function_check_memory_usage
7138 && GET_CODE (op0) == MEM)
7139 {
7140 enum memory_use_mode memory_usage;
7141 memory_usage = get_memory_usage_from_modifier (modifier);
7142
7143 if (memory_usage != MEMORY_USE_DONT)
7144 {
7145 rtx to;
7146 int size;
7147
7148 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7149 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7150
7151 /* Check the access right of the pointer. */
7152 in_check_memory_usage = 1;
7153 if (size > BITS_PER_UNIT)
7154 emit_library_call (chkr_check_addr_libfunc,
7155 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7156 Pmode, GEN_INT (size / BITS_PER_UNIT),
7157 TYPE_MODE (sizetype),
7158 GEN_INT (memory_usage),
7159 TYPE_MODE (integer_type_node));
7160 in_check_memory_usage = 0;
7161 }
7162 }
7163
7164 /* In cases where an aligned union has an unaligned object
7165 as a field, we might be extracting a BLKmode value from
7166 an integer-mode (e.g., SImode) object. Handle this case
7167 by doing the extract into an object as wide as the field
7168 (which we know to be the width of a basic mode), then
7169 storing into memory, and changing the mode to BLKmode. */
7170 if (mode1 == VOIDmode
7171 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7172 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7173 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7174 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7175 && modifier != EXPAND_CONST_ADDRESS
7176 && modifier != EXPAND_INITIALIZER)
7177 /* If the field isn't aligned enough to fetch as a memref,
7178 fetch it as a bit field. */
7179 || (mode1 != BLKmode
7180 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7181 && ((TYPE_ALIGN (TREE_TYPE (tem))
7182 < GET_MODE_ALIGNMENT (mode))
7183 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7184 /* If the type and the field are a constant size and the
7185 size of the type isn't the same size as the bitfield,
7186 we must use bitfield operations. */
7187 || (bitsize >= 0
7188 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7189 == INTEGER_CST)
7190 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7191 bitsize))
7192 || (mode == BLKmode
7193 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7194 && (TYPE_ALIGN (type) > alignment
7195 || bitpos % TYPE_ALIGN (type) != 0)))
7196 {
7197 enum machine_mode ext_mode = mode;
7198
7199 if (ext_mode == BLKmode
7200 && ! (target != 0 && GET_CODE (op0) == MEM
7201 && GET_CODE (target) == MEM
7202 && bitpos % BITS_PER_UNIT == 0))
7203 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7204
7205 if (ext_mode == BLKmode)
7206 {
7207 /* In this case, BITPOS must start at a byte boundary and
7208 TARGET, if specified, must be a MEM. */
7209 if (GET_CODE (op0) != MEM
7210 || (target != 0 && GET_CODE (target) != MEM)
7211 || bitpos % BITS_PER_UNIT != 0)
7212 abort ();
7213
7214 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7215 if (target == 0)
7216 target = assign_temp (type, 0, 1, 1);
7217
7218 emit_block_move (target, op0,
7219 bitsize == -1 ? expr_size (exp)
7220 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7221 / BITS_PER_UNIT));
7222
7223 return target;
7224 }
7225
7226 op0 = validize_mem (op0);
7227
7228 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7229 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7230
7231 op0 = extract_bit_field (op0, bitsize, bitpos,
7232 unsignedp, target, ext_mode, ext_mode,
7233 int_size_in_bytes (TREE_TYPE (tem)));
7234
7235 /* If the result is a record type and BITSIZE is narrower than
7236 the mode of OP0, an integral mode, and this is a big endian
7237 machine, we must put the field into the high-order bits. */
7238 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7239 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7240 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7241 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7242 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7243 - bitsize),
7244 op0, 1);
7245
7246 if (mode == BLKmode)
7247 {
7248 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7249 TYPE_QUAL_CONST);
7250 rtx new = assign_temp (nt, 0, 1, 1);
7251
7252 emit_move_insn (new, op0);
7253 op0 = copy_rtx (new);
7254 PUT_MODE (op0, BLKmode);
7255 }
7256
7257 return op0;
7258 }
7259
7260 /* If the result is BLKmode, use that to access the object
7261 now as well. */
7262 if (mode == BLKmode)
7263 mode1 = BLKmode;
7264
7265 /* Get a reference to just this component. */
7266 if (modifier == EXPAND_CONST_ADDRESS
7267 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7268 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7269 else
7270 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7271
7272 if (op0 == orig_op0)
7273 op0 = copy_rtx (op0);
7274
7275 set_mem_attributes (op0, exp, 0);
7276 if (GET_CODE (XEXP (op0, 0)) == REG)
7277 mark_reg_pointer (XEXP (op0, 0), alignment);
7278
7279 MEM_VOLATILE_P (op0) |= volatilep;
7280 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7281 || modifier == EXPAND_CONST_ADDRESS
7282 || modifier == EXPAND_INITIALIZER)
7283 return op0;
7284 else if (target == 0)
7285 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7286
7287 convert_move (target, op0, unsignedp);
7288 return target;
7289 }
7290
7291 case VTABLE_REF:
7292 {
7293 rtx insn, before = get_last_insn (), vtbl_ref;
7294
7295 /* Evaluate the interior expression. */
7296 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7297 tmode, modifier);
7298
7299 /* Get or create an instruction off which to hang a note. */
7300 if (REG_P (subtarget))
7301 {
7302 target = subtarget;
7303 insn = get_last_insn ();
7304 if (insn == before)
7305 abort ();
7306 if (! INSN_P (insn))
7307 insn = prev_nonnote_insn (insn);
7308 }
7309 else
7310 {
7311 target = gen_reg_rtx (GET_MODE (subtarget));
7312 insn = emit_move_insn (target, subtarget);
7313 }
7314
7315 /* Collect the data for the note. */
7316 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7317 vtbl_ref = plus_constant (vtbl_ref,
7318 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7319 /* Discard the initial CONST that was added. */
7320 vtbl_ref = XEXP (vtbl_ref, 0);
7321
7322 REG_NOTES (insn)
7323 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7324
7325 return target;
7326 }
7327
7328 /* Intended for a reference to a buffer of a file-object in Pascal.
7329 But it's not certain that a special tree code will really be
7330 necessary for these. INDIRECT_REF might work for them. */
7331 case BUFFER_REF:
7332 abort ();
7333
7334 case IN_EXPR:
7335 {
7336 /* Pascal set IN expression.
7337
7338 Algorithm:
7339 rlo = set_low - (set_low%bits_per_word);
7340 the_word = set [ (index - rlo)/bits_per_word ];
7341 bit_index = index % bits_per_word;
7342 bitmask = 1 << bit_index;
7343 return !!(the_word & bitmask); */
7344
7345 tree set = TREE_OPERAND (exp, 0);
7346 tree index = TREE_OPERAND (exp, 1);
7347 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7348 tree set_type = TREE_TYPE (set);
7349 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7350 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7351 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7352 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7353 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7354 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7355 rtx setaddr = XEXP (setval, 0);
7356 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7357 rtx rlow;
7358 rtx diff, quo, rem, addr, bit, result;
7359
7360 /* If domain is empty, answer is no. Likewise if index is constant
7361 and out of bounds. */
7362 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7363 && TREE_CODE (set_low_bound) == INTEGER_CST
7364 && tree_int_cst_lt (set_high_bound, set_low_bound))
7365 || (TREE_CODE (index) == INTEGER_CST
7366 && TREE_CODE (set_low_bound) == INTEGER_CST
7367 && tree_int_cst_lt (index, set_low_bound))
7368 || (TREE_CODE (set_high_bound) == INTEGER_CST
7369 && TREE_CODE (index) == INTEGER_CST
7370 && tree_int_cst_lt (set_high_bound, index))))
7371 return const0_rtx;
7372
7373 if (target == 0)
7374 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7375
7376 /* If we get here, we have to generate the code for both cases
7377 (in range and out of range). */
7378
7379 op0 = gen_label_rtx ();
7380 op1 = gen_label_rtx ();
7381
7382 if (! (GET_CODE (index_val) == CONST_INT
7383 && GET_CODE (lo_r) == CONST_INT))
7384 {
7385 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7386 GET_MODE (index_val), iunsignedp, 0, op1);
7387 }
7388
7389 if (! (GET_CODE (index_val) == CONST_INT
7390 && GET_CODE (hi_r) == CONST_INT))
7391 {
7392 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7393 GET_MODE (index_val), iunsignedp, 0, op1);
7394 }
7395
7396 /* Calculate the element number of bit zero in the first word
7397 of the set. */
7398 if (GET_CODE (lo_r) == CONST_INT)
7399 rlow = GEN_INT (INTVAL (lo_r)
7400 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7401 else
7402 rlow = expand_binop (index_mode, and_optab, lo_r,
7403 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7404 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7405
7406 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7407 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7408
7409 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7410 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7411 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7412 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7413
7414 addr = memory_address (byte_mode,
7415 expand_binop (index_mode, add_optab, diff,
7416 setaddr, NULL_RTX, iunsignedp,
7417 OPTAB_LIB_WIDEN));
7418
7419 /* Extract the bit we want to examine. */
7420 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7421 gen_rtx_MEM (byte_mode, addr),
7422 make_tree (TREE_TYPE (index), rem),
7423 NULL_RTX, 1);
7424 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7425 GET_MODE (target) == byte_mode ? target : 0,
7426 1, OPTAB_LIB_WIDEN);
7427
7428 if (result != target)
7429 convert_move (target, result, 1);
7430
7431 /* Output the code to handle the out-of-range case. */
7432 emit_jump (op0);
7433 emit_label (op1);
7434 emit_move_insn (target, const0_rtx);
7435 emit_label (op0);
7436 return target;
7437 }
7438
7439 case WITH_CLEANUP_EXPR:
7440 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7441 {
7442 WITH_CLEANUP_EXPR_RTL (exp)
7443 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7444 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7445
7446 /* That's it for this cleanup. */
7447 TREE_OPERAND (exp, 1) = 0;
7448 }
7449 return WITH_CLEANUP_EXPR_RTL (exp);
7450
7451 case CLEANUP_POINT_EXPR:
7452 {
7453 /* Start a new binding layer that will keep track of all cleanup
7454 actions to be performed. */
7455 expand_start_bindings (2);
7456
7457 target_temp_slot_level = temp_slot_level;
7458
7459 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7460 /* If we're going to use this value, load it up now. */
7461 if (! ignore)
7462 op0 = force_not_mem (op0);
7463 preserve_temp_slots (op0);
7464 expand_end_bindings (NULL_TREE, 0, 0);
7465 }
7466 return op0;
7467
7468 case CALL_EXPR:
7469 /* Check for a built-in function. */
7470 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7471 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7472 == FUNCTION_DECL)
7473 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7474 {
7475 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7476 == BUILT_IN_FRONTEND)
7477 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7478 else
7479 return expand_builtin (exp, target, subtarget, tmode, ignore);
7480 }
7481
7482 return expand_call (exp, target, ignore);
7483
7484 case NON_LVALUE_EXPR:
7485 case NOP_EXPR:
7486 case CONVERT_EXPR:
7487 case REFERENCE_EXPR:
7488 if (TREE_OPERAND (exp, 0) == error_mark_node)
7489 return const0_rtx;
7490
7491 if (TREE_CODE (type) == UNION_TYPE)
7492 {
7493 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7494
7495 /* If both input and output are BLKmode, this conversion
7496 isn't actually doing anything unless we need to make the
7497 alignment stricter. */
7498 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7499 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7500 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7501 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7502 modifier);
7503
7504 if (target == 0)
7505 target = assign_temp (type, 0, 1, 1);
7506
7507 if (GET_CODE (target) == MEM)
7508 /* Store data into beginning of memory target. */
7509 store_expr (TREE_OPERAND (exp, 0),
7510 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7511
7512 else if (GET_CODE (target) == REG)
7513 /* Store this field into a union of the proper type. */
7514 store_field (target,
7515 MIN ((int_size_in_bytes (TREE_TYPE
7516 (TREE_OPERAND (exp, 0)))
7517 * BITS_PER_UNIT),
7518 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7519 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7520 VOIDmode, 0, int_size_in_bytes (type), 0);
7521 else
7522 abort ();
7523
7524 /* Return the entire union. */
7525 return target;
7526 }
7527
7528 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7529 {
7530 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7531 ro_modifier);
7532
7533 /* If the signedness of the conversion differs and OP0 is
7534 a promoted SUBREG, clear that indication since we now
7535 have to do the proper extension. */
7536 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7537 && GET_CODE (op0) == SUBREG)
7538 SUBREG_PROMOTED_VAR_P (op0) = 0;
7539
7540 return op0;
7541 }
7542
7543 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7544 if (GET_MODE (op0) == mode)
7545 return op0;
7546
7547 /* If OP0 is a constant, just convert it into the proper mode. */
7548 if (CONSTANT_P (op0))
7549 return
7550 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7551 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7552
7553 if (modifier == EXPAND_INITIALIZER)
7554 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7555
7556 if (target == 0)
7557 return
7558 convert_to_mode (mode, op0,
7559 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7560 else
7561 convert_move (target, op0,
7562 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7563 return target;
7564
7565 case PLUS_EXPR:
7566 /* We come here from MINUS_EXPR when the second operand is a
7567 constant. */
7568 plus_expr:
7569 this_optab = ! unsignedp && flag_trapv
7570 && (GET_MODE_CLASS(mode) == MODE_INT)
7571 ? addv_optab : add_optab;
7572
7573 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7574 something else, make sure we add the register to the constant and
7575 then to the other thing. This case can occur during strength
7576 reduction and doing it this way will produce better code if the
7577 frame pointer or argument pointer is eliminated.
7578
7579 fold-const.c will ensure that the constant is always in the inner
7580 PLUS_EXPR, so the only case we need to do anything about is if
7581 sp, ap, or fp is our second argument, in which case we must swap
7582 the innermost first argument and our second argument. */
7583
7584 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7585 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7586 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7587 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7588 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7589 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7590 {
7591 tree t = TREE_OPERAND (exp, 1);
7592
7593 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7594 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7595 }
7596
7597 /* If the result is to be ptr_mode and we are adding an integer to
7598 something, we might be forming a constant. So try to use
7599 plus_constant. If it produces a sum and we can't accept it,
7600 use force_operand. This allows P = &ARR[const] to generate
7601 efficient code on machines where a SYMBOL_REF is not a valid
7602 address.
7603
7604 If this is an EXPAND_SUM call, always return the sum. */
7605 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7606 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7607 {
7608 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7609 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7610 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7611 {
7612 rtx constant_part;
7613
7614 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7615 EXPAND_SUM);
7616 /* Use immed_double_const to ensure that the constant is
7617 truncated according to the mode of OP1, then sign extended
7618 to a HOST_WIDE_INT. Using the constant directly can result
7619 in non-canonical RTL in a 64x32 cross compile. */
7620 constant_part
7621 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7622 (HOST_WIDE_INT) 0,
7623 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7624 op1 = plus_constant (op1, INTVAL (constant_part));
7625 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7626 op1 = force_operand (op1, target);
7627 return op1;
7628 }
7629
7630 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7631 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7632 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7633 {
7634 rtx constant_part;
7635
7636 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7637 EXPAND_SUM);
7638 if (! CONSTANT_P (op0))
7639 {
7640 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7641 VOIDmode, modifier);
7642 /* Don't go to both_summands if modifier
7643 says it's not right to return a PLUS. */
7644 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7645 goto binop2;
7646 goto both_summands;
7647 }
7648 /* Use immed_double_const to ensure that the constant is
7649 truncated according to the mode of OP1, then sign extended
7650 to a HOST_WIDE_INT. Using the constant directly can result
7651 in non-canonical RTL in a 64x32 cross compile. */
7652 constant_part
7653 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7654 (HOST_WIDE_INT) 0,
7655 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7656 op0 = plus_constant (op0, INTVAL (constant_part));
7657 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7658 op0 = force_operand (op0, target);
7659 return op0;
7660 }
7661 }
7662
7663 /* No sense saving up arithmetic to be done
7664 if it's all in the wrong mode to form part of an address.
7665 And force_operand won't know whether to sign-extend or
7666 zero-extend. */
7667 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7668 || mode != ptr_mode)
7669 goto binop;
7670
7671 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7672 subtarget = 0;
7673
7674 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7675 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7676
7677 both_summands:
7678 /* Make sure any term that's a sum with a constant comes last. */
7679 if (GET_CODE (op0) == PLUS
7680 && CONSTANT_P (XEXP (op0, 1)))
7681 {
7682 temp = op0;
7683 op0 = op1;
7684 op1 = temp;
7685 }
7686 /* If adding to a sum including a constant,
7687 associate it to put the constant outside. */
7688 if (GET_CODE (op1) == PLUS
7689 && CONSTANT_P (XEXP (op1, 1)))
7690 {
7691 rtx constant_term = const0_rtx;
7692
7693 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7694 if (temp != 0)
7695 op0 = temp;
7696 /* Ensure that MULT comes first if there is one. */
7697 else if (GET_CODE (op0) == MULT)
7698 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7699 else
7700 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7701
7702 /* Let's also eliminate constants from op0 if possible. */
7703 op0 = eliminate_constant_term (op0, &constant_term);
7704
7705 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7706 their sum should be a constant. Form it into OP1, since the
7707 result we want will then be OP0 + OP1. */
7708
7709 temp = simplify_binary_operation (PLUS, mode, constant_term,
7710 XEXP (op1, 1));
7711 if (temp != 0)
7712 op1 = temp;
7713 else
7714 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7715 }
7716
7717 /* Put a constant term last and put a multiplication first. */
7718 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7719 temp = op1, op1 = op0, op0 = temp;
7720
7721 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7722 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7723
7724 case MINUS_EXPR:
7725 /* For initializers, we are allowed to return a MINUS of two
7726 symbolic constants. Here we handle all cases when both operands
7727 are constant. */
7728 /* Handle difference of two symbolic constants,
7729 for the sake of an initializer. */
7730 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7731 && really_constant_p (TREE_OPERAND (exp, 0))
7732 && really_constant_p (TREE_OPERAND (exp, 1)))
7733 {
7734 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7735 VOIDmode, ro_modifier);
7736 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7737 VOIDmode, ro_modifier);
7738
7739 /* If the last operand is a CONST_INT, use plus_constant of
7740 the negated constant. Else make the MINUS. */
7741 if (GET_CODE (op1) == CONST_INT)
7742 return plus_constant (op0, - INTVAL (op1));
7743 else
7744 return gen_rtx_MINUS (mode, op0, op1);
7745 }
7746 /* Convert A - const to A + (-const). */
7747 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7748 {
7749 tree negated = fold (build1 (NEGATE_EXPR, type,
7750 TREE_OPERAND (exp, 1)));
7751
7752 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7753 /* If we can't negate the constant in TYPE, leave it alone and
7754 expand_binop will negate it for us. We used to try to do it
7755 here in the signed version of TYPE, but that doesn't work
7756 on POINTER_TYPEs. */;
7757 else
7758 {
7759 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7760 goto plus_expr;
7761 }
7762 }
7763 this_optab = ! unsignedp && flag_trapv
7764 && (GET_MODE_CLASS(mode) == MODE_INT)
7765 ? subv_optab : sub_optab;
7766 goto binop;
7767
7768 case MULT_EXPR:
7769 /* If first operand is constant, swap them.
7770 Thus the following special case checks need only
7771 check the second operand. */
7772 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7773 {
7774 tree t1 = TREE_OPERAND (exp, 0);
7775 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7776 TREE_OPERAND (exp, 1) = t1;
7777 }
7778
7779 /* Attempt to return something suitable for generating an
7780 indexed address, for machines that support that. */
7781
7782 if (modifier == EXPAND_SUM && mode == ptr_mode
7783 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7784 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7785 {
7786 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7787 EXPAND_SUM);
7788
7789 /* Apply distributive law if OP0 is x+c. */
7790 if (GET_CODE (op0) == PLUS
7791 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7792 return
7793 gen_rtx_PLUS
7794 (mode,
7795 gen_rtx_MULT
7796 (mode, XEXP (op0, 0),
7797 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7798 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7799 * INTVAL (XEXP (op0, 1))));
7800
7801 if (GET_CODE (op0) != REG)
7802 op0 = force_operand (op0, NULL_RTX);
7803 if (GET_CODE (op0) != REG)
7804 op0 = copy_to_mode_reg (mode, op0);
7805
7806 return
7807 gen_rtx_MULT (mode, op0,
7808 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7809 }
7810
7811 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7812 subtarget = 0;
7813
7814 /* Check for multiplying things that have been extended
7815 from a narrower type. If this machine supports multiplying
7816 in that narrower type with a result in the desired type,
7817 do it that way, and avoid the explicit type-conversion. */
7818 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7819 && TREE_CODE (type) == INTEGER_TYPE
7820 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7821 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7822 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7823 && int_fits_type_p (TREE_OPERAND (exp, 1),
7824 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7825 /* Don't use a widening multiply if a shift will do. */
7826 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7827 > HOST_BITS_PER_WIDE_INT)
7828 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7829 ||
7830 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7831 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7832 ==
7833 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7834 /* If both operands are extended, they must either both
7835 be zero-extended or both be sign-extended. */
7836 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7837 ==
7838 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7839 {
7840 enum machine_mode innermode
7841 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7842 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7843 ? smul_widen_optab : umul_widen_optab);
7844 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7845 ? umul_widen_optab : smul_widen_optab);
7846 if (mode == GET_MODE_WIDER_MODE (innermode))
7847 {
7848 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7849 {
7850 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7851 NULL_RTX, VOIDmode, 0);
7852 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7853 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7854 VOIDmode, 0);
7855 else
7856 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7857 NULL_RTX, VOIDmode, 0);
7858 goto binop2;
7859 }
7860 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7861 && innermode == word_mode)
7862 {
7863 rtx htem;
7864 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7865 NULL_RTX, VOIDmode, 0);
7866 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7867 op1 = convert_modes (innermode, mode,
7868 expand_expr (TREE_OPERAND (exp, 1),
7869 NULL_RTX, VOIDmode, 0),
7870 unsignedp);
7871 else
7872 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7873 NULL_RTX, VOIDmode, 0);
7874 temp = expand_binop (mode, other_optab, op0, op1, target,
7875 unsignedp, OPTAB_LIB_WIDEN);
7876 htem = expand_mult_highpart_adjust (innermode,
7877 gen_highpart (innermode, temp),
7878 op0, op1,
7879 gen_highpart (innermode, temp),
7880 unsignedp);
7881 emit_move_insn (gen_highpart (innermode, temp), htem);
7882 return temp;
7883 }
7884 }
7885 }
7886 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7887 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7888 return expand_mult (mode, op0, op1, target, unsignedp);
7889
7890 case TRUNC_DIV_EXPR:
7891 case FLOOR_DIV_EXPR:
7892 case CEIL_DIV_EXPR:
7893 case ROUND_DIV_EXPR:
7894 case EXACT_DIV_EXPR:
7895 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7896 subtarget = 0;
7897 /* Possible optimization: compute the dividend with EXPAND_SUM
7898 then if the divisor is constant can optimize the case
7899 where some terms of the dividend have coeffs divisible by it. */
7900 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7901 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7902 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7903
7904 case RDIV_EXPR:
7905 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7906 expensive divide. If not, combine will rebuild the original
7907 computation. */
7908 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7909 && !real_onep (TREE_OPERAND (exp, 0)))
7910 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7911 build (RDIV_EXPR, type,
7912 build_real (type, dconst1),
7913 TREE_OPERAND (exp, 1))),
7914 target, tmode, unsignedp);
7915 this_optab = sdiv_optab;
7916 goto binop;
7917
7918 case TRUNC_MOD_EXPR:
7919 case FLOOR_MOD_EXPR:
7920 case CEIL_MOD_EXPR:
7921 case ROUND_MOD_EXPR:
7922 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7923 subtarget = 0;
7924 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7925 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7926 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7927
7928 case FIX_ROUND_EXPR:
7929 case FIX_FLOOR_EXPR:
7930 case FIX_CEIL_EXPR:
7931 abort (); /* Not used for C. */
7932
7933 case FIX_TRUNC_EXPR:
7934 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7935 if (target == 0)
7936 target = gen_reg_rtx (mode);
7937 expand_fix (target, op0, unsignedp);
7938 return target;
7939
7940 case FLOAT_EXPR:
7941 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7942 if (target == 0)
7943 target = gen_reg_rtx (mode);
7944 /* expand_float can't figure out what to do if FROM has VOIDmode.
7945 So give it the correct mode. With -O, cse will optimize this. */
7946 if (GET_MODE (op0) == VOIDmode)
7947 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7948 op0);
7949 expand_float (target, op0,
7950 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7951 return target;
7952
7953 case NEGATE_EXPR:
7954 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7955 temp = expand_unop (mode,
7956 ! unsignedp && flag_trapv
7957 && (GET_MODE_CLASS(mode) == MODE_INT)
7958 ? negv_optab : neg_optab, op0, target, 0);
7959 if (temp == 0)
7960 abort ();
7961 return temp;
7962
7963 case ABS_EXPR:
7964 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7965
7966 /* Handle complex values specially. */
7967 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7968 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7969 return expand_complex_abs (mode, op0, target, unsignedp);
7970
7971 /* Unsigned abs is simply the operand. Testing here means we don't
7972 risk generating incorrect code below. */
7973 if (TREE_UNSIGNED (type))
7974 return op0;
7975
7976 return expand_abs (mode, op0, target, unsignedp,
7977 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7978
7979 case MAX_EXPR:
7980 case MIN_EXPR:
7981 target = original_target;
7982 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7983 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7984 || GET_MODE (target) != mode
7985 || (GET_CODE (target) == REG
7986 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7987 target = gen_reg_rtx (mode);
7988 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7989 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7990
7991 /* First try to do it with a special MIN or MAX instruction.
7992 If that does not win, use a conditional jump to select the proper
7993 value. */
7994 this_optab = (TREE_UNSIGNED (type)
7995 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7996 : (code == MIN_EXPR ? smin_optab : smax_optab));
7997
7998 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7999 OPTAB_WIDEN);
8000 if (temp != 0)
8001 return temp;
8002
8003 /* At this point, a MEM target is no longer useful; we will get better
8004 code without it. */
8005
8006 if (GET_CODE (target) == MEM)
8007 target = gen_reg_rtx (mode);
8008
8009 if (target != op0)
8010 emit_move_insn (target, op0);
8011
8012 op0 = gen_label_rtx ();
8013
8014 /* If this mode is an integer too wide to compare properly,
8015 compare word by word. Rely on cse to optimize constant cases. */
8016 if (GET_MODE_CLASS (mode) == MODE_INT
8017 && ! can_compare_p (GE, mode, ccp_jump))
8018 {
8019 if (code == MAX_EXPR)
8020 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8021 target, op1, NULL_RTX, op0);
8022 else
8023 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8024 op1, target, NULL_RTX, op0);
8025 }
8026 else
8027 {
8028 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8029 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8030 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
8031 op0);
8032 }
8033 emit_move_insn (target, op1);
8034 emit_label (op0);
8035 return target;
8036
8037 case BIT_NOT_EXPR:
8038 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8039 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8040 if (temp == 0)
8041 abort ();
8042 return temp;
8043
8044 case FFS_EXPR:
8045 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8046 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8047 if (temp == 0)
8048 abort ();
8049 return temp;
8050
8051 /* ??? Can optimize bitwise operations with one arg constant.
8052 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8053 and (a bitwise1 b) bitwise2 b (etc)
8054 but that is probably not worth while. */
8055
8056 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8057 boolean values when we want in all cases to compute both of them. In
8058 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8059 as actual zero-or-1 values and then bitwise anding. In cases where
8060 there cannot be any side effects, better code would be made by
8061 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8062 how to recognize those cases. */
8063
8064 case TRUTH_AND_EXPR:
8065 case BIT_AND_EXPR:
8066 this_optab = and_optab;
8067 goto binop;
8068
8069 case TRUTH_OR_EXPR:
8070 case BIT_IOR_EXPR:
8071 this_optab = ior_optab;
8072 goto binop;
8073
8074 case TRUTH_XOR_EXPR:
8075 case BIT_XOR_EXPR:
8076 this_optab = xor_optab;
8077 goto binop;
8078
8079 case LSHIFT_EXPR:
8080 case RSHIFT_EXPR:
8081 case LROTATE_EXPR:
8082 case RROTATE_EXPR:
8083 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8084 subtarget = 0;
8085 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8086 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8087 unsignedp);
8088
8089 /* Could determine the answer when only additive constants differ. Also,
8090 the addition of one can be handled by changing the condition. */
8091 case LT_EXPR:
8092 case LE_EXPR:
8093 case GT_EXPR:
8094 case GE_EXPR:
8095 case EQ_EXPR:
8096 case NE_EXPR:
8097 case UNORDERED_EXPR:
8098 case ORDERED_EXPR:
8099 case UNLT_EXPR:
8100 case UNLE_EXPR:
8101 case UNGT_EXPR:
8102 case UNGE_EXPR:
8103 case UNEQ_EXPR:
8104 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8105 if (temp != 0)
8106 return temp;
8107
8108 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8109 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8110 && original_target
8111 && GET_CODE (original_target) == REG
8112 && (GET_MODE (original_target)
8113 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8114 {
8115 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8116 VOIDmode, 0);
8117
8118 if (temp != original_target)
8119 temp = copy_to_reg (temp);
8120
8121 op1 = gen_label_rtx ();
8122 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8123 GET_MODE (temp), unsignedp, 0, op1);
8124 emit_move_insn (temp, const1_rtx);
8125 emit_label (op1);
8126 return temp;
8127 }
8128
8129 /* If no set-flag instruction, must generate a conditional
8130 store into a temporary variable. Drop through
8131 and handle this like && and ||. */
8132
8133 case TRUTH_ANDIF_EXPR:
8134 case TRUTH_ORIF_EXPR:
8135 if (! ignore
8136 && (target == 0 || ! safe_from_p (target, exp, 1)
8137 /* Make sure we don't have a hard reg (such as function's return
8138 value) live across basic blocks, if not optimizing. */
8139 || (!optimize && GET_CODE (target) == REG
8140 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8141 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8142
8143 if (target)
8144 emit_clr_insn (target);
8145
8146 op1 = gen_label_rtx ();
8147 jumpifnot (exp, op1);
8148
8149 if (target)
8150 emit_0_to_1_insn (target);
8151
8152 emit_label (op1);
8153 return ignore ? const0_rtx : target;
8154
8155 case TRUTH_NOT_EXPR:
8156 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8157 /* The parser is careful to generate TRUTH_NOT_EXPR
8158 only with operands that are always zero or one. */
8159 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8160 target, 1, OPTAB_LIB_WIDEN);
8161 if (temp == 0)
8162 abort ();
8163 return temp;
8164
8165 case COMPOUND_EXPR:
8166 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8167 emit_queue ();
8168 return expand_expr (TREE_OPERAND (exp, 1),
8169 (ignore ? const0_rtx : target),
8170 VOIDmode, 0);
8171
8172 case COND_EXPR:
8173 /* If we would have a "singleton" (see below) were it not for a
8174 conversion in each arm, bring that conversion back out. */
8175 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8176 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8177 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8178 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8179 {
8180 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8181 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8182
8183 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8184 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8185 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8186 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8187 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8188 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8189 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8190 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8191 return expand_expr (build1 (NOP_EXPR, type,
8192 build (COND_EXPR, TREE_TYPE (iftrue),
8193 TREE_OPERAND (exp, 0),
8194 iftrue, iffalse)),
8195 target, tmode, modifier);
8196 }
8197
8198 {
8199 /* Note that COND_EXPRs whose type is a structure or union
8200 are required to be constructed to contain assignments of
8201 a temporary variable, so that we can evaluate them here
8202 for side effect only. If type is void, we must do likewise. */
8203
8204 /* If an arm of the branch requires a cleanup,
8205 only that cleanup is performed. */
8206
8207 tree singleton = 0;
8208 tree binary_op = 0, unary_op = 0;
8209
8210 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8211 convert it to our mode, if necessary. */
8212 if (integer_onep (TREE_OPERAND (exp, 1))
8213 && integer_zerop (TREE_OPERAND (exp, 2))
8214 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8215 {
8216 if (ignore)
8217 {
8218 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8219 ro_modifier);
8220 return const0_rtx;
8221 }
8222
8223 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8224 if (GET_MODE (op0) == mode)
8225 return op0;
8226
8227 if (target == 0)
8228 target = gen_reg_rtx (mode);
8229 convert_move (target, op0, unsignedp);
8230 return target;
8231 }
8232
8233 /* Check for X ? A + B : A. If we have this, we can copy A to the
8234 output and conditionally add B. Similarly for unary operations.
8235 Don't do this if X has side-effects because those side effects
8236 might affect A or B and the "?" operation is a sequence point in
8237 ANSI. (operand_equal_p tests for side effects.) */
8238
8239 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8240 && operand_equal_p (TREE_OPERAND (exp, 2),
8241 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8242 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8243 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8244 && operand_equal_p (TREE_OPERAND (exp, 1),
8245 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8246 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8247 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8248 && operand_equal_p (TREE_OPERAND (exp, 2),
8249 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8250 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8251 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8252 && operand_equal_p (TREE_OPERAND (exp, 1),
8253 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8254 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8255
8256 /* If we are not to produce a result, we have no target. Otherwise,
8257 if a target was specified use it; it will not be used as an
8258 intermediate target unless it is safe. If no target, use a
8259 temporary. */
8260
8261 if (ignore)
8262 temp = 0;
8263 else if (original_target
8264 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8265 || (singleton && GET_CODE (original_target) == REG
8266 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8267 && original_target == var_rtx (singleton)))
8268 && GET_MODE (original_target) == mode
8269 #ifdef HAVE_conditional_move
8270 && (! can_conditionally_move_p (mode)
8271 || GET_CODE (original_target) == REG
8272 || TREE_ADDRESSABLE (type))
8273 #endif
8274 && (GET_CODE (original_target) != MEM
8275 || TREE_ADDRESSABLE (type)))
8276 temp = original_target;
8277 else if (TREE_ADDRESSABLE (type))
8278 abort ();
8279 else
8280 temp = assign_temp (type, 0, 0, 1);
8281
8282 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8283 do the test of X as a store-flag operation, do this as
8284 A + ((X != 0) << log C). Similarly for other simple binary
8285 operators. Only do for C == 1 if BRANCH_COST is low. */
8286 if (temp && singleton && binary_op
8287 && (TREE_CODE (binary_op) == PLUS_EXPR
8288 || TREE_CODE (binary_op) == MINUS_EXPR
8289 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8290 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8291 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8292 : integer_onep (TREE_OPERAND (binary_op, 1)))
8293 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8294 {
8295 rtx result;
8296 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8297 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8298 ? addv_optab : add_optab)
8299 : TREE_CODE (binary_op) == MINUS_EXPR
8300 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8301 ? subv_optab : sub_optab)
8302 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8303 : xor_optab);
8304
8305 /* If we had X ? A : A + 1, do this as A + (X == 0).
8306
8307 We have to invert the truth value here and then put it
8308 back later if do_store_flag fails. We cannot simply copy
8309 TREE_OPERAND (exp, 0) to another variable and modify that
8310 because invert_truthvalue can modify the tree pointed to
8311 by its argument. */
8312 if (singleton == TREE_OPERAND (exp, 1))
8313 TREE_OPERAND (exp, 0)
8314 = invert_truthvalue (TREE_OPERAND (exp, 0));
8315
8316 result = do_store_flag (TREE_OPERAND (exp, 0),
8317 (safe_from_p (temp, singleton, 1)
8318 ? temp : NULL_RTX),
8319 mode, BRANCH_COST <= 1);
8320
8321 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8322 result = expand_shift (LSHIFT_EXPR, mode, result,
8323 build_int_2 (tree_log2
8324 (TREE_OPERAND
8325 (binary_op, 1)),
8326 0),
8327 (safe_from_p (temp, singleton, 1)
8328 ? temp : NULL_RTX), 0);
8329
8330 if (result)
8331 {
8332 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8333 return expand_binop (mode, boptab, op1, result, temp,
8334 unsignedp, OPTAB_LIB_WIDEN);
8335 }
8336 else if (singleton == TREE_OPERAND (exp, 1))
8337 TREE_OPERAND (exp, 0)
8338 = invert_truthvalue (TREE_OPERAND (exp, 0));
8339 }
8340
8341 do_pending_stack_adjust ();
8342 NO_DEFER_POP;
8343 op0 = gen_label_rtx ();
8344
8345 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8346 {
8347 if (temp != 0)
8348 {
8349 /* If the target conflicts with the other operand of the
8350 binary op, we can't use it. Also, we can't use the target
8351 if it is a hard register, because evaluating the condition
8352 might clobber it. */
8353 if ((binary_op
8354 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8355 || (GET_CODE (temp) == REG
8356 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8357 temp = gen_reg_rtx (mode);
8358 store_expr (singleton, temp, 0);
8359 }
8360 else
8361 expand_expr (singleton,
8362 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8363 if (singleton == TREE_OPERAND (exp, 1))
8364 jumpif (TREE_OPERAND (exp, 0), op0);
8365 else
8366 jumpifnot (TREE_OPERAND (exp, 0), op0);
8367
8368 start_cleanup_deferral ();
8369 if (binary_op && temp == 0)
8370 /* Just touch the other operand. */
8371 expand_expr (TREE_OPERAND (binary_op, 1),
8372 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8373 else if (binary_op)
8374 store_expr (build (TREE_CODE (binary_op), type,
8375 make_tree (type, temp),
8376 TREE_OPERAND (binary_op, 1)),
8377 temp, 0);
8378 else
8379 store_expr (build1 (TREE_CODE (unary_op), type,
8380 make_tree (type, temp)),
8381 temp, 0);
8382 op1 = op0;
8383 }
8384 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8385 comparison operator. If we have one of these cases, set the
8386 output to A, branch on A (cse will merge these two references),
8387 then set the output to FOO. */
8388 else if (temp
8389 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8390 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8391 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8392 TREE_OPERAND (exp, 1), 0)
8393 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8394 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8395 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8396 {
8397 if (GET_CODE (temp) == REG
8398 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8399 temp = gen_reg_rtx (mode);
8400 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8401 jumpif (TREE_OPERAND (exp, 0), op0);
8402
8403 start_cleanup_deferral ();
8404 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8405 op1 = op0;
8406 }
8407 else if (temp
8408 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8409 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8410 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8411 TREE_OPERAND (exp, 2), 0)
8412 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8413 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8414 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8415 {
8416 if (GET_CODE (temp) == REG
8417 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8418 temp = gen_reg_rtx (mode);
8419 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8420 jumpifnot (TREE_OPERAND (exp, 0), op0);
8421
8422 start_cleanup_deferral ();
8423 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8424 op1 = op0;
8425 }
8426 else
8427 {
8428 op1 = gen_label_rtx ();
8429 jumpifnot (TREE_OPERAND (exp, 0), op0);
8430
8431 start_cleanup_deferral ();
8432
8433 /* One branch of the cond can be void, if it never returns. For
8434 example A ? throw : E */
8435 if (temp != 0
8436 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8437 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8438 else
8439 expand_expr (TREE_OPERAND (exp, 1),
8440 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8441 end_cleanup_deferral ();
8442 emit_queue ();
8443 emit_jump_insn (gen_jump (op1));
8444 emit_barrier ();
8445 emit_label (op0);
8446 start_cleanup_deferral ();
8447 if (temp != 0
8448 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8449 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8450 else
8451 expand_expr (TREE_OPERAND (exp, 2),
8452 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8453 }
8454
8455 end_cleanup_deferral ();
8456
8457 emit_queue ();
8458 emit_label (op1);
8459 OK_DEFER_POP;
8460
8461 return temp;
8462 }
8463
8464 case TARGET_EXPR:
8465 {
8466 /* Something needs to be initialized, but we didn't know
8467 where that thing was when building the tree. For example,
8468 it could be the return value of a function, or a parameter
8469 to a function which lays down in the stack, or a temporary
8470 variable which must be passed by reference.
8471
8472 We guarantee that the expression will either be constructed
8473 or copied into our original target. */
8474
8475 tree slot = TREE_OPERAND (exp, 0);
8476 tree cleanups = NULL_TREE;
8477 tree exp1;
8478
8479 if (TREE_CODE (slot) != VAR_DECL)
8480 abort ();
8481
8482 if (! ignore)
8483 target = original_target;
8484
8485 /* Set this here so that if we get a target that refers to a
8486 register variable that's already been used, put_reg_into_stack
8487 knows that it should fix up those uses. */
8488 TREE_USED (slot) = 1;
8489
8490 if (target == 0)
8491 {
8492 if (DECL_RTL_SET_P (slot))
8493 {
8494 target = DECL_RTL (slot);
8495 /* If we have already expanded the slot, so don't do
8496 it again. (mrs) */
8497 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8498 return target;
8499 }
8500 else
8501 {
8502 target = assign_temp (type, 2, 0, 1);
8503 /* All temp slots at this level must not conflict. */
8504 preserve_temp_slots (target);
8505 SET_DECL_RTL (slot, target);
8506 if (TREE_ADDRESSABLE (slot))
8507 put_var_into_stack (slot);
8508
8509 /* Since SLOT is not known to the called function
8510 to belong to its stack frame, we must build an explicit
8511 cleanup. This case occurs when we must build up a reference
8512 to pass the reference as an argument. In this case,
8513 it is very likely that such a reference need not be
8514 built here. */
8515
8516 if (TREE_OPERAND (exp, 2) == 0)
8517 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8518 cleanups = TREE_OPERAND (exp, 2);
8519 }
8520 }
8521 else
8522 {
8523 /* This case does occur, when expanding a parameter which
8524 needs to be constructed on the stack. The target
8525 is the actual stack address that we want to initialize.
8526 The function we call will perform the cleanup in this case. */
8527
8528 /* If we have already assigned it space, use that space,
8529 not target that we were passed in, as our target
8530 parameter is only a hint. */
8531 if (DECL_RTL_SET_P (slot))
8532 {
8533 target = DECL_RTL (slot);
8534 /* If we have already expanded the slot, so don't do
8535 it again. (mrs) */
8536 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8537 return target;
8538 }
8539 else
8540 {
8541 SET_DECL_RTL (slot, target);
8542 /* If we must have an addressable slot, then make sure that
8543 the RTL that we just stored in slot is OK. */
8544 if (TREE_ADDRESSABLE (slot))
8545 put_var_into_stack (slot);
8546 }
8547 }
8548
8549 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8550 /* Mark it as expanded. */
8551 TREE_OPERAND (exp, 1) = NULL_TREE;
8552
8553 store_expr (exp1, target, 0);
8554
8555 expand_decl_cleanup (NULL_TREE, cleanups);
8556
8557 return target;
8558 }
8559
8560 case INIT_EXPR:
8561 {
8562 tree lhs = TREE_OPERAND (exp, 0);
8563 tree rhs = TREE_OPERAND (exp, 1);
8564
8565 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8566 return temp;
8567 }
8568
8569 case MODIFY_EXPR:
8570 {
8571 /* If lhs is complex, expand calls in rhs before computing it.
8572 That's so we don't compute a pointer and save it over a
8573 call. If lhs is simple, compute it first so we can give it
8574 as a target if the rhs is just a call. This avoids an
8575 extra temp and copy and that prevents a partial-subsumption
8576 which makes bad code. Actually we could treat
8577 component_ref's of vars like vars. */
8578
8579 tree lhs = TREE_OPERAND (exp, 0);
8580 tree rhs = TREE_OPERAND (exp, 1);
8581
8582 temp = 0;
8583
8584 /* Check for |= or &= of a bitfield of size one into another bitfield
8585 of size 1. In this case, (unless we need the result of the
8586 assignment) we can do this more efficiently with a
8587 test followed by an assignment, if necessary.
8588
8589 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8590 things change so we do, this code should be enhanced to
8591 support it. */
8592 if (ignore
8593 && TREE_CODE (lhs) == COMPONENT_REF
8594 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8595 || TREE_CODE (rhs) == BIT_AND_EXPR)
8596 && TREE_OPERAND (rhs, 0) == lhs
8597 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8598 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8599 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8600 {
8601 rtx label = gen_label_rtx ();
8602
8603 do_jump (TREE_OPERAND (rhs, 1),
8604 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8605 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8606 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8607 (TREE_CODE (rhs) == BIT_IOR_EXPR
8608 ? integer_one_node
8609 : integer_zero_node)),
8610 0, 0);
8611 do_pending_stack_adjust ();
8612 emit_label (label);
8613 return const0_rtx;
8614 }
8615
8616 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8617
8618 return temp;
8619 }
8620
8621 case RETURN_EXPR:
8622 if (!TREE_OPERAND (exp, 0))
8623 expand_null_return ();
8624 else
8625 expand_return (TREE_OPERAND (exp, 0));
8626 return const0_rtx;
8627
8628 case PREINCREMENT_EXPR:
8629 case PREDECREMENT_EXPR:
8630 return expand_increment (exp, 0, ignore);
8631
8632 case POSTINCREMENT_EXPR:
8633 case POSTDECREMENT_EXPR:
8634 /* Faster to treat as pre-increment if result is not used. */
8635 return expand_increment (exp, ! ignore, ignore);
8636
8637 case ADDR_EXPR:
8638 /* Are we taking the address of a nested function? */
8639 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8640 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8641 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8642 && ! TREE_STATIC (exp))
8643 {
8644 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8645 op0 = force_operand (op0, target);
8646 }
8647 /* If we are taking the address of something erroneous, just
8648 return a zero. */
8649 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8650 return const0_rtx;
8651 /* If we are taking the address of a constant and are at the
8652 top level, we have to use output_constant_def since we can't
8653 call force_const_mem at top level. */
8654 else if (cfun == 0
8655 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8656 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8657 == 'c')))
8658 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8659 else
8660 {
8661 /* We make sure to pass const0_rtx down if we came in with
8662 ignore set, to avoid doing the cleanups twice for something. */
8663 op0 = expand_expr (TREE_OPERAND (exp, 0),
8664 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8665 (modifier == EXPAND_INITIALIZER
8666 ? modifier : EXPAND_CONST_ADDRESS));
8667
8668 /* If we are going to ignore the result, OP0 will have been set
8669 to const0_rtx, so just return it. Don't get confused and
8670 think we are taking the address of the constant. */
8671 if (ignore)
8672 return op0;
8673
8674 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8675 clever and returns a REG when given a MEM. */
8676 op0 = protect_from_queue (op0, 1);
8677
8678 /* We would like the object in memory. If it is a constant, we can
8679 have it be statically allocated into memory. For a non-constant,
8680 we need to allocate some memory and store the value into it. */
8681
8682 if (CONSTANT_P (op0))
8683 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8684 op0);
8685 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8686 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8687 || GET_CODE (op0) == PARALLEL)
8688 {
8689 /* If this object is in a register, it must be not
8690 be BLKmode. */
8691 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8692 tree nt = build_qualified_type (inner_type,
8693 (TYPE_QUALS (inner_type)
8694 | TYPE_QUAL_CONST));
8695 rtx memloc = assign_temp (nt, 1, 1, 1);
8696
8697 if (GET_CODE (op0) == PARALLEL)
8698 /* Handle calls that pass values in multiple non-contiguous
8699 locations. The Irix 6 ABI has examples of this. */
8700 emit_group_store (memloc, op0, int_size_in_bytes (inner_type));
8701 else
8702 emit_move_insn (memloc, op0);
8703
8704 op0 = memloc;
8705 }
8706
8707 if (GET_CODE (op0) != MEM)
8708 abort ();
8709
8710 mark_temp_addr_taken (op0);
8711 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8712 {
8713 op0 = XEXP (op0, 0);
8714 #ifdef POINTERS_EXTEND_UNSIGNED
8715 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8716 && mode == ptr_mode)
8717 op0 = convert_memory_address (ptr_mode, op0);
8718 #endif
8719 return op0;
8720 }
8721
8722 op0 = force_operand (XEXP (op0, 0), target);
8723 }
8724
8725 if (flag_force_addr && GET_CODE (op0) != REG)
8726 op0 = force_reg (Pmode, op0);
8727
8728 if (GET_CODE (op0) == REG
8729 && ! REG_USERVAR_P (op0))
8730 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8731
8732 #ifdef POINTERS_EXTEND_UNSIGNED
8733 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8734 && mode == ptr_mode)
8735 op0 = convert_memory_address (ptr_mode, op0);
8736 #endif
8737
8738 return op0;
8739
8740 case ENTRY_VALUE_EXPR:
8741 abort ();
8742
8743 /* COMPLEX type for Extended Pascal & Fortran */
8744 case COMPLEX_EXPR:
8745 {
8746 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8747 rtx insns;
8748
8749 /* Get the rtx code of the operands. */
8750 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8751 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8752
8753 if (! target)
8754 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8755
8756 start_sequence ();
8757
8758 /* Move the real (op0) and imaginary (op1) parts to their location. */
8759 emit_move_insn (gen_realpart (mode, target), op0);
8760 emit_move_insn (gen_imagpart (mode, target), op1);
8761
8762 insns = get_insns ();
8763 end_sequence ();
8764
8765 /* Complex construction should appear as a single unit. */
8766 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8767 each with a separate pseudo as destination.
8768 It's not correct for flow to treat them as a unit. */
8769 if (GET_CODE (target) != CONCAT)
8770 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8771 else
8772 emit_insns (insns);
8773
8774 return target;
8775 }
8776
8777 case REALPART_EXPR:
8778 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8779 return gen_realpart (mode, op0);
8780
8781 case IMAGPART_EXPR:
8782 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8783 return gen_imagpart (mode, op0);
8784
8785 case CONJ_EXPR:
8786 {
8787 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8788 rtx imag_t;
8789 rtx insns;
8790
8791 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8792
8793 if (! target)
8794 target = gen_reg_rtx (mode);
8795
8796 start_sequence ();
8797
8798 /* Store the realpart and the negated imagpart to target. */
8799 emit_move_insn (gen_realpart (partmode, target),
8800 gen_realpart (partmode, op0));
8801
8802 imag_t = gen_imagpart (partmode, target);
8803 temp = expand_unop (partmode,
8804 ! unsignedp && flag_trapv
8805 && (GET_MODE_CLASS(partmode) == MODE_INT)
8806 ? negv_optab : neg_optab,
8807 gen_imagpart (partmode, op0), imag_t, 0);
8808 if (temp != imag_t)
8809 emit_move_insn (imag_t, temp);
8810
8811 insns = get_insns ();
8812 end_sequence ();
8813
8814 /* Conjugate should appear as a single unit
8815 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8816 each with a separate pseudo as destination.
8817 It's not correct for flow to treat them as a unit. */
8818 if (GET_CODE (target) != CONCAT)
8819 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8820 else
8821 emit_insns (insns);
8822
8823 return target;
8824 }
8825
8826 case TRY_CATCH_EXPR:
8827 {
8828 tree handler = TREE_OPERAND (exp, 1);
8829
8830 expand_eh_region_start ();
8831
8832 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8833
8834 expand_eh_region_end_cleanup (handler);
8835
8836 return op0;
8837 }
8838
8839 case TRY_FINALLY_EXPR:
8840 {
8841 tree try_block = TREE_OPERAND (exp, 0);
8842 tree finally_block = TREE_OPERAND (exp, 1);
8843 rtx finally_label = gen_label_rtx ();
8844 rtx done_label = gen_label_rtx ();
8845 rtx return_link = gen_reg_rtx (Pmode);
8846 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8847 (tree) finally_label, (tree) return_link);
8848 TREE_SIDE_EFFECTS (cleanup) = 1;
8849
8850 /* Start a new binding layer that will keep track of all cleanup
8851 actions to be performed. */
8852 expand_start_bindings (2);
8853
8854 target_temp_slot_level = temp_slot_level;
8855
8856 expand_decl_cleanup (NULL_TREE, cleanup);
8857 op0 = expand_expr (try_block, target, tmode, modifier);
8858
8859 preserve_temp_slots (op0);
8860 expand_end_bindings (NULL_TREE, 0, 0);
8861 emit_jump (done_label);
8862 emit_label (finally_label);
8863 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8864 emit_indirect_jump (return_link);
8865 emit_label (done_label);
8866 return op0;
8867 }
8868
8869 case GOTO_SUBROUTINE_EXPR:
8870 {
8871 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8872 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8873 rtx return_address = gen_label_rtx ();
8874 emit_move_insn (return_link,
8875 gen_rtx_LABEL_REF (Pmode, return_address));
8876 emit_jump (subr);
8877 emit_label (return_address);
8878 return const0_rtx;
8879 }
8880
8881 case VA_ARG_EXPR:
8882 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8883
8884 case EXC_PTR_EXPR:
8885 return get_exception_pointer (cfun);
8886
8887 case FDESC_EXPR:
8888 /* Function descriptors are not valid except for as
8889 initialization constants, and should not be expanded. */
8890 abort ();
8891
8892 default:
8893 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8894 }
8895
8896 /* Here to do an ordinary binary operator, generating an instruction
8897 from the optab already placed in `this_optab'. */
8898 binop:
8899 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8900 subtarget = 0;
8901 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8902 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8903 binop2:
8904 temp = expand_binop (mode, this_optab, op0, op1, target,
8905 unsignedp, OPTAB_LIB_WIDEN);
8906 if (temp == 0)
8907 abort ();
8908 return temp;
8909 }
8910 \f
8911 /* Similar to expand_expr, except that we don't specify a target, target
8912 mode, or modifier and we return the alignment of the inner type. This is
8913 used in cases where it is not necessary to align the result to the
8914 alignment of its type as long as we know the alignment of the result, for
8915 example for comparisons of BLKmode values. */
8916
8917 static rtx
8918 expand_expr_unaligned (exp, palign)
8919 tree exp;
8920 unsigned int *palign;
8921 {
8922 rtx op0;
8923 tree type = TREE_TYPE (exp);
8924 enum machine_mode mode = TYPE_MODE (type);
8925
8926 /* Default the alignment we return to that of the type. */
8927 *palign = TYPE_ALIGN (type);
8928
8929 /* The only cases in which we do anything special is if the resulting mode
8930 is BLKmode. */
8931 if (mode != BLKmode)
8932 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8933
8934 switch (TREE_CODE (exp))
8935 {
8936 case CONVERT_EXPR:
8937 case NOP_EXPR:
8938 case NON_LVALUE_EXPR:
8939 /* Conversions between BLKmode values don't change the underlying
8940 alignment or value. */
8941 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8942 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8943 break;
8944
8945 case ARRAY_REF:
8946 /* Much of the code for this case is copied directly from expand_expr.
8947 We need to duplicate it here because we will do something different
8948 in the fall-through case, so we need to handle the same exceptions
8949 it does. */
8950 {
8951 tree array = TREE_OPERAND (exp, 0);
8952 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8953 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8954 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8955 HOST_WIDE_INT i;
8956
8957 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8958 abort ();
8959
8960 /* Optimize the special-case of a zero lower bound.
8961
8962 We convert the low_bound to sizetype to avoid some problems
8963 with constant folding. (E.g. suppose the lower bound is 1,
8964 and its mode is QI. Without the conversion, (ARRAY
8965 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8966 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8967
8968 if (! integer_zerop (low_bound))
8969 index = size_diffop (index, convert (sizetype, low_bound));
8970
8971 /* If this is a constant index into a constant array,
8972 just get the value from the array. Handle both the cases when
8973 we have an explicit constructor and when our operand is a variable
8974 that was declared const. */
8975
8976 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8977 && host_integerp (index, 0)
8978 && 0 > compare_tree_int (index,
8979 list_length (CONSTRUCTOR_ELTS
8980 (TREE_OPERAND (exp, 0)))))
8981 {
8982 tree elem;
8983
8984 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8985 i = tree_low_cst (index, 0);
8986 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8987 ;
8988
8989 if (elem)
8990 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8991 }
8992
8993 else if (optimize >= 1
8994 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8995 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8996 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8997 {
8998 if (TREE_CODE (index) == INTEGER_CST)
8999 {
9000 tree init = DECL_INITIAL (array);
9001
9002 if (TREE_CODE (init) == CONSTRUCTOR)
9003 {
9004 tree elem;
9005
9006 for (elem = CONSTRUCTOR_ELTS (init);
9007 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
9008 elem = TREE_CHAIN (elem))
9009 ;
9010
9011 if (elem)
9012 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
9013 palign);
9014 }
9015 }
9016 }
9017 }
9018 /* Fall through. */
9019
9020 case COMPONENT_REF:
9021 case BIT_FIELD_REF:
9022 case ARRAY_RANGE_REF:
9023 /* If the operand is a CONSTRUCTOR, we can just extract the
9024 appropriate field if it is present. Don't do this if we have
9025 already written the data since we want to refer to that copy
9026 and varasm.c assumes that's what we'll do. */
9027 if (TREE_CODE (exp) == COMPONENT_REF
9028 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9029 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
9030 {
9031 tree elt;
9032
9033 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
9034 elt = TREE_CHAIN (elt))
9035 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
9036 /* Note that unlike the case in expand_expr, we know this is
9037 BLKmode and hence not an integer. */
9038 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9039 }
9040
9041 {
9042 enum machine_mode mode1;
9043 HOST_WIDE_INT bitsize, bitpos;
9044 tree offset;
9045 int volatilep = 0;
9046 unsigned int alignment;
9047 int unsignedp;
9048 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9049 &mode1, &unsignedp, &volatilep,
9050 &alignment);
9051
9052 /* If we got back the original object, something is wrong. Perhaps
9053 we are evaluating an expression too early. In any event, don't
9054 infinitely recurse. */
9055 if (tem == exp)
9056 abort ();
9057
9058 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9059
9060 /* If this is a constant, put it into a register if it is a
9061 legitimate constant and OFFSET is 0 and memory if it isn't. */
9062 if (CONSTANT_P (op0))
9063 {
9064 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9065
9066 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9067 && offset == 0)
9068 op0 = force_reg (inner_mode, op0);
9069 else
9070 op0 = validize_mem (force_const_mem (inner_mode, op0));
9071 }
9072
9073 if (offset != 0)
9074 {
9075 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9076
9077 /* If this object is in a register, put it into memory.
9078 This case can't occur in C, but can in Ada if we have
9079 unchecked conversion of an expression from a scalar type to
9080 an array or record type. */
9081 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9082 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9083 {
9084 tree nt = build_qualified_type (TREE_TYPE (tem),
9085 (TYPE_QUALS (TREE_TYPE (tem))
9086 | TYPE_QUAL_CONST));
9087 rtx memloc = assign_temp (nt, 1, 1, 1);
9088
9089 emit_move_insn (memloc, op0);
9090 op0 = memloc;
9091 }
9092
9093 if (GET_CODE (op0) != MEM)
9094 abort ();
9095
9096 if (GET_MODE (offset_rtx) != ptr_mode)
9097 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9098
9099 #ifdef POINTERS_EXTEND_UNSIGNED
9100 if (GET_MODE (offset_rtx) != Pmode)
9101 offset_rtx = convert_memory_address (Pmode, offset_rtx);
9102 #endif
9103
9104 op0 = offset_address (op0, offset_rtx,
9105 highest_pow2_factor (offset));
9106 }
9107
9108 /* Don't forget about volatility even if this is a bitfield. */
9109 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9110 {
9111 op0 = copy_rtx (op0);
9112 MEM_VOLATILE_P (op0) = 1;
9113 }
9114
9115 /* Check the access. */
9116 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9117 {
9118 rtx to;
9119 int size;
9120
9121 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9122 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9123
9124 /* Check the access right of the pointer. */
9125 in_check_memory_usage = 1;
9126 if (size > BITS_PER_UNIT)
9127 emit_library_call (chkr_check_addr_libfunc,
9128 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9129 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9130 TYPE_MODE (sizetype),
9131 GEN_INT (MEMORY_USE_RO),
9132 TYPE_MODE (integer_type_node));
9133 in_check_memory_usage = 0;
9134 }
9135
9136 /* In cases where an aligned union has an unaligned object
9137 as a field, we might be extracting a BLKmode value from
9138 an integer-mode (e.g., SImode) object. Handle this case
9139 by doing the extract into an object as wide as the field
9140 (which we know to be the width of a basic mode), then
9141 storing into memory, and changing the mode to BLKmode.
9142 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9143 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9144 if (mode1 == VOIDmode
9145 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9146 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9147 && (TYPE_ALIGN (type) > alignment
9148 || bitpos % TYPE_ALIGN (type) != 0)))
9149 {
9150 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9151
9152 if (ext_mode == BLKmode)
9153 {
9154 /* In this case, BITPOS must start at a byte boundary. */
9155 if (GET_CODE (op0) != MEM
9156 || bitpos % BITS_PER_UNIT != 0)
9157 abort ();
9158
9159 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9160 }
9161 else
9162 {
9163 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9164 TYPE_QUAL_CONST);
9165 rtx new = assign_temp (nt, 0, 1, 1);
9166
9167 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9168 unsignedp, NULL_RTX, ext_mode,
9169 ext_mode,
9170 int_size_in_bytes (TREE_TYPE (tem)));
9171
9172 /* If the result is a record type and BITSIZE is narrower than
9173 the mode of OP0, an integral mode, and this is a big endian
9174 machine, we must put the field into the high-order bits. */
9175 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9176 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9177 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9178 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9179 size_int (GET_MODE_BITSIZE
9180 (GET_MODE (op0))
9181 - bitsize),
9182 op0, 1);
9183
9184 emit_move_insn (new, op0);
9185 op0 = copy_rtx (new);
9186 PUT_MODE (op0, BLKmode);
9187 }
9188 }
9189 else
9190 /* Get a reference to just this component. */
9191 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9192
9193 set_mem_attributes (op0, exp, 0);
9194
9195 /* Adjust the alignment in case the bit position is not
9196 a multiple of the alignment of the inner object. */
9197 while (bitpos % alignment != 0)
9198 alignment >>= 1;
9199
9200 if (GET_CODE (XEXP (op0, 0)) == REG)
9201 mark_reg_pointer (XEXP (op0, 0), alignment);
9202
9203 MEM_IN_STRUCT_P (op0) = 1;
9204 MEM_VOLATILE_P (op0) |= volatilep;
9205
9206 *palign = alignment;
9207 return op0;
9208 }
9209
9210 default:
9211 break;
9212
9213 }
9214
9215 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9216 }
9217 \f
9218 /* Return the tree node if a ARG corresponds to a string constant or zero
9219 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9220 in bytes within the string that ARG is accessing. The type of the
9221 offset will be `sizetype'. */
9222
9223 tree
9224 string_constant (arg, ptr_offset)
9225 tree arg;
9226 tree *ptr_offset;
9227 {
9228 STRIP_NOPS (arg);
9229
9230 if (TREE_CODE (arg) == ADDR_EXPR
9231 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9232 {
9233 *ptr_offset = size_zero_node;
9234 return TREE_OPERAND (arg, 0);
9235 }
9236 else if (TREE_CODE (arg) == PLUS_EXPR)
9237 {
9238 tree arg0 = TREE_OPERAND (arg, 0);
9239 tree arg1 = TREE_OPERAND (arg, 1);
9240
9241 STRIP_NOPS (arg0);
9242 STRIP_NOPS (arg1);
9243
9244 if (TREE_CODE (arg0) == ADDR_EXPR
9245 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9246 {
9247 *ptr_offset = convert (sizetype, arg1);
9248 return TREE_OPERAND (arg0, 0);
9249 }
9250 else if (TREE_CODE (arg1) == ADDR_EXPR
9251 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9252 {
9253 *ptr_offset = convert (sizetype, arg0);
9254 return TREE_OPERAND (arg1, 0);
9255 }
9256 }
9257
9258 return 0;
9259 }
9260 \f
9261 /* Expand code for a post- or pre- increment or decrement
9262 and return the RTX for the result.
9263 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9264
9265 static rtx
9266 expand_increment (exp, post, ignore)
9267 tree exp;
9268 int post, ignore;
9269 {
9270 rtx op0, op1;
9271 rtx temp, value;
9272 tree incremented = TREE_OPERAND (exp, 0);
9273 optab this_optab = add_optab;
9274 int icode;
9275 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9276 int op0_is_copy = 0;
9277 int single_insn = 0;
9278 /* 1 means we can't store into OP0 directly,
9279 because it is a subreg narrower than a word,
9280 and we don't dare clobber the rest of the word. */
9281 int bad_subreg = 0;
9282
9283 /* Stabilize any component ref that might need to be
9284 evaluated more than once below. */
9285 if (!post
9286 || TREE_CODE (incremented) == BIT_FIELD_REF
9287 || (TREE_CODE (incremented) == COMPONENT_REF
9288 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9289 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9290 incremented = stabilize_reference (incremented);
9291 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9292 ones into save exprs so that they don't accidentally get evaluated
9293 more than once by the code below. */
9294 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9295 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9296 incremented = save_expr (incremented);
9297
9298 /* Compute the operands as RTX.
9299 Note whether OP0 is the actual lvalue or a copy of it:
9300 I believe it is a copy iff it is a register or subreg
9301 and insns were generated in computing it. */
9302
9303 temp = get_last_insn ();
9304 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9305
9306 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9307 in place but instead must do sign- or zero-extension during assignment,
9308 so we copy it into a new register and let the code below use it as
9309 a copy.
9310
9311 Note that we can safely modify this SUBREG since it is know not to be
9312 shared (it was made by the expand_expr call above). */
9313
9314 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9315 {
9316 if (post)
9317 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9318 else
9319 bad_subreg = 1;
9320 }
9321 else if (GET_CODE (op0) == SUBREG
9322 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9323 {
9324 /* We cannot increment this SUBREG in place. If we are
9325 post-incrementing, get a copy of the old value. Otherwise,
9326 just mark that we cannot increment in place. */
9327 if (post)
9328 op0 = copy_to_reg (op0);
9329 else
9330 bad_subreg = 1;
9331 }
9332
9333 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9334 && temp != get_last_insn ());
9335 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9336 EXPAND_MEMORY_USE_BAD);
9337
9338 /* Decide whether incrementing or decrementing. */
9339 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9340 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9341 this_optab = sub_optab;
9342
9343 /* Convert decrement by a constant into a negative increment. */
9344 if (this_optab == sub_optab
9345 && GET_CODE (op1) == CONST_INT)
9346 {
9347 op1 = GEN_INT (-INTVAL (op1));
9348 this_optab = add_optab;
9349 }
9350
9351 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9352 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9353
9354 /* For a preincrement, see if we can do this with a single instruction. */
9355 if (!post)
9356 {
9357 icode = (int) this_optab->handlers[(int) mode].insn_code;
9358 if (icode != (int) CODE_FOR_nothing
9359 /* Make sure that OP0 is valid for operands 0 and 1
9360 of the insn we want to queue. */
9361 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9362 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9363 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9364 single_insn = 1;
9365 }
9366
9367 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9368 then we cannot just increment OP0. We must therefore contrive to
9369 increment the original value. Then, for postincrement, we can return
9370 OP0 since it is a copy of the old value. For preincrement, expand here
9371 unless we can do it with a single insn.
9372
9373 Likewise if storing directly into OP0 would clobber high bits
9374 we need to preserve (bad_subreg). */
9375 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9376 {
9377 /* This is the easiest way to increment the value wherever it is.
9378 Problems with multiple evaluation of INCREMENTED are prevented
9379 because either (1) it is a component_ref or preincrement,
9380 in which case it was stabilized above, or (2) it is an array_ref
9381 with constant index in an array in a register, which is
9382 safe to reevaluate. */
9383 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9384 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9385 ? MINUS_EXPR : PLUS_EXPR),
9386 TREE_TYPE (exp),
9387 incremented,
9388 TREE_OPERAND (exp, 1));
9389
9390 while (TREE_CODE (incremented) == NOP_EXPR
9391 || TREE_CODE (incremented) == CONVERT_EXPR)
9392 {
9393 newexp = convert (TREE_TYPE (incremented), newexp);
9394 incremented = TREE_OPERAND (incremented, 0);
9395 }
9396
9397 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9398 return post ? op0 : temp;
9399 }
9400
9401 if (post)
9402 {
9403 /* We have a true reference to the value in OP0.
9404 If there is an insn to add or subtract in this mode, queue it.
9405 Queueing the increment insn avoids the register shuffling
9406 that often results if we must increment now and first save
9407 the old value for subsequent use. */
9408
9409 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9410 op0 = stabilize (op0);
9411 #endif
9412
9413 icode = (int) this_optab->handlers[(int) mode].insn_code;
9414 if (icode != (int) CODE_FOR_nothing
9415 /* Make sure that OP0 is valid for operands 0 and 1
9416 of the insn we want to queue. */
9417 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9418 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9419 {
9420 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9421 op1 = force_reg (mode, op1);
9422
9423 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9424 }
9425 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9426 {
9427 rtx addr = (general_operand (XEXP (op0, 0), mode)
9428 ? force_reg (Pmode, XEXP (op0, 0))
9429 : copy_to_reg (XEXP (op0, 0)));
9430 rtx temp, result;
9431
9432 op0 = replace_equiv_address (op0, addr);
9433 temp = force_reg (GET_MODE (op0), op0);
9434 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9435 op1 = force_reg (mode, op1);
9436
9437 /* The increment queue is LIFO, thus we have to `queue'
9438 the instructions in reverse order. */
9439 enqueue_insn (op0, gen_move_insn (op0, temp));
9440 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9441 return result;
9442 }
9443 }
9444
9445 /* Preincrement, or we can't increment with one simple insn. */
9446 if (post)
9447 /* Save a copy of the value before inc or dec, to return it later. */
9448 temp = value = copy_to_reg (op0);
9449 else
9450 /* Arrange to return the incremented value. */
9451 /* Copy the rtx because expand_binop will protect from the queue,
9452 and the results of that would be invalid for us to return
9453 if our caller does emit_queue before using our result. */
9454 temp = copy_rtx (value = op0);
9455
9456 /* Increment however we can. */
9457 op1 = expand_binop (mode, this_optab, value, op1,
9458 current_function_check_memory_usage ? NULL_RTX : op0,
9459 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9460 /* Make sure the value is stored into OP0. */
9461 if (op1 != op0)
9462 emit_move_insn (op0, op1);
9463
9464 return temp;
9465 }
9466 \f
9467 /* At the start of a function, record that we have no previously-pushed
9468 arguments waiting to be popped. */
9469
9470 void
9471 init_pending_stack_adjust ()
9472 {
9473 pending_stack_adjust = 0;
9474 }
9475
9476 /* When exiting from function, if safe, clear out any pending stack adjust
9477 so the adjustment won't get done.
9478
9479 Note, if the current function calls alloca, then it must have a
9480 frame pointer regardless of the value of flag_omit_frame_pointer. */
9481
9482 void
9483 clear_pending_stack_adjust ()
9484 {
9485 #ifdef EXIT_IGNORE_STACK
9486 if (optimize > 0
9487 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9488 && EXIT_IGNORE_STACK
9489 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9490 && ! flag_inline_functions)
9491 {
9492 stack_pointer_delta -= pending_stack_adjust,
9493 pending_stack_adjust = 0;
9494 }
9495 #endif
9496 }
9497
9498 /* Pop any previously-pushed arguments that have not been popped yet. */
9499
9500 void
9501 do_pending_stack_adjust ()
9502 {
9503 if (inhibit_defer_pop == 0)
9504 {
9505 if (pending_stack_adjust != 0)
9506 adjust_stack (GEN_INT (pending_stack_adjust));
9507 pending_stack_adjust = 0;
9508 }
9509 }
9510 \f
9511 /* Expand conditional expressions. */
9512
9513 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9514 LABEL is an rtx of code CODE_LABEL, in this function and all the
9515 functions here. */
9516
9517 void
9518 jumpifnot (exp, label)
9519 tree exp;
9520 rtx label;
9521 {
9522 do_jump (exp, label, NULL_RTX);
9523 }
9524
9525 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9526
9527 void
9528 jumpif (exp, label)
9529 tree exp;
9530 rtx label;
9531 {
9532 do_jump (exp, NULL_RTX, label);
9533 }
9534
9535 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9536 the result is zero, or IF_TRUE_LABEL if the result is one.
9537 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9538 meaning fall through in that case.
9539
9540 do_jump always does any pending stack adjust except when it does not
9541 actually perform a jump. An example where there is no jump
9542 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9543
9544 This function is responsible for optimizing cases such as
9545 &&, || and comparison operators in EXP. */
9546
9547 void
9548 do_jump (exp, if_false_label, if_true_label)
9549 tree exp;
9550 rtx if_false_label, if_true_label;
9551 {
9552 enum tree_code code = TREE_CODE (exp);
9553 /* Some cases need to create a label to jump to
9554 in order to properly fall through.
9555 These cases set DROP_THROUGH_LABEL nonzero. */
9556 rtx drop_through_label = 0;
9557 rtx temp;
9558 int i;
9559 tree type;
9560 enum machine_mode mode;
9561
9562 #ifdef MAX_INTEGER_COMPUTATION_MODE
9563 check_max_integer_computation_mode (exp);
9564 #endif
9565
9566 emit_queue ();
9567
9568 switch (code)
9569 {
9570 case ERROR_MARK:
9571 break;
9572
9573 case INTEGER_CST:
9574 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9575 if (temp)
9576 emit_jump (temp);
9577 break;
9578
9579 #if 0
9580 /* This is not true with #pragma weak */
9581 case ADDR_EXPR:
9582 /* The address of something can never be zero. */
9583 if (if_true_label)
9584 emit_jump (if_true_label);
9585 break;
9586 #endif
9587
9588 case NOP_EXPR:
9589 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9590 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9591 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9592 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9593 goto normal;
9594 case CONVERT_EXPR:
9595 /* If we are narrowing the operand, we have to do the compare in the
9596 narrower mode. */
9597 if ((TYPE_PRECISION (TREE_TYPE (exp))
9598 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9599 goto normal;
9600 case NON_LVALUE_EXPR:
9601 case REFERENCE_EXPR:
9602 case ABS_EXPR:
9603 case NEGATE_EXPR:
9604 case LROTATE_EXPR:
9605 case RROTATE_EXPR:
9606 /* These cannot change zero->non-zero or vice versa. */
9607 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9608 break;
9609
9610 case WITH_RECORD_EXPR:
9611 /* Put the object on the placeholder list, recurse through our first
9612 operand, and pop the list. */
9613 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9614 placeholder_list);
9615 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9616 placeholder_list = TREE_CHAIN (placeholder_list);
9617 break;
9618
9619 #if 0
9620 /* This is never less insns than evaluating the PLUS_EXPR followed by
9621 a test and can be longer if the test is eliminated. */
9622 case PLUS_EXPR:
9623 /* Reduce to minus. */
9624 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9625 TREE_OPERAND (exp, 0),
9626 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9627 TREE_OPERAND (exp, 1))));
9628 /* Process as MINUS. */
9629 #endif
9630
9631 case MINUS_EXPR:
9632 /* Non-zero iff operands of minus differ. */
9633 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9634 TREE_OPERAND (exp, 0),
9635 TREE_OPERAND (exp, 1)),
9636 NE, NE, if_false_label, if_true_label);
9637 break;
9638
9639 case BIT_AND_EXPR:
9640 /* If we are AND'ing with a small constant, do this comparison in the
9641 smallest type that fits. If the machine doesn't have comparisons
9642 that small, it will be converted back to the wider comparison.
9643 This helps if we are testing the sign bit of a narrower object.
9644 combine can't do this for us because it can't know whether a
9645 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9646
9647 if (! SLOW_BYTE_ACCESS
9648 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9649 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9650 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9651 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9652 && (type = type_for_mode (mode, 1)) != 0
9653 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9654 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9655 != CODE_FOR_nothing))
9656 {
9657 do_jump (convert (type, exp), if_false_label, if_true_label);
9658 break;
9659 }
9660 goto normal;
9661
9662 case TRUTH_NOT_EXPR:
9663 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9664 break;
9665
9666 case TRUTH_ANDIF_EXPR:
9667 if (if_false_label == 0)
9668 if_false_label = drop_through_label = gen_label_rtx ();
9669 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9670 start_cleanup_deferral ();
9671 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9672 end_cleanup_deferral ();
9673 break;
9674
9675 case TRUTH_ORIF_EXPR:
9676 if (if_true_label == 0)
9677 if_true_label = drop_through_label = gen_label_rtx ();
9678 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9679 start_cleanup_deferral ();
9680 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9681 end_cleanup_deferral ();
9682 break;
9683
9684 case COMPOUND_EXPR:
9685 push_temp_slots ();
9686 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9687 preserve_temp_slots (NULL_RTX);
9688 free_temp_slots ();
9689 pop_temp_slots ();
9690 emit_queue ();
9691 do_pending_stack_adjust ();
9692 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9693 break;
9694
9695 case COMPONENT_REF:
9696 case BIT_FIELD_REF:
9697 case ARRAY_REF:
9698 case ARRAY_RANGE_REF:
9699 {
9700 HOST_WIDE_INT bitsize, bitpos;
9701 int unsignedp;
9702 enum machine_mode mode;
9703 tree type;
9704 tree offset;
9705 int volatilep = 0;
9706 unsigned int alignment;
9707
9708 /* Get description of this reference. We don't actually care
9709 about the underlying object here. */
9710 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9711 &unsignedp, &volatilep, &alignment);
9712
9713 type = type_for_size (bitsize, unsignedp);
9714 if (! SLOW_BYTE_ACCESS
9715 && type != 0 && bitsize >= 0
9716 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9717 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9718 != CODE_FOR_nothing))
9719 {
9720 do_jump (convert (type, exp), if_false_label, if_true_label);
9721 break;
9722 }
9723 goto normal;
9724 }
9725
9726 case COND_EXPR:
9727 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9728 if (integer_onep (TREE_OPERAND (exp, 1))
9729 && integer_zerop (TREE_OPERAND (exp, 2)))
9730 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9731
9732 else if (integer_zerop (TREE_OPERAND (exp, 1))
9733 && integer_onep (TREE_OPERAND (exp, 2)))
9734 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9735
9736 else
9737 {
9738 rtx label1 = gen_label_rtx ();
9739 drop_through_label = gen_label_rtx ();
9740
9741 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9742
9743 start_cleanup_deferral ();
9744 /* Now the THEN-expression. */
9745 do_jump (TREE_OPERAND (exp, 1),
9746 if_false_label ? if_false_label : drop_through_label,
9747 if_true_label ? if_true_label : drop_through_label);
9748 /* In case the do_jump just above never jumps. */
9749 do_pending_stack_adjust ();
9750 emit_label (label1);
9751
9752 /* Now the ELSE-expression. */
9753 do_jump (TREE_OPERAND (exp, 2),
9754 if_false_label ? if_false_label : drop_through_label,
9755 if_true_label ? if_true_label : drop_through_label);
9756 end_cleanup_deferral ();
9757 }
9758 break;
9759
9760 case EQ_EXPR:
9761 {
9762 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9763
9764 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9765 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9766 {
9767 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9768 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9769 do_jump
9770 (fold
9771 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9772 fold (build (EQ_EXPR, TREE_TYPE (exp),
9773 fold (build1 (REALPART_EXPR,
9774 TREE_TYPE (inner_type),
9775 exp0)),
9776 fold (build1 (REALPART_EXPR,
9777 TREE_TYPE (inner_type),
9778 exp1)))),
9779 fold (build (EQ_EXPR, TREE_TYPE (exp),
9780 fold (build1 (IMAGPART_EXPR,
9781 TREE_TYPE (inner_type),
9782 exp0)),
9783 fold (build1 (IMAGPART_EXPR,
9784 TREE_TYPE (inner_type),
9785 exp1)))))),
9786 if_false_label, if_true_label);
9787 }
9788
9789 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9790 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9791
9792 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9793 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9794 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9795 else
9796 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9797 break;
9798 }
9799
9800 case NE_EXPR:
9801 {
9802 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9803
9804 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9805 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9806 {
9807 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9808 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9809 do_jump
9810 (fold
9811 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9812 fold (build (NE_EXPR, TREE_TYPE (exp),
9813 fold (build1 (REALPART_EXPR,
9814 TREE_TYPE (inner_type),
9815 exp0)),
9816 fold (build1 (REALPART_EXPR,
9817 TREE_TYPE (inner_type),
9818 exp1)))),
9819 fold (build (NE_EXPR, TREE_TYPE (exp),
9820 fold (build1 (IMAGPART_EXPR,
9821 TREE_TYPE (inner_type),
9822 exp0)),
9823 fold (build1 (IMAGPART_EXPR,
9824 TREE_TYPE (inner_type),
9825 exp1)))))),
9826 if_false_label, if_true_label);
9827 }
9828
9829 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9830 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9831
9832 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9833 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9834 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9835 else
9836 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9837 break;
9838 }
9839
9840 case LT_EXPR:
9841 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9842 if (GET_MODE_CLASS (mode) == MODE_INT
9843 && ! can_compare_p (LT, mode, ccp_jump))
9844 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9845 else
9846 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9847 break;
9848
9849 case LE_EXPR:
9850 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9851 if (GET_MODE_CLASS (mode) == MODE_INT
9852 && ! can_compare_p (LE, mode, ccp_jump))
9853 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9854 else
9855 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9856 break;
9857
9858 case GT_EXPR:
9859 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9860 if (GET_MODE_CLASS (mode) == MODE_INT
9861 && ! can_compare_p (GT, mode, ccp_jump))
9862 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9863 else
9864 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9865 break;
9866
9867 case GE_EXPR:
9868 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9869 if (GET_MODE_CLASS (mode) == MODE_INT
9870 && ! can_compare_p (GE, mode, ccp_jump))
9871 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9872 else
9873 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9874 break;
9875
9876 case UNORDERED_EXPR:
9877 case ORDERED_EXPR:
9878 {
9879 enum rtx_code cmp, rcmp;
9880 int do_rev;
9881
9882 if (code == UNORDERED_EXPR)
9883 cmp = UNORDERED, rcmp = ORDERED;
9884 else
9885 cmp = ORDERED, rcmp = UNORDERED;
9886 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9887
9888 do_rev = 0;
9889 if (! can_compare_p (cmp, mode, ccp_jump)
9890 && (can_compare_p (rcmp, mode, ccp_jump)
9891 /* If the target doesn't provide either UNORDERED or ORDERED
9892 comparisons, canonicalize on UNORDERED for the library. */
9893 || rcmp == UNORDERED))
9894 do_rev = 1;
9895
9896 if (! do_rev)
9897 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9898 else
9899 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9900 }
9901 break;
9902
9903 {
9904 enum rtx_code rcode1;
9905 enum tree_code tcode2;
9906
9907 case UNLT_EXPR:
9908 rcode1 = UNLT;
9909 tcode2 = LT_EXPR;
9910 goto unordered_bcc;
9911 case UNLE_EXPR:
9912 rcode1 = UNLE;
9913 tcode2 = LE_EXPR;
9914 goto unordered_bcc;
9915 case UNGT_EXPR:
9916 rcode1 = UNGT;
9917 tcode2 = GT_EXPR;
9918 goto unordered_bcc;
9919 case UNGE_EXPR:
9920 rcode1 = UNGE;
9921 tcode2 = GE_EXPR;
9922 goto unordered_bcc;
9923 case UNEQ_EXPR:
9924 rcode1 = UNEQ;
9925 tcode2 = EQ_EXPR;
9926 goto unordered_bcc;
9927
9928 unordered_bcc:
9929 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9930 if (can_compare_p (rcode1, mode, ccp_jump))
9931 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9932 if_true_label);
9933 else
9934 {
9935 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9936 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9937 tree cmp0, cmp1;
9938
9939 /* If the target doesn't support combined unordered
9940 compares, decompose into UNORDERED + comparison. */
9941 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9942 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9943 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9944 do_jump (exp, if_false_label, if_true_label);
9945 }
9946 }
9947 break;
9948
9949 /* Special case:
9950 __builtin_expect (<test>, 0) and
9951 __builtin_expect (<test>, 1)
9952
9953 We need to do this here, so that <test> is not converted to a SCC
9954 operation on machines that use condition code registers and COMPARE
9955 like the PowerPC, and then the jump is done based on whether the SCC
9956 operation produced a 1 or 0. */
9957 case CALL_EXPR:
9958 /* Check for a built-in function. */
9959 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9960 {
9961 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9962 tree arglist = TREE_OPERAND (exp, 1);
9963
9964 if (TREE_CODE (fndecl) == FUNCTION_DECL
9965 && DECL_BUILT_IN (fndecl)
9966 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9967 && arglist != NULL_TREE
9968 && TREE_CHAIN (arglist) != NULL_TREE)
9969 {
9970 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9971 if_true_label);
9972
9973 if (seq != NULL_RTX)
9974 {
9975 emit_insn (seq);
9976 return;
9977 }
9978 }
9979 }
9980 /* fall through and generate the normal code. */
9981
9982 default:
9983 normal:
9984 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9985 #if 0
9986 /* This is not needed any more and causes poor code since it causes
9987 comparisons and tests from non-SI objects to have different code
9988 sequences. */
9989 /* Copy to register to avoid generating bad insns by cse
9990 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9991 if (!cse_not_expected && GET_CODE (temp) == MEM)
9992 temp = copy_to_reg (temp);
9993 #endif
9994 do_pending_stack_adjust ();
9995 /* Do any postincrements in the expression that was tested. */
9996 emit_queue ();
9997
9998 if (GET_CODE (temp) == CONST_INT
9999 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10000 || GET_CODE (temp) == LABEL_REF)
10001 {
10002 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10003 if (target)
10004 emit_jump (target);
10005 }
10006 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10007 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10008 /* Note swapping the labels gives us not-equal. */
10009 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10010 else if (GET_MODE (temp) != VOIDmode)
10011 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10012 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10013 GET_MODE (temp), NULL_RTX, 0,
10014 if_false_label, if_true_label);
10015 else
10016 abort ();
10017 }
10018
10019 if (drop_through_label)
10020 {
10021 /* If do_jump produces code that might be jumped around,
10022 do any stack adjusts from that code, before the place
10023 where control merges in. */
10024 do_pending_stack_adjust ();
10025 emit_label (drop_through_label);
10026 }
10027 }
10028 \f
10029 /* Given a comparison expression EXP for values too wide to be compared
10030 with one insn, test the comparison and jump to the appropriate label.
10031 The code of EXP is ignored; we always test GT if SWAP is 0,
10032 and LT if SWAP is 1. */
10033
10034 static void
10035 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10036 tree exp;
10037 int swap;
10038 rtx if_false_label, if_true_label;
10039 {
10040 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10041 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10042 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10043 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10044
10045 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10046 }
10047
10048 /* Compare OP0 with OP1, word at a time, in mode MODE.
10049 UNSIGNEDP says to do unsigned comparison.
10050 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10051
10052 void
10053 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10054 enum machine_mode mode;
10055 int unsignedp;
10056 rtx op0, op1;
10057 rtx if_false_label, if_true_label;
10058 {
10059 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10060 rtx drop_through_label = 0;
10061 int i;
10062
10063 if (! if_true_label || ! if_false_label)
10064 drop_through_label = gen_label_rtx ();
10065 if (! if_true_label)
10066 if_true_label = drop_through_label;
10067 if (! if_false_label)
10068 if_false_label = drop_through_label;
10069
10070 /* Compare a word at a time, high order first. */
10071 for (i = 0; i < nwords; i++)
10072 {
10073 rtx op0_word, op1_word;
10074
10075 if (WORDS_BIG_ENDIAN)
10076 {
10077 op0_word = operand_subword_force (op0, i, mode);
10078 op1_word = operand_subword_force (op1, i, mode);
10079 }
10080 else
10081 {
10082 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10083 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10084 }
10085
10086 /* All but high-order word must be compared as unsigned. */
10087 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10088 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10089 NULL_RTX, if_true_label);
10090
10091 /* Consider lower words only if these are equal. */
10092 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10093 NULL_RTX, 0, NULL_RTX, if_false_label);
10094 }
10095
10096 if (if_false_label)
10097 emit_jump (if_false_label);
10098 if (drop_through_label)
10099 emit_label (drop_through_label);
10100 }
10101
10102 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10103 with one insn, test the comparison and jump to the appropriate label. */
10104
10105 static void
10106 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10107 tree exp;
10108 rtx if_false_label, if_true_label;
10109 {
10110 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10111 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10112 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10113 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10114 int i;
10115 rtx drop_through_label = 0;
10116
10117 if (! if_false_label)
10118 drop_through_label = if_false_label = gen_label_rtx ();
10119
10120 for (i = 0; i < nwords; i++)
10121 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10122 operand_subword_force (op1, i, mode),
10123 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10124 word_mode, NULL_RTX, 0, if_false_label,
10125 NULL_RTX);
10126
10127 if (if_true_label)
10128 emit_jump (if_true_label);
10129 if (drop_through_label)
10130 emit_label (drop_through_label);
10131 }
10132 \f
10133 /* Jump according to whether OP0 is 0.
10134 We assume that OP0 has an integer mode that is too wide
10135 for the available compare insns. */
10136
10137 void
10138 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10139 rtx op0;
10140 rtx if_false_label, if_true_label;
10141 {
10142 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10143 rtx part;
10144 int i;
10145 rtx drop_through_label = 0;
10146
10147 /* The fastest way of doing this comparison on almost any machine is to
10148 "or" all the words and compare the result. If all have to be loaded
10149 from memory and this is a very wide item, it's possible this may
10150 be slower, but that's highly unlikely. */
10151
10152 part = gen_reg_rtx (word_mode);
10153 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10154 for (i = 1; i < nwords && part != 0; i++)
10155 part = expand_binop (word_mode, ior_optab, part,
10156 operand_subword_force (op0, i, GET_MODE (op0)),
10157 part, 1, OPTAB_WIDEN);
10158
10159 if (part != 0)
10160 {
10161 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10162 NULL_RTX, 0, if_false_label, if_true_label);
10163
10164 return;
10165 }
10166
10167 /* If we couldn't do the "or" simply, do this with a series of compares. */
10168 if (! if_false_label)
10169 drop_through_label = if_false_label = gen_label_rtx ();
10170
10171 for (i = 0; i < nwords; i++)
10172 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10173 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10174 if_false_label, NULL_RTX);
10175
10176 if (if_true_label)
10177 emit_jump (if_true_label);
10178
10179 if (drop_through_label)
10180 emit_label (drop_through_label);
10181 }
10182 \f
10183 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10184 (including code to compute the values to be compared)
10185 and set (CC0) according to the result.
10186 The decision as to signed or unsigned comparison must be made by the caller.
10187
10188 We force a stack adjustment unless there are currently
10189 things pushed on the stack that aren't yet used.
10190
10191 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10192 compared.
10193
10194 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10195 size of MODE should be used. */
10196
10197 rtx
10198 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10199 rtx op0, op1;
10200 enum rtx_code code;
10201 int unsignedp;
10202 enum machine_mode mode;
10203 rtx size;
10204 unsigned int align;
10205 {
10206 rtx tem;
10207
10208 /* If one operand is constant, make it the second one. Only do this
10209 if the other operand is not constant as well. */
10210
10211 if (swap_commutative_operands_p (op0, op1))
10212 {
10213 tem = op0;
10214 op0 = op1;
10215 op1 = tem;
10216 code = swap_condition (code);
10217 }
10218
10219 if (flag_force_mem)
10220 {
10221 op0 = force_not_mem (op0);
10222 op1 = force_not_mem (op1);
10223 }
10224
10225 do_pending_stack_adjust ();
10226
10227 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10228 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10229 return tem;
10230
10231 #if 0
10232 /* There's no need to do this now that combine.c can eliminate lots of
10233 sign extensions. This can be less efficient in certain cases on other
10234 machines. */
10235
10236 /* If this is a signed equality comparison, we can do it as an
10237 unsigned comparison since zero-extension is cheaper than sign
10238 extension and comparisons with zero are done as unsigned. This is
10239 the case even on machines that can do fast sign extension, since
10240 zero-extension is easier to combine with other operations than
10241 sign-extension is. If we are comparing against a constant, we must
10242 convert it to what it would look like unsigned. */
10243 if ((code == EQ || code == NE) && ! unsignedp
10244 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10245 {
10246 if (GET_CODE (op1) == CONST_INT
10247 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10248 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10249 unsignedp = 1;
10250 }
10251 #endif
10252
10253 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10254
10255 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10256 }
10257
10258 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10259 The decision as to signed or unsigned comparison must be made by the caller.
10260
10261 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10262 compared.
10263
10264 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10265 size of MODE should be used. */
10266
10267 void
10268 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10269 if_false_label, if_true_label)
10270 rtx op0, op1;
10271 enum rtx_code code;
10272 int unsignedp;
10273 enum machine_mode mode;
10274 rtx size;
10275 unsigned int align;
10276 rtx if_false_label, if_true_label;
10277 {
10278 rtx tem;
10279 int dummy_true_label = 0;
10280
10281 /* Reverse the comparison if that is safe and we want to jump if it is
10282 false. */
10283 if (! if_true_label && ! FLOAT_MODE_P (mode))
10284 {
10285 if_true_label = if_false_label;
10286 if_false_label = 0;
10287 code = reverse_condition (code);
10288 }
10289
10290 /* If one operand is constant, make it the second one. Only do this
10291 if the other operand is not constant as well. */
10292
10293 if (swap_commutative_operands_p (op0, op1))
10294 {
10295 tem = op0;
10296 op0 = op1;
10297 op1 = tem;
10298 code = swap_condition (code);
10299 }
10300
10301 if (flag_force_mem)
10302 {
10303 op0 = force_not_mem (op0);
10304 op1 = force_not_mem (op1);
10305 }
10306
10307 do_pending_stack_adjust ();
10308
10309 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10310 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10311 {
10312 if (tem == const_true_rtx)
10313 {
10314 if (if_true_label)
10315 emit_jump (if_true_label);
10316 }
10317 else
10318 {
10319 if (if_false_label)
10320 emit_jump (if_false_label);
10321 }
10322 return;
10323 }
10324
10325 #if 0
10326 /* There's no need to do this now that combine.c can eliminate lots of
10327 sign extensions. This can be less efficient in certain cases on other
10328 machines. */
10329
10330 /* If this is a signed equality comparison, we can do it as an
10331 unsigned comparison since zero-extension is cheaper than sign
10332 extension and comparisons with zero are done as unsigned. This is
10333 the case even on machines that can do fast sign extension, since
10334 zero-extension is easier to combine with other operations than
10335 sign-extension is. If we are comparing against a constant, we must
10336 convert it to what it would look like unsigned. */
10337 if ((code == EQ || code == NE) && ! unsignedp
10338 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10339 {
10340 if (GET_CODE (op1) == CONST_INT
10341 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10342 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10343 unsignedp = 1;
10344 }
10345 #endif
10346
10347 if (! if_true_label)
10348 {
10349 dummy_true_label = 1;
10350 if_true_label = gen_label_rtx ();
10351 }
10352
10353 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10354 if_true_label);
10355
10356 if (if_false_label)
10357 emit_jump (if_false_label);
10358 if (dummy_true_label)
10359 emit_label (if_true_label);
10360 }
10361
10362 /* Generate code for a comparison expression EXP (including code to compute
10363 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10364 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10365 generated code will drop through.
10366 SIGNED_CODE should be the rtx operation for this comparison for
10367 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10368
10369 We force a stack adjustment unless there are currently
10370 things pushed on the stack that aren't yet used. */
10371
10372 static void
10373 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10374 if_true_label)
10375 tree exp;
10376 enum rtx_code signed_code, unsigned_code;
10377 rtx if_false_label, if_true_label;
10378 {
10379 unsigned int align0, align1;
10380 rtx op0, op1;
10381 tree type;
10382 enum machine_mode mode;
10383 int unsignedp;
10384 enum rtx_code code;
10385
10386 /* Don't crash if the comparison was erroneous. */
10387 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10388 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10389 return;
10390
10391 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10392 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10393 return;
10394
10395 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10396 mode = TYPE_MODE (type);
10397 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10398 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10399 || (GET_MODE_BITSIZE (mode)
10400 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10401 1)))))))
10402 {
10403 /* op0 might have been replaced by promoted constant, in which
10404 case the type of second argument should be used. */
10405 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10406 mode = TYPE_MODE (type);
10407 }
10408 unsignedp = TREE_UNSIGNED (type);
10409 code = unsignedp ? unsigned_code : signed_code;
10410
10411 #ifdef HAVE_canonicalize_funcptr_for_compare
10412 /* If function pointers need to be "canonicalized" before they can
10413 be reliably compared, then canonicalize them. */
10414 if (HAVE_canonicalize_funcptr_for_compare
10415 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10416 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10417 == FUNCTION_TYPE))
10418 {
10419 rtx new_op0 = gen_reg_rtx (mode);
10420
10421 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10422 op0 = new_op0;
10423 }
10424
10425 if (HAVE_canonicalize_funcptr_for_compare
10426 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10427 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10428 == FUNCTION_TYPE))
10429 {
10430 rtx new_op1 = gen_reg_rtx (mode);
10431
10432 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10433 op1 = new_op1;
10434 }
10435 #endif
10436
10437 /* Do any postincrements in the expression that was tested. */
10438 emit_queue ();
10439
10440 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10441 ((mode == BLKmode)
10442 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10443 MIN (align0, align1),
10444 if_false_label, if_true_label);
10445 }
10446 \f
10447 /* Generate code to calculate EXP using a store-flag instruction
10448 and return an rtx for the result. EXP is either a comparison
10449 or a TRUTH_NOT_EXPR whose operand is a comparison.
10450
10451 If TARGET is nonzero, store the result there if convenient.
10452
10453 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10454 cheap.
10455
10456 Return zero if there is no suitable set-flag instruction
10457 available on this machine.
10458
10459 Once expand_expr has been called on the arguments of the comparison,
10460 we are committed to doing the store flag, since it is not safe to
10461 re-evaluate the expression. We emit the store-flag insn by calling
10462 emit_store_flag, but only expand the arguments if we have a reason
10463 to believe that emit_store_flag will be successful. If we think that
10464 it will, but it isn't, we have to simulate the store-flag with a
10465 set/jump/set sequence. */
10466
10467 static rtx
10468 do_store_flag (exp, target, mode, only_cheap)
10469 tree exp;
10470 rtx target;
10471 enum machine_mode mode;
10472 int only_cheap;
10473 {
10474 enum rtx_code code;
10475 tree arg0, arg1, type;
10476 tree tem;
10477 enum machine_mode operand_mode;
10478 int invert = 0;
10479 int unsignedp;
10480 rtx op0, op1;
10481 enum insn_code icode;
10482 rtx subtarget = target;
10483 rtx result, label;
10484
10485 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10486 result at the end. We can't simply invert the test since it would
10487 have already been inverted if it were valid. This case occurs for
10488 some floating-point comparisons. */
10489
10490 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10491 invert = 1, exp = TREE_OPERAND (exp, 0);
10492
10493 arg0 = TREE_OPERAND (exp, 0);
10494 arg1 = TREE_OPERAND (exp, 1);
10495
10496 /* Don't crash if the comparison was erroneous. */
10497 if (arg0 == error_mark_node || arg1 == error_mark_node)
10498 return const0_rtx;
10499
10500 type = TREE_TYPE (arg0);
10501 operand_mode = TYPE_MODE (type);
10502 unsignedp = TREE_UNSIGNED (type);
10503
10504 /* We won't bother with BLKmode store-flag operations because it would mean
10505 passing a lot of information to emit_store_flag. */
10506 if (operand_mode == BLKmode)
10507 return 0;
10508
10509 /* We won't bother with store-flag operations involving function pointers
10510 when function pointers must be canonicalized before comparisons. */
10511 #ifdef HAVE_canonicalize_funcptr_for_compare
10512 if (HAVE_canonicalize_funcptr_for_compare
10513 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10514 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10515 == FUNCTION_TYPE))
10516 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10517 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10518 == FUNCTION_TYPE))))
10519 return 0;
10520 #endif
10521
10522 STRIP_NOPS (arg0);
10523 STRIP_NOPS (arg1);
10524
10525 /* Get the rtx comparison code to use. We know that EXP is a comparison
10526 operation of some type. Some comparisons against 1 and -1 can be
10527 converted to comparisons with zero. Do so here so that the tests
10528 below will be aware that we have a comparison with zero. These
10529 tests will not catch constants in the first operand, but constants
10530 are rarely passed as the first operand. */
10531
10532 switch (TREE_CODE (exp))
10533 {
10534 case EQ_EXPR:
10535 code = EQ;
10536 break;
10537 case NE_EXPR:
10538 code = NE;
10539 break;
10540 case LT_EXPR:
10541 if (integer_onep (arg1))
10542 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10543 else
10544 code = unsignedp ? LTU : LT;
10545 break;
10546 case LE_EXPR:
10547 if (! unsignedp && integer_all_onesp (arg1))
10548 arg1 = integer_zero_node, code = LT;
10549 else
10550 code = unsignedp ? LEU : LE;
10551 break;
10552 case GT_EXPR:
10553 if (! unsignedp && integer_all_onesp (arg1))
10554 arg1 = integer_zero_node, code = GE;
10555 else
10556 code = unsignedp ? GTU : GT;
10557 break;
10558 case GE_EXPR:
10559 if (integer_onep (arg1))
10560 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10561 else
10562 code = unsignedp ? GEU : GE;
10563 break;
10564
10565 case UNORDERED_EXPR:
10566 code = UNORDERED;
10567 break;
10568 case ORDERED_EXPR:
10569 code = ORDERED;
10570 break;
10571 case UNLT_EXPR:
10572 code = UNLT;
10573 break;
10574 case UNLE_EXPR:
10575 code = UNLE;
10576 break;
10577 case UNGT_EXPR:
10578 code = UNGT;
10579 break;
10580 case UNGE_EXPR:
10581 code = UNGE;
10582 break;
10583 case UNEQ_EXPR:
10584 code = UNEQ;
10585 break;
10586
10587 default:
10588 abort ();
10589 }
10590
10591 /* Put a constant second. */
10592 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10593 {
10594 tem = arg0; arg0 = arg1; arg1 = tem;
10595 code = swap_condition (code);
10596 }
10597
10598 /* If this is an equality or inequality test of a single bit, we can
10599 do this by shifting the bit being tested to the low-order bit and
10600 masking the result with the constant 1. If the condition was EQ,
10601 we xor it with 1. This does not require an scc insn and is faster
10602 than an scc insn even if we have it. */
10603
10604 if ((code == NE || code == EQ)
10605 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10606 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10607 {
10608 tree inner = TREE_OPERAND (arg0, 0);
10609 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10610 int ops_unsignedp;
10611
10612 /* If INNER is a right shift of a constant and it plus BITNUM does
10613 not overflow, adjust BITNUM and INNER. */
10614
10615 if (TREE_CODE (inner) == RSHIFT_EXPR
10616 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10617 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10618 && bitnum < TYPE_PRECISION (type)
10619 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10620 bitnum - TYPE_PRECISION (type)))
10621 {
10622 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10623 inner = TREE_OPERAND (inner, 0);
10624 }
10625
10626 /* If we are going to be able to omit the AND below, we must do our
10627 operations as unsigned. If we must use the AND, we have a choice.
10628 Normally unsigned is faster, but for some machines signed is. */
10629 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10630 #ifdef LOAD_EXTEND_OP
10631 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10632 #else
10633 : 1
10634 #endif
10635 );
10636
10637 if (! get_subtarget (subtarget)
10638 || GET_MODE (subtarget) != operand_mode
10639 || ! safe_from_p (subtarget, inner, 1))
10640 subtarget = 0;
10641
10642 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10643
10644 if (bitnum != 0)
10645 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10646 size_int (bitnum), subtarget, ops_unsignedp);
10647
10648 if (GET_MODE (op0) != mode)
10649 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10650
10651 if ((code == EQ && ! invert) || (code == NE && invert))
10652 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10653 ops_unsignedp, OPTAB_LIB_WIDEN);
10654
10655 /* Put the AND last so it can combine with more things. */
10656 if (bitnum != TYPE_PRECISION (type) - 1)
10657 op0 = expand_and (op0, const1_rtx, subtarget);
10658
10659 return op0;
10660 }
10661
10662 /* Now see if we are likely to be able to do this. Return if not. */
10663 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10664 return 0;
10665
10666 icode = setcc_gen_code[(int) code];
10667 if (icode == CODE_FOR_nothing
10668 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10669 {
10670 /* We can only do this if it is one of the special cases that
10671 can be handled without an scc insn. */
10672 if ((code == LT && integer_zerop (arg1))
10673 || (! only_cheap && code == GE && integer_zerop (arg1)))
10674 ;
10675 else if (BRANCH_COST >= 0
10676 && ! only_cheap && (code == NE || code == EQ)
10677 && TREE_CODE (type) != REAL_TYPE
10678 && ((abs_optab->handlers[(int) operand_mode].insn_code
10679 != CODE_FOR_nothing)
10680 || (ffs_optab->handlers[(int) operand_mode].insn_code
10681 != CODE_FOR_nothing)))
10682 ;
10683 else
10684 return 0;
10685 }
10686
10687 if (! get_subtarget (target)
10688 || GET_MODE (subtarget) != operand_mode
10689 || ! safe_from_p (subtarget, arg1, 1))
10690 subtarget = 0;
10691
10692 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10693 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10694
10695 if (target == 0)
10696 target = gen_reg_rtx (mode);
10697
10698 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10699 because, if the emit_store_flag does anything it will succeed and
10700 OP0 and OP1 will not be used subsequently. */
10701
10702 result = emit_store_flag (target, code,
10703 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10704 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10705 operand_mode, unsignedp, 1);
10706
10707 if (result)
10708 {
10709 if (invert)
10710 result = expand_binop (mode, xor_optab, result, const1_rtx,
10711 result, 0, OPTAB_LIB_WIDEN);
10712 return result;
10713 }
10714
10715 /* If this failed, we have to do this with set/compare/jump/set code. */
10716 if (GET_CODE (target) != REG
10717 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10718 target = gen_reg_rtx (GET_MODE (target));
10719
10720 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10721 result = compare_from_rtx (op0, op1, code, unsignedp,
10722 operand_mode, NULL_RTX, 0);
10723 if (GET_CODE (result) == CONST_INT)
10724 return (((result == const0_rtx && ! invert)
10725 || (result != const0_rtx && invert))
10726 ? const0_rtx : const1_rtx);
10727
10728 label = gen_label_rtx ();
10729 if (bcc_gen_fctn[(int) code] == 0)
10730 abort ();
10731
10732 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10733 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10734 emit_label (label);
10735
10736 return target;
10737 }
10738 \f
10739
10740 /* Stubs in case we haven't got a casesi insn. */
10741 #ifndef HAVE_casesi
10742 # define HAVE_casesi 0
10743 # define gen_casesi(a, b, c, d, e) (0)
10744 # define CODE_FOR_casesi CODE_FOR_nothing
10745 #endif
10746
10747 /* If the machine does not have a case insn that compares the bounds,
10748 this means extra overhead for dispatch tables, which raises the
10749 threshold for using them. */
10750 #ifndef CASE_VALUES_THRESHOLD
10751 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10752 #endif /* CASE_VALUES_THRESHOLD */
10753
10754 unsigned int
10755 case_values_threshold ()
10756 {
10757 return CASE_VALUES_THRESHOLD;
10758 }
10759
10760 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10761 0 otherwise (i.e. if there is no casesi instruction). */
10762 int
10763 try_casesi (index_type, index_expr, minval, range,
10764 table_label, default_label)
10765 tree index_type, index_expr, minval, range;
10766 rtx table_label ATTRIBUTE_UNUSED;
10767 rtx default_label;
10768 {
10769 enum machine_mode index_mode = SImode;
10770 int index_bits = GET_MODE_BITSIZE (index_mode);
10771 rtx op1, op2, index;
10772 enum machine_mode op_mode;
10773
10774 if (! HAVE_casesi)
10775 return 0;
10776
10777 /* Convert the index to SImode. */
10778 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10779 {
10780 enum machine_mode omode = TYPE_MODE (index_type);
10781 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10782
10783 /* We must handle the endpoints in the original mode. */
10784 index_expr = build (MINUS_EXPR, index_type,
10785 index_expr, minval);
10786 minval = integer_zero_node;
10787 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10788 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10789 omode, 1, 0, default_label);
10790 /* Now we can safely truncate. */
10791 index = convert_to_mode (index_mode, index, 0);
10792 }
10793 else
10794 {
10795 if (TYPE_MODE (index_type) != index_mode)
10796 {
10797 index_expr = convert (type_for_size (index_bits, 0),
10798 index_expr);
10799 index_type = TREE_TYPE (index_expr);
10800 }
10801
10802 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10803 }
10804 emit_queue ();
10805 index = protect_from_queue (index, 0);
10806 do_pending_stack_adjust ();
10807
10808 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10809 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10810 (index, op_mode))
10811 index = copy_to_mode_reg (op_mode, index);
10812
10813 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10814
10815 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10816 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10817 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10818 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10819 (op1, op_mode))
10820 op1 = copy_to_mode_reg (op_mode, op1);
10821
10822 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10823
10824 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10825 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10826 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10827 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10828 (op2, op_mode))
10829 op2 = copy_to_mode_reg (op_mode, op2);
10830
10831 emit_jump_insn (gen_casesi (index, op1, op2,
10832 table_label, default_label));
10833 return 1;
10834 }
10835
10836 /* Attempt to generate a tablejump instruction; same concept. */
10837 #ifndef HAVE_tablejump
10838 #define HAVE_tablejump 0
10839 #define gen_tablejump(x, y) (0)
10840 #endif
10841
10842 /* Subroutine of the next function.
10843
10844 INDEX is the value being switched on, with the lowest value
10845 in the table already subtracted.
10846 MODE is its expected mode (needed if INDEX is constant).
10847 RANGE is the length of the jump table.
10848 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10849
10850 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10851 index value is out of range. */
10852
10853 static void
10854 do_tablejump (index, mode, range, table_label, default_label)
10855 rtx index, range, table_label, default_label;
10856 enum machine_mode mode;
10857 {
10858 rtx temp, vector;
10859
10860 /* Do an unsigned comparison (in the proper mode) between the index
10861 expression and the value which represents the length of the range.
10862 Since we just finished subtracting the lower bound of the range
10863 from the index expression, this comparison allows us to simultaneously
10864 check that the original index expression value is both greater than
10865 or equal to the minimum value of the range and less than or equal to
10866 the maximum value of the range. */
10867
10868 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10869 0, default_label);
10870
10871 /* If index is in range, it must fit in Pmode.
10872 Convert to Pmode so we can index with it. */
10873 if (mode != Pmode)
10874 index = convert_to_mode (Pmode, index, 1);
10875
10876 /* Don't let a MEM slip thru, because then INDEX that comes
10877 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10878 and break_out_memory_refs will go to work on it and mess it up. */
10879 #ifdef PIC_CASE_VECTOR_ADDRESS
10880 if (flag_pic && GET_CODE (index) != REG)
10881 index = copy_to_mode_reg (Pmode, index);
10882 #endif
10883
10884 /* If flag_force_addr were to affect this address
10885 it could interfere with the tricky assumptions made
10886 about addresses that contain label-refs,
10887 which may be valid only very near the tablejump itself. */
10888 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10889 GET_MODE_SIZE, because this indicates how large insns are. The other
10890 uses should all be Pmode, because they are addresses. This code
10891 could fail if addresses and insns are not the same size. */
10892 index = gen_rtx_PLUS (Pmode,
10893 gen_rtx_MULT (Pmode, index,
10894 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10895 gen_rtx_LABEL_REF (Pmode, table_label));
10896 #ifdef PIC_CASE_VECTOR_ADDRESS
10897 if (flag_pic)
10898 index = PIC_CASE_VECTOR_ADDRESS (index);
10899 else
10900 #endif
10901 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10902 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10903 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10904 RTX_UNCHANGING_P (vector) = 1;
10905 convert_move (temp, vector, 0);
10906
10907 emit_jump_insn (gen_tablejump (temp, table_label));
10908
10909 /* If we are generating PIC code or if the table is PC-relative, the
10910 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10911 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10912 emit_barrier ();
10913 }
10914
10915 int
10916 try_tablejump (index_type, index_expr, minval, range,
10917 table_label, default_label)
10918 tree index_type, index_expr, minval, range;
10919 rtx table_label, default_label;
10920 {
10921 rtx index;
10922
10923 if (! HAVE_tablejump)
10924 return 0;
10925
10926 index_expr = fold (build (MINUS_EXPR, index_type,
10927 convert (index_type, index_expr),
10928 convert (index_type, minval)));
10929 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10930 emit_queue ();
10931 index = protect_from_queue (index, 0);
10932 do_pending_stack_adjust ();
10933
10934 do_tablejump (index, TYPE_MODE (index_type),
10935 convert_modes (TYPE_MODE (index_type),
10936 TYPE_MODE (TREE_TYPE (range)),
10937 expand_expr (range, NULL_RTX,
10938 VOIDmode, 0),
10939 TREE_UNSIGNED (TREE_TYPE (range))),
10940 table_label, default_label);
10941 return 1;
10942 }