calls.c: Fix formatting.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "ggc.h"
44 #include "intl.h"
45 #include "tm_p.h"
46
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
49
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
52
53 #ifdef PUSH_ROUNDING
54
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
57 #endif
58
59 #endif
60
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
64 #else
65 #define STACK_PUSH_CODE PRE_INC
66 #endif
67 #endif
68
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
72 #endif
73
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
80 parameter. */
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
82
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
90
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
95
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
98
99 /* This structure is used by move_by_pieces to describe the move to
100 be performed. */
101 struct move_by_pieces
102 {
103 rtx to;
104 rtx to_addr;
105 int autinc_to;
106 int explicit_inc_to;
107 rtx from;
108 rtx from_addr;
109 int autinc_from;
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
113 int reverse;
114 };
115
116 /* This structure is used by store_by_pieces to describe the clear to
117 be performed. */
118
119 struct store_by_pieces
120 {
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
128 PTR constfundata;
129 int reverse;
130 };
131
132 extern struct obstack permanent_obstack;
133
134 static rtx get_push_address PARAMS ((int));
135
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
139 unsigned int));
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
143 enum machine_mode));
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
145 unsigned int));
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
147 unsigned int));
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
149 enum machine_mode,
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, unsigned int, int,
157 int));
158 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
159 HOST_WIDE_INT));
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int,
163 unsigned int, HOST_WIDE_INT, int));
164 static enum memory_use_mode
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166 static rtx var_rtx PARAMS ((tree));
167 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
168 static rtx expand_increment PARAMS ((tree, int, int));
169 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
170 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
171 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
172 rtx, rtx));
173 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
174 #ifdef PUSH_ROUNDING
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
176 #endif
177 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
178
179 /* Record for each mode whether we can move a register directly to or
180 from an object of that mode in memory. If we can't, we won't try
181 to use that mode directly when accessing a field of that mode. */
182
183 static char direct_load[NUM_MACHINE_MODES];
184 static char direct_store[NUM_MACHINE_MODES];
185
186 /* If a memory-to-memory move would take MOVE_RATIO or more simple
187 move-instruction sequences, we will do a movstr or libcall instead. */
188
189 #ifndef MOVE_RATIO
190 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
191 #define MOVE_RATIO 2
192 #else
193 /* If we are optimizing for space (-Os), cut down the default move ratio. */
194 #define MOVE_RATIO (optimize_size ? 3 : 15)
195 #endif
196 #endif
197
198 /* This macro is used to determine whether move_by_pieces should be called
199 to perform a structure copy. */
200 #ifndef MOVE_BY_PIECES_P
201 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
203 #endif
204
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movstr_optab[NUM_MACHINE_MODES];
207
208 /* This array records the insn_code of insns to perform block clears. */
209 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
210
211 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
212
213 #ifndef SLOW_UNALIGNED_ACCESS
214 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
215 #endif
216 \f
217 /* This is run once per compilation to set up which modes can be used
218 directly in memory and to initialize the block move optab. */
219
220 void
221 init_expr_once ()
222 {
223 rtx insn, pat;
224 enum machine_mode mode;
225 int num_clobbers;
226 rtx mem, mem1;
227
228 start_sequence ();
229
230 /* Try indexing by frame ptr and try by stack ptr.
231 It is known that on the Convex the stack ptr isn't a valid index.
232 With luck, one or the other is valid on any machine. */
233 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
234 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
235
236 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
237 pat = PATTERN (insn);
238
239 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
240 mode = (enum machine_mode) ((int) mode + 1))
241 {
242 int regno;
243 rtx reg;
244
245 direct_load[(int) mode] = direct_store[(int) mode] = 0;
246 PUT_MODE (mem, mode);
247 PUT_MODE (mem1, mode);
248
249 /* See if there is some register that can be used in this mode and
250 directly loaded or stored from memory. */
251
252 if (mode != VOIDmode && mode != BLKmode)
253 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
254 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
255 regno++)
256 {
257 if (! HARD_REGNO_MODE_OK (regno, mode))
258 continue;
259
260 reg = gen_rtx_REG (mode, regno);
261
262 SET_SRC (pat) = mem;
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
266
267 SET_SRC (pat) = mem1;
268 SET_DEST (pat) = reg;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_load[(int) mode] = 1;
271
272 SET_SRC (pat) = reg;
273 SET_DEST (pat) = mem;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
276
277 SET_SRC (pat) = reg;
278 SET_DEST (pat) = mem1;
279 if (recog (pat, insn, &num_clobbers) >= 0)
280 direct_store[(int) mode] = 1;
281 }
282 }
283
284 end_sequence ();
285 }
286
287 /* This is run at the start of compiling a function. */
288
289 void
290 init_expr ()
291 {
292 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
293
294 pending_chain = 0;
295 pending_stack_adjust = 0;
296 stack_pointer_delta = 0;
297 inhibit_defer_pop = 0;
298 saveregs_value = 0;
299 apply_args_value = 0;
300 forced_labels = 0;
301 }
302
303 void
304 mark_expr_status (p)
305 struct expr_status *p;
306 {
307 if (p == NULL)
308 return;
309
310 ggc_mark_rtx (p->x_saveregs_value);
311 ggc_mark_rtx (p->x_apply_args_value);
312 ggc_mark_rtx (p->x_forced_labels);
313 }
314
315 void
316 free_expr_status (f)
317 struct function *f;
318 {
319 free (f->expr);
320 f->expr = NULL;
321 }
322
323 /* Small sanity check that the queue is empty at the end of a function. */
324
325 void
326 finish_expr_for_function ()
327 {
328 if (pending_chain)
329 abort ();
330 }
331 \f
332 /* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
334
335 /* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
338
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
341
342 static rtx
343 enqueue_insn (var, body)
344 rtx var, body;
345 {
346 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
347 body, pending_chain);
348 return pending_chain;
349 }
350
351 /* Use protect_from_queue to convert a QUEUED expression
352 into something that you can put immediately into an instruction.
353 If the queued incrementation has not happened yet,
354 protect_from_queue returns the variable itself.
355 If the incrementation has happened, protect_from_queue returns a temp
356 that contains a copy of the old value of the variable.
357
358 Any time an rtx which might possibly be a QUEUED is to be put
359 into an instruction, it must be passed through protect_from_queue first.
360 QUEUED expressions are not meaningful in instructions.
361
362 Do not pass a value through protect_from_queue and then hold
363 on to it for a while before putting it in an instruction!
364 If the queue is flushed in between, incorrect code will result. */
365
366 rtx
367 protect_from_queue (x, modify)
368 register rtx x;
369 int modify;
370 {
371 register RTX_CODE code = GET_CODE (x);
372
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain == 0)
376 return x;
377 #endif
378
379 if (code != QUEUED)
380 {
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
385 shared. */
386 if (code == MEM && GET_MODE (x) != BLKmode
387 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
388 {
389 rtx y = XEXP (x, 0);
390 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
391
392 if (QUEUED_INSN (y))
393 {
394 rtx temp = gen_reg_rtx (GET_MODE (x));
395
396 emit_insn_before (gen_move_insn (temp, new),
397 QUEUED_INSN (y));
398 return temp;
399 }
400
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
404 }
405
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
408 if (code == MEM)
409 {
410 rtx tem = protect_from_queue (XEXP (x, 0), 0);
411 if (tem != XEXP (x, 0))
412 {
413 x = copy_rtx (x);
414 XEXP (x, 0) = tem;
415 }
416 }
417 else if (code == PLUS || code == MULT)
418 {
419 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
420 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
421 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
422 {
423 x = copy_rtx (x);
424 XEXP (x, 0) = new0;
425 XEXP (x, 1) = new1;
426 }
427 }
428 return x;
429 }
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
432 emit_queue. */
433 if (QUEUED_INSN (x) == 0)
434 return copy_to_reg (QUEUED_VAR (x));
435 /* If the increment has happened and a pre-increment copy exists,
436 use that copy. */
437 if (QUEUED_COPY (x) != 0)
438 return QUEUED_COPY (x);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
443 QUEUED_INSN (x));
444 return QUEUED_COPY (x);
445 }
446
447 /* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
451
452 int
453 queued_subexp_p (x)
454 rtx x;
455 {
456 register enum rtx_code code = GET_CODE (x);
457 switch (code)
458 {
459 case QUEUED:
460 return 1;
461 case MEM:
462 return queued_subexp_p (XEXP (x, 0));
463 case MULT:
464 case PLUS:
465 case MINUS:
466 return (queued_subexp_p (XEXP (x, 0))
467 || queued_subexp_p (XEXP (x, 1)));
468 default:
469 return 0;
470 }
471 }
472
473 /* Perform all the pending incrementations. */
474
475 void
476 emit_queue ()
477 {
478 register rtx p;
479 while ((p = pending_chain))
480 {
481 rtx body = QUEUED_BODY (p);
482
483 if (GET_CODE (body) == SEQUENCE)
484 {
485 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
486 emit_insn (QUEUED_BODY (p));
487 }
488 else
489 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
490 pending_chain = QUEUED_NEXT (p);
491 }
492 }
493 \f
494 /* Copy data from FROM to TO, where the machine modes are not the same.
495 Both modes may be integer, or both may be floating.
496 UNSIGNEDP should be nonzero if FROM is an unsigned type.
497 This causes zero-extension instead of sign-extension. */
498
499 void
500 convert_move (to, from, unsignedp)
501 register rtx to, from;
502 int unsignedp;
503 {
504 enum machine_mode to_mode = GET_MODE (to);
505 enum machine_mode from_mode = GET_MODE (from);
506 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
507 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
508 enum insn_code code;
509 rtx libcall;
510
511 /* rtx code for making an equivalent value. */
512 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
513
514 to = protect_from_queue (to, 1);
515 from = protect_from_queue (from, 0);
516
517 if (to_real != from_real)
518 abort ();
519
520 /* If FROM is a SUBREG that indicates that we have already done at least
521 the required extension, strip it. We don't handle such SUBREGs as
522 TO here. */
523
524 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
525 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
526 >= GET_MODE_SIZE (to_mode))
527 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
528 from = gen_lowpart (to_mode, from), from_mode = to_mode;
529
530 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
531 abort ();
532
533 if (to_mode == from_mode
534 || (from_mode == VOIDmode && CONSTANT_P (from)))
535 {
536 emit_move_insn (to, from);
537 return;
538 }
539
540 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
541 {
542 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
543 abort ();
544
545 if (VECTOR_MODE_P (to_mode))
546 from = gen_rtx_SUBREG (to_mode, from, 0);
547 else
548 to = gen_rtx_SUBREG (from_mode, to, 0);
549
550 emit_move_insn (to, from);
551 return;
552 }
553
554 if (to_real != from_real)
555 abort ();
556
557 if (to_real)
558 {
559 rtx value, insns;
560
561 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
562 {
563 /* Try converting directly if the insn is supported. */
564 if ((code = can_extend_p (to_mode, from_mode, 0))
565 != CODE_FOR_nothing)
566 {
567 emit_unop_insn (code, to, from, UNKNOWN);
568 return;
569 }
570 }
571
572 #ifdef HAVE_trunchfqf2
573 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
574 {
575 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
576 return;
577 }
578 #endif
579 #ifdef HAVE_trunctqfqf2
580 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
583 return;
584 }
585 #endif
586 #ifdef HAVE_truncsfqf2
587 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
588 {
589 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
590 return;
591 }
592 #endif
593 #ifdef HAVE_truncdfqf2
594 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
595 {
596 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
597 return;
598 }
599 #endif
600 #ifdef HAVE_truncxfqf2
601 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
602 {
603 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
604 return;
605 }
606 #endif
607 #ifdef HAVE_trunctfqf2
608 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
609 {
610 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
611 return;
612 }
613 #endif
614
615 #ifdef HAVE_trunctqfhf2
616 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
617 {
618 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
619 return;
620 }
621 #endif
622 #ifdef HAVE_truncsfhf2
623 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
624 {
625 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
626 return;
627 }
628 #endif
629 #ifdef HAVE_truncdfhf2
630 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
631 {
632 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
633 return;
634 }
635 #endif
636 #ifdef HAVE_truncxfhf2
637 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
638 {
639 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
640 return;
641 }
642 #endif
643 #ifdef HAVE_trunctfhf2
644 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
645 {
646 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
647 return;
648 }
649 #endif
650
651 #ifdef HAVE_truncsftqf2
652 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
653 {
654 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
655 return;
656 }
657 #endif
658 #ifdef HAVE_truncdftqf2
659 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
660 {
661 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
662 return;
663 }
664 #endif
665 #ifdef HAVE_truncxftqf2
666 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
667 {
668 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
669 return;
670 }
671 #endif
672 #ifdef HAVE_trunctftqf2
673 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
674 {
675 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
676 return;
677 }
678 #endif
679
680 #ifdef HAVE_truncdfsf2
681 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
682 {
683 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
684 return;
685 }
686 #endif
687 #ifdef HAVE_truncxfsf2
688 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
689 {
690 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694 #ifdef HAVE_trunctfsf2
695 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
696 {
697 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
698 return;
699 }
700 #endif
701 #ifdef HAVE_truncxfdf2
702 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
703 {
704 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
705 return;
706 }
707 #endif
708 #ifdef HAVE_trunctfdf2
709 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
710 {
711 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
712 return;
713 }
714 #endif
715
716 libcall = (rtx) 0;
717 switch (from_mode)
718 {
719 case SFmode:
720 switch (to_mode)
721 {
722 case DFmode:
723 libcall = extendsfdf2_libfunc;
724 break;
725
726 case XFmode:
727 libcall = extendsfxf2_libfunc;
728 break;
729
730 case TFmode:
731 libcall = extendsftf2_libfunc;
732 break;
733
734 default:
735 break;
736 }
737 break;
738
739 case DFmode:
740 switch (to_mode)
741 {
742 case SFmode:
743 libcall = truncdfsf2_libfunc;
744 break;
745
746 case XFmode:
747 libcall = extenddfxf2_libfunc;
748 break;
749
750 case TFmode:
751 libcall = extenddftf2_libfunc;
752 break;
753
754 default:
755 break;
756 }
757 break;
758
759 case XFmode:
760 switch (to_mode)
761 {
762 case SFmode:
763 libcall = truncxfsf2_libfunc;
764 break;
765
766 case DFmode:
767 libcall = truncxfdf2_libfunc;
768 break;
769
770 default:
771 break;
772 }
773 break;
774
775 case TFmode:
776 switch (to_mode)
777 {
778 case SFmode:
779 libcall = trunctfsf2_libfunc;
780 break;
781
782 case DFmode:
783 libcall = trunctfdf2_libfunc;
784 break;
785
786 default:
787 break;
788 }
789 break;
790
791 default:
792 break;
793 }
794
795 if (libcall == (rtx) 0)
796 /* This conversion is not implemented yet. */
797 abort ();
798
799 start_sequence ();
800 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
801 1, from, from_mode);
802 insns = get_insns ();
803 end_sequence ();
804 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
805 from));
806 return;
807 }
808
809 /* Now both modes are integers. */
810
811 /* Handle expanding beyond a word. */
812 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
813 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
814 {
815 rtx insns;
816 rtx lowpart;
817 rtx fill_value;
818 rtx lowfrom;
819 int i;
820 enum machine_mode lowpart_mode;
821 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
822
823 /* Try converting directly if the insn is supported. */
824 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
825 != CODE_FOR_nothing)
826 {
827 /* If FROM is a SUBREG, put it into a register. Do this
828 so that we always generate the same set of insns for
829 better cse'ing; if an intermediate assignment occurred,
830 we won't be doing the operation directly on the SUBREG. */
831 if (optimize > 0 && GET_CODE (from) == SUBREG)
832 from = force_reg (from_mode, from);
833 emit_unop_insn (code, to, from, equiv_code);
834 return;
835 }
836 /* Next, try converting via full word. */
837 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
838 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
839 != CODE_FOR_nothing))
840 {
841 if (GET_CODE (to) == REG)
842 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
843 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
844 emit_unop_insn (code, to,
845 gen_lowpart (word_mode, to), equiv_code);
846 return;
847 }
848
849 /* No special multiword conversion insn; do it by hand. */
850 start_sequence ();
851
852 /* Since we will turn this into a no conflict block, we must ensure
853 that the source does not overlap the target. */
854
855 if (reg_overlap_mentioned_p (to, from))
856 from = force_reg (from_mode, from);
857
858 /* Get a copy of FROM widened to a word, if necessary. */
859 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
860 lowpart_mode = word_mode;
861 else
862 lowpart_mode = from_mode;
863
864 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
865
866 lowpart = gen_lowpart (lowpart_mode, to);
867 emit_move_insn (lowpart, lowfrom);
868
869 /* Compute the value to put in each remaining word. */
870 if (unsignedp)
871 fill_value = const0_rtx;
872 else
873 {
874 #ifdef HAVE_slt
875 if (HAVE_slt
876 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
877 && STORE_FLAG_VALUE == -1)
878 {
879 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
880 lowpart_mode, 0, 0);
881 fill_value = gen_reg_rtx (word_mode);
882 emit_insn (gen_slt (fill_value));
883 }
884 else
885 #endif
886 {
887 fill_value
888 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
889 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
890 NULL_RTX, 0);
891 fill_value = convert_to_mode (word_mode, fill_value, 1);
892 }
893 }
894
895 /* Fill the remaining words. */
896 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
897 {
898 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
899 rtx subword = operand_subword (to, index, 1, to_mode);
900
901 if (subword == 0)
902 abort ();
903
904 if (fill_value != subword)
905 emit_move_insn (subword, fill_value);
906 }
907
908 insns = get_insns ();
909 end_sequence ();
910
911 emit_no_conflict_block (insns, to, from, NULL_RTX,
912 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
913 return;
914 }
915
916 /* Truncating multi-word to a word or less. */
917 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
918 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
919 {
920 if (!((GET_CODE (from) == MEM
921 && ! MEM_VOLATILE_P (from)
922 && direct_load[(int) to_mode]
923 && ! mode_dependent_address_p (XEXP (from, 0)))
924 || GET_CODE (from) == REG
925 || GET_CODE (from) == SUBREG))
926 from = force_reg (from_mode, from);
927 convert_move (to, gen_lowpart (word_mode, from), 0);
928 return;
929 }
930
931 /* Handle pointer conversion. */ /* SPEE 900220. */
932 if (to_mode == PQImode)
933 {
934 if (from_mode != QImode)
935 from = convert_to_mode (QImode, from, unsignedp);
936
937 #ifdef HAVE_truncqipqi2
938 if (HAVE_truncqipqi2)
939 {
940 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
941 return;
942 }
943 #endif /* HAVE_truncqipqi2 */
944 abort ();
945 }
946
947 if (from_mode == PQImode)
948 {
949 if (to_mode != QImode)
950 {
951 from = convert_to_mode (QImode, from, unsignedp);
952 from_mode = QImode;
953 }
954 else
955 {
956 #ifdef HAVE_extendpqiqi2
957 if (HAVE_extendpqiqi2)
958 {
959 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
960 return;
961 }
962 #endif /* HAVE_extendpqiqi2 */
963 abort ();
964 }
965 }
966
967 if (to_mode == PSImode)
968 {
969 if (from_mode != SImode)
970 from = convert_to_mode (SImode, from, unsignedp);
971
972 #ifdef HAVE_truncsipsi2
973 if (HAVE_truncsipsi2)
974 {
975 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
976 return;
977 }
978 #endif /* HAVE_truncsipsi2 */
979 abort ();
980 }
981
982 if (from_mode == PSImode)
983 {
984 if (to_mode != SImode)
985 {
986 from = convert_to_mode (SImode, from, unsignedp);
987 from_mode = SImode;
988 }
989 else
990 {
991 #ifdef HAVE_extendpsisi2
992 if (! unsignedp && HAVE_extendpsisi2)
993 {
994 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
995 return;
996 }
997 #endif /* HAVE_extendpsisi2 */
998 #ifdef HAVE_zero_extendpsisi2
999 if (unsignedp && HAVE_zero_extendpsisi2)
1000 {
1001 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1002 return;
1003 }
1004 #endif /* HAVE_zero_extendpsisi2 */
1005 abort ();
1006 }
1007 }
1008
1009 if (to_mode == PDImode)
1010 {
1011 if (from_mode != DImode)
1012 from = convert_to_mode (DImode, from, unsignedp);
1013
1014 #ifdef HAVE_truncdipdi2
1015 if (HAVE_truncdipdi2)
1016 {
1017 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1018 return;
1019 }
1020 #endif /* HAVE_truncdipdi2 */
1021 abort ();
1022 }
1023
1024 if (from_mode == PDImode)
1025 {
1026 if (to_mode != DImode)
1027 {
1028 from = convert_to_mode (DImode, from, unsignedp);
1029 from_mode = DImode;
1030 }
1031 else
1032 {
1033 #ifdef HAVE_extendpdidi2
1034 if (HAVE_extendpdidi2)
1035 {
1036 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1037 return;
1038 }
1039 #endif /* HAVE_extendpdidi2 */
1040 abort ();
1041 }
1042 }
1043
1044 /* Now follow all the conversions between integers
1045 no more than a word long. */
1046
1047 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1048 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1049 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1050 GET_MODE_BITSIZE (from_mode)))
1051 {
1052 if (!((GET_CODE (from) == MEM
1053 && ! MEM_VOLATILE_P (from)
1054 && direct_load[(int) to_mode]
1055 && ! mode_dependent_address_p (XEXP (from, 0)))
1056 || GET_CODE (from) == REG
1057 || GET_CODE (from) == SUBREG))
1058 from = force_reg (from_mode, from);
1059 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1060 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1061 from = copy_to_reg (from);
1062 emit_move_insn (to, gen_lowpart (to_mode, from));
1063 return;
1064 }
1065
1066 /* Handle extension. */
1067 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1068 {
1069 /* Convert directly if that works. */
1070 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1071 != CODE_FOR_nothing)
1072 {
1073 emit_unop_insn (code, to, from, equiv_code);
1074 return;
1075 }
1076 else
1077 {
1078 enum machine_mode intermediate;
1079 rtx tmp;
1080 tree shift_amount;
1081
1082 /* Search for a mode to convert via. */
1083 for (intermediate = from_mode; intermediate != VOIDmode;
1084 intermediate = GET_MODE_WIDER_MODE (intermediate))
1085 if (((can_extend_p (to_mode, intermediate, unsignedp)
1086 != CODE_FOR_nothing)
1087 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1088 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1089 GET_MODE_BITSIZE (intermediate))))
1090 && (can_extend_p (intermediate, from_mode, unsignedp)
1091 != CODE_FOR_nothing))
1092 {
1093 convert_move (to, convert_to_mode (intermediate, from,
1094 unsignedp), unsignedp);
1095 return;
1096 }
1097
1098 /* No suitable intermediate mode.
1099 Generate what we need with shifts. */
1100 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1101 - GET_MODE_BITSIZE (from_mode), 0);
1102 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1103 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1104 to, unsignedp);
1105 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1106 to, unsignedp);
1107 if (tmp != to)
1108 emit_move_insn (to, tmp);
1109 return;
1110 }
1111 }
1112
1113 /* Support special truncate insns for certain modes. */
1114
1115 if (from_mode == DImode && to_mode == SImode)
1116 {
1117 #ifdef HAVE_truncdisi2
1118 if (HAVE_truncdisi2)
1119 {
1120 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1121 return;
1122 }
1123 #endif
1124 convert_move (to, force_reg (from_mode, from), unsignedp);
1125 return;
1126 }
1127
1128 if (from_mode == DImode && to_mode == HImode)
1129 {
1130 #ifdef HAVE_truncdihi2
1131 if (HAVE_truncdihi2)
1132 {
1133 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1134 return;
1135 }
1136 #endif
1137 convert_move (to, force_reg (from_mode, from), unsignedp);
1138 return;
1139 }
1140
1141 if (from_mode == DImode && to_mode == QImode)
1142 {
1143 #ifdef HAVE_truncdiqi2
1144 if (HAVE_truncdiqi2)
1145 {
1146 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1147 return;
1148 }
1149 #endif
1150 convert_move (to, force_reg (from_mode, from), unsignedp);
1151 return;
1152 }
1153
1154 if (from_mode == SImode && to_mode == HImode)
1155 {
1156 #ifdef HAVE_truncsihi2
1157 if (HAVE_truncsihi2)
1158 {
1159 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1160 return;
1161 }
1162 #endif
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1164 return;
1165 }
1166
1167 if (from_mode == SImode && to_mode == QImode)
1168 {
1169 #ifdef HAVE_truncsiqi2
1170 if (HAVE_truncsiqi2)
1171 {
1172 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1173 return;
1174 }
1175 #endif
1176 convert_move (to, force_reg (from_mode, from), unsignedp);
1177 return;
1178 }
1179
1180 if (from_mode == HImode && to_mode == QImode)
1181 {
1182 #ifdef HAVE_trunchiqi2
1183 if (HAVE_trunchiqi2)
1184 {
1185 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1186 return;
1187 }
1188 #endif
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 return;
1191 }
1192
1193 if (from_mode == TImode && to_mode == DImode)
1194 {
1195 #ifdef HAVE_trunctidi2
1196 if (HAVE_trunctidi2)
1197 {
1198 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1199 return;
1200 }
1201 #endif
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 return;
1204 }
1205
1206 if (from_mode == TImode && to_mode == SImode)
1207 {
1208 #ifdef HAVE_trunctisi2
1209 if (HAVE_trunctisi2)
1210 {
1211 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1212 return;
1213 }
1214 #endif
1215 convert_move (to, force_reg (from_mode, from), unsignedp);
1216 return;
1217 }
1218
1219 if (from_mode == TImode && to_mode == HImode)
1220 {
1221 #ifdef HAVE_trunctihi2
1222 if (HAVE_trunctihi2)
1223 {
1224 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1225 return;
1226 }
1227 #endif
1228 convert_move (to, force_reg (from_mode, from), unsignedp);
1229 return;
1230 }
1231
1232 if (from_mode == TImode && to_mode == QImode)
1233 {
1234 #ifdef HAVE_trunctiqi2
1235 if (HAVE_trunctiqi2)
1236 {
1237 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1238 return;
1239 }
1240 #endif
1241 convert_move (to, force_reg (from_mode, from), unsignedp);
1242 return;
1243 }
1244
1245 /* Handle truncation of volatile memrefs, and so on;
1246 the things that couldn't be truncated directly,
1247 and for which there was no special instruction. */
1248 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1249 {
1250 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1251 emit_move_insn (to, temp);
1252 return;
1253 }
1254
1255 /* Mode combination is not recognized. */
1256 abort ();
1257 }
1258
1259 /* Return an rtx for a value that would result
1260 from converting X to mode MODE.
1261 Both X and MODE may be floating, or both integer.
1262 UNSIGNEDP is nonzero if X is an unsigned value.
1263 This can be done by referring to a part of X in place
1264 or by copying to a new temporary with conversion.
1265
1266 This function *must not* call protect_from_queue
1267 except when putting X into an insn (in which case convert_move does it). */
1268
1269 rtx
1270 convert_to_mode (mode, x, unsignedp)
1271 enum machine_mode mode;
1272 rtx x;
1273 int unsignedp;
1274 {
1275 return convert_modes (mode, VOIDmode, x, unsignedp);
1276 }
1277
1278 /* Return an rtx for a value that would result
1279 from converting X from mode OLDMODE to mode MODE.
1280 Both modes may be floating, or both integer.
1281 UNSIGNEDP is nonzero if X is an unsigned value.
1282
1283 This can be done by referring to a part of X in place
1284 or by copying to a new temporary with conversion.
1285
1286 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1287
1288 This function *must not* call protect_from_queue
1289 except when putting X into an insn (in which case convert_move does it). */
1290
1291 rtx
1292 convert_modes (mode, oldmode, x, unsignedp)
1293 enum machine_mode mode, oldmode;
1294 rtx x;
1295 int unsignedp;
1296 {
1297 register rtx temp;
1298
1299 /* If FROM is a SUBREG that indicates that we have already done at least
1300 the required extension, strip it. */
1301
1302 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1303 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1304 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1305 x = gen_lowpart (mode, x);
1306
1307 if (GET_MODE (x) != VOIDmode)
1308 oldmode = GET_MODE (x);
1309
1310 if (mode == oldmode)
1311 return x;
1312
1313 /* There is one case that we must handle specially: If we are converting
1314 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1315 we are to interpret the constant as unsigned, gen_lowpart will do
1316 the wrong if the constant appears negative. What we want to do is
1317 make the high-order word of the constant zero, not all ones. */
1318
1319 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1320 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1321 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1322 {
1323 HOST_WIDE_INT val = INTVAL (x);
1324
1325 if (oldmode != VOIDmode
1326 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1327 {
1328 int width = GET_MODE_BITSIZE (oldmode);
1329
1330 /* We need to zero extend VAL. */
1331 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1332 }
1333
1334 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1335 }
1336
1337 /* We can do this with a gen_lowpart if both desired and current modes
1338 are integer, and this is either a constant integer, a register, or a
1339 non-volatile MEM. Except for the constant case where MODE is no
1340 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1341
1342 if ((GET_CODE (x) == CONST_INT
1343 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1344 || (GET_MODE_CLASS (mode) == MODE_INT
1345 && GET_MODE_CLASS (oldmode) == MODE_INT
1346 && (GET_CODE (x) == CONST_DOUBLE
1347 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1348 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1349 && direct_load[(int) mode])
1350 || (GET_CODE (x) == REG
1351 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1352 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1353 {
1354 /* ?? If we don't know OLDMODE, we have to assume here that
1355 X does not need sign- or zero-extension. This may not be
1356 the case, but it's the best we can do. */
1357 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1358 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1359 {
1360 HOST_WIDE_INT val = INTVAL (x);
1361 int width = GET_MODE_BITSIZE (oldmode);
1362
1363 /* We must sign or zero-extend in this case. Start by
1364 zero-extending, then sign extend if we need to. */
1365 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1366 if (! unsignedp
1367 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1368 val |= (HOST_WIDE_INT) (-1) << width;
1369
1370 return GEN_INT (trunc_int_for_mode (val, mode));
1371 }
1372
1373 return gen_lowpart (mode, x);
1374 }
1375
1376 temp = gen_reg_rtx (mode);
1377 convert_move (temp, x, unsignedp);
1378 return temp;
1379 }
1380 \f
1381 /* This macro is used to determine what the largest unit size that
1382 move_by_pieces can use is. */
1383
1384 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1385 move efficiently, as opposed to MOVE_MAX which is the maximum
1386 number of bytes we can move with a single instruction. */
1387
1388 #ifndef MOVE_MAX_PIECES
1389 #define MOVE_MAX_PIECES MOVE_MAX
1390 #endif
1391
1392 /* Generate several move instructions to copy LEN bytes from block FROM to
1393 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1394 and TO through protect_from_queue before calling.
1395
1396 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1397 used to push FROM to the stack.
1398
1399 ALIGN is maximum alignment we can assume. */
1400
1401 void
1402 move_by_pieces (to, from, len, align)
1403 rtx to, from;
1404 unsigned HOST_WIDE_INT len;
1405 unsigned int align;
1406 {
1407 struct move_by_pieces data;
1408 rtx to_addr, from_addr = XEXP (from, 0);
1409 unsigned int max_size = MOVE_MAX_PIECES + 1;
1410 enum machine_mode mode = VOIDmode, tmode;
1411 enum insn_code icode;
1412
1413 data.offset = 0;
1414 data.from_addr = from_addr;
1415 if (to)
1416 {
1417 to_addr = XEXP (to, 0);
1418 data.to = to;
1419 data.autinc_to
1420 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1421 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1422 data.reverse
1423 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1424 }
1425 else
1426 {
1427 to_addr = NULL_RTX;
1428 data.to = NULL_RTX;
1429 data.autinc_to = 1;
1430 #ifdef STACK_GROWS_DOWNWARD
1431 data.reverse = 1;
1432 #else
1433 data.reverse = 0;
1434 #endif
1435 }
1436 data.to_addr = to_addr;
1437 data.from = from;
1438 data.autinc_from
1439 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1440 || GET_CODE (from_addr) == POST_INC
1441 || GET_CODE (from_addr) == POST_DEC);
1442
1443 data.explicit_inc_from = 0;
1444 data.explicit_inc_to = 0;
1445 if (data.reverse) data.offset = len;
1446 data.len = len;
1447
1448 /* If copying requires more than two move insns,
1449 copy addresses to registers (to make displacements shorter)
1450 and use post-increment if available. */
1451 if (!(data.autinc_from && data.autinc_to)
1452 && move_by_pieces_ninsns (len, align) > 2)
1453 {
1454 /* Find the mode of the largest move... */
1455 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1456 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1457 if (GET_MODE_SIZE (tmode) < max_size)
1458 mode = tmode;
1459
1460 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1461 {
1462 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1463 data.autinc_from = 1;
1464 data.explicit_inc_from = -1;
1465 }
1466 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1467 {
1468 data.from_addr = copy_addr_to_reg (from_addr);
1469 data.autinc_from = 1;
1470 data.explicit_inc_from = 1;
1471 }
1472 if (!data.autinc_from && CONSTANT_P (from_addr))
1473 data.from_addr = copy_addr_to_reg (from_addr);
1474 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1475 {
1476 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1477 data.autinc_to = 1;
1478 data.explicit_inc_to = -1;
1479 }
1480 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1481 {
1482 data.to_addr = copy_addr_to_reg (to_addr);
1483 data.autinc_to = 1;
1484 data.explicit_inc_to = 1;
1485 }
1486 if (!data.autinc_to && CONSTANT_P (to_addr))
1487 data.to_addr = copy_addr_to_reg (to_addr);
1488 }
1489
1490 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1491 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1492 align = MOVE_MAX * BITS_PER_UNIT;
1493
1494 /* First move what we can in the largest integer mode, then go to
1495 successively smaller modes. */
1496
1497 while (max_size > 1)
1498 {
1499 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1500 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1501 if (GET_MODE_SIZE (tmode) < max_size)
1502 mode = tmode;
1503
1504 if (mode == VOIDmode)
1505 break;
1506
1507 icode = mov_optab->handlers[(int) mode].insn_code;
1508 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1509 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1510
1511 max_size = GET_MODE_SIZE (mode);
1512 }
1513
1514 /* The code above should have handled everything. */
1515 if (data.len > 0)
1516 abort ();
1517 }
1518
1519 /* Return number of insns required to move L bytes by pieces.
1520 ALIGN (in bits) is maximum alignment we can assume. */
1521
1522 static unsigned HOST_WIDE_INT
1523 move_by_pieces_ninsns (l, align)
1524 unsigned HOST_WIDE_INT l;
1525 unsigned int align;
1526 {
1527 unsigned HOST_WIDE_INT n_insns = 0;
1528 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1529
1530 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1531 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1532 align = MOVE_MAX * BITS_PER_UNIT;
1533
1534 while (max_size > 1)
1535 {
1536 enum machine_mode mode = VOIDmode, tmode;
1537 enum insn_code icode;
1538
1539 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1540 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1541 if (GET_MODE_SIZE (tmode) < max_size)
1542 mode = tmode;
1543
1544 if (mode == VOIDmode)
1545 break;
1546
1547 icode = mov_optab->handlers[(int) mode].insn_code;
1548 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1549 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1550
1551 max_size = GET_MODE_SIZE (mode);
1552 }
1553
1554 if (l)
1555 abort ();
1556 return n_insns;
1557 }
1558
1559 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1560 with move instructions for mode MODE. GENFUN is the gen_... function
1561 to make a move insn for that mode. DATA has all the other info. */
1562
1563 static void
1564 move_by_pieces_1 (genfun, mode, data)
1565 rtx (*genfun) PARAMS ((rtx, ...));
1566 enum machine_mode mode;
1567 struct move_by_pieces *data;
1568 {
1569 unsigned int size = GET_MODE_SIZE (mode);
1570 rtx to1 = NULL_RTX, from1;
1571
1572 while (data->len >= size)
1573 {
1574 if (data->reverse)
1575 data->offset -= size;
1576
1577 if (data->to)
1578 {
1579 if (data->autinc_to)
1580 {
1581 to1 = replace_equiv_address (data->to, data->to_addr);
1582 to1 = adjust_address (to1, mode, 0);
1583 }
1584 else
1585 to1 = adjust_address (data->to, mode, data->offset);
1586 }
1587
1588 if (data->autinc_from)
1589 {
1590 from1 = replace_equiv_address (data->from, data->from_addr);
1591 from1 = adjust_address (from1, mode, 0);
1592 }
1593 else
1594 from1 = adjust_address (data->from, mode, data->offset);
1595
1596 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1597 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1598 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1599 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1600
1601 if (data->to)
1602 emit_insn ((*genfun) (to1, from1));
1603 else
1604 {
1605 #ifdef PUSH_ROUNDING
1606 emit_single_push_insn (mode, from1, NULL);
1607 #else
1608 abort ();
1609 #endif
1610 }
1611
1612 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1613 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1614 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1615 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1616
1617 if (! data->reverse)
1618 data->offset += size;
1619
1620 data->len -= size;
1621 }
1622 }
1623 \f
1624 /* Emit code to move a block Y to a block X.
1625 This may be done with string-move instructions,
1626 with multiple scalar move instructions, or with a library call.
1627
1628 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1629 with mode BLKmode.
1630 SIZE is an rtx that says how long they are.
1631 ALIGN is the maximum alignment we can assume they have.
1632
1633 Return the address of the new block, if memcpy is called and returns it,
1634 0 otherwise. */
1635
1636 rtx
1637 emit_block_move (x, y, size, align)
1638 rtx x, y;
1639 rtx size;
1640 unsigned int align;
1641 {
1642 rtx retval = 0;
1643 #ifdef TARGET_MEM_FUNCTIONS
1644 static tree fn;
1645 tree call_expr, arg_list;
1646 #endif
1647
1648 if (GET_MODE (x) != BLKmode)
1649 abort ();
1650
1651 if (GET_MODE (y) != BLKmode)
1652 abort ();
1653
1654 x = protect_from_queue (x, 1);
1655 y = protect_from_queue (y, 0);
1656 size = protect_from_queue (size, 0);
1657
1658 if (GET_CODE (x) != MEM)
1659 abort ();
1660 if (GET_CODE (y) != MEM)
1661 abort ();
1662 if (size == 0)
1663 abort ();
1664
1665 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1666 move_by_pieces (x, y, INTVAL (size), align);
1667 else
1668 {
1669 /* Try the most limited insn first, because there's no point
1670 including more than one in the machine description unless
1671 the more limited one has some advantage. */
1672
1673 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1674 enum machine_mode mode;
1675
1676 /* Since this is a move insn, we don't care about volatility. */
1677 volatile_ok = 1;
1678
1679 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1680 mode = GET_MODE_WIDER_MODE (mode))
1681 {
1682 enum insn_code code = movstr_optab[(int) mode];
1683 insn_operand_predicate_fn pred;
1684
1685 if (code != CODE_FOR_nothing
1686 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1687 here because if SIZE is less than the mode mask, as it is
1688 returned by the macro, it will definitely be less than the
1689 actual mode mask. */
1690 && ((GET_CODE (size) == CONST_INT
1691 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1692 <= (GET_MODE_MASK (mode) >> 1)))
1693 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1694 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1695 || (*pred) (x, BLKmode))
1696 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1697 || (*pred) (y, BLKmode))
1698 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1699 || (*pred) (opalign, VOIDmode)))
1700 {
1701 rtx op2;
1702 rtx last = get_last_insn ();
1703 rtx pat;
1704
1705 op2 = convert_to_mode (mode, size, 1);
1706 pred = insn_data[(int) code].operand[2].predicate;
1707 if (pred != 0 && ! (*pred) (op2, mode))
1708 op2 = copy_to_mode_reg (mode, op2);
1709
1710 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1711 if (pat)
1712 {
1713 emit_insn (pat);
1714 volatile_ok = 0;
1715 return 0;
1716 }
1717 else
1718 delete_insns_since (last);
1719 }
1720 }
1721
1722 volatile_ok = 0;
1723
1724 /* X, Y, or SIZE may have been passed through protect_from_queue.
1725
1726 It is unsafe to save the value generated by protect_from_queue
1727 and reuse it later. Consider what happens if emit_queue is
1728 called before the return value from protect_from_queue is used.
1729
1730 Expansion of the CALL_EXPR below will call emit_queue before
1731 we are finished emitting RTL for argument setup. So if we are
1732 not careful we could get the wrong value for an argument.
1733
1734 To avoid this problem we go ahead and emit code to copy X, Y &
1735 SIZE into new pseudos. We can then place those new pseudos
1736 into an RTL_EXPR and use them later, even after a call to
1737 emit_queue.
1738
1739 Note this is not strictly needed for library calls since they
1740 do not call emit_queue before loading their arguments. However,
1741 we may need to have library calls call emit_queue in the future
1742 since failing to do so could cause problems for targets which
1743 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1744 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1745 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1746
1747 #ifdef TARGET_MEM_FUNCTIONS
1748 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1749 #else
1750 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1751 TREE_UNSIGNED (integer_type_node));
1752 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1753 #endif
1754
1755 #ifdef TARGET_MEM_FUNCTIONS
1756 /* It is incorrect to use the libcall calling conventions to call
1757 memcpy in this context.
1758
1759 This could be a user call to memcpy and the user may wish to
1760 examine the return value from memcpy.
1761
1762 For targets where libcalls and normal calls have different conventions
1763 for returning pointers, we could end up generating incorrect code.
1764
1765 So instead of using a libcall sequence we build up a suitable
1766 CALL_EXPR and expand the call in the normal fashion. */
1767 if (fn == NULL_TREE)
1768 {
1769 tree fntype;
1770
1771 /* This was copied from except.c, I don't know if all this is
1772 necessary in this context or not. */
1773 fn = get_identifier ("memcpy");
1774 fntype = build_pointer_type (void_type_node);
1775 fntype = build_function_type (fntype, NULL_TREE);
1776 fn = build_decl (FUNCTION_DECL, fn, fntype);
1777 ggc_add_tree_root (&fn, 1);
1778 DECL_EXTERNAL (fn) = 1;
1779 TREE_PUBLIC (fn) = 1;
1780 DECL_ARTIFICIAL (fn) = 1;
1781 TREE_NOTHROW (fn) = 1;
1782 make_decl_rtl (fn, NULL);
1783 assemble_external (fn);
1784 }
1785
1786 /* We need to make an argument list for the function call.
1787
1788 memcpy has three arguments, the first two are void * addresses and
1789 the last is a size_t byte count for the copy. */
1790 arg_list
1791 = build_tree_list (NULL_TREE,
1792 make_tree (build_pointer_type (void_type_node), x));
1793 TREE_CHAIN (arg_list)
1794 = build_tree_list (NULL_TREE,
1795 make_tree (build_pointer_type (void_type_node), y));
1796 TREE_CHAIN (TREE_CHAIN (arg_list))
1797 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1798 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1799
1800 /* Now we have to build up the CALL_EXPR itself. */
1801 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1802 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1803 call_expr, arg_list, NULL_TREE);
1804 TREE_SIDE_EFFECTS (call_expr) = 1;
1805
1806 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1807 #else
1808 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1809 VOIDmode, 3, y, Pmode, x, Pmode,
1810 convert_to_mode (TYPE_MODE (integer_type_node), size,
1811 TREE_UNSIGNED (integer_type_node)),
1812 TYPE_MODE (integer_type_node));
1813 #endif
1814 }
1815
1816 return retval;
1817 }
1818 \f
1819 /* Copy all or part of a value X into registers starting at REGNO.
1820 The number of registers to be filled is NREGS. */
1821
1822 void
1823 move_block_to_reg (regno, x, nregs, mode)
1824 int regno;
1825 rtx x;
1826 int nregs;
1827 enum machine_mode mode;
1828 {
1829 int i;
1830 #ifdef HAVE_load_multiple
1831 rtx pat;
1832 rtx last;
1833 #endif
1834
1835 if (nregs == 0)
1836 return;
1837
1838 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1839 x = validize_mem (force_const_mem (mode, x));
1840
1841 /* See if the machine can do this with a load multiple insn. */
1842 #ifdef HAVE_load_multiple
1843 if (HAVE_load_multiple)
1844 {
1845 last = get_last_insn ();
1846 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1847 GEN_INT (nregs));
1848 if (pat)
1849 {
1850 emit_insn (pat);
1851 return;
1852 }
1853 else
1854 delete_insns_since (last);
1855 }
1856 #endif
1857
1858 for (i = 0; i < nregs; i++)
1859 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1860 operand_subword_force (x, i, mode));
1861 }
1862
1863 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1864 The number of registers to be filled is NREGS. SIZE indicates the number
1865 of bytes in the object X. */
1866
1867 void
1868 move_block_from_reg (regno, x, nregs, size)
1869 int regno;
1870 rtx x;
1871 int nregs;
1872 int size;
1873 {
1874 int i;
1875 #ifdef HAVE_store_multiple
1876 rtx pat;
1877 rtx last;
1878 #endif
1879 enum machine_mode mode;
1880
1881 if (nregs == 0)
1882 return;
1883
1884 /* If SIZE is that of a mode no bigger than a word, just use that
1885 mode's store operation. */
1886 if (size <= UNITS_PER_WORD
1887 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1888 {
1889 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1890 return;
1891 }
1892
1893 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1894 to the left before storing to memory. Note that the previous test
1895 doesn't handle all cases (e.g. SIZE == 3). */
1896 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1897 {
1898 rtx tem = operand_subword (x, 0, 1, BLKmode);
1899 rtx shift;
1900
1901 if (tem == 0)
1902 abort ();
1903
1904 shift = expand_shift (LSHIFT_EXPR, word_mode,
1905 gen_rtx_REG (word_mode, regno),
1906 build_int_2 ((UNITS_PER_WORD - size)
1907 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1908 emit_move_insn (tem, shift);
1909 return;
1910 }
1911
1912 /* See if the machine can do this with a store multiple insn. */
1913 #ifdef HAVE_store_multiple
1914 if (HAVE_store_multiple)
1915 {
1916 last = get_last_insn ();
1917 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1918 GEN_INT (nregs));
1919 if (pat)
1920 {
1921 emit_insn (pat);
1922 return;
1923 }
1924 else
1925 delete_insns_since (last);
1926 }
1927 #endif
1928
1929 for (i = 0; i < nregs; i++)
1930 {
1931 rtx tem = operand_subword (x, i, 1, BLKmode);
1932
1933 if (tem == 0)
1934 abort ();
1935
1936 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1937 }
1938 }
1939
1940 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1941 registers represented by a PARALLEL. SSIZE represents the total size of
1942 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1943 SRC in bits. */
1944 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1945 the balance will be in what would be the low-order memory addresses, i.e.
1946 left justified for big endian, right justified for little endian. This
1947 happens to be true for the targets currently using this support. If this
1948 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1949 would be needed. */
1950
1951 void
1952 emit_group_load (dst, orig_src, ssize, align)
1953 rtx dst, orig_src;
1954 unsigned int align;
1955 int ssize;
1956 {
1957 rtx *tmps, src;
1958 int start, i;
1959
1960 if (GET_CODE (dst) != PARALLEL)
1961 abort ();
1962
1963 /* Check for a NULL entry, used to indicate that the parameter goes
1964 both on the stack and in registers. */
1965 if (XEXP (XVECEXP (dst, 0, 0), 0))
1966 start = 0;
1967 else
1968 start = 1;
1969
1970 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1971
1972 /* Process the pieces. */
1973 for (i = start; i < XVECLEN (dst, 0); i++)
1974 {
1975 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1976 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1977 unsigned int bytelen = GET_MODE_SIZE (mode);
1978 int shift = 0;
1979
1980 /* Handle trailing fragments that run over the size of the struct. */
1981 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1982 {
1983 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1984 bytelen = ssize - bytepos;
1985 if (bytelen <= 0)
1986 abort ();
1987 }
1988
1989 /* If we won't be loading directly from memory, protect the real source
1990 from strange tricks we might play; but make sure that the source can
1991 be loaded directly into the destination. */
1992 src = orig_src;
1993 if (GET_CODE (orig_src) != MEM
1994 && (!CONSTANT_P (orig_src)
1995 || (GET_MODE (orig_src) != mode
1996 && GET_MODE (orig_src) != VOIDmode)))
1997 {
1998 if (GET_MODE (orig_src) == VOIDmode)
1999 src = gen_reg_rtx (mode);
2000 else
2001 src = gen_reg_rtx (GET_MODE (orig_src));
2002 emit_move_insn (src, orig_src);
2003 }
2004
2005 /* Optimize the access just a bit. */
2006 if (GET_CODE (src) == MEM
2007 && align >= GET_MODE_ALIGNMENT (mode)
2008 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2009 && bytelen == GET_MODE_SIZE (mode))
2010 {
2011 tmps[i] = gen_reg_rtx (mode);
2012 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2013 }
2014 else if (GET_CODE (src) == CONCAT)
2015 {
2016 if (bytepos == 0
2017 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2018 tmps[i] = XEXP (src, 0);
2019 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2020 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2021 tmps[i] = XEXP (src, 1);
2022 else
2023 abort ();
2024 }
2025 else if (CONSTANT_P (src)
2026 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2027 tmps[i] = src;
2028 else
2029 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2030 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2031 mode, mode, align, ssize);
2032
2033 if (BYTES_BIG_ENDIAN && shift)
2034 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2035 tmps[i], 0, OPTAB_WIDEN);
2036 }
2037
2038 emit_queue ();
2039
2040 /* Copy the extracted pieces into the proper (probable) hard regs. */
2041 for (i = start; i < XVECLEN (dst, 0); i++)
2042 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2043 }
2044
2045 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2046 registers represented by a PARALLEL. SSIZE represents the total size of
2047 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2048
2049 void
2050 emit_group_store (orig_dst, src, ssize, align)
2051 rtx orig_dst, src;
2052 int ssize;
2053 unsigned int align;
2054 {
2055 rtx *tmps, dst;
2056 int start, i;
2057
2058 if (GET_CODE (src) != PARALLEL)
2059 abort ();
2060
2061 /* Check for a NULL entry, used to indicate that the parameter goes
2062 both on the stack and in registers. */
2063 if (XEXP (XVECEXP (src, 0, 0), 0))
2064 start = 0;
2065 else
2066 start = 1;
2067
2068 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2069
2070 /* Copy the (probable) hard regs into pseudos. */
2071 for (i = start; i < XVECLEN (src, 0); i++)
2072 {
2073 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2074 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2075 emit_move_insn (tmps[i], reg);
2076 }
2077 emit_queue ();
2078
2079 /* If we won't be storing directly into memory, protect the real destination
2080 from strange tricks we might play. */
2081 dst = orig_dst;
2082 if (GET_CODE (dst) == PARALLEL)
2083 {
2084 rtx temp;
2085
2086 /* We can get a PARALLEL dst if there is a conditional expression in
2087 a return statement. In that case, the dst and src are the same,
2088 so no action is necessary. */
2089 if (rtx_equal_p (dst, src))
2090 return;
2091
2092 /* It is unclear if we can ever reach here, but we may as well handle
2093 it. Allocate a temporary, and split this into a store/load to/from
2094 the temporary. */
2095
2096 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2097 emit_group_store (temp, src, ssize, align);
2098 emit_group_load (dst, temp, ssize, align);
2099 return;
2100 }
2101 else if (GET_CODE (dst) != MEM)
2102 {
2103 dst = gen_reg_rtx (GET_MODE (orig_dst));
2104 /* Make life a bit easier for combine. */
2105 emit_move_insn (dst, const0_rtx);
2106 }
2107
2108 /* Process the pieces. */
2109 for (i = start; i < XVECLEN (src, 0); i++)
2110 {
2111 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2112 enum machine_mode mode = GET_MODE (tmps[i]);
2113 unsigned int bytelen = GET_MODE_SIZE (mode);
2114
2115 /* Handle trailing fragments that run over the size of the struct. */
2116 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2117 {
2118 if (BYTES_BIG_ENDIAN)
2119 {
2120 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2121 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2122 tmps[i], 0, OPTAB_WIDEN);
2123 }
2124 bytelen = ssize - bytepos;
2125 }
2126
2127 /* Optimize the access just a bit. */
2128 if (GET_CODE (dst) == MEM
2129 && align >= GET_MODE_ALIGNMENT (mode)
2130 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2131 && bytelen == GET_MODE_SIZE (mode))
2132 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2133 else
2134 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2135 mode, tmps[i], align, ssize);
2136 }
2137
2138 emit_queue ();
2139
2140 /* Copy from the pseudo into the (probable) hard reg. */
2141 if (GET_CODE (dst) == REG)
2142 emit_move_insn (orig_dst, dst);
2143 }
2144
2145 /* Generate code to copy a BLKmode object of TYPE out of a
2146 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2147 is null, a stack temporary is created. TGTBLK is returned.
2148
2149 The primary purpose of this routine is to handle functions
2150 that return BLKmode structures in registers. Some machines
2151 (the PA for example) want to return all small structures
2152 in registers regardless of the structure's alignment. */
2153
2154 rtx
2155 copy_blkmode_from_reg (tgtblk, srcreg, type)
2156 rtx tgtblk;
2157 rtx srcreg;
2158 tree type;
2159 {
2160 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2161 rtx src = NULL, dst = NULL;
2162 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2163 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2164
2165 if (tgtblk == 0)
2166 {
2167 tgtblk = assign_temp (build_qualified_type (type,
2168 (TYPE_QUALS (type)
2169 | TYPE_QUAL_CONST)),
2170 0, 1, 1);
2171 preserve_temp_slots (tgtblk);
2172 }
2173
2174 /* This code assumes srcreg is at least a full word. If it isn't,
2175 copy it into a new pseudo which is a full word. */
2176 if (GET_MODE (srcreg) != BLKmode
2177 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2178 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2179
2180 /* Structures whose size is not a multiple of a word are aligned
2181 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2182 machine, this means we must skip the empty high order bytes when
2183 calculating the bit offset. */
2184 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2185 big_endian_correction
2186 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2187
2188 /* Copy the structure BITSIZE bites at a time.
2189
2190 We could probably emit more efficient code for machines which do not use
2191 strict alignment, but it doesn't seem worth the effort at the current
2192 time. */
2193 for (bitpos = 0, xbitpos = big_endian_correction;
2194 bitpos < bytes * BITS_PER_UNIT;
2195 bitpos += bitsize, xbitpos += bitsize)
2196 {
2197 /* We need a new source operand each time xbitpos is on a
2198 word boundary and when xbitpos == big_endian_correction
2199 (the first time through). */
2200 if (xbitpos % BITS_PER_WORD == 0
2201 || xbitpos == big_endian_correction)
2202 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2203 GET_MODE (srcreg));
2204
2205 /* We need a new destination operand each time bitpos is on
2206 a word boundary. */
2207 if (bitpos % BITS_PER_WORD == 0)
2208 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2209
2210 /* Use xbitpos for the source extraction (right justified) and
2211 xbitpos for the destination store (left justified). */
2212 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2213 extract_bit_field (src, bitsize,
2214 xbitpos % BITS_PER_WORD, 1,
2215 NULL_RTX, word_mode, word_mode,
2216 bitsize, BITS_PER_WORD),
2217 bitsize, BITS_PER_WORD);
2218 }
2219
2220 return tgtblk;
2221 }
2222
2223 /* Add a USE expression for REG to the (possibly empty) list pointed
2224 to by CALL_FUSAGE. REG must denote a hard register. */
2225
2226 void
2227 use_reg (call_fusage, reg)
2228 rtx *call_fusage, reg;
2229 {
2230 if (GET_CODE (reg) != REG
2231 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2232 abort ();
2233
2234 *call_fusage
2235 = gen_rtx_EXPR_LIST (VOIDmode,
2236 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2237 }
2238
2239 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2240 starting at REGNO. All of these registers must be hard registers. */
2241
2242 void
2243 use_regs (call_fusage, regno, nregs)
2244 rtx *call_fusage;
2245 int regno;
2246 int nregs;
2247 {
2248 int i;
2249
2250 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2251 abort ();
2252
2253 for (i = 0; i < nregs; i++)
2254 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2255 }
2256
2257 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2258 PARALLEL REGS. This is for calls that pass values in multiple
2259 non-contiguous locations. The Irix 6 ABI has examples of this. */
2260
2261 void
2262 use_group_regs (call_fusage, regs)
2263 rtx *call_fusage;
2264 rtx regs;
2265 {
2266 int i;
2267
2268 for (i = 0; i < XVECLEN (regs, 0); i++)
2269 {
2270 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2271
2272 /* A NULL entry means the parameter goes both on the stack and in
2273 registers. This can also be a MEM for targets that pass values
2274 partially on the stack and partially in registers. */
2275 if (reg != 0 && GET_CODE (reg) == REG)
2276 use_reg (call_fusage, reg);
2277 }
2278 }
2279 \f
2280
2281 int
2282 can_store_by_pieces (len, constfun, constfundata, align)
2283 unsigned HOST_WIDE_INT len;
2284 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2285 PTR constfundata;
2286 unsigned int align;
2287 {
2288 unsigned HOST_WIDE_INT max_size, l;
2289 HOST_WIDE_INT offset = 0;
2290 enum machine_mode mode, tmode;
2291 enum insn_code icode;
2292 int reverse;
2293 rtx cst;
2294
2295 if (! MOVE_BY_PIECES_P (len, align))
2296 return 0;
2297
2298 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2299 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2300 align = MOVE_MAX * BITS_PER_UNIT;
2301
2302 /* We would first store what we can in the largest integer mode, then go to
2303 successively smaller modes. */
2304
2305 for (reverse = 0;
2306 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2307 reverse++)
2308 {
2309 l = len;
2310 mode = VOIDmode;
2311 max_size = MOVE_MAX_PIECES + 1;
2312 while (max_size > 1)
2313 {
2314 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2315 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2316 if (GET_MODE_SIZE (tmode) < max_size)
2317 mode = tmode;
2318
2319 if (mode == VOIDmode)
2320 break;
2321
2322 icode = mov_optab->handlers[(int) mode].insn_code;
2323 if (icode != CODE_FOR_nothing
2324 && align >= GET_MODE_ALIGNMENT (mode))
2325 {
2326 unsigned int size = GET_MODE_SIZE (mode);
2327
2328 while (l >= size)
2329 {
2330 if (reverse)
2331 offset -= size;
2332
2333 cst = (*constfun) (constfundata, offset, mode);
2334 if (!LEGITIMATE_CONSTANT_P (cst))
2335 return 0;
2336
2337 if (!reverse)
2338 offset += size;
2339
2340 l -= size;
2341 }
2342 }
2343
2344 max_size = GET_MODE_SIZE (mode);
2345 }
2346
2347 /* The code above should have handled everything. */
2348 if (l != 0)
2349 abort ();
2350 }
2351
2352 return 1;
2353 }
2354
2355 /* Generate several move instructions to store LEN bytes generated by
2356 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2357 pointer which will be passed as argument in every CONSTFUN call.
2358 ALIGN is maximum alignment we can assume. */
2359
2360 void
2361 store_by_pieces (to, len, constfun, constfundata, align)
2362 rtx to;
2363 unsigned HOST_WIDE_INT len;
2364 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2365 PTR constfundata;
2366 unsigned int align;
2367 {
2368 struct store_by_pieces data;
2369
2370 if (! MOVE_BY_PIECES_P (len, align))
2371 abort ();
2372 to = protect_from_queue (to, 1);
2373 data.constfun = constfun;
2374 data.constfundata = constfundata;
2375 data.len = len;
2376 data.to = to;
2377 store_by_pieces_1 (&data, align);
2378 }
2379
2380 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2381 rtx with BLKmode). The caller must pass TO through protect_from_queue
2382 before calling. ALIGN is maximum alignment we can assume. */
2383
2384 static void
2385 clear_by_pieces (to, len, align)
2386 rtx to;
2387 unsigned HOST_WIDE_INT len;
2388 unsigned int align;
2389 {
2390 struct store_by_pieces data;
2391
2392 data.constfun = clear_by_pieces_1;
2393 data.constfundata = NULL;
2394 data.len = len;
2395 data.to = to;
2396 store_by_pieces_1 (&data, align);
2397 }
2398
2399 /* Callback routine for clear_by_pieces.
2400 Return const0_rtx unconditionally. */
2401
2402 static rtx
2403 clear_by_pieces_1 (data, offset, mode)
2404 PTR data ATTRIBUTE_UNUSED;
2405 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2406 enum machine_mode mode ATTRIBUTE_UNUSED;
2407 {
2408 return const0_rtx;
2409 }
2410
2411 /* Subroutine of clear_by_pieces and store_by_pieces.
2412 Generate several move instructions to store LEN bytes of block TO. (A MEM
2413 rtx with BLKmode). The caller must pass TO through protect_from_queue
2414 before calling. ALIGN is maximum alignment we can assume. */
2415
2416 static void
2417 store_by_pieces_1 (data, align)
2418 struct store_by_pieces *data;
2419 unsigned int align;
2420 {
2421 rtx to_addr = XEXP (data->to, 0);
2422 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2423 enum machine_mode mode = VOIDmode, tmode;
2424 enum insn_code icode;
2425
2426 data->offset = 0;
2427 data->to_addr = to_addr;
2428 data->autinc_to
2429 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2430 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2431
2432 data->explicit_inc_to = 0;
2433 data->reverse
2434 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2435 if (data->reverse)
2436 data->offset = data->len;
2437
2438 /* If storing requires more than two move insns,
2439 copy addresses to registers (to make displacements shorter)
2440 and use post-increment if available. */
2441 if (!data->autinc_to
2442 && move_by_pieces_ninsns (data->len, align) > 2)
2443 {
2444 /* Determine the main mode we'll be using. */
2445 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2446 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2447 if (GET_MODE_SIZE (tmode) < max_size)
2448 mode = tmode;
2449
2450 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2451 {
2452 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2453 data->autinc_to = 1;
2454 data->explicit_inc_to = -1;
2455 }
2456
2457 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2458 && ! data->autinc_to)
2459 {
2460 data->to_addr = copy_addr_to_reg (to_addr);
2461 data->autinc_to = 1;
2462 data->explicit_inc_to = 1;
2463 }
2464
2465 if ( !data->autinc_to && CONSTANT_P (to_addr))
2466 data->to_addr = copy_addr_to_reg (to_addr);
2467 }
2468
2469 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2470 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2471 align = MOVE_MAX * BITS_PER_UNIT;
2472
2473 /* First store what we can in the largest integer mode, then go to
2474 successively smaller modes. */
2475
2476 while (max_size > 1)
2477 {
2478 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2479 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2480 if (GET_MODE_SIZE (tmode) < max_size)
2481 mode = tmode;
2482
2483 if (mode == VOIDmode)
2484 break;
2485
2486 icode = mov_optab->handlers[(int) mode].insn_code;
2487 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2488 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2489
2490 max_size = GET_MODE_SIZE (mode);
2491 }
2492
2493 /* The code above should have handled everything. */
2494 if (data->len != 0)
2495 abort ();
2496 }
2497
2498 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2499 with move instructions for mode MODE. GENFUN is the gen_... function
2500 to make a move insn for that mode. DATA has all the other info. */
2501
2502 static void
2503 store_by_pieces_2 (genfun, mode, data)
2504 rtx (*genfun) PARAMS ((rtx, ...));
2505 enum machine_mode mode;
2506 struct store_by_pieces *data;
2507 {
2508 unsigned int size = GET_MODE_SIZE (mode);
2509 rtx to1, cst;
2510
2511 while (data->len >= size)
2512 {
2513 if (data->reverse)
2514 data->offset -= size;
2515
2516 if (data->autinc_to)
2517 {
2518 to1 = replace_equiv_address (data->to, data->to_addr);
2519 to1 = adjust_address (to1, mode, 0);
2520 }
2521 else
2522 to1 = adjust_address (data->to, mode, data->offset);
2523
2524 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2525 emit_insn (gen_add2_insn (data->to_addr,
2526 GEN_INT (-(HOST_WIDE_INT) size)));
2527
2528 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2529 emit_insn ((*genfun) (to1, cst));
2530
2531 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2532 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2533
2534 if (! data->reverse)
2535 data->offset += size;
2536
2537 data->len -= size;
2538 }
2539 }
2540 \f
2541 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2542 its length in bytes and ALIGN is the maximum alignment we can is has.
2543
2544 If we call a function that returns the length of the block, return it. */
2545
2546 rtx
2547 clear_storage (object, size, align)
2548 rtx object;
2549 rtx size;
2550 unsigned int align;
2551 {
2552 #ifdef TARGET_MEM_FUNCTIONS
2553 static tree fn;
2554 tree call_expr, arg_list;
2555 #endif
2556 rtx retval = 0;
2557
2558 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2559 just move a zero. Otherwise, do this a piece at a time. */
2560 if (GET_MODE (object) != BLKmode
2561 && GET_CODE (size) == CONST_INT
2562 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2563 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2564 else
2565 {
2566 object = protect_from_queue (object, 1);
2567 size = protect_from_queue (size, 0);
2568
2569 if (GET_CODE (size) == CONST_INT
2570 && MOVE_BY_PIECES_P (INTVAL (size), align))
2571 clear_by_pieces (object, INTVAL (size), align);
2572 else
2573 {
2574 /* Try the most limited insn first, because there's no point
2575 including more than one in the machine description unless
2576 the more limited one has some advantage. */
2577
2578 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2579 enum machine_mode mode;
2580
2581 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2582 mode = GET_MODE_WIDER_MODE (mode))
2583 {
2584 enum insn_code code = clrstr_optab[(int) mode];
2585 insn_operand_predicate_fn pred;
2586
2587 if (code != CODE_FOR_nothing
2588 /* We don't need MODE to be narrower than
2589 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2590 the mode mask, as it is returned by the macro, it will
2591 definitely be less than the actual mode mask. */
2592 && ((GET_CODE (size) == CONST_INT
2593 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2594 <= (GET_MODE_MASK (mode) >> 1)))
2595 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2596 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2597 || (*pred) (object, BLKmode))
2598 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2599 || (*pred) (opalign, VOIDmode)))
2600 {
2601 rtx op1;
2602 rtx last = get_last_insn ();
2603 rtx pat;
2604
2605 op1 = convert_to_mode (mode, size, 1);
2606 pred = insn_data[(int) code].operand[1].predicate;
2607 if (pred != 0 && ! (*pred) (op1, mode))
2608 op1 = copy_to_mode_reg (mode, op1);
2609
2610 pat = GEN_FCN ((int) code) (object, op1, opalign);
2611 if (pat)
2612 {
2613 emit_insn (pat);
2614 return 0;
2615 }
2616 else
2617 delete_insns_since (last);
2618 }
2619 }
2620
2621 /* OBJECT or SIZE may have been passed through protect_from_queue.
2622
2623 It is unsafe to save the value generated by protect_from_queue
2624 and reuse it later. Consider what happens if emit_queue is
2625 called before the return value from protect_from_queue is used.
2626
2627 Expansion of the CALL_EXPR below will call emit_queue before
2628 we are finished emitting RTL for argument setup. So if we are
2629 not careful we could get the wrong value for an argument.
2630
2631 To avoid this problem we go ahead and emit code to copy OBJECT
2632 and SIZE into new pseudos. We can then place those new pseudos
2633 into an RTL_EXPR and use them later, even after a call to
2634 emit_queue.
2635
2636 Note this is not strictly needed for library calls since they
2637 do not call emit_queue before loading their arguments. However,
2638 we may need to have library calls call emit_queue in the future
2639 since failing to do so could cause problems for targets which
2640 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2641 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2642
2643 #ifdef TARGET_MEM_FUNCTIONS
2644 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2645 #else
2646 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2647 TREE_UNSIGNED (integer_type_node));
2648 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2649 #endif
2650
2651 #ifdef TARGET_MEM_FUNCTIONS
2652 /* It is incorrect to use the libcall calling conventions to call
2653 memset in this context.
2654
2655 This could be a user call to memset and the user may wish to
2656 examine the return value from memset.
2657
2658 For targets where libcalls and normal calls have different
2659 conventions for returning pointers, we could end up generating
2660 incorrect code.
2661
2662 So instead of using a libcall sequence we build up a suitable
2663 CALL_EXPR and expand the call in the normal fashion. */
2664 if (fn == NULL_TREE)
2665 {
2666 tree fntype;
2667
2668 /* This was copied from except.c, I don't know if all this is
2669 necessary in this context or not. */
2670 fn = get_identifier ("memset");
2671 fntype = build_pointer_type (void_type_node);
2672 fntype = build_function_type (fntype, NULL_TREE);
2673 fn = build_decl (FUNCTION_DECL, fn, fntype);
2674 ggc_add_tree_root (&fn, 1);
2675 DECL_EXTERNAL (fn) = 1;
2676 TREE_PUBLIC (fn) = 1;
2677 DECL_ARTIFICIAL (fn) = 1;
2678 TREE_NOTHROW (fn) = 1;
2679 make_decl_rtl (fn, NULL);
2680 assemble_external (fn);
2681 }
2682
2683 /* We need to make an argument list for the function call.
2684
2685 memset has three arguments, the first is a void * addresses, the
2686 second an integer with the initialization value, the last is a
2687 size_t byte count for the copy. */
2688 arg_list
2689 = build_tree_list (NULL_TREE,
2690 make_tree (build_pointer_type (void_type_node),
2691 object));
2692 TREE_CHAIN (arg_list)
2693 = build_tree_list (NULL_TREE,
2694 make_tree (integer_type_node, const0_rtx));
2695 TREE_CHAIN (TREE_CHAIN (arg_list))
2696 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2697 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2698
2699 /* Now we have to build up the CALL_EXPR itself. */
2700 call_expr = build1 (ADDR_EXPR,
2701 build_pointer_type (TREE_TYPE (fn)), fn);
2702 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2703 call_expr, arg_list, NULL_TREE);
2704 TREE_SIDE_EFFECTS (call_expr) = 1;
2705
2706 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2707 #else
2708 emit_library_call (bzero_libfunc, LCT_NORMAL,
2709 VOIDmode, 2, object, Pmode, size,
2710 TYPE_MODE (integer_type_node));
2711 #endif
2712 }
2713 }
2714
2715 return retval;
2716 }
2717
2718 /* Generate code to copy Y into X.
2719 Both Y and X must have the same mode, except that
2720 Y can be a constant with VOIDmode.
2721 This mode cannot be BLKmode; use emit_block_move for that.
2722
2723 Return the last instruction emitted. */
2724
2725 rtx
2726 emit_move_insn (x, y)
2727 rtx x, y;
2728 {
2729 enum machine_mode mode = GET_MODE (x);
2730 rtx y_cst = NULL_RTX;
2731 rtx last_insn;
2732
2733 x = protect_from_queue (x, 1);
2734 y = protect_from_queue (y, 0);
2735
2736 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2737 abort ();
2738
2739 /* Never force constant_p_rtx to memory. */
2740 if (GET_CODE (y) == CONSTANT_P_RTX)
2741 ;
2742 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2743 {
2744 y_cst = y;
2745 y = force_const_mem (mode, y);
2746 }
2747
2748 /* If X or Y are memory references, verify that their addresses are valid
2749 for the machine. */
2750 if (GET_CODE (x) == MEM
2751 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2752 && ! push_operand (x, GET_MODE (x)))
2753 || (flag_force_addr
2754 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2755 x = validize_mem (x);
2756
2757 if (GET_CODE (y) == MEM
2758 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2759 || (flag_force_addr
2760 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2761 y = validize_mem (y);
2762
2763 if (mode == BLKmode)
2764 abort ();
2765
2766 last_insn = emit_move_insn_1 (x, y);
2767
2768 if (y_cst && GET_CODE (x) == REG)
2769 REG_NOTES (last_insn)
2770 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2771
2772 return last_insn;
2773 }
2774
2775 /* Low level part of emit_move_insn.
2776 Called just like emit_move_insn, but assumes X and Y
2777 are basically valid. */
2778
2779 rtx
2780 emit_move_insn_1 (x, y)
2781 rtx x, y;
2782 {
2783 enum machine_mode mode = GET_MODE (x);
2784 enum machine_mode submode;
2785 enum mode_class class = GET_MODE_CLASS (mode);
2786 unsigned int i;
2787
2788 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2789 abort ();
2790
2791 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2792 return
2793 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2794
2795 /* Expand complex moves by moving real part and imag part, if possible. */
2796 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2797 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2798 * BITS_PER_UNIT),
2799 (class == MODE_COMPLEX_INT
2800 ? MODE_INT : MODE_FLOAT),
2801 0))
2802 && (mov_optab->handlers[(int) submode].insn_code
2803 != CODE_FOR_nothing))
2804 {
2805 /* Don't split destination if it is a stack push. */
2806 int stack = push_operand (x, GET_MODE (x));
2807
2808 #ifdef PUSH_ROUNDING
2809 /* In case we output to the stack, but the size is smaller machine can
2810 push exactly, we need to use move instructions. */
2811 if (stack
2812 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2813 {
2814 rtx temp;
2815 int offset1, offset2;
2816
2817 /* Do not use anti_adjust_stack, since we don't want to update
2818 stack_pointer_delta. */
2819 temp = expand_binop (Pmode,
2820 #ifdef STACK_GROWS_DOWNWARD
2821 sub_optab,
2822 #else
2823 add_optab,
2824 #endif
2825 stack_pointer_rtx,
2826 GEN_INT
2827 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2828 stack_pointer_rtx,
2829 0,
2830 OPTAB_LIB_WIDEN);
2831 if (temp != stack_pointer_rtx)
2832 emit_move_insn (stack_pointer_rtx, temp);
2833 #ifdef STACK_GROWS_DOWNWARD
2834 offset1 = 0;
2835 offset2 = GET_MODE_SIZE (submode);
2836 #else
2837 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2838 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2839 + GET_MODE_SIZE (submode));
2840 #endif
2841 emit_move_insn (change_address (x, submode,
2842 gen_rtx_PLUS (Pmode,
2843 stack_pointer_rtx,
2844 GEN_INT (offset1))),
2845 gen_realpart (submode, y));
2846 emit_move_insn (change_address (x, submode,
2847 gen_rtx_PLUS (Pmode,
2848 stack_pointer_rtx,
2849 GEN_INT (offset2))),
2850 gen_imagpart (submode, y));
2851 }
2852 else
2853 #endif
2854 /* If this is a stack, push the highpart first, so it
2855 will be in the argument order.
2856
2857 In that case, change_address is used only to convert
2858 the mode, not to change the address. */
2859 if (stack)
2860 {
2861 /* Note that the real part always precedes the imag part in memory
2862 regardless of machine's endianness. */
2863 #ifdef STACK_GROWS_DOWNWARD
2864 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2865 (gen_rtx_MEM (submode, XEXP (x, 0)),
2866 gen_imagpart (submode, y)));
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_realpart (submode, y)));
2870 #else
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (gen_rtx_MEM (submode, XEXP (x, 0)),
2873 gen_realpart (submode, y)));
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_imagpart (submode, y)));
2877 #endif
2878 }
2879 else
2880 {
2881 rtx realpart_x, realpart_y;
2882 rtx imagpart_x, imagpart_y;
2883
2884 /* If this is a complex value with each part being smaller than a
2885 word, the usual calling sequence will likely pack the pieces into
2886 a single register. Unfortunately, SUBREG of hard registers only
2887 deals in terms of words, so we have a problem converting input
2888 arguments to the CONCAT of two registers that is used elsewhere
2889 for complex values. If this is before reload, we can copy it into
2890 memory and reload. FIXME, we should see about using extract and
2891 insert on integer registers, but complex short and complex char
2892 variables should be rarely used. */
2893 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2894 && (reload_in_progress | reload_completed) == 0)
2895 {
2896 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2897 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2898
2899 if (packed_dest_p || packed_src_p)
2900 {
2901 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2902 ? MODE_FLOAT : MODE_INT);
2903
2904 enum machine_mode reg_mode
2905 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2906
2907 if (reg_mode != BLKmode)
2908 {
2909 rtx mem = assign_stack_temp (reg_mode,
2910 GET_MODE_SIZE (mode), 0);
2911 rtx cmem = adjust_address (mem, mode, 0);
2912
2913 cfun->cannot_inline
2914 = N_("function using short complex types cannot be inline");
2915
2916 if (packed_dest_p)
2917 {
2918 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2919 emit_move_insn_1 (cmem, y);
2920 return emit_move_insn_1 (sreg, mem);
2921 }
2922 else
2923 {
2924 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2925 emit_move_insn_1 (mem, sreg);
2926 return emit_move_insn_1 (x, cmem);
2927 }
2928 }
2929 }
2930 }
2931
2932 realpart_x = gen_realpart (submode, x);
2933 realpart_y = gen_realpart (submode, y);
2934 imagpart_x = gen_imagpart (submode, x);
2935 imagpart_y = gen_imagpart (submode, y);
2936
2937 /* Show the output dies here. This is necessary for SUBREGs
2938 of pseudos since we cannot track their lifetimes correctly;
2939 hard regs shouldn't appear here except as return values.
2940 We never want to emit such a clobber after reload. */
2941 if (x != y
2942 && ! (reload_in_progress || reload_completed)
2943 && (GET_CODE (realpart_x) == SUBREG
2944 || GET_CODE (imagpart_x) == SUBREG))
2945 {
2946 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2947 }
2948
2949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2950 (realpart_x, realpart_y));
2951 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2952 (imagpart_x, imagpart_y));
2953 }
2954
2955 return get_last_insn ();
2956 }
2957
2958 /* This will handle any multi-word mode that lacks a move_insn pattern.
2959 However, you will get better code if you define such patterns,
2960 even if they must turn into multiple assembler instructions. */
2961 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2962 {
2963 rtx last_insn = 0;
2964 rtx seq, inner;
2965 int need_clobber;
2966
2967 #ifdef PUSH_ROUNDING
2968
2969 /* If X is a push on the stack, do the push now and replace
2970 X with a reference to the stack pointer. */
2971 if (push_operand (x, GET_MODE (x)))
2972 {
2973 rtx temp;
2974 enum rtx_code code;
2975
2976 /* Do not use anti_adjust_stack, since we don't want to update
2977 stack_pointer_delta. */
2978 temp = expand_binop (Pmode,
2979 #ifdef STACK_GROWS_DOWNWARD
2980 sub_optab,
2981 #else
2982 add_optab,
2983 #endif
2984 stack_pointer_rtx,
2985 GEN_INT
2986 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2987 stack_pointer_rtx,
2988 0,
2989 OPTAB_LIB_WIDEN);
2990 if (temp != stack_pointer_rtx)
2991 emit_move_insn (stack_pointer_rtx, temp);
2992
2993 code = GET_CODE (XEXP (x, 0));
2994 /* Just hope that small offsets off SP are OK. */
2995 if (code == POST_INC)
2996 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2997 GEN_INT (-(HOST_WIDE_INT)
2998 GET_MODE_SIZE (GET_MODE (x))));
2999 else if (code == POST_DEC)
3000 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3001 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3002 else
3003 temp = stack_pointer_rtx;
3004
3005 x = change_address (x, VOIDmode, temp);
3006 }
3007 #endif
3008
3009 /* If we are in reload, see if either operand is a MEM whose address
3010 is scheduled for replacement. */
3011 if (reload_in_progress && GET_CODE (x) == MEM
3012 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3013 x = replace_equiv_address_nv (x, inner);
3014 if (reload_in_progress && GET_CODE (y) == MEM
3015 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3016 y = replace_equiv_address_nv (y, inner);
3017
3018 start_sequence ();
3019
3020 need_clobber = 0;
3021 for (i = 0;
3022 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3023 i++)
3024 {
3025 rtx xpart = operand_subword (x, i, 1, mode);
3026 rtx ypart = operand_subword (y, i, 1, mode);
3027
3028 /* If we can't get a part of Y, put Y into memory if it is a
3029 constant. Otherwise, force it into a register. If we still
3030 can't get a part of Y, abort. */
3031 if (ypart == 0 && CONSTANT_P (y))
3032 {
3033 y = force_const_mem (mode, y);
3034 ypart = operand_subword (y, i, 1, mode);
3035 }
3036 else if (ypart == 0)
3037 ypart = operand_subword_force (y, i, mode);
3038
3039 if (xpart == 0 || ypart == 0)
3040 abort ();
3041
3042 need_clobber |= (GET_CODE (xpart) == SUBREG);
3043
3044 last_insn = emit_move_insn (xpart, ypart);
3045 }
3046
3047 seq = gen_sequence ();
3048 end_sequence ();
3049
3050 /* Show the output dies here. This is necessary for SUBREGs
3051 of pseudos since we cannot track their lifetimes correctly;
3052 hard regs shouldn't appear here except as return values.
3053 We never want to emit such a clobber after reload. */
3054 if (x != y
3055 && ! (reload_in_progress || reload_completed)
3056 && need_clobber != 0)
3057 {
3058 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3059 }
3060
3061 emit_insn (seq);
3062
3063 return last_insn;
3064 }
3065 else
3066 abort ();
3067 }
3068 \f
3069 /* Pushing data onto the stack. */
3070
3071 /* Push a block of length SIZE (perhaps variable)
3072 and return an rtx to address the beginning of the block.
3073 Note that it is not possible for the value returned to be a QUEUED.
3074 The value may be virtual_outgoing_args_rtx.
3075
3076 EXTRA is the number of bytes of padding to push in addition to SIZE.
3077 BELOW nonzero means this padding comes at low addresses;
3078 otherwise, the padding comes at high addresses. */
3079
3080 rtx
3081 push_block (size, extra, below)
3082 rtx size;
3083 int extra, below;
3084 {
3085 register rtx temp;
3086
3087 size = convert_modes (Pmode, ptr_mode, size, 1);
3088 if (CONSTANT_P (size))
3089 anti_adjust_stack (plus_constant (size, extra));
3090 else if (GET_CODE (size) == REG && extra == 0)
3091 anti_adjust_stack (size);
3092 else
3093 {
3094 temp = copy_to_mode_reg (Pmode, size);
3095 if (extra != 0)
3096 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3097 temp, 0, OPTAB_LIB_WIDEN);
3098 anti_adjust_stack (temp);
3099 }
3100
3101 #ifndef STACK_GROWS_DOWNWARD
3102 if (0)
3103 #else
3104 if (1)
3105 #endif
3106 {
3107 temp = virtual_outgoing_args_rtx;
3108 if (extra != 0 && below)
3109 temp = plus_constant (temp, extra);
3110 }
3111 else
3112 {
3113 if (GET_CODE (size) == CONST_INT)
3114 temp = plus_constant (virtual_outgoing_args_rtx,
3115 -INTVAL (size) - (below ? 0 : extra));
3116 else if (extra != 0 && !below)
3117 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3118 negate_rtx (Pmode, plus_constant (size, extra)));
3119 else
3120 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3121 negate_rtx (Pmode, size));
3122 }
3123
3124 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3125 }
3126
3127
3128 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3129 block of SIZE bytes. */
3130
3131 static rtx
3132 get_push_address (size)
3133 int size;
3134 {
3135 register rtx temp;
3136
3137 if (STACK_PUSH_CODE == POST_DEC)
3138 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3139 else if (STACK_PUSH_CODE == POST_INC)
3140 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3141 else
3142 temp = stack_pointer_rtx;
3143
3144 return copy_to_reg (temp);
3145 }
3146
3147 #ifdef PUSH_ROUNDING
3148
3149 /* Emit single push insn. */
3150
3151 static void
3152 emit_single_push_insn (mode, x, type)
3153 rtx x;
3154 enum machine_mode mode;
3155 tree type;
3156 {
3157 rtx dest_addr;
3158 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3159 rtx dest;
3160 enum insn_code icode;
3161 insn_operand_predicate_fn pred;
3162
3163 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3164 /* If there is push pattern, use it. Otherwise try old way of throwing
3165 MEM representing push operation to move expander. */
3166 icode = push_optab->handlers[(int) mode].insn_code;
3167 if (icode != CODE_FOR_nothing)
3168 {
3169 if (((pred = insn_data[(int) icode].operand[0].predicate)
3170 && !((*pred) (x, mode))))
3171 x = force_reg (mode, x);
3172 emit_insn (GEN_FCN (icode) (x));
3173 return;
3174 }
3175 if (GET_MODE_SIZE (mode) == rounded_size)
3176 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3177 else
3178 {
3179 #ifdef STACK_GROWS_DOWNWARD
3180 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3181 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3182 #else
3183 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3184 GEN_INT (rounded_size));
3185 #endif
3186 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3187 }
3188
3189 dest = gen_rtx_MEM (mode, dest_addr);
3190
3191 if (type != 0)
3192 {
3193 set_mem_attributes (dest, type, 1);
3194 /* Function incoming arguments may overlap with sibling call
3195 outgoing arguments and we cannot allow reordering of reads
3196 from function arguments with stores to outgoing arguments
3197 of sibling calls. */
3198 set_mem_alias_set (dest, 0);
3199 }
3200 emit_move_insn (dest, x);
3201 }
3202 #endif
3203
3204 /* Generate code to push X onto the stack, assuming it has mode MODE and
3205 type TYPE.
3206 MODE is redundant except when X is a CONST_INT (since they don't
3207 carry mode info).
3208 SIZE is an rtx for the size of data to be copied (in bytes),
3209 needed only if X is BLKmode.
3210
3211 ALIGN (in bits) is maximum alignment we can assume.
3212
3213 If PARTIAL and REG are both nonzero, then copy that many of the first
3214 words of X into registers starting with REG, and push the rest of X.
3215 The amount of space pushed is decreased by PARTIAL words,
3216 rounded *down* to a multiple of PARM_BOUNDARY.
3217 REG must be a hard register in this case.
3218 If REG is zero but PARTIAL is not, take any all others actions for an
3219 argument partially in registers, but do not actually load any
3220 registers.
3221
3222 EXTRA is the amount in bytes of extra space to leave next to this arg.
3223 This is ignored if an argument block has already been allocated.
3224
3225 On a machine that lacks real push insns, ARGS_ADDR is the address of
3226 the bottom of the argument block for this call. We use indexing off there
3227 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3228 argument block has not been preallocated.
3229
3230 ARGS_SO_FAR is the size of args previously pushed for this call.
3231
3232 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3233 for arguments passed in registers. If nonzero, it will be the number
3234 of bytes required. */
3235
3236 void
3237 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3238 args_addr, args_so_far, reg_parm_stack_space,
3239 alignment_pad)
3240 register rtx x;
3241 enum machine_mode mode;
3242 tree type;
3243 rtx size;
3244 unsigned int align;
3245 int partial;
3246 rtx reg;
3247 int extra;
3248 rtx args_addr;
3249 rtx args_so_far;
3250 int reg_parm_stack_space;
3251 rtx alignment_pad;
3252 {
3253 rtx xinner;
3254 enum direction stack_direction
3255 #ifdef STACK_GROWS_DOWNWARD
3256 = downward;
3257 #else
3258 = upward;
3259 #endif
3260
3261 /* Decide where to pad the argument: `downward' for below,
3262 `upward' for above, or `none' for don't pad it.
3263 Default is below for small data on big-endian machines; else above. */
3264 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3265
3266 /* Invert direction if stack is post-decrement.
3267 FIXME: why? */
3268 if (STACK_PUSH_CODE == POST_DEC)
3269 if (where_pad != none)
3270 where_pad = (where_pad == downward ? upward : downward);
3271
3272 xinner = x = protect_from_queue (x, 0);
3273
3274 if (mode == BLKmode)
3275 {
3276 /* Copy a block into the stack, entirely or partially. */
3277
3278 register rtx temp;
3279 int used = partial * UNITS_PER_WORD;
3280 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3281 int skip;
3282
3283 if (size == 0)
3284 abort ();
3285
3286 used -= offset;
3287
3288 /* USED is now the # of bytes we need not copy to the stack
3289 because registers will take care of them. */
3290
3291 if (partial != 0)
3292 xinner = adjust_address (xinner, BLKmode, used);
3293
3294 /* If the partial register-part of the arg counts in its stack size,
3295 skip the part of stack space corresponding to the registers.
3296 Otherwise, start copying to the beginning of the stack space,
3297 by setting SKIP to 0. */
3298 skip = (reg_parm_stack_space == 0) ? 0 : used;
3299
3300 #ifdef PUSH_ROUNDING
3301 /* Do it with several push insns if that doesn't take lots of insns
3302 and if there is no difficulty with push insns that skip bytes
3303 on the stack for alignment purposes. */
3304 if (args_addr == 0
3305 && PUSH_ARGS
3306 && GET_CODE (size) == CONST_INT
3307 && skip == 0
3308 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3309 /* Here we avoid the case of a structure whose weak alignment
3310 forces many pushes of a small amount of data,
3311 and such small pushes do rounding that causes trouble. */
3312 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3313 || align >= BIGGEST_ALIGNMENT
3314 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3315 == (align / BITS_PER_UNIT)))
3316 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3317 {
3318 /* Push padding now if padding above and stack grows down,
3319 or if padding below and stack grows up.
3320 But if space already allocated, this has already been done. */
3321 if (extra && args_addr == 0
3322 && where_pad != none && where_pad != stack_direction)
3323 anti_adjust_stack (GEN_INT (extra));
3324
3325 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3326
3327 if (current_function_check_memory_usage && ! in_check_memory_usage)
3328 {
3329 rtx temp;
3330
3331 in_check_memory_usage = 1;
3332 temp = get_push_address (INTVAL (size) - used);
3333 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3334 emit_library_call (chkr_copy_bitmap_libfunc,
3335 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3336 Pmode, XEXP (xinner, 0), Pmode,
3337 GEN_INT (INTVAL (size) - used),
3338 TYPE_MODE (sizetype));
3339 else
3340 emit_library_call (chkr_set_right_libfunc,
3341 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3342 Pmode, GEN_INT (INTVAL (size) - used),
3343 TYPE_MODE (sizetype),
3344 GEN_INT (MEMORY_USE_RW),
3345 TYPE_MODE (integer_type_node));
3346 in_check_memory_usage = 0;
3347 }
3348 }
3349 else
3350 #endif /* PUSH_ROUNDING */
3351 {
3352 rtx target;
3353
3354 /* Otherwise make space on the stack and copy the data
3355 to the address of that space. */
3356
3357 /* Deduct words put into registers from the size we must copy. */
3358 if (partial != 0)
3359 {
3360 if (GET_CODE (size) == CONST_INT)
3361 size = GEN_INT (INTVAL (size) - used);
3362 else
3363 size = expand_binop (GET_MODE (size), sub_optab, size,
3364 GEN_INT (used), NULL_RTX, 0,
3365 OPTAB_LIB_WIDEN);
3366 }
3367
3368 /* Get the address of the stack space.
3369 In this case, we do not deal with EXTRA separately.
3370 A single stack adjust will do. */
3371 if (! args_addr)
3372 {
3373 temp = push_block (size, extra, where_pad == downward);
3374 extra = 0;
3375 }
3376 else if (GET_CODE (args_so_far) == CONST_INT)
3377 temp = memory_address (BLKmode,
3378 plus_constant (args_addr,
3379 skip + INTVAL (args_so_far)));
3380 else
3381 temp = memory_address (BLKmode,
3382 plus_constant (gen_rtx_PLUS (Pmode,
3383 args_addr,
3384 args_so_far),
3385 skip));
3386 if (current_function_check_memory_usage && ! in_check_memory_usage)
3387 {
3388 in_check_memory_usage = 1;
3389 target = copy_to_reg (temp);
3390 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3391 emit_library_call (chkr_copy_bitmap_libfunc,
3392 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3393 target, Pmode,
3394 XEXP (xinner, 0), Pmode,
3395 size, TYPE_MODE (sizetype));
3396 else
3397 emit_library_call (chkr_set_right_libfunc,
3398 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3399 target, Pmode,
3400 size, TYPE_MODE (sizetype),
3401 GEN_INT (MEMORY_USE_RW),
3402 TYPE_MODE (integer_type_node));
3403 in_check_memory_usage = 0;
3404 }
3405
3406 target = gen_rtx_MEM (BLKmode, temp);
3407
3408 if (type != 0)
3409 {
3410 set_mem_attributes (target, type, 1);
3411 /* Function incoming arguments may overlap with sibling call
3412 outgoing arguments and we cannot allow reordering of reads
3413 from function arguments with stores to outgoing arguments
3414 of sibling calls. */
3415 set_mem_alias_set (target, 0);
3416 }
3417
3418 /* TEMP is the address of the block. Copy the data there. */
3419 if (GET_CODE (size) == CONST_INT
3420 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3421 {
3422 move_by_pieces (target, xinner, INTVAL (size), align);
3423 goto ret;
3424 }
3425 else
3426 {
3427 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3428 enum machine_mode mode;
3429
3430 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3431 mode != VOIDmode;
3432 mode = GET_MODE_WIDER_MODE (mode))
3433 {
3434 enum insn_code code = movstr_optab[(int) mode];
3435 insn_operand_predicate_fn pred;
3436
3437 if (code != CODE_FOR_nothing
3438 && ((GET_CODE (size) == CONST_INT
3439 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3440 <= (GET_MODE_MASK (mode) >> 1)))
3441 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3442 && (!(pred = insn_data[(int) code].operand[0].predicate)
3443 || ((*pred) (target, BLKmode)))
3444 && (!(pred = insn_data[(int) code].operand[1].predicate)
3445 || ((*pred) (xinner, BLKmode)))
3446 && (!(pred = insn_data[(int) code].operand[3].predicate)
3447 || ((*pred) (opalign, VOIDmode))))
3448 {
3449 rtx op2 = convert_to_mode (mode, size, 1);
3450 rtx last = get_last_insn ();
3451 rtx pat;
3452
3453 pred = insn_data[(int) code].operand[2].predicate;
3454 if (pred != 0 && ! (*pred) (op2, mode))
3455 op2 = copy_to_mode_reg (mode, op2);
3456
3457 pat = GEN_FCN ((int) code) (target, xinner,
3458 op2, opalign);
3459 if (pat)
3460 {
3461 emit_insn (pat);
3462 goto ret;
3463 }
3464 else
3465 delete_insns_since (last);
3466 }
3467 }
3468 }
3469
3470 if (!ACCUMULATE_OUTGOING_ARGS)
3471 {
3472 /* If the source is referenced relative to the stack pointer,
3473 copy it to another register to stabilize it. We do not need
3474 to do this if we know that we won't be changing sp. */
3475
3476 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3477 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3478 temp = copy_to_reg (temp);
3479 }
3480
3481 /* Make inhibit_defer_pop nonzero around the library call
3482 to force it to pop the bcopy-arguments right away. */
3483 NO_DEFER_POP;
3484 #ifdef TARGET_MEM_FUNCTIONS
3485 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3486 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3487 convert_to_mode (TYPE_MODE (sizetype),
3488 size, TREE_UNSIGNED (sizetype)),
3489 TYPE_MODE (sizetype));
3490 #else
3491 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3492 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3493 convert_to_mode (TYPE_MODE (integer_type_node),
3494 size,
3495 TREE_UNSIGNED (integer_type_node)),
3496 TYPE_MODE (integer_type_node));
3497 #endif
3498 OK_DEFER_POP;
3499 }
3500 }
3501 else if (partial > 0)
3502 {
3503 /* Scalar partly in registers. */
3504
3505 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3506 int i;
3507 int not_stack;
3508 /* # words of start of argument
3509 that we must make space for but need not store. */
3510 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3511 int args_offset = INTVAL (args_so_far);
3512 int skip;
3513
3514 /* Push padding now if padding above and stack grows down,
3515 or if padding below and stack grows up.
3516 But if space already allocated, this has already been done. */
3517 if (extra && args_addr == 0
3518 && where_pad != none && where_pad != stack_direction)
3519 anti_adjust_stack (GEN_INT (extra));
3520
3521 /* If we make space by pushing it, we might as well push
3522 the real data. Otherwise, we can leave OFFSET nonzero
3523 and leave the space uninitialized. */
3524 if (args_addr == 0)
3525 offset = 0;
3526
3527 /* Now NOT_STACK gets the number of words that we don't need to
3528 allocate on the stack. */
3529 not_stack = partial - offset;
3530
3531 /* If the partial register-part of the arg counts in its stack size,
3532 skip the part of stack space corresponding to the registers.
3533 Otherwise, start copying to the beginning of the stack space,
3534 by setting SKIP to 0. */
3535 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3536
3537 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3538 x = validize_mem (force_const_mem (mode, x));
3539
3540 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3541 SUBREGs of such registers are not allowed. */
3542 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3543 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3544 x = copy_to_reg (x);
3545
3546 /* Loop over all the words allocated on the stack for this arg. */
3547 /* We can do it by words, because any scalar bigger than a word
3548 has a size a multiple of a word. */
3549 #ifndef PUSH_ARGS_REVERSED
3550 for (i = not_stack; i < size; i++)
3551 #else
3552 for (i = size - 1; i >= not_stack; i--)
3553 #endif
3554 if (i >= not_stack + offset)
3555 emit_push_insn (operand_subword_force (x, i, mode),
3556 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3557 0, args_addr,
3558 GEN_INT (args_offset + ((i - not_stack + skip)
3559 * UNITS_PER_WORD)),
3560 reg_parm_stack_space, alignment_pad);
3561 }
3562 else
3563 {
3564 rtx addr;
3565 rtx target = NULL_RTX;
3566 rtx dest;
3567
3568 /* Push padding now if padding above and stack grows down,
3569 or if padding below and stack grows up.
3570 But if space already allocated, this has already been done. */
3571 if (extra && args_addr == 0
3572 && where_pad != none && where_pad != stack_direction)
3573 anti_adjust_stack (GEN_INT (extra));
3574
3575 #ifdef PUSH_ROUNDING
3576 if (args_addr == 0 && PUSH_ARGS)
3577 emit_single_push_insn (mode, x, type);
3578 else
3579 #endif
3580 {
3581 if (GET_CODE (args_so_far) == CONST_INT)
3582 addr
3583 = memory_address (mode,
3584 plus_constant (args_addr,
3585 INTVAL (args_so_far)));
3586 else
3587 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3588 args_so_far));
3589 target = addr;
3590 dest = gen_rtx_MEM (mode, addr);
3591 if (type != 0)
3592 {
3593 set_mem_attributes (dest, type, 1);
3594 /* Function incoming arguments may overlap with sibling call
3595 outgoing arguments and we cannot allow reordering of reads
3596 from function arguments with stores to outgoing arguments
3597 of sibling calls. */
3598 set_mem_alias_set (dest, 0);
3599 }
3600
3601 emit_move_insn (dest, x);
3602
3603 }
3604
3605 if (current_function_check_memory_usage && ! in_check_memory_usage)
3606 {
3607 in_check_memory_usage = 1;
3608 if (target == 0)
3609 target = get_push_address (GET_MODE_SIZE (mode));
3610
3611 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3612 emit_library_call (chkr_copy_bitmap_libfunc,
3613 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3614 Pmode, XEXP (x, 0), Pmode,
3615 GEN_INT (GET_MODE_SIZE (mode)),
3616 TYPE_MODE (sizetype));
3617 else
3618 emit_library_call (chkr_set_right_libfunc,
3619 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3620 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3621 TYPE_MODE (sizetype),
3622 GEN_INT (MEMORY_USE_RW),
3623 TYPE_MODE (integer_type_node));
3624 in_check_memory_usage = 0;
3625 }
3626 }
3627
3628 ret:
3629 /* If part should go in registers, copy that part
3630 into the appropriate registers. Do this now, at the end,
3631 since mem-to-mem copies above may do function calls. */
3632 if (partial > 0 && reg != 0)
3633 {
3634 /* Handle calls that pass values in multiple non-contiguous locations.
3635 The Irix 6 ABI has examples of this. */
3636 if (GET_CODE (reg) == PARALLEL)
3637 emit_group_load (reg, x, -1, align); /* ??? size? */
3638 else
3639 move_block_to_reg (REGNO (reg), x, partial, mode);
3640 }
3641
3642 if (extra && args_addr == 0 && where_pad == stack_direction)
3643 anti_adjust_stack (GEN_INT (extra));
3644
3645 if (alignment_pad && args_addr == 0)
3646 anti_adjust_stack (alignment_pad);
3647 }
3648 \f
3649 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3650 operations. */
3651
3652 static rtx
3653 get_subtarget (x)
3654 rtx x;
3655 {
3656 return ((x == 0
3657 /* Only registers can be subtargets. */
3658 || GET_CODE (x) != REG
3659 /* If the register is readonly, it can't be set more than once. */
3660 || RTX_UNCHANGING_P (x)
3661 /* Don't use hard regs to avoid extending their life. */
3662 || REGNO (x) < FIRST_PSEUDO_REGISTER
3663 /* Avoid subtargets inside loops,
3664 since they hide some invariant expressions. */
3665 || preserve_subexpressions_p ())
3666 ? 0 : x);
3667 }
3668
3669 /* Expand an assignment that stores the value of FROM into TO.
3670 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3671 (This may contain a QUEUED rtx;
3672 if the value is constant, this rtx is a constant.)
3673 Otherwise, the returned value is NULL_RTX.
3674
3675 SUGGEST_REG is no longer actually used.
3676 It used to mean, copy the value through a register
3677 and return that register, if that is possible.
3678 We now use WANT_VALUE to decide whether to do this. */
3679
3680 rtx
3681 expand_assignment (to, from, want_value, suggest_reg)
3682 tree to, from;
3683 int want_value;
3684 int suggest_reg ATTRIBUTE_UNUSED;
3685 {
3686 register rtx to_rtx = 0;
3687 rtx result;
3688
3689 /* Don't crash if the lhs of the assignment was erroneous. */
3690
3691 if (TREE_CODE (to) == ERROR_MARK)
3692 {
3693 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3694 return want_value ? result : NULL_RTX;
3695 }
3696
3697 /* Assignment of a structure component needs special treatment
3698 if the structure component's rtx is not simply a MEM.
3699 Assignment of an array element at a constant index, and assignment of
3700 an array element in an unaligned packed structure field, has the same
3701 problem. */
3702
3703 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3704 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3705 {
3706 enum machine_mode mode1;
3707 HOST_WIDE_INT bitsize, bitpos;
3708 tree offset;
3709 int unsignedp;
3710 int volatilep = 0;
3711 tree tem;
3712 unsigned int alignment;
3713
3714 push_temp_slots ();
3715 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3716 &unsignedp, &volatilep, &alignment);
3717
3718 /* If we are going to use store_bit_field and extract_bit_field,
3719 make sure to_rtx will be safe for multiple use. */
3720
3721 if (mode1 == VOIDmode && want_value)
3722 tem = stabilize_reference (tem);
3723
3724 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3725 if (offset != 0)
3726 {
3727 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3728
3729 if (GET_CODE (to_rtx) != MEM)
3730 abort ();
3731
3732 if (GET_MODE (offset_rtx) != ptr_mode)
3733 {
3734 #ifdef POINTERS_EXTEND_UNSIGNED
3735 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3736 #else
3737 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3738 #endif
3739 }
3740
3741 /* A constant address in TO_RTX can have VOIDmode, we must not try
3742 to call force_reg for that case. Avoid that case. */
3743 if (GET_CODE (to_rtx) == MEM
3744 && GET_MODE (to_rtx) == BLKmode
3745 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3746 && bitsize
3747 && (bitpos % bitsize) == 0
3748 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3749 && alignment == GET_MODE_ALIGNMENT (mode1))
3750 {
3751 rtx temp
3752 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3753
3754 if (GET_CODE (XEXP (temp, 0)) == REG)
3755 to_rtx = temp;
3756 else
3757 to_rtx = (replace_equiv_address
3758 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3759 XEXP (temp, 0))));
3760 bitpos = 0;
3761 }
3762
3763 to_rtx = change_address (to_rtx, VOIDmode,
3764 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3765 force_reg (ptr_mode,
3766 offset_rtx)));
3767 }
3768
3769 if (volatilep)
3770 {
3771 if (GET_CODE (to_rtx) == MEM)
3772 {
3773 /* When the offset is zero, to_rtx is the address of the
3774 structure we are storing into, and hence may be shared.
3775 We must make a new MEM before setting the volatile bit. */
3776 if (offset == 0)
3777 to_rtx = copy_rtx (to_rtx);
3778
3779 MEM_VOLATILE_P (to_rtx) = 1;
3780 }
3781 #if 0 /* This was turned off because, when a field is volatile
3782 in an object which is not volatile, the object may be in a register,
3783 and then we would abort over here. */
3784 else
3785 abort ();
3786 #endif
3787 }
3788
3789 if (TREE_CODE (to) == COMPONENT_REF
3790 && TREE_READONLY (TREE_OPERAND (to, 1)))
3791 {
3792 if (offset == 0)
3793 to_rtx = copy_rtx (to_rtx);
3794
3795 RTX_UNCHANGING_P (to_rtx) = 1;
3796 }
3797
3798 /* Check the access. */
3799 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3800 {
3801 rtx to_addr;
3802 int size;
3803 int best_mode_size;
3804 enum machine_mode best_mode;
3805
3806 best_mode = get_best_mode (bitsize, bitpos,
3807 TYPE_ALIGN (TREE_TYPE (tem)),
3808 mode1, volatilep);
3809 if (best_mode == VOIDmode)
3810 best_mode = QImode;
3811
3812 best_mode_size = GET_MODE_BITSIZE (best_mode);
3813 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3814 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3815 size *= GET_MODE_SIZE (best_mode);
3816
3817 /* Check the access right of the pointer. */
3818 in_check_memory_usage = 1;
3819 if (size)
3820 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3821 VOIDmode, 3, to_addr, Pmode,
3822 GEN_INT (size), TYPE_MODE (sizetype),
3823 GEN_INT (MEMORY_USE_WO),
3824 TYPE_MODE (integer_type_node));
3825 in_check_memory_usage = 0;
3826 }
3827
3828 /* If this is a varying-length object, we must get the address of
3829 the source and do an explicit block move. */
3830 if (bitsize < 0)
3831 {
3832 unsigned int from_align;
3833 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3834 rtx inner_to_rtx
3835 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3836
3837 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3838 MIN (alignment, from_align));
3839 free_temp_slots ();
3840 pop_temp_slots ();
3841 return to_rtx;
3842 }
3843 else
3844 {
3845 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3846 (want_value
3847 /* Spurious cast for HPUX compiler. */
3848 ? ((enum machine_mode)
3849 TYPE_MODE (TREE_TYPE (to)))
3850 : VOIDmode),
3851 unsignedp,
3852 alignment,
3853 int_size_in_bytes (TREE_TYPE (tem)),
3854 get_alias_set (to));
3855
3856 preserve_temp_slots (result);
3857 free_temp_slots ();
3858 pop_temp_slots ();
3859
3860 /* If the value is meaningful, convert RESULT to the proper mode.
3861 Otherwise, return nothing. */
3862 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3863 TYPE_MODE (TREE_TYPE (from)),
3864 result,
3865 TREE_UNSIGNED (TREE_TYPE (to)))
3866 : NULL_RTX);
3867 }
3868 }
3869
3870 /* If the rhs is a function call and its value is not an aggregate,
3871 call the function before we start to compute the lhs.
3872 This is needed for correct code for cases such as
3873 val = setjmp (buf) on machines where reference to val
3874 requires loading up part of an address in a separate insn.
3875
3876 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3877 since it might be a promoted variable where the zero- or sign- extension
3878 needs to be done. Handling this in the normal way is safe because no
3879 computation is done before the call. */
3880 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3881 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3882 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3883 && GET_CODE (DECL_RTL (to)) == REG))
3884 {
3885 rtx value;
3886
3887 push_temp_slots ();
3888 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3889 if (to_rtx == 0)
3890 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3891
3892 /* Handle calls that return values in multiple non-contiguous locations.
3893 The Irix 6 ABI has examples of this. */
3894 if (GET_CODE (to_rtx) == PARALLEL)
3895 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3896 TYPE_ALIGN (TREE_TYPE (from)));
3897 else if (GET_MODE (to_rtx) == BLKmode)
3898 emit_block_move (to_rtx, value, expr_size (from),
3899 TYPE_ALIGN (TREE_TYPE (from)));
3900 else
3901 {
3902 #ifdef POINTERS_EXTEND_UNSIGNED
3903 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3904 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3905 value = convert_memory_address (GET_MODE (to_rtx), value);
3906 #endif
3907 emit_move_insn (to_rtx, value);
3908 }
3909 preserve_temp_slots (to_rtx);
3910 free_temp_slots ();
3911 pop_temp_slots ();
3912 return want_value ? to_rtx : NULL_RTX;
3913 }
3914
3915 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3916 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3917
3918 if (to_rtx == 0)
3919 {
3920 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3921 if (GET_CODE (to_rtx) == MEM)
3922 set_mem_alias_set (to_rtx, get_alias_set (to));
3923 }
3924
3925 /* Don't move directly into a return register. */
3926 if (TREE_CODE (to) == RESULT_DECL
3927 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3928 {
3929 rtx temp;
3930
3931 push_temp_slots ();
3932 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3933
3934 if (GET_CODE (to_rtx) == PARALLEL)
3935 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3936 TYPE_ALIGN (TREE_TYPE (from)));
3937 else
3938 emit_move_insn (to_rtx, temp);
3939
3940 preserve_temp_slots (to_rtx);
3941 free_temp_slots ();
3942 pop_temp_slots ();
3943 return want_value ? to_rtx : NULL_RTX;
3944 }
3945
3946 /* In case we are returning the contents of an object which overlaps
3947 the place the value is being stored, use a safe function when copying
3948 a value through a pointer into a structure value return block. */
3949 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3950 && current_function_returns_struct
3951 && !current_function_returns_pcc_struct)
3952 {
3953 rtx from_rtx, size;
3954
3955 push_temp_slots ();
3956 size = expr_size (from);
3957 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3958 EXPAND_MEMORY_USE_DONT);
3959
3960 /* Copy the rights of the bitmap. */
3961 if (current_function_check_memory_usage)
3962 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3963 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3964 XEXP (from_rtx, 0), Pmode,
3965 convert_to_mode (TYPE_MODE (sizetype),
3966 size, TREE_UNSIGNED (sizetype)),
3967 TYPE_MODE (sizetype));
3968
3969 #ifdef TARGET_MEM_FUNCTIONS
3970 emit_library_call (memmove_libfunc, LCT_NORMAL,
3971 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3972 XEXP (from_rtx, 0), Pmode,
3973 convert_to_mode (TYPE_MODE (sizetype),
3974 size, TREE_UNSIGNED (sizetype)),
3975 TYPE_MODE (sizetype));
3976 #else
3977 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3978 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3979 XEXP (to_rtx, 0), Pmode,
3980 convert_to_mode (TYPE_MODE (integer_type_node),
3981 size, TREE_UNSIGNED (integer_type_node)),
3982 TYPE_MODE (integer_type_node));
3983 #endif
3984
3985 preserve_temp_slots (to_rtx);
3986 free_temp_slots ();
3987 pop_temp_slots ();
3988 return want_value ? to_rtx : NULL_RTX;
3989 }
3990
3991 /* Compute FROM and store the value in the rtx we got. */
3992
3993 push_temp_slots ();
3994 result = store_expr (from, to_rtx, want_value);
3995 preserve_temp_slots (result);
3996 free_temp_slots ();
3997 pop_temp_slots ();
3998 return want_value ? result : NULL_RTX;
3999 }
4000
4001 /* Generate code for computing expression EXP,
4002 and storing the value into TARGET.
4003 TARGET may contain a QUEUED rtx.
4004
4005 If WANT_VALUE is nonzero, return a copy of the value
4006 not in TARGET, so that we can be sure to use the proper
4007 value in a containing expression even if TARGET has something
4008 else stored in it. If possible, we copy the value through a pseudo
4009 and return that pseudo. Or, if the value is constant, we try to
4010 return the constant. In some cases, we return a pseudo
4011 copied *from* TARGET.
4012
4013 If the mode is BLKmode then we may return TARGET itself.
4014 It turns out that in BLKmode it doesn't cause a problem.
4015 because C has no operators that could combine two different
4016 assignments into the same BLKmode object with different values
4017 with no sequence point. Will other languages need this to
4018 be more thorough?
4019
4020 If WANT_VALUE is 0, we return NULL, to make sure
4021 to catch quickly any cases where the caller uses the value
4022 and fails to set WANT_VALUE. */
4023
4024 rtx
4025 store_expr (exp, target, want_value)
4026 register tree exp;
4027 register rtx target;
4028 int want_value;
4029 {
4030 register rtx temp;
4031 int dont_return_target = 0;
4032 int dont_store_target = 0;
4033
4034 if (TREE_CODE (exp) == COMPOUND_EXPR)
4035 {
4036 /* Perform first part of compound expression, then assign from second
4037 part. */
4038 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4039 emit_queue ();
4040 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4041 }
4042 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4043 {
4044 /* For conditional expression, get safe form of the target. Then
4045 test the condition, doing the appropriate assignment on either
4046 side. This avoids the creation of unnecessary temporaries.
4047 For non-BLKmode, it is more efficient not to do this. */
4048
4049 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4050
4051 emit_queue ();
4052 target = protect_from_queue (target, 1);
4053
4054 do_pending_stack_adjust ();
4055 NO_DEFER_POP;
4056 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4057 start_cleanup_deferral ();
4058 store_expr (TREE_OPERAND (exp, 1), target, 0);
4059 end_cleanup_deferral ();
4060 emit_queue ();
4061 emit_jump_insn (gen_jump (lab2));
4062 emit_barrier ();
4063 emit_label (lab1);
4064 start_cleanup_deferral ();
4065 store_expr (TREE_OPERAND (exp, 2), target, 0);
4066 end_cleanup_deferral ();
4067 emit_queue ();
4068 emit_label (lab2);
4069 OK_DEFER_POP;
4070
4071 return want_value ? target : NULL_RTX;
4072 }
4073 else if (queued_subexp_p (target))
4074 /* If target contains a postincrement, let's not risk
4075 using it as the place to generate the rhs. */
4076 {
4077 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4078 {
4079 /* Expand EXP into a new pseudo. */
4080 temp = gen_reg_rtx (GET_MODE (target));
4081 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4082 }
4083 else
4084 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4085
4086 /* If target is volatile, ANSI requires accessing the value
4087 *from* the target, if it is accessed. So make that happen.
4088 In no case return the target itself. */
4089 if (! MEM_VOLATILE_P (target) && want_value)
4090 dont_return_target = 1;
4091 }
4092 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4093 && GET_MODE (target) != BLKmode)
4094 /* If target is in memory and caller wants value in a register instead,
4095 arrange that. Pass TARGET as target for expand_expr so that,
4096 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4097 We know expand_expr will not use the target in that case.
4098 Don't do this if TARGET is volatile because we are supposed
4099 to write it and then read it. */
4100 {
4101 temp = expand_expr (exp, target, GET_MODE (target), 0);
4102 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4103 {
4104 /* If TEMP is already in the desired TARGET, only copy it from
4105 memory and don't store it there again. */
4106 if (temp == target
4107 || (rtx_equal_p (temp, target)
4108 && ! side_effects_p (temp) && ! side_effects_p (target)))
4109 dont_store_target = 1;
4110 temp = copy_to_reg (temp);
4111 }
4112 dont_return_target = 1;
4113 }
4114 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4115 /* If this is an scalar in a register that is stored in a wider mode
4116 than the declared mode, compute the result into its declared mode
4117 and then convert to the wider mode. Our value is the computed
4118 expression. */
4119 {
4120 /* If we don't want a value, we can do the conversion inside EXP,
4121 which will often result in some optimizations. Do the conversion
4122 in two steps: first change the signedness, if needed, then
4123 the extend. But don't do this if the type of EXP is a subtype
4124 of something else since then the conversion might involve
4125 more than just converting modes. */
4126 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4127 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4128 {
4129 if (TREE_UNSIGNED (TREE_TYPE (exp))
4130 != SUBREG_PROMOTED_UNSIGNED_P (target))
4131 exp
4132 = convert
4133 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4134 TREE_TYPE (exp)),
4135 exp);
4136
4137 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4138 SUBREG_PROMOTED_UNSIGNED_P (target)),
4139 exp);
4140 }
4141
4142 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4143
4144 /* If TEMP is a volatile MEM and we want a result value, make
4145 the access now so it gets done only once. Likewise if
4146 it contains TARGET. */
4147 if (GET_CODE (temp) == MEM && want_value
4148 && (MEM_VOLATILE_P (temp)
4149 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4150 temp = copy_to_reg (temp);
4151
4152 /* If TEMP is a VOIDmode constant, use convert_modes to make
4153 sure that we properly convert it. */
4154 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4155 {
4156 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4157 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4158 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4159 GET_MODE (target), temp,
4160 SUBREG_PROMOTED_UNSIGNED_P (target));
4161 }
4162
4163 convert_move (SUBREG_REG (target), temp,
4164 SUBREG_PROMOTED_UNSIGNED_P (target));
4165
4166 /* If we promoted a constant, change the mode back down to match
4167 target. Otherwise, the caller might get confused by a result whose
4168 mode is larger than expected. */
4169
4170 if (want_value && GET_MODE (temp) != GET_MODE (target)
4171 && GET_MODE (temp) != VOIDmode)
4172 {
4173 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4174 SUBREG_PROMOTED_VAR_P (temp) = 1;
4175 SUBREG_PROMOTED_UNSIGNED_P (temp)
4176 = SUBREG_PROMOTED_UNSIGNED_P (target);
4177 }
4178
4179 return want_value ? temp : NULL_RTX;
4180 }
4181 else
4182 {
4183 temp = expand_expr (exp, target, GET_MODE (target), 0);
4184 /* Return TARGET if it's a specified hardware register.
4185 If TARGET is a volatile mem ref, either return TARGET
4186 or return a reg copied *from* TARGET; ANSI requires this.
4187
4188 Otherwise, if TEMP is not TARGET, return TEMP
4189 if it is constant (for efficiency),
4190 or if we really want the correct value. */
4191 if (!(target && GET_CODE (target) == REG
4192 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4193 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4194 && ! rtx_equal_p (temp, target)
4195 && (CONSTANT_P (temp) || want_value))
4196 dont_return_target = 1;
4197 }
4198
4199 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4200 the same as that of TARGET, adjust the constant. This is needed, for
4201 example, in case it is a CONST_DOUBLE and we want only a word-sized
4202 value. */
4203 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4204 && TREE_CODE (exp) != ERROR_MARK
4205 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4206 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4207 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4208
4209 if (current_function_check_memory_usage
4210 && GET_CODE (target) == MEM
4211 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4212 {
4213 in_check_memory_usage = 1;
4214 if (GET_CODE (temp) == MEM)
4215 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4216 VOIDmode, 3, XEXP (target, 0), Pmode,
4217 XEXP (temp, 0), Pmode,
4218 expr_size (exp), TYPE_MODE (sizetype));
4219 else
4220 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4221 VOIDmode, 3, XEXP (target, 0), Pmode,
4222 expr_size (exp), TYPE_MODE (sizetype),
4223 GEN_INT (MEMORY_USE_WO),
4224 TYPE_MODE (integer_type_node));
4225 in_check_memory_usage = 0;
4226 }
4227
4228 /* If value was not generated in the target, store it there.
4229 Convert the value to TARGET's type first if nec. */
4230 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4231 one or both of them are volatile memory refs, we have to distinguish
4232 two cases:
4233 - expand_expr has used TARGET. In this case, we must not generate
4234 another copy. This can be detected by TARGET being equal according
4235 to == .
4236 - expand_expr has not used TARGET - that means that the source just
4237 happens to have the same RTX form. Since temp will have been created
4238 by expand_expr, it will compare unequal according to == .
4239 We must generate a copy in this case, to reach the correct number
4240 of volatile memory references. */
4241
4242 if ((! rtx_equal_p (temp, target)
4243 || (temp != target && (side_effects_p (temp)
4244 || side_effects_p (target))))
4245 && TREE_CODE (exp) != ERROR_MARK
4246 && ! dont_store_target)
4247 {
4248 target = protect_from_queue (target, 1);
4249 if (GET_MODE (temp) != GET_MODE (target)
4250 && GET_MODE (temp) != VOIDmode)
4251 {
4252 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4253 if (dont_return_target)
4254 {
4255 /* In this case, we will return TEMP,
4256 so make sure it has the proper mode.
4257 But don't forget to store the value into TARGET. */
4258 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4259 emit_move_insn (target, temp);
4260 }
4261 else
4262 convert_move (target, temp, unsignedp);
4263 }
4264
4265 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4266 {
4267 /* Handle copying a string constant into an array.
4268 The string constant may be shorter than the array.
4269 So copy just the string's actual length, and clear the rest. */
4270 rtx size;
4271 rtx addr;
4272
4273 /* Get the size of the data type of the string,
4274 which is actually the size of the target. */
4275 size = expr_size (exp);
4276 if (GET_CODE (size) == CONST_INT
4277 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4278 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4279 else
4280 {
4281 /* Compute the size of the data to copy from the string. */
4282 tree copy_size
4283 = size_binop (MIN_EXPR,
4284 make_tree (sizetype, size),
4285 size_int (TREE_STRING_LENGTH (exp)));
4286 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4287 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4288 VOIDmode, 0);
4289 rtx label = 0;
4290
4291 /* Copy that much. */
4292 emit_block_move (target, temp, copy_size_rtx,
4293 TYPE_ALIGN (TREE_TYPE (exp)));
4294
4295 /* Figure out how much is left in TARGET that we have to clear.
4296 Do all calculations in ptr_mode. */
4297
4298 addr = XEXP (target, 0);
4299 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4300
4301 if (GET_CODE (copy_size_rtx) == CONST_INT)
4302 {
4303 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4304 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4305 align = MIN (align,
4306 (unsigned int) (BITS_PER_UNIT
4307 * (INTVAL (copy_size_rtx)
4308 & - INTVAL (copy_size_rtx))));
4309 }
4310 else
4311 {
4312 addr = force_reg (ptr_mode, addr);
4313 addr = expand_binop (ptr_mode, add_optab, addr,
4314 copy_size_rtx, NULL_RTX, 0,
4315 OPTAB_LIB_WIDEN);
4316
4317 size = expand_binop (ptr_mode, sub_optab, size,
4318 copy_size_rtx, NULL_RTX, 0,
4319 OPTAB_LIB_WIDEN);
4320
4321 align = BITS_PER_UNIT;
4322 label = gen_label_rtx ();
4323 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4324 GET_MODE (size), 0, 0, label);
4325 }
4326 align = MIN (align, expr_align (copy_size));
4327
4328 if (size != const0_rtx)
4329 {
4330 rtx dest = gen_rtx_MEM (BLKmode, addr);
4331
4332 MEM_COPY_ATTRIBUTES (dest, target);
4333
4334 /* Be sure we can write on ADDR. */
4335 in_check_memory_usage = 1;
4336 if (current_function_check_memory_usage)
4337 emit_library_call (chkr_check_addr_libfunc,
4338 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4339 addr, Pmode,
4340 size, TYPE_MODE (sizetype),
4341 GEN_INT (MEMORY_USE_WO),
4342 TYPE_MODE (integer_type_node));
4343 in_check_memory_usage = 0;
4344 clear_storage (dest, size, align);
4345 }
4346
4347 if (label)
4348 emit_label (label);
4349 }
4350 }
4351 /* Handle calls that return values in multiple non-contiguous locations.
4352 The Irix 6 ABI has examples of this. */
4353 else if (GET_CODE (target) == PARALLEL)
4354 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4355 TYPE_ALIGN (TREE_TYPE (exp)));
4356 else if (GET_MODE (temp) == BLKmode)
4357 emit_block_move (target, temp, expr_size (exp),
4358 TYPE_ALIGN (TREE_TYPE (exp)));
4359 else
4360 emit_move_insn (target, temp);
4361 }
4362
4363 /* If we don't want a value, return NULL_RTX. */
4364 if (! want_value)
4365 return NULL_RTX;
4366
4367 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4368 ??? The latter test doesn't seem to make sense. */
4369 else if (dont_return_target && GET_CODE (temp) != MEM)
4370 return temp;
4371
4372 /* Return TARGET itself if it is a hard register. */
4373 else if (want_value && GET_MODE (target) != BLKmode
4374 && ! (GET_CODE (target) == REG
4375 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4376 return copy_to_reg (target);
4377
4378 else
4379 return target;
4380 }
4381 \f
4382 /* Return 1 if EXP just contains zeros. */
4383
4384 static int
4385 is_zeros_p (exp)
4386 tree exp;
4387 {
4388 tree elt;
4389
4390 switch (TREE_CODE (exp))
4391 {
4392 case CONVERT_EXPR:
4393 case NOP_EXPR:
4394 case NON_LVALUE_EXPR:
4395 return is_zeros_p (TREE_OPERAND (exp, 0));
4396
4397 case INTEGER_CST:
4398 return integer_zerop (exp);
4399
4400 case COMPLEX_CST:
4401 return
4402 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4403
4404 case REAL_CST:
4405 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4406
4407 case CONSTRUCTOR:
4408 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4409 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4410 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4411 if (! is_zeros_p (TREE_VALUE (elt)))
4412 return 0;
4413
4414 return 1;
4415
4416 default:
4417 return 0;
4418 }
4419 }
4420
4421 /* Return 1 if EXP contains mostly (3/4) zeros. */
4422
4423 static int
4424 mostly_zeros_p (exp)
4425 tree exp;
4426 {
4427 if (TREE_CODE (exp) == CONSTRUCTOR)
4428 {
4429 int elts = 0, zeros = 0;
4430 tree elt = CONSTRUCTOR_ELTS (exp);
4431 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4432 {
4433 /* If there are no ranges of true bits, it is all zero. */
4434 return elt == NULL_TREE;
4435 }
4436 for (; elt; elt = TREE_CHAIN (elt))
4437 {
4438 /* We do not handle the case where the index is a RANGE_EXPR,
4439 so the statistic will be somewhat inaccurate.
4440 We do make a more accurate count in store_constructor itself,
4441 so since this function is only used for nested array elements,
4442 this should be close enough. */
4443 if (mostly_zeros_p (TREE_VALUE (elt)))
4444 zeros++;
4445 elts++;
4446 }
4447
4448 return 4 * zeros >= 3 * elts;
4449 }
4450
4451 return is_zeros_p (exp);
4452 }
4453 \f
4454 /* Helper function for store_constructor.
4455 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4456 TYPE is the type of the CONSTRUCTOR, not the element type.
4457 ALIGN and CLEARED are as for store_constructor.
4458 ALIAS_SET is the alias set to use for any stores.
4459
4460 This provides a recursive shortcut back to store_constructor when it isn't
4461 necessary to go through store_field. This is so that we can pass through
4462 the cleared field to let store_constructor know that we may not have to
4463 clear a substructure if the outer structure has already been cleared. */
4464
4465 static void
4466 store_constructor_field (target, bitsize, bitpos,
4467 mode, exp, type, align, cleared, alias_set)
4468 rtx target;
4469 unsigned HOST_WIDE_INT bitsize;
4470 HOST_WIDE_INT bitpos;
4471 enum machine_mode mode;
4472 tree exp, type;
4473 unsigned int align;
4474 int cleared;
4475 int alias_set;
4476 {
4477 if (TREE_CODE (exp) == CONSTRUCTOR
4478 && bitpos % BITS_PER_UNIT == 0
4479 /* If we have a non-zero bitpos for a register target, then we just
4480 let store_field do the bitfield handling. This is unlikely to
4481 generate unnecessary clear instructions anyways. */
4482 && (bitpos == 0 || GET_CODE (target) == MEM))
4483 {
4484 if (bitpos != 0)
4485 target
4486 = adjust_address (target,
4487 GET_MODE (target) == BLKmode
4488 || 0 != (bitpos
4489 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4490 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4491
4492
4493 /* Show the alignment may no longer be what it was and update the alias
4494 set, if required. */
4495 if (bitpos != 0)
4496 align = MIN (align, (unsigned int) bitpos & - bitpos);
4497 if (GET_CODE (target) == MEM)
4498 set_mem_alias_set (target, alias_set);
4499
4500 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4501 }
4502 else
4503 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4504 int_size_in_bytes (type), alias_set);
4505 }
4506
4507 /* Store the value of constructor EXP into the rtx TARGET.
4508 TARGET is either a REG or a MEM.
4509 ALIGN is the maximum known alignment for TARGET.
4510 CLEARED is true if TARGET is known to have been zero'd.
4511 SIZE is the number of bytes of TARGET we are allowed to modify: this
4512 may not be the same as the size of EXP if we are assigning to a field
4513 which has been packed to exclude padding bits. */
4514
4515 static void
4516 store_constructor (exp, target, align, cleared, size)
4517 tree exp;
4518 rtx target;
4519 unsigned int align;
4520 int cleared;
4521 HOST_WIDE_INT size;
4522 {
4523 tree type = TREE_TYPE (exp);
4524 #ifdef WORD_REGISTER_OPERATIONS
4525 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4526 #endif
4527
4528 /* We know our target cannot conflict, since safe_from_p has been called. */
4529 #if 0
4530 /* Don't try copying piece by piece into a hard register
4531 since that is vulnerable to being clobbered by EXP.
4532 Instead, construct in a pseudo register and then copy it all. */
4533 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4534 {
4535 rtx temp = gen_reg_rtx (GET_MODE (target));
4536 store_constructor (exp, temp, align, cleared, size);
4537 emit_move_insn (target, temp);
4538 return;
4539 }
4540 #endif
4541
4542 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4543 || TREE_CODE (type) == QUAL_UNION_TYPE)
4544 {
4545 register tree elt;
4546
4547 /* Inform later passes that the whole union value is dead. */
4548 if ((TREE_CODE (type) == UNION_TYPE
4549 || TREE_CODE (type) == QUAL_UNION_TYPE)
4550 && ! cleared)
4551 {
4552 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4553
4554 /* If the constructor is empty, clear the union. */
4555 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4556 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4557 }
4558
4559 /* If we are building a static constructor into a register,
4560 set the initial value as zero so we can fold the value into
4561 a constant. But if more than one register is involved,
4562 this probably loses. */
4563 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4564 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4565 {
4566 if (! cleared)
4567 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4568
4569 cleared = 1;
4570 }
4571
4572 /* If the constructor has fewer fields than the structure
4573 or if we are initializing the structure to mostly zeros,
4574 clear the whole structure first. Don't do this if TARGET is a
4575 register whose mode size isn't equal to SIZE since clear_storage
4576 can't handle this case. */
4577 else if (size > 0
4578 && ((list_length (CONSTRUCTOR_ELTS (exp))
4579 != fields_length (type))
4580 || mostly_zeros_p (exp))
4581 && (GET_CODE (target) != REG
4582 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4583 {
4584 if (! cleared)
4585 clear_storage (target, GEN_INT (size), align);
4586
4587 cleared = 1;
4588 }
4589 else if (! cleared)
4590 /* Inform later passes that the old value is dead. */
4591 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4592
4593 /* Store each element of the constructor into
4594 the corresponding field of TARGET. */
4595
4596 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4597 {
4598 register tree field = TREE_PURPOSE (elt);
4599 #ifdef WORD_REGISTER_OPERATIONS
4600 tree value = TREE_VALUE (elt);
4601 #endif
4602 register enum machine_mode mode;
4603 HOST_WIDE_INT bitsize;
4604 HOST_WIDE_INT bitpos = 0;
4605 int unsignedp;
4606 tree offset;
4607 rtx to_rtx = target;
4608
4609 /* Just ignore missing fields.
4610 We cleared the whole structure, above,
4611 if any fields are missing. */
4612 if (field == 0)
4613 continue;
4614
4615 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4616 continue;
4617
4618 if (host_integerp (DECL_SIZE (field), 1))
4619 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4620 else
4621 bitsize = -1;
4622
4623 unsignedp = TREE_UNSIGNED (field);
4624 mode = DECL_MODE (field);
4625 if (DECL_BIT_FIELD (field))
4626 mode = VOIDmode;
4627
4628 offset = DECL_FIELD_OFFSET (field);
4629 if (host_integerp (offset, 0)
4630 && host_integerp (bit_position (field), 0))
4631 {
4632 bitpos = int_bit_position (field);
4633 offset = 0;
4634 }
4635 else
4636 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4637
4638 if (offset)
4639 {
4640 rtx offset_rtx;
4641
4642 if (contains_placeholder_p (offset))
4643 offset = build (WITH_RECORD_EXPR, sizetype,
4644 offset, make_tree (TREE_TYPE (exp), target));
4645
4646 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4647 if (GET_CODE (to_rtx) != MEM)
4648 abort ();
4649
4650 if (GET_MODE (offset_rtx) != ptr_mode)
4651 {
4652 #ifdef POINTERS_EXTEND_UNSIGNED
4653 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4654 #else
4655 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4656 #endif
4657 }
4658
4659 to_rtx
4660 = change_address (to_rtx, VOIDmode,
4661 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4662 force_reg (ptr_mode,
4663 offset_rtx)));
4664 align = DECL_OFFSET_ALIGN (field);
4665 }
4666
4667 if (TREE_READONLY (field))
4668 {
4669 if (GET_CODE (to_rtx) == MEM)
4670 to_rtx = copy_rtx (to_rtx);
4671
4672 RTX_UNCHANGING_P (to_rtx) = 1;
4673 }
4674
4675 #ifdef WORD_REGISTER_OPERATIONS
4676 /* If this initializes a field that is smaller than a word, at the
4677 start of a word, try to widen it to a full word.
4678 This special case allows us to output C++ member function
4679 initializations in a form that the optimizers can understand. */
4680 if (GET_CODE (target) == REG
4681 && bitsize < BITS_PER_WORD
4682 && bitpos % BITS_PER_WORD == 0
4683 && GET_MODE_CLASS (mode) == MODE_INT
4684 && TREE_CODE (value) == INTEGER_CST
4685 && exp_size >= 0
4686 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4687 {
4688 tree type = TREE_TYPE (value);
4689 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4690 {
4691 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4692 value = convert (type, value);
4693 }
4694 if (BYTES_BIG_ENDIAN)
4695 value
4696 = fold (build (LSHIFT_EXPR, type, value,
4697 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4698 bitsize = BITS_PER_WORD;
4699 mode = word_mode;
4700 }
4701 #endif
4702 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4703 TREE_VALUE (elt), type, align, cleared,
4704 (DECL_NONADDRESSABLE_P (field)
4705 && GET_CODE (to_rtx) == MEM)
4706 ? MEM_ALIAS_SET (to_rtx)
4707 : get_alias_set (TREE_TYPE (field)));
4708 }
4709 }
4710 else if (TREE_CODE (type) == ARRAY_TYPE)
4711 {
4712 register tree elt;
4713 register int i;
4714 int need_to_clear;
4715 tree domain = TYPE_DOMAIN (type);
4716 tree elttype = TREE_TYPE (type);
4717 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4718 && TYPE_MAX_VALUE (domain)
4719 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4720 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4721 HOST_WIDE_INT minelt = 0;
4722 HOST_WIDE_INT maxelt = 0;
4723
4724 /* If we have constant bounds for the range of the type, get them. */
4725 if (const_bounds_p)
4726 {
4727 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4728 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4729 }
4730
4731 /* If the constructor has fewer elements than the array,
4732 clear the whole array first. Similarly if this is
4733 static constructor of a non-BLKmode object. */
4734 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4735 need_to_clear = 1;
4736 else
4737 {
4738 HOST_WIDE_INT count = 0, zero_count = 0;
4739 need_to_clear = ! const_bounds_p;
4740
4741 /* This loop is a more accurate version of the loop in
4742 mostly_zeros_p (it handles RANGE_EXPR in an index).
4743 It is also needed to check for missing elements. */
4744 for (elt = CONSTRUCTOR_ELTS (exp);
4745 elt != NULL_TREE && ! need_to_clear;
4746 elt = TREE_CHAIN (elt))
4747 {
4748 tree index = TREE_PURPOSE (elt);
4749 HOST_WIDE_INT this_node_count;
4750
4751 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4752 {
4753 tree lo_index = TREE_OPERAND (index, 0);
4754 tree hi_index = TREE_OPERAND (index, 1);
4755
4756 if (! host_integerp (lo_index, 1)
4757 || ! host_integerp (hi_index, 1))
4758 {
4759 need_to_clear = 1;
4760 break;
4761 }
4762
4763 this_node_count = (tree_low_cst (hi_index, 1)
4764 - tree_low_cst (lo_index, 1) + 1);
4765 }
4766 else
4767 this_node_count = 1;
4768
4769 count += this_node_count;
4770 if (mostly_zeros_p (TREE_VALUE (elt)))
4771 zero_count += this_node_count;
4772 }
4773
4774 /* Clear the entire array first if there are any missing elements,
4775 or if the incidence of zero elements is >= 75%. */
4776 if (! need_to_clear
4777 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4778 need_to_clear = 1;
4779 }
4780
4781 if (need_to_clear && size > 0)
4782 {
4783 if (! cleared)
4784 clear_storage (target, GEN_INT (size), align);
4785 cleared = 1;
4786 }
4787 else if (REG_P (target))
4788 /* Inform later passes that the old value is dead. */
4789 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4790
4791 /* Store each element of the constructor into
4792 the corresponding element of TARGET, determined
4793 by counting the elements. */
4794 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4795 elt;
4796 elt = TREE_CHAIN (elt), i++)
4797 {
4798 register enum machine_mode mode;
4799 HOST_WIDE_INT bitsize;
4800 HOST_WIDE_INT bitpos;
4801 int unsignedp;
4802 tree value = TREE_VALUE (elt);
4803 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4804 tree index = TREE_PURPOSE (elt);
4805 rtx xtarget = target;
4806
4807 if (cleared && is_zeros_p (value))
4808 continue;
4809
4810 unsignedp = TREE_UNSIGNED (elttype);
4811 mode = TYPE_MODE (elttype);
4812 if (mode == BLKmode)
4813 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4814 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4815 : -1);
4816 else
4817 bitsize = GET_MODE_BITSIZE (mode);
4818
4819 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4820 {
4821 tree lo_index = TREE_OPERAND (index, 0);
4822 tree hi_index = TREE_OPERAND (index, 1);
4823 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4824 struct nesting *loop;
4825 HOST_WIDE_INT lo, hi, count;
4826 tree position;
4827
4828 /* If the range is constant and "small", unroll the loop. */
4829 if (const_bounds_p
4830 && host_integerp (lo_index, 0)
4831 && host_integerp (hi_index, 0)
4832 && (lo = tree_low_cst (lo_index, 0),
4833 hi = tree_low_cst (hi_index, 0),
4834 count = hi - lo + 1,
4835 (GET_CODE (target) != MEM
4836 || count <= 2
4837 || (host_integerp (TYPE_SIZE (elttype), 1)
4838 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4839 <= 40 * 8)))))
4840 {
4841 lo -= minelt; hi -= minelt;
4842 for (; lo <= hi; lo++)
4843 {
4844 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4845 store_constructor_field
4846 (target, bitsize, bitpos, mode, value, type, align,
4847 cleared,
4848 TYPE_NONALIASED_COMPONENT (type)
4849 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4850 }
4851 }
4852 else
4853 {
4854 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4855 loop_top = gen_label_rtx ();
4856 loop_end = gen_label_rtx ();
4857
4858 unsignedp = TREE_UNSIGNED (domain);
4859
4860 index = build_decl (VAR_DECL, NULL_TREE, domain);
4861
4862 index_r
4863 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4864 &unsignedp, 0));
4865 SET_DECL_RTL (index, index_r);
4866 if (TREE_CODE (value) == SAVE_EXPR
4867 && SAVE_EXPR_RTL (value) == 0)
4868 {
4869 /* Make sure value gets expanded once before the
4870 loop. */
4871 expand_expr (value, const0_rtx, VOIDmode, 0);
4872 emit_queue ();
4873 }
4874 store_expr (lo_index, index_r, 0);
4875 loop = expand_start_loop (0);
4876
4877 /* Assign value to element index. */
4878 position
4879 = convert (ssizetype,
4880 fold (build (MINUS_EXPR, TREE_TYPE (index),
4881 index, TYPE_MIN_VALUE (domain))));
4882 position = size_binop (MULT_EXPR, position,
4883 convert (ssizetype,
4884 TYPE_SIZE_UNIT (elttype)));
4885
4886 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4887 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4888 xtarget = change_address (target, mode, addr);
4889 if (TREE_CODE (value) == CONSTRUCTOR)
4890 store_constructor (value, xtarget, align, cleared,
4891 bitsize / BITS_PER_UNIT);
4892 else
4893 store_expr (value, xtarget, 0);
4894
4895 expand_exit_loop_if_false (loop,
4896 build (LT_EXPR, integer_type_node,
4897 index, hi_index));
4898
4899 expand_increment (build (PREINCREMENT_EXPR,
4900 TREE_TYPE (index),
4901 index, integer_one_node), 0, 0);
4902 expand_end_loop ();
4903 emit_label (loop_end);
4904 }
4905 }
4906 else if ((index != 0 && ! host_integerp (index, 0))
4907 || ! host_integerp (TYPE_SIZE (elttype), 1))
4908 {
4909 rtx pos_rtx, addr;
4910 tree position;
4911
4912 if (index == 0)
4913 index = ssize_int (1);
4914
4915 if (minelt)
4916 index = convert (ssizetype,
4917 fold (build (MINUS_EXPR, index,
4918 TYPE_MIN_VALUE (domain))));
4919
4920 position = size_binop (MULT_EXPR, index,
4921 convert (ssizetype,
4922 TYPE_SIZE_UNIT (elttype)));
4923 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4924 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4925 xtarget = change_address (target, mode, addr);
4926 store_expr (value, xtarget, 0);
4927 }
4928 else
4929 {
4930 if (index != 0)
4931 bitpos = ((tree_low_cst (index, 0) - minelt)
4932 * tree_low_cst (TYPE_SIZE (elttype), 1));
4933 else
4934 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4935
4936 store_constructor_field (target, bitsize, bitpos, mode, value,
4937 type, align, cleared,
4938 TYPE_NONALIASED_COMPONENT (type)
4939 && GET_CODE (target) == MEM
4940 ? MEM_ALIAS_SET (target) :
4941 get_alias_set (elttype));
4942
4943 }
4944 }
4945 }
4946
4947 /* Set constructor assignments. */
4948 else if (TREE_CODE (type) == SET_TYPE)
4949 {
4950 tree elt = CONSTRUCTOR_ELTS (exp);
4951 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4952 tree domain = TYPE_DOMAIN (type);
4953 tree domain_min, domain_max, bitlength;
4954
4955 /* The default implementation strategy is to extract the constant
4956 parts of the constructor, use that to initialize the target,
4957 and then "or" in whatever non-constant ranges we need in addition.
4958
4959 If a large set is all zero or all ones, it is
4960 probably better to set it using memset (if available) or bzero.
4961 Also, if a large set has just a single range, it may also be
4962 better to first clear all the first clear the set (using
4963 bzero/memset), and set the bits we want. */
4964
4965 /* Check for all zeros. */
4966 if (elt == NULL_TREE && size > 0)
4967 {
4968 if (!cleared)
4969 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4970 return;
4971 }
4972
4973 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4974 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4975 bitlength = size_binop (PLUS_EXPR,
4976 size_diffop (domain_max, domain_min),
4977 ssize_int (1));
4978
4979 nbits = tree_low_cst (bitlength, 1);
4980
4981 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4982 are "complicated" (more than one range), initialize (the
4983 constant parts) by copying from a constant. */
4984 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4985 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4986 {
4987 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4988 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4989 char *bit_buffer = (char *) alloca (nbits);
4990 HOST_WIDE_INT word = 0;
4991 unsigned int bit_pos = 0;
4992 unsigned int ibit = 0;
4993 unsigned int offset = 0; /* In bytes from beginning of set. */
4994
4995 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4996 for (;;)
4997 {
4998 if (bit_buffer[ibit])
4999 {
5000 if (BYTES_BIG_ENDIAN)
5001 word |= (1 << (set_word_size - 1 - bit_pos));
5002 else
5003 word |= 1 << bit_pos;
5004 }
5005
5006 bit_pos++; ibit++;
5007 if (bit_pos >= set_word_size || ibit == nbits)
5008 {
5009 if (word != 0 || ! cleared)
5010 {
5011 rtx datum = GEN_INT (word);
5012 rtx to_rtx;
5013
5014 /* The assumption here is that it is safe to use
5015 XEXP if the set is multi-word, but not if
5016 it's single-word. */
5017 if (GET_CODE (target) == MEM)
5018 to_rtx = adjust_address (target, mode, offset);
5019 else if (offset == 0)
5020 to_rtx = target;
5021 else
5022 abort ();
5023 emit_move_insn (to_rtx, datum);
5024 }
5025
5026 if (ibit == nbits)
5027 break;
5028 word = 0;
5029 bit_pos = 0;
5030 offset += set_word_size / BITS_PER_UNIT;
5031 }
5032 }
5033 }
5034 else if (!cleared)
5035 /* Don't bother clearing storage if the set is all ones. */
5036 if (TREE_CHAIN (elt) != NULL_TREE
5037 || (TREE_PURPOSE (elt) == NULL_TREE
5038 ? nbits != 1
5039 : ( ! host_integerp (TREE_VALUE (elt), 0)
5040 || ! host_integerp (TREE_PURPOSE (elt), 0)
5041 || (tree_low_cst (TREE_VALUE (elt), 0)
5042 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5043 != (HOST_WIDE_INT) nbits))))
5044 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5045
5046 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5047 {
5048 /* Start of range of element or NULL. */
5049 tree startbit = TREE_PURPOSE (elt);
5050 /* End of range of element, or element value. */
5051 tree endbit = TREE_VALUE (elt);
5052 #ifdef TARGET_MEM_FUNCTIONS
5053 HOST_WIDE_INT startb, endb;
5054 #endif
5055 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5056
5057 bitlength_rtx = expand_expr (bitlength,
5058 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5059
5060 /* Handle non-range tuple element like [ expr ]. */
5061 if (startbit == NULL_TREE)
5062 {
5063 startbit = save_expr (endbit);
5064 endbit = startbit;
5065 }
5066
5067 startbit = convert (sizetype, startbit);
5068 endbit = convert (sizetype, endbit);
5069 if (! integer_zerop (domain_min))
5070 {
5071 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5072 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5073 }
5074 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5075 EXPAND_CONST_ADDRESS);
5076 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5077 EXPAND_CONST_ADDRESS);
5078
5079 if (REG_P (target))
5080 {
5081 targetx
5082 = assign_temp
5083 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5084 TYPE_QUAL_CONST)),
5085 0, 1, 1);
5086 emit_move_insn (targetx, target);
5087 }
5088
5089 else if (GET_CODE (target) == MEM)
5090 targetx = target;
5091 else
5092 abort ();
5093
5094 #ifdef TARGET_MEM_FUNCTIONS
5095 /* Optimization: If startbit and endbit are
5096 constants divisible by BITS_PER_UNIT,
5097 call memset instead. */
5098 if (TREE_CODE (startbit) == INTEGER_CST
5099 && TREE_CODE (endbit) == INTEGER_CST
5100 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5101 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5102 {
5103 emit_library_call (memset_libfunc, LCT_NORMAL,
5104 VOIDmode, 3,
5105 plus_constant (XEXP (targetx, 0),
5106 startb / BITS_PER_UNIT),
5107 Pmode,
5108 constm1_rtx, TYPE_MODE (integer_type_node),
5109 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5110 TYPE_MODE (sizetype));
5111 }
5112 else
5113 #endif
5114 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5115 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5116 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5117 startbit_rtx, TYPE_MODE (sizetype),
5118 endbit_rtx, TYPE_MODE (sizetype));
5119
5120 if (REG_P (target))
5121 emit_move_insn (target, targetx);
5122 }
5123 }
5124
5125 else
5126 abort ();
5127 }
5128
5129 /* Store the value of EXP (an expression tree)
5130 into a subfield of TARGET which has mode MODE and occupies
5131 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5132 If MODE is VOIDmode, it means that we are storing into a bit-field.
5133
5134 If VALUE_MODE is VOIDmode, return nothing in particular.
5135 UNSIGNEDP is not used in this case.
5136
5137 Otherwise, return an rtx for the value stored. This rtx
5138 has mode VALUE_MODE if that is convenient to do.
5139 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5140
5141 ALIGN is the alignment that TARGET is known to have.
5142 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5143
5144 ALIAS_SET is the alias set for the destination. This value will
5145 (in general) be different from that for TARGET, since TARGET is a
5146 reference to the containing structure. */
5147
5148 static rtx
5149 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5150 unsignedp, align, total_size, alias_set)
5151 rtx target;
5152 HOST_WIDE_INT bitsize;
5153 HOST_WIDE_INT bitpos;
5154 enum machine_mode mode;
5155 tree exp;
5156 enum machine_mode value_mode;
5157 int unsignedp;
5158 unsigned int align;
5159 HOST_WIDE_INT total_size;
5160 int alias_set;
5161 {
5162 HOST_WIDE_INT width_mask = 0;
5163
5164 if (TREE_CODE (exp) == ERROR_MARK)
5165 return const0_rtx;
5166
5167 /* If we have nothing to store, do nothing unless the expression has
5168 side-effects. */
5169 if (bitsize == 0)
5170 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5171
5172 if (bitsize < HOST_BITS_PER_WIDE_INT)
5173 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5174
5175 /* If we are storing into an unaligned field of an aligned union that is
5176 in a register, we may have the mode of TARGET being an integer mode but
5177 MODE == BLKmode. In that case, get an aligned object whose size and
5178 alignment are the same as TARGET and store TARGET into it (we can avoid
5179 the store if the field being stored is the entire width of TARGET). Then
5180 call ourselves recursively to store the field into a BLKmode version of
5181 that object. Finally, load from the object into TARGET. This is not
5182 very efficient in general, but should only be slightly more expensive
5183 than the otherwise-required unaligned accesses. Perhaps this can be
5184 cleaned up later. */
5185
5186 if (mode == BLKmode
5187 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5188 {
5189 rtx object
5190 = assign_temp
5191 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5192 TYPE_QUAL_CONST),
5193 0, 1, 1);
5194 rtx blk_object = copy_rtx (object);
5195
5196 PUT_MODE (blk_object, BLKmode);
5197
5198 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5199 emit_move_insn (object, target);
5200
5201 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5202 align, total_size, alias_set);
5203
5204 /* Even though we aren't returning target, we need to
5205 give it the updated value. */
5206 emit_move_insn (target, object);
5207
5208 return blk_object;
5209 }
5210
5211 if (GET_CODE (target) == CONCAT)
5212 {
5213 /* We're storing into a struct containing a single __complex. */
5214
5215 if (bitpos != 0)
5216 abort ();
5217 return store_expr (exp, target, 0);
5218 }
5219
5220 /* If the structure is in a register or if the component
5221 is a bit field, we cannot use addressing to access it.
5222 Use bit-field techniques or SUBREG to store in it. */
5223
5224 if (mode == VOIDmode
5225 || (mode != BLKmode && ! direct_store[(int) mode]
5226 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5227 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5228 || GET_CODE (target) == REG
5229 || GET_CODE (target) == SUBREG
5230 /* If the field isn't aligned enough to store as an ordinary memref,
5231 store it as a bit field. */
5232 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5233 && (align < GET_MODE_ALIGNMENT (mode)
5234 || bitpos % GET_MODE_ALIGNMENT (mode)))
5235 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5236 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5237 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5238 /* If the RHS and field are a constant size and the size of the
5239 RHS isn't the same size as the bitfield, we must use bitfield
5240 operations. */
5241 || (bitsize >= 0
5242 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5243 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5244 {
5245 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5246
5247 /* If BITSIZE is narrower than the size of the type of EXP
5248 we will be narrowing TEMP. Normally, what's wanted are the
5249 low-order bits. However, if EXP's type is a record and this is
5250 big-endian machine, we want the upper BITSIZE bits. */
5251 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5252 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5253 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5254 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5255 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5256 - bitsize),
5257 temp, 1);
5258
5259 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5260 MODE. */
5261 if (mode != VOIDmode && mode != BLKmode
5262 && mode != TYPE_MODE (TREE_TYPE (exp)))
5263 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5264
5265 /* If the modes of TARGET and TEMP are both BLKmode, both
5266 must be in memory and BITPOS must be aligned on a byte
5267 boundary. If so, we simply do a block copy. */
5268 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5269 {
5270 unsigned int exp_align = expr_align (exp);
5271
5272 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5273 || bitpos % BITS_PER_UNIT != 0)
5274 abort ();
5275
5276 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5277
5278 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5279 align = MIN (exp_align, align);
5280
5281 /* Find an alignment that is consistent with the bit position. */
5282 while ((bitpos % align) != 0)
5283 align >>= 1;
5284
5285 emit_block_move (target, temp,
5286 bitsize == -1 ? expr_size (exp)
5287 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5288 / BITS_PER_UNIT),
5289 align);
5290
5291 return value_mode == VOIDmode ? const0_rtx : target;
5292 }
5293
5294 /* Store the value in the bitfield. */
5295 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5296 if (value_mode != VOIDmode)
5297 {
5298 /* The caller wants an rtx for the value. */
5299 /* If possible, avoid refetching from the bitfield itself. */
5300 if (width_mask != 0
5301 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5302 {
5303 tree count;
5304 enum machine_mode tmode;
5305
5306 if (unsignedp)
5307 return expand_and (temp,
5308 GEN_INT
5309 (trunc_int_for_mode
5310 (width_mask,
5311 GET_MODE (temp) == VOIDmode
5312 ? value_mode
5313 : GET_MODE (temp))), NULL_RTX);
5314 tmode = GET_MODE (temp);
5315 if (tmode == VOIDmode)
5316 tmode = value_mode;
5317 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5318 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5319 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5320 }
5321 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5322 NULL_RTX, value_mode, 0, align,
5323 total_size);
5324 }
5325 return const0_rtx;
5326 }
5327 else
5328 {
5329 rtx addr = XEXP (target, 0);
5330 rtx to_rtx;
5331
5332 /* If a value is wanted, it must be the lhs;
5333 so make the address stable for multiple use. */
5334
5335 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5336 && ! CONSTANT_ADDRESS_P (addr)
5337 /* A frame-pointer reference is already stable. */
5338 && ! (GET_CODE (addr) == PLUS
5339 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5340 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5341 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5342 target = replace_equiv_address (target, copy_to_reg (addr));
5343
5344 /* Now build a reference to just the desired component. */
5345
5346 to_rtx = copy_rtx (adjust_address (target, mode,
5347 bitpos / BITS_PER_UNIT));
5348
5349 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5350 /* If the address of the structure varies, then it might be on
5351 the stack. And, stack slots may be shared across scopes.
5352 So, two different structures, of different types, can end up
5353 at the same location. We will give the structures alias set
5354 zero; here we must be careful not to give non-zero alias sets
5355 to their fields. */
5356 set_mem_alias_set (to_rtx,
5357 rtx_varies_p (addr, /*for_alias=*/0)
5358 ? 0 : alias_set);
5359
5360 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5361 }
5362 }
5363 \f
5364 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5365 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5366 codes and find the ultimate containing object, which we return.
5367
5368 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5369 bit position, and *PUNSIGNEDP to the signedness of the field.
5370 If the position of the field is variable, we store a tree
5371 giving the variable offset (in units) in *POFFSET.
5372 This offset is in addition to the bit position.
5373 If the position is not variable, we store 0 in *POFFSET.
5374 We set *PALIGNMENT to the alignment of the address that will be
5375 computed. This is the alignment of the thing we return if *POFFSET
5376 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5377
5378 If any of the extraction expressions is volatile,
5379 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5380
5381 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5382 is a mode that can be used to access the field. In that case, *PBITSIZE
5383 is redundant.
5384
5385 If the field describes a variable-sized object, *PMODE is set to
5386 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5387 this case, but the address of the object can be found. */
5388
5389 tree
5390 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5391 punsignedp, pvolatilep, palignment)
5392 tree exp;
5393 HOST_WIDE_INT *pbitsize;
5394 HOST_WIDE_INT *pbitpos;
5395 tree *poffset;
5396 enum machine_mode *pmode;
5397 int *punsignedp;
5398 int *pvolatilep;
5399 unsigned int *palignment;
5400 {
5401 tree size_tree = 0;
5402 enum machine_mode mode = VOIDmode;
5403 tree offset = size_zero_node;
5404 tree bit_offset = bitsize_zero_node;
5405 unsigned int alignment = BIGGEST_ALIGNMENT;
5406 tree tem;
5407
5408 /* First get the mode, signedness, and size. We do this from just the
5409 outermost expression. */
5410 if (TREE_CODE (exp) == COMPONENT_REF)
5411 {
5412 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5413 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5414 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5415
5416 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5417 }
5418 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5419 {
5420 size_tree = TREE_OPERAND (exp, 1);
5421 *punsignedp = TREE_UNSIGNED (exp);
5422 }
5423 else
5424 {
5425 mode = TYPE_MODE (TREE_TYPE (exp));
5426 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5427
5428 if (mode == BLKmode)
5429 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5430 else
5431 *pbitsize = GET_MODE_BITSIZE (mode);
5432 }
5433
5434 if (size_tree != 0)
5435 {
5436 if (! host_integerp (size_tree, 1))
5437 mode = BLKmode, *pbitsize = -1;
5438 else
5439 *pbitsize = tree_low_cst (size_tree, 1);
5440 }
5441
5442 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5443 and find the ultimate containing object. */
5444 while (1)
5445 {
5446 if (TREE_CODE (exp) == BIT_FIELD_REF)
5447 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5448 else if (TREE_CODE (exp) == COMPONENT_REF)
5449 {
5450 tree field = TREE_OPERAND (exp, 1);
5451 tree this_offset = DECL_FIELD_OFFSET (field);
5452
5453 /* If this field hasn't been filled in yet, don't go
5454 past it. This should only happen when folding expressions
5455 made during type construction. */
5456 if (this_offset == 0)
5457 break;
5458 else if (! TREE_CONSTANT (this_offset)
5459 && contains_placeholder_p (this_offset))
5460 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5461
5462 offset = size_binop (PLUS_EXPR, offset, this_offset);
5463 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5464 DECL_FIELD_BIT_OFFSET (field));
5465
5466 if (! host_integerp (offset, 0))
5467 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5468 }
5469
5470 else if (TREE_CODE (exp) == ARRAY_REF
5471 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5472 {
5473 tree index = TREE_OPERAND (exp, 1);
5474 tree array = TREE_OPERAND (exp, 0);
5475 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5476 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5477 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5478
5479 /* We assume all arrays have sizes that are a multiple of a byte.
5480 First subtract the lower bound, if any, in the type of the
5481 index, then convert to sizetype and multiply by the size of the
5482 array element. */
5483 if (low_bound != 0 && ! integer_zerop (low_bound))
5484 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5485 index, low_bound));
5486
5487 /* If the index has a self-referential type, pass it to a
5488 WITH_RECORD_EXPR; if the component size is, pass our
5489 component to one. */
5490 if (! TREE_CONSTANT (index)
5491 && contains_placeholder_p (index))
5492 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5493 if (! TREE_CONSTANT (unit_size)
5494 && contains_placeholder_p (unit_size))
5495 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5496
5497 offset = size_binop (PLUS_EXPR, offset,
5498 size_binop (MULT_EXPR,
5499 convert (sizetype, index),
5500 unit_size));
5501 }
5502
5503 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5504 && ! ((TREE_CODE (exp) == NOP_EXPR
5505 || TREE_CODE (exp) == CONVERT_EXPR)
5506 && (TYPE_MODE (TREE_TYPE (exp))
5507 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5508 break;
5509
5510 /* If any reference in the chain is volatile, the effect is volatile. */
5511 if (TREE_THIS_VOLATILE (exp))
5512 *pvolatilep = 1;
5513
5514 /* If the offset is non-constant already, then we can't assume any
5515 alignment more than the alignment here. */
5516 if (! TREE_CONSTANT (offset))
5517 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5518
5519 exp = TREE_OPERAND (exp, 0);
5520 }
5521
5522 if (DECL_P (exp))
5523 alignment = MIN (alignment, DECL_ALIGN (exp));
5524 else if (TREE_TYPE (exp) != 0)
5525 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5526
5527 /* If OFFSET is constant, see if we can return the whole thing as a
5528 constant bit position. Otherwise, split it up. */
5529 if (host_integerp (offset, 0)
5530 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5531 bitsize_unit_node))
5532 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5533 && host_integerp (tem, 0))
5534 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5535 else
5536 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5537
5538 *pmode = mode;
5539 *palignment = alignment;
5540 return exp;
5541 }
5542
5543 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5544
5545 static enum memory_use_mode
5546 get_memory_usage_from_modifier (modifier)
5547 enum expand_modifier modifier;
5548 {
5549 switch (modifier)
5550 {
5551 case EXPAND_NORMAL:
5552 case EXPAND_SUM:
5553 return MEMORY_USE_RO;
5554 break;
5555 case EXPAND_MEMORY_USE_WO:
5556 return MEMORY_USE_WO;
5557 break;
5558 case EXPAND_MEMORY_USE_RW:
5559 return MEMORY_USE_RW;
5560 break;
5561 case EXPAND_MEMORY_USE_DONT:
5562 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5563 MEMORY_USE_DONT, because they are modifiers to a call of
5564 expand_expr in the ADDR_EXPR case of expand_expr. */
5565 case EXPAND_CONST_ADDRESS:
5566 case EXPAND_INITIALIZER:
5567 return MEMORY_USE_DONT;
5568 case EXPAND_MEMORY_USE_BAD:
5569 default:
5570 abort ();
5571 }
5572 }
5573 \f
5574 /* Given an rtx VALUE that may contain additions and multiplications, return
5575 an equivalent value that just refers to a register, memory, or constant.
5576 This is done by generating instructions to perform the arithmetic and
5577 returning a pseudo-register containing the value.
5578
5579 The returned value may be a REG, SUBREG, MEM or constant. */
5580
5581 rtx
5582 force_operand (value, target)
5583 rtx value, target;
5584 {
5585 register optab binoptab = 0;
5586 /* Use a temporary to force order of execution of calls to
5587 `force_operand'. */
5588 rtx tmp;
5589 register rtx op2;
5590 /* Use subtarget as the target for operand 0 of a binary operation. */
5591 register rtx subtarget = get_subtarget (target);
5592
5593 /* Check for a PIC address load. */
5594 if (flag_pic
5595 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5596 && XEXP (value, 0) == pic_offset_table_rtx
5597 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5598 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5599 || GET_CODE (XEXP (value, 1)) == CONST))
5600 {
5601 if (!subtarget)
5602 subtarget = gen_reg_rtx (GET_MODE (value));
5603 emit_move_insn (subtarget, value);
5604 return subtarget;
5605 }
5606
5607 if (GET_CODE (value) == PLUS)
5608 binoptab = add_optab;
5609 else if (GET_CODE (value) == MINUS)
5610 binoptab = sub_optab;
5611 else if (GET_CODE (value) == MULT)
5612 {
5613 op2 = XEXP (value, 1);
5614 if (!CONSTANT_P (op2)
5615 && !(GET_CODE (op2) == REG && op2 != subtarget))
5616 subtarget = 0;
5617 tmp = force_operand (XEXP (value, 0), subtarget);
5618 return expand_mult (GET_MODE (value), tmp,
5619 force_operand (op2, NULL_RTX),
5620 target, 1);
5621 }
5622
5623 if (binoptab)
5624 {
5625 op2 = XEXP (value, 1);
5626 if (!CONSTANT_P (op2)
5627 && !(GET_CODE (op2) == REG && op2 != subtarget))
5628 subtarget = 0;
5629 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5630 {
5631 binoptab = add_optab;
5632 op2 = negate_rtx (GET_MODE (value), op2);
5633 }
5634
5635 /* Check for an addition with OP2 a constant integer and our first
5636 operand a PLUS of a virtual register and something else. In that
5637 case, we want to emit the sum of the virtual register and the
5638 constant first and then add the other value. This allows virtual
5639 register instantiation to simply modify the constant rather than
5640 creating another one around this addition. */
5641 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5642 && GET_CODE (XEXP (value, 0)) == PLUS
5643 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5644 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5645 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5646 {
5647 rtx temp = expand_binop (GET_MODE (value), binoptab,
5648 XEXP (XEXP (value, 0), 0), op2,
5649 subtarget, 0, OPTAB_LIB_WIDEN);
5650 return expand_binop (GET_MODE (value), binoptab, temp,
5651 force_operand (XEXP (XEXP (value, 0), 1), 0),
5652 target, 0, OPTAB_LIB_WIDEN);
5653 }
5654
5655 tmp = force_operand (XEXP (value, 0), subtarget);
5656 return expand_binop (GET_MODE (value), binoptab, tmp,
5657 force_operand (op2, NULL_RTX),
5658 target, 0, OPTAB_LIB_WIDEN);
5659 /* We give UNSIGNEDP = 0 to expand_binop
5660 because the only operations we are expanding here are signed ones. */
5661 }
5662 return value;
5663 }
5664 \f
5665 /* Subroutine of expand_expr: return nonzero iff there is no way that
5666 EXP can reference X, which is being modified. TOP_P is nonzero if this
5667 call is going to be used to determine whether we need a temporary
5668 for EXP, as opposed to a recursive call to this function.
5669
5670 It is always safe for this routine to return zero since it merely
5671 searches for optimization opportunities. */
5672
5673 int
5674 safe_from_p (x, exp, top_p)
5675 rtx x;
5676 tree exp;
5677 int top_p;
5678 {
5679 rtx exp_rtl = 0;
5680 int i, nops;
5681 static tree save_expr_list;
5682
5683 if (x == 0
5684 /* If EXP has varying size, we MUST use a target since we currently
5685 have no way of allocating temporaries of variable size
5686 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5687 So we assume here that something at a higher level has prevented a
5688 clash. This is somewhat bogus, but the best we can do. Only
5689 do this when X is BLKmode and when we are at the top level. */
5690 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5691 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5692 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5693 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5694 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5695 != INTEGER_CST)
5696 && GET_MODE (x) == BLKmode)
5697 /* If X is in the outgoing argument area, it is always safe. */
5698 || (GET_CODE (x) == MEM
5699 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5700 || (GET_CODE (XEXP (x, 0)) == PLUS
5701 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5702 return 1;
5703
5704 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5705 find the underlying pseudo. */
5706 if (GET_CODE (x) == SUBREG)
5707 {
5708 x = SUBREG_REG (x);
5709 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5710 return 0;
5711 }
5712
5713 /* A SAVE_EXPR might appear many times in the expression passed to the
5714 top-level safe_from_p call, and if it has a complex subexpression,
5715 examining it multiple times could result in a combinatorial explosion.
5716 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5717 with optimization took about 28 minutes to compile -- even though it was
5718 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5719 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5720 we have processed. Note that the only test of top_p was above. */
5721
5722 if (top_p)
5723 {
5724 int rtn;
5725 tree t;
5726
5727 save_expr_list = 0;
5728
5729 rtn = safe_from_p (x, exp, 0);
5730
5731 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5732 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5733
5734 return rtn;
5735 }
5736
5737 /* Now look at our tree code and possibly recurse. */
5738 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5739 {
5740 case 'd':
5741 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5742 break;
5743
5744 case 'c':
5745 return 1;
5746
5747 case 'x':
5748 if (TREE_CODE (exp) == TREE_LIST)
5749 return ((TREE_VALUE (exp) == 0
5750 || safe_from_p (x, TREE_VALUE (exp), 0))
5751 && (TREE_CHAIN (exp) == 0
5752 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5753 else if (TREE_CODE (exp) == ERROR_MARK)
5754 return 1; /* An already-visited SAVE_EXPR? */
5755 else
5756 return 0;
5757
5758 case '1':
5759 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5760
5761 case '2':
5762 case '<':
5763 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5764 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5765
5766 case 'e':
5767 case 'r':
5768 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5769 the expression. If it is set, we conflict iff we are that rtx or
5770 both are in memory. Otherwise, we check all operands of the
5771 expression recursively. */
5772
5773 switch (TREE_CODE (exp))
5774 {
5775 case ADDR_EXPR:
5776 return (staticp (TREE_OPERAND (exp, 0))
5777 || TREE_STATIC (exp)
5778 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5779
5780 case INDIRECT_REF:
5781 if (GET_CODE (x) == MEM
5782 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5783 get_alias_set (exp)))
5784 return 0;
5785 break;
5786
5787 case CALL_EXPR:
5788 /* Assume that the call will clobber all hard registers and
5789 all of memory. */
5790 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5791 || GET_CODE (x) == MEM)
5792 return 0;
5793 break;
5794
5795 case RTL_EXPR:
5796 /* If a sequence exists, we would have to scan every instruction
5797 in the sequence to see if it was safe. This is probably not
5798 worthwhile. */
5799 if (RTL_EXPR_SEQUENCE (exp))
5800 return 0;
5801
5802 exp_rtl = RTL_EXPR_RTL (exp);
5803 break;
5804
5805 case WITH_CLEANUP_EXPR:
5806 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5807 break;
5808
5809 case CLEANUP_POINT_EXPR:
5810 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5811
5812 case SAVE_EXPR:
5813 exp_rtl = SAVE_EXPR_RTL (exp);
5814 if (exp_rtl)
5815 break;
5816
5817 /* If we've already scanned this, don't do it again. Otherwise,
5818 show we've scanned it and record for clearing the flag if we're
5819 going on. */
5820 if (TREE_PRIVATE (exp))
5821 return 1;
5822
5823 TREE_PRIVATE (exp) = 1;
5824 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5825 {
5826 TREE_PRIVATE (exp) = 0;
5827 return 0;
5828 }
5829
5830 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5831 return 1;
5832
5833 case BIND_EXPR:
5834 /* The only operand we look at is operand 1. The rest aren't
5835 part of the expression. */
5836 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5837
5838 case METHOD_CALL_EXPR:
5839 /* This takes an rtx argument, but shouldn't appear here. */
5840 abort ();
5841
5842 default:
5843 break;
5844 }
5845
5846 /* If we have an rtx, we do not need to scan our operands. */
5847 if (exp_rtl)
5848 break;
5849
5850 nops = first_rtl_op (TREE_CODE (exp));
5851 for (i = 0; i < nops; i++)
5852 if (TREE_OPERAND (exp, i) != 0
5853 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5854 return 0;
5855
5856 /* If this is a language-specific tree code, it may require
5857 special handling. */
5858 if ((unsigned int) TREE_CODE (exp)
5859 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5860 && lang_safe_from_p
5861 && !(*lang_safe_from_p) (x, exp))
5862 return 0;
5863 }
5864
5865 /* If we have an rtl, find any enclosed object. Then see if we conflict
5866 with it. */
5867 if (exp_rtl)
5868 {
5869 if (GET_CODE (exp_rtl) == SUBREG)
5870 {
5871 exp_rtl = SUBREG_REG (exp_rtl);
5872 if (GET_CODE (exp_rtl) == REG
5873 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5874 return 0;
5875 }
5876
5877 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5878 are memory and they conflict. */
5879 return ! (rtx_equal_p (x, exp_rtl)
5880 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5881 && true_dependence (exp_rtl, GET_MODE (x), x,
5882 rtx_addr_varies_p)));
5883 }
5884
5885 /* If we reach here, it is safe. */
5886 return 1;
5887 }
5888
5889 /* Subroutine of expand_expr: return rtx if EXP is a
5890 variable or parameter; else return 0. */
5891
5892 static rtx
5893 var_rtx (exp)
5894 tree exp;
5895 {
5896 STRIP_NOPS (exp);
5897 switch (TREE_CODE (exp))
5898 {
5899 case PARM_DECL:
5900 case VAR_DECL:
5901 return DECL_RTL (exp);
5902 default:
5903 return 0;
5904 }
5905 }
5906
5907 #ifdef MAX_INTEGER_COMPUTATION_MODE
5908
5909 void
5910 check_max_integer_computation_mode (exp)
5911 tree exp;
5912 {
5913 enum tree_code code;
5914 enum machine_mode mode;
5915
5916 /* Strip any NOPs that don't change the mode. */
5917 STRIP_NOPS (exp);
5918 code = TREE_CODE (exp);
5919
5920 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5921 if (code == NOP_EXPR
5922 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5923 return;
5924
5925 /* First check the type of the overall operation. We need only look at
5926 unary, binary and relational operations. */
5927 if (TREE_CODE_CLASS (code) == '1'
5928 || TREE_CODE_CLASS (code) == '2'
5929 || TREE_CODE_CLASS (code) == '<')
5930 {
5931 mode = TYPE_MODE (TREE_TYPE (exp));
5932 if (GET_MODE_CLASS (mode) == MODE_INT
5933 && mode > MAX_INTEGER_COMPUTATION_MODE)
5934 internal_error ("unsupported wide integer operation");
5935 }
5936
5937 /* Check operand of a unary op. */
5938 if (TREE_CODE_CLASS (code) == '1')
5939 {
5940 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5941 if (GET_MODE_CLASS (mode) == MODE_INT
5942 && mode > MAX_INTEGER_COMPUTATION_MODE)
5943 internal_error ("unsupported wide integer operation");
5944 }
5945
5946 /* Check operands of a binary/comparison op. */
5947 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5948 {
5949 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5950 if (GET_MODE_CLASS (mode) == MODE_INT
5951 && mode > MAX_INTEGER_COMPUTATION_MODE)
5952 internal_error ("unsupported wide integer operation");
5953
5954 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5955 if (GET_MODE_CLASS (mode) == MODE_INT
5956 && mode > MAX_INTEGER_COMPUTATION_MODE)
5957 internal_error ("unsupported wide integer operation");
5958 }
5959 }
5960 #endif
5961 \f
5962 /* Return an object on the placeholder list that matches EXP, a
5963 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5964 PLACEHOLDER_EXPR or a pointer type to it. For further information,
5965 see tree.def. If no such object is found, abort. If PLIST is nonzero,
5966 it is a location into which a pointer into the placeholder list at
5967 which the object is found is placed. */
5968
5969 tree
5970 find_placeholder (exp, plist)
5971 tree exp;
5972 tree *plist;
5973 {
5974 tree type = TREE_TYPE (exp);
5975 tree placeholder_expr;
5976
5977 for (placeholder_expr = placeholder_list; placeholder_expr != 0;
5978 placeholder_expr = TREE_CHAIN (placeholder_expr))
5979 {
5980 tree need_type = TYPE_MAIN_VARIANT (type);
5981 tree elt;
5982
5983 /* Find the outermost reference that is of the type we want. If none,
5984 see if any object has a type that is a pointer to the type we
5985 want. */
5986 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5987 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5988 || TREE_CODE (elt) == COND_EXPR)
5989 ? TREE_OPERAND (elt, 1)
5990 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5991 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5992 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5993 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5994 ? TREE_OPERAND (elt, 0) : 0))
5995 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5996 {
5997 if (plist)
5998 *plist = placeholder_expr;
5999 return elt;
6000 }
6001
6002 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6003 elt
6004 = ((TREE_CODE (elt) == COMPOUND_EXPR
6005 || TREE_CODE (elt) == COND_EXPR)
6006 ? TREE_OPERAND (elt, 1)
6007 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6008 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6009 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6010 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6011 ? TREE_OPERAND (elt, 0) : 0))
6012 if (POINTER_TYPE_P (TREE_TYPE (elt))
6013 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6014 == need_type))
6015 {
6016 if (plist)
6017 *plist = placeholder_expr;
6018 return build1 (INDIRECT_REF, need_type, elt);
6019 }
6020 }
6021
6022 abort ();
6023 }
6024 \f
6025 /* expand_expr: generate code for computing expression EXP.
6026 An rtx for the computed value is returned. The value is never null.
6027 In the case of a void EXP, const0_rtx is returned.
6028
6029 The value may be stored in TARGET if TARGET is nonzero.
6030 TARGET is just a suggestion; callers must assume that
6031 the rtx returned may not be the same as TARGET.
6032
6033 If TARGET is CONST0_RTX, it means that the value will be ignored.
6034
6035 If TMODE is not VOIDmode, it suggests generating the
6036 result in mode TMODE. But this is done only when convenient.
6037 Otherwise, TMODE is ignored and the value generated in its natural mode.
6038 TMODE is just a suggestion; callers must assume that
6039 the rtx returned may not have mode TMODE.
6040
6041 Note that TARGET may have neither TMODE nor MODE. In that case, it
6042 probably will not be used.
6043
6044 If MODIFIER is EXPAND_SUM then when EXP is an addition
6045 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6046 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6047 products as above, or REG or MEM, or constant.
6048 Ordinarily in such cases we would output mul or add instructions
6049 and then return a pseudo reg containing the sum.
6050
6051 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6052 it also marks a label as absolutely required (it can't be dead).
6053 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6054 This is used for outputting expressions used in initializers.
6055
6056 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6057 with a constant address even if that address is not normally legitimate.
6058 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6059
6060 rtx
6061 expand_expr (exp, target, tmode, modifier)
6062 register tree exp;
6063 rtx target;
6064 enum machine_mode tmode;
6065 enum expand_modifier modifier;
6066 {
6067 register rtx op0, op1, temp;
6068 tree type = TREE_TYPE (exp);
6069 int unsignedp = TREE_UNSIGNED (type);
6070 register enum machine_mode mode;
6071 register enum tree_code code = TREE_CODE (exp);
6072 optab this_optab;
6073 rtx subtarget, original_target;
6074 int ignore;
6075 tree context;
6076 /* Used by check-memory-usage to make modifier read only. */
6077 enum expand_modifier ro_modifier;
6078
6079 /* Handle ERROR_MARK before anybody tries to access its type. */
6080 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6081 {
6082 op0 = CONST0_RTX (tmode);
6083 if (op0 != 0)
6084 return op0;
6085 return const0_rtx;
6086 }
6087
6088 mode = TYPE_MODE (type);
6089 /* Use subtarget as the target for operand 0 of a binary operation. */
6090 subtarget = get_subtarget (target);
6091 original_target = target;
6092 ignore = (target == const0_rtx
6093 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6094 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6095 || code == COND_EXPR)
6096 && TREE_CODE (type) == VOID_TYPE));
6097
6098 /* Make a read-only version of the modifier. */
6099 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6100 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6101 ro_modifier = modifier;
6102 else
6103 ro_modifier = EXPAND_NORMAL;
6104
6105 /* If we are going to ignore this result, we need only do something
6106 if there is a side-effect somewhere in the expression. If there
6107 is, short-circuit the most common cases here. Note that we must
6108 not call expand_expr with anything but const0_rtx in case this
6109 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6110
6111 if (ignore)
6112 {
6113 if (! TREE_SIDE_EFFECTS (exp))
6114 return const0_rtx;
6115
6116 /* Ensure we reference a volatile object even if value is ignored, but
6117 don't do this if all we are doing is taking its address. */
6118 if (TREE_THIS_VOLATILE (exp)
6119 && TREE_CODE (exp) != FUNCTION_DECL
6120 && mode != VOIDmode && mode != BLKmode
6121 && modifier != EXPAND_CONST_ADDRESS)
6122 {
6123 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6124 if (GET_CODE (temp) == MEM)
6125 temp = copy_to_reg (temp);
6126 return const0_rtx;
6127 }
6128
6129 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6130 || code == INDIRECT_REF || code == BUFFER_REF)
6131 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6132 VOIDmode, ro_modifier);
6133 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6134 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6135 {
6136 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6137 ro_modifier);
6138 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6139 ro_modifier);
6140 return const0_rtx;
6141 }
6142 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6143 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6144 /* If the second operand has no side effects, just evaluate
6145 the first. */
6146 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6147 VOIDmode, ro_modifier);
6148 else if (code == BIT_FIELD_REF)
6149 {
6150 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6151 ro_modifier);
6152 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6153 ro_modifier);
6154 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6155 ro_modifier);
6156 return const0_rtx;
6157 }
6158 ;
6159 target = 0;
6160 }
6161
6162 #ifdef MAX_INTEGER_COMPUTATION_MODE
6163 /* Only check stuff here if the mode we want is different from the mode
6164 of the expression; if it's the same, check_max_integer_computiation_mode
6165 will handle it. Do we really need to check this stuff at all? */
6166
6167 if (target
6168 && GET_MODE (target) != mode
6169 && TREE_CODE (exp) != INTEGER_CST
6170 && TREE_CODE (exp) != PARM_DECL
6171 && TREE_CODE (exp) != ARRAY_REF
6172 && TREE_CODE (exp) != ARRAY_RANGE_REF
6173 && TREE_CODE (exp) != COMPONENT_REF
6174 && TREE_CODE (exp) != BIT_FIELD_REF
6175 && TREE_CODE (exp) != INDIRECT_REF
6176 && TREE_CODE (exp) != CALL_EXPR
6177 && TREE_CODE (exp) != VAR_DECL
6178 && TREE_CODE (exp) != RTL_EXPR)
6179 {
6180 enum machine_mode mode = GET_MODE (target);
6181
6182 if (GET_MODE_CLASS (mode) == MODE_INT
6183 && mode > MAX_INTEGER_COMPUTATION_MODE)
6184 internal_error ("unsupported wide integer operation");
6185 }
6186
6187 if (tmode != mode
6188 && TREE_CODE (exp) != INTEGER_CST
6189 && TREE_CODE (exp) != PARM_DECL
6190 && TREE_CODE (exp) != ARRAY_REF
6191 && TREE_CODE (exp) != ARRAY_RANGE_REF
6192 && TREE_CODE (exp) != COMPONENT_REF
6193 && TREE_CODE (exp) != BIT_FIELD_REF
6194 && TREE_CODE (exp) != INDIRECT_REF
6195 && TREE_CODE (exp) != VAR_DECL
6196 && TREE_CODE (exp) != CALL_EXPR
6197 && TREE_CODE (exp) != RTL_EXPR
6198 && GET_MODE_CLASS (tmode) == MODE_INT
6199 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6200 internal_error ("unsupported wide integer operation");
6201
6202 check_max_integer_computation_mode (exp);
6203 #endif
6204
6205 /* If will do cse, generate all results into pseudo registers
6206 since 1) that allows cse to find more things
6207 and 2) otherwise cse could produce an insn the machine
6208 cannot support. */
6209
6210 if (! cse_not_expected && mode != BLKmode && target
6211 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6212 target = subtarget;
6213
6214 switch (code)
6215 {
6216 case LABEL_DECL:
6217 {
6218 tree function = decl_function_context (exp);
6219 /* Handle using a label in a containing function. */
6220 if (function != current_function_decl
6221 && function != inline_function_decl && function != 0)
6222 {
6223 struct function *p = find_function_data (function);
6224 p->expr->x_forced_labels
6225 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6226 p->expr->x_forced_labels);
6227 }
6228 else
6229 {
6230 if (modifier == EXPAND_INITIALIZER)
6231 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6232 label_rtx (exp),
6233 forced_labels);
6234 }
6235
6236 temp = gen_rtx_MEM (FUNCTION_MODE,
6237 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6238 if (function != current_function_decl
6239 && function != inline_function_decl && function != 0)
6240 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6241 return temp;
6242 }
6243
6244 case PARM_DECL:
6245 if (DECL_RTL (exp) == 0)
6246 {
6247 error_with_decl (exp, "prior parameter's size depends on `%s'");
6248 return CONST0_RTX (mode);
6249 }
6250
6251 /* ... fall through ... */
6252
6253 case VAR_DECL:
6254 /* If a static var's type was incomplete when the decl was written,
6255 but the type is complete now, lay out the decl now. */
6256 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6257 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6258 {
6259 layout_decl (exp, 0);
6260 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6261 }
6262
6263 /* Although static-storage variables start off initialized, according to
6264 ANSI C, a memcpy could overwrite them with uninitialized values. So
6265 we check them too. This also lets us check for read-only variables
6266 accessed via a non-const declaration, in case it won't be detected
6267 any other way (e.g., in an embedded system or OS kernel without
6268 memory protection).
6269
6270 Aggregates are not checked here; they're handled elsewhere. */
6271 if (cfun && current_function_check_memory_usage
6272 && code == VAR_DECL
6273 && GET_CODE (DECL_RTL (exp)) == MEM
6274 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6275 {
6276 enum memory_use_mode memory_usage;
6277 memory_usage = get_memory_usage_from_modifier (modifier);
6278
6279 in_check_memory_usage = 1;
6280 if (memory_usage != MEMORY_USE_DONT)
6281 emit_library_call (chkr_check_addr_libfunc,
6282 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6283 XEXP (DECL_RTL (exp), 0), Pmode,
6284 GEN_INT (int_size_in_bytes (type)),
6285 TYPE_MODE (sizetype),
6286 GEN_INT (memory_usage),
6287 TYPE_MODE (integer_type_node));
6288 in_check_memory_usage = 0;
6289 }
6290
6291 /* ... fall through ... */
6292
6293 case FUNCTION_DECL:
6294 case RESULT_DECL:
6295 if (DECL_RTL (exp) == 0)
6296 abort ();
6297
6298 /* Ensure variable marked as used even if it doesn't go through
6299 a parser. If it hasn't be used yet, write out an external
6300 definition. */
6301 if (! TREE_USED (exp))
6302 {
6303 assemble_external (exp);
6304 TREE_USED (exp) = 1;
6305 }
6306
6307 /* Show we haven't gotten RTL for this yet. */
6308 temp = 0;
6309
6310 /* Handle variables inherited from containing functions. */
6311 context = decl_function_context (exp);
6312
6313 /* We treat inline_function_decl as an alias for the current function
6314 because that is the inline function whose vars, types, etc.
6315 are being merged into the current function.
6316 See expand_inline_function. */
6317
6318 if (context != 0 && context != current_function_decl
6319 && context != inline_function_decl
6320 /* If var is static, we don't need a static chain to access it. */
6321 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6322 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6323 {
6324 rtx addr;
6325
6326 /* Mark as non-local and addressable. */
6327 DECL_NONLOCAL (exp) = 1;
6328 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6329 abort ();
6330 mark_addressable (exp);
6331 if (GET_CODE (DECL_RTL (exp)) != MEM)
6332 abort ();
6333 addr = XEXP (DECL_RTL (exp), 0);
6334 if (GET_CODE (addr) == MEM)
6335 addr
6336 = replace_equiv_address (addr,
6337 fix_lexical_addr (XEXP (addr, 0), exp));
6338 else
6339 addr = fix_lexical_addr (addr, exp);
6340
6341 temp = replace_equiv_address (DECL_RTL (exp), addr);
6342 }
6343
6344 /* This is the case of an array whose size is to be determined
6345 from its initializer, while the initializer is still being parsed.
6346 See expand_decl. */
6347
6348 else if (GET_CODE (DECL_RTL (exp)) == MEM
6349 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6350 temp = validize_mem (DECL_RTL (exp));
6351
6352 /* If DECL_RTL is memory, we are in the normal case and either
6353 the address is not valid or it is not a register and -fforce-addr
6354 is specified, get the address into a register. */
6355
6356 else if (GET_CODE (DECL_RTL (exp)) == MEM
6357 && modifier != EXPAND_CONST_ADDRESS
6358 && modifier != EXPAND_SUM
6359 && modifier != EXPAND_INITIALIZER
6360 && (! memory_address_p (DECL_MODE (exp),
6361 XEXP (DECL_RTL (exp), 0))
6362 || (flag_force_addr
6363 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6364 temp = replace_equiv_address (DECL_RTL (exp),
6365 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6366
6367 /* If we got something, return it. But first, set the alignment
6368 if the address is a register. */
6369 if (temp != 0)
6370 {
6371 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6372 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6373
6374 return temp;
6375 }
6376
6377 /* If the mode of DECL_RTL does not match that of the decl, it
6378 must be a promoted value. We return a SUBREG of the wanted mode,
6379 but mark it so that we know that it was already extended. */
6380
6381 if (GET_CODE (DECL_RTL (exp)) == REG
6382 && GET_MODE (DECL_RTL (exp)) != mode)
6383 {
6384 /* Get the signedness used for this variable. Ensure we get the
6385 same mode we got when the variable was declared. */
6386 if (GET_MODE (DECL_RTL (exp))
6387 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6388 abort ();
6389
6390 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6391 SUBREG_PROMOTED_VAR_P (temp) = 1;
6392 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6393 return temp;
6394 }
6395
6396 return DECL_RTL (exp);
6397
6398 case INTEGER_CST:
6399 return immed_double_const (TREE_INT_CST_LOW (exp),
6400 TREE_INT_CST_HIGH (exp), mode);
6401
6402 case CONST_DECL:
6403 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6404 EXPAND_MEMORY_USE_BAD);
6405
6406 case REAL_CST:
6407 /* If optimized, generate immediate CONST_DOUBLE
6408 which will be turned into memory by reload if necessary.
6409
6410 We used to force a register so that loop.c could see it. But
6411 this does not allow gen_* patterns to perform optimizations with
6412 the constants. It also produces two insns in cases like "x = 1.0;".
6413 On most machines, floating-point constants are not permitted in
6414 many insns, so we'd end up copying it to a register in any case.
6415
6416 Now, we do the copying in expand_binop, if appropriate. */
6417 return immed_real_const (exp);
6418
6419 case COMPLEX_CST:
6420 case STRING_CST:
6421 if (! TREE_CST_RTL (exp))
6422 output_constant_def (exp, 1);
6423
6424 /* TREE_CST_RTL probably contains a constant address.
6425 On RISC machines where a constant address isn't valid,
6426 make some insns to get that address into a register. */
6427 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6428 && modifier != EXPAND_CONST_ADDRESS
6429 && modifier != EXPAND_INITIALIZER
6430 && modifier != EXPAND_SUM
6431 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6432 || (flag_force_addr
6433 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6434 return replace_equiv_address (TREE_CST_RTL (exp),
6435 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6436 return TREE_CST_RTL (exp);
6437
6438 case EXPR_WITH_FILE_LOCATION:
6439 {
6440 rtx to_return;
6441 const char *saved_input_filename = input_filename;
6442 int saved_lineno = lineno;
6443 input_filename = EXPR_WFL_FILENAME (exp);
6444 lineno = EXPR_WFL_LINENO (exp);
6445 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6446 emit_line_note (input_filename, lineno);
6447 /* Possibly avoid switching back and forth here. */
6448 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6449 input_filename = saved_input_filename;
6450 lineno = saved_lineno;
6451 return to_return;
6452 }
6453
6454 case SAVE_EXPR:
6455 context = decl_function_context (exp);
6456
6457 /* If this SAVE_EXPR was at global context, assume we are an
6458 initialization function and move it into our context. */
6459 if (context == 0)
6460 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6461
6462 /* We treat inline_function_decl as an alias for the current function
6463 because that is the inline function whose vars, types, etc.
6464 are being merged into the current function.
6465 See expand_inline_function. */
6466 if (context == current_function_decl || context == inline_function_decl)
6467 context = 0;
6468
6469 /* If this is non-local, handle it. */
6470 if (context)
6471 {
6472 /* The following call just exists to abort if the context is
6473 not of a containing function. */
6474 find_function_data (context);
6475
6476 temp = SAVE_EXPR_RTL (exp);
6477 if (temp && GET_CODE (temp) == REG)
6478 {
6479 put_var_into_stack (exp);
6480 temp = SAVE_EXPR_RTL (exp);
6481 }
6482 if (temp == 0 || GET_CODE (temp) != MEM)
6483 abort ();
6484 return
6485 replace_equiv_address (temp,
6486 fix_lexical_addr (XEXP (temp, 0), exp));
6487 }
6488 if (SAVE_EXPR_RTL (exp) == 0)
6489 {
6490 if (mode == VOIDmode)
6491 temp = const0_rtx;
6492 else
6493 temp = assign_temp (build_qualified_type (type,
6494 (TYPE_QUALS (type)
6495 | TYPE_QUAL_CONST)),
6496 3, 0, 0);
6497
6498 SAVE_EXPR_RTL (exp) = temp;
6499 if (!optimize && GET_CODE (temp) == REG)
6500 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6501 save_expr_regs);
6502
6503 /* If the mode of TEMP does not match that of the expression, it
6504 must be a promoted value. We pass store_expr a SUBREG of the
6505 wanted mode but mark it so that we know that it was already
6506 extended. Note that `unsignedp' was modified above in
6507 this case. */
6508
6509 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6510 {
6511 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6512 SUBREG_PROMOTED_VAR_P (temp) = 1;
6513 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6514 }
6515
6516 if (temp == const0_rtx)
6517 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6518 EXPAND_MEMORY_USE_BAD);
6519 else
6520 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6521
6522 TREE_USED (exp) = 1;
6523 }
6524
6525 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6526 must be a promoted value. We return a SUBREG of the wanted mode,
6527 but mark it so that we know that it was already extended. */
6528
6529 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6530 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6531 {
6532 /* Compute the signedness and make the proper SUBREG. */
6533 promote_mode (type, mode, &unsignedp, 0);
6534 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6535 SUBREG_PROMOTED_VAR_P (temp) = 1;
6536 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6537 return temp;
6538 }
6539
6540 return SAVE_EXPR_RTL (exp);
6541
6542 case UNSAVE_EXPR:
6543 {
6544 rtx temp;
6545 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6546 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6547 return temp;
6548 }
6549
6550 case PLACEHOLDER_EXPR:
6551 {
6552 tree old_list = placeholder_list;
6553 tree placeholder_expr;
6554
6555 exp = find_placeholder (exp, &placeholder_expr);
6556 placeholder_list = TREE_CHAIN (placeholder_expr);
6557 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6558 placeholder_list = old_list;
6559 return temp;
6560 }
6561
6562 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6563 abort ();
6564
6565 case WITH_RECORD_EXPR:
6566 /* Put the object on the placeholder list, expand our first operand,
6567 and pop the list. */
6568 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6569 placeholder_list);
6570 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6571 tmode, ro_modifier);
6572 placeholder_list = TREE_CHAIN (placeholder_list);
6573 return target;
6574
6575 case GOTO_EXPR:
6576 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6577 expand_goto (TREE_OPERAND (exp, 0));
6578 else
6579 expand_computed_goto (TREE_OPERAND (exp, 0));
6580 return const0_rtx;
6581
6582 case EXIT_EXPR:
6583 expand_exit_loop_if_false (NULL,
6584 invert_truthvalue (TREE_OPERAND (exp, 0)));
6585 return const0_rtx;
6586
6587 case LABELED_BLOCK_EXPR:
6588 if (LABELED_BLOCK_BODY (exp))
6589 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6590 /* Should perhaps use expand_label, but this is simpler and safer. */
6591 do_pending_stack_adjust ();
6592 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6593 return const0_rtx;
6594
6595 case EXIT_BLOCK_EXPR:
6596 if (EXIT_BLOCK_RETURN (exp))
6597 sorry ("returned value in block_exit_expr");
6598 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6599 return const0_rtx;
6600
6601 case LOOP_EXPR:
6602 push_temp_slots ();
6603 expand_start_loop (1);
6604 expand_expr_stmt (TREE_OPERAND (exp, 0));
6605 expand_end_loop ();
6606 pop_temp_slots ();
6607
6608 return const0_rtx;
6609
6610 case BIND_EXPR:
6611 {
6612 tree vars = TREE_OPERAND (exp, 0);
6613 int vars_need_expansion = 0;
6614
6615 /* Need to open a binding contour here because
6616 if there are any cleanups they must be contained here. */
6617 expand_start_bindings (2);
6618
6619 /* Mark the corresponding BLOCK for output in its proper place. */
6620 if (TREE_OPERAND (exp, 2) != 0
6621 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6622 insert_block (TREE_OPERAND (exp, 2));
6623
6624 /* If VARS have not yet been expanded, expand them now. */
6625 while (vars)
6626 {
6627 if (!DECL_RTL_SET_P (vars))
6628 {
6629 vars_need_expansion = 1;
6630 expand_decl (vars);
6631 }
6632 expand_decl_init (vars);
6633 vars = TREE_CHAIN (vars);
6634 }
6635
6636 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6637
6638 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6639
6640 return temp;
6641 }
6642
6643 case RTL_EXPR:
6644 if (RTL_EXPR_SEQUENCE (exp))
6645 {
6646 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6647 abort ();
6648 emit_insns (RTL_EXPR_SEQUENCE (exp));
6649 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6650 }
6651 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6652 free_temps_for_rtl_expr (exp);
6653 return RTL_EXPR_RTL (exp);
6654
6655 case CONSTRUCTOR:
6656 /* If we don't need the result, just ensure we evaluate any
6657 subexpressions. */
6658 if (ignore)
6659 {
6660 tree elt;
6661 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6662 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6663 EXPAND_MEMORY_USE_BAD);
6664 return const0_rtx;
6665 }
6666
6667 /* All elts simple constants => refer to a constant in memory. But
6668 if this is a non-BLKmode mode, let it store a field at a time
6669 since that should make a CONST_INT or CONST_DOUBLE when we
6670 fold. Likewise, if we have a target we can use, it is best to
6671 store directly into the target unless the type is large enough
6672 that memcpy will be used. If we are making an initializer and
6673 all operands are constant, put it in memory as well. */
6674 else if ((TREE_STATIC (exp)
6675 && ((mode == BLKmode
6676 && ! (target != 0 && safe_from_p (target, exp, 1)))
6677 || TREE_ADDRESSABLE (exp)
6678 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6679 && (! MOVE_BY_PIECES_P
6680 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6681 TYPE_ALIGN (type)))
6682 && ! mostly_zeros_p (exp))))
6683 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6684 {
6685 rtx constructor = output_constant_def (exp, 1);
6686
6687 if (modifier != EXPAND_CONST_ADDRESS
6688 && modifier != EXPAND_INITIALIZER
6689 && modifier != EXPAND_SUM)
6690 constructor = validize_mem (constructor);
6691
6692 return constructor;
6693 }
6694 else
6695 {
6696 /* Handle calls that pass values in multiple non-contiguous
6697 locations. The Irix 6 ABI has examples of this. */
6698 if (target == 0 || ! safe_from_p (target, exp, 1)
6699 || GET_CODE (target) == PARALLEL)
6700 target
6701 = assign_temp (build_qualified_type (type,
6702 (TYPE_QUALS (type)
6703 | (TREE_READONLY (exp)
6704 * TYPE_QUAL_CONST))),
6705 TREE_ADDRESSABLE (exp), 1, 1);
6706
6707 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6708 int_size_in_bytes (TREE_TYPE (exp)));
6709 return target;
6710 }
6711
6712 case INDIRECT_REF:
6713 {
6714 tree exp1 = TREE_OPERAND (exp, 0);
6715 tree index;
6716 tree string = string_constant (exp1, &index);
6717
6718 /* Try to optimize reads from const strings. */
6719 if (string
6720 && TREE_CODE (string) == STRING_CST
6721 && TREE_CODE (index) == INTEGER_CST
6722 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6723 && GET_MODE_CLASS (mode) == MODE_INT
6724 && GET_MODE_SIZE (mode) == 1
6725 && modifier != EXPAND_MEMORY_USE_WO)
6726 return
6727 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6728
6729 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6730 op0 = memory_address (mode, op0);
6731
6732 if (cfun && current_function_check_memory_usage
6733 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6734 {
6735 enum memory_use_mode memory_usage;
6736 memory_usage = get_memory_usage_from_modifier (modifier);
6737
6738 if (memory_usage != MEMORY_USE_DONT)
6739 {
6740 in_check_memory_usage = 1;
6741 emit_library_call (chkr_check_addr_libfunc,
6742 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6743 Pmode, GEN_INT (int_size_in_bytes (type)),
6744 TYPE_MODE (sizetype),
6745 GEN_INT (memory_usage),
6746 TYPE_MODE (integer_type_node));
6747 in_check_memory_usage = 0;
6748 }
6749 }
6750
6751 temp = gen_rtx_MEM (mode, op0);
6752 set_mem_attributes (temp, exp, 0);
6753
6754 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6755 here, because, in C and C++, the fact that a location is accessed
6756 through a pointer to const does not mean that the value there can
6757 never change. Languages where it can never change should
6758 also set TREE_STATIC. */
6759 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6760
6761 /* If we are writing to this object and its type is a record with
6762 readonly fields, we must mark it as readonly so it will
6763 conflict with readonly references to those fields. */
6764 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6765 RTX_UNCHANGING_P (temp) = 1;
6766
6767 return temp;
6768 }
6769
6770 case ARRAY_REF:
6771 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6772 abort ();
6773
6774 {
6775 tree array = TREE_OPERAND (exp, 0);
6776 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6777 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6778 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6779 HOST_WIDE_INT i;
6780
6781 /* Optimize the special-case of a zero lower bound.
6782
6783 We convert the low_bound to sizetype to avoid some problems
6784 with constant folding. (E.g. suppose the lower bound is 1,
6785 and its mode is QI. Without the conversion, (ARRAY
6786 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6787 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6788
6789 if (! integer_zerop (low_bound))
6790 index = size_diffop (index, convert (sizetype, low_bound));
6791
6792 /* Fold an expression like: "foo"[2].
6793 This is not done in fold so it won't happen inside &.
6794 Don't fold if this is for wide characters since it's too
6795 difficult to do correctly and this is a very rare case. */
6796
6797 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6798 && TREE_CODE (array) == STRING_CST
6799 && TREE_CODE (index) == INTEGER_CST
6800 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6801 && GET_MODE_CLASS (mode) == MODE_INT
6802 && GET_MODE_SIZE (mode) == 1)
6803 return
6804 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6805
6806 /* If this is a constant index into a constant array,
6807 just get the value from the array. Handle both the cases when
6808 we have an explicit constructor and when our operand is a variable
6809 that was declared const. */
6810
6811 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6812 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6813 && TREE_CODE (index) == INTEGER_CST
6814 && 0 > compare_tree_int (index,
6815 list_length (CONSTRUCTOR_ELTS
6816 (TREE_OPERAND (exp, 0)))))
6817 {
6818 tree elem;
6819
6820 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6821 i = TREE_INT_CST_LOW (index);
6822 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6823 ;
6824
6825 if (elem)
6826 return expand_expr (fold (TREE_VALUE (elem)), target,
6827 tmode, ro_modifier);
6828 }
6829
6830 else if (optimize >= 1
6831 && modifier != EXPAND_CONST_ADDRESS
6832 && modifier != EXPAND_INITIALIZER
6833 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6834 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6835 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6836 {
6837 if (TREE_CODE (index) == INTEGER_CST)
6838 {
6839 tree init = DECL_INITIAL (array);
6840
6841 if (TREE_CODE (init) == CONSTRUCTOR)
6842 {
6843 tree elem;
6844
6845 for (elem = CONSTRUCTOR_ELTS (init);
6846 (elem
6847 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6848 elem = TREE_CHAIN (elem))
6849 ;
6850
6851 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6852 return expand_expr (fold (TREE_VALUE (elem)), target,
6853 tmode, ro_modifier);
6854 }
6855 else if (TREE_CODE (init) == STRING_CST
6856 && 0 > compare_tree_int (index,
6857 TREE_STRING_LENGTH (init)))
6858 {
6859 tree type = TREE_TYPE (TREE_TYPE (init));
6860 enum machine_mode mode = TYPE_MODE (type);
6861
6862 if (GET_MODE_CLASS (mode) == MODE_INT
6863 && GET_MODE_SIZE (mode) == 1)
6864 return (GEN_INT
6865 (TREE_STRING_POINTER
6866 (init)[TREE_INT_CST_LOW (index)]));
6867 }
6868 }
6869 }
6870 }
6871 /* Fall through. */
6872
6873 case COMPONENT_REF:
6874 case BIT_FIELD_REF:
6875 case ARRAY_RANGE_REF:
6876 /* If the operand is a CONSTRUCTOR, we can just extract the
6877 appropriate field if it is present. Don't do this if we have
6878 already written the data since we want to refer to that copy
6879 and varasm.c assumes that's what we'll do. */
6880 if (code == COMPONENT_REF
6881 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6882 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6883 {
6884 tree elt;
6885
6886 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6887 elt = TREE_CHAIN (elt))
6888 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6889 /* We can normally use the value of the field in the
6890 CONSTRUCTOR. However, if this is a bitfield in
6891 an integral mode that we can fit in a HOST_WIDE_INT,
6892 we must mask only the number of bits in the bitfield,
6893 since this is done implicitly by the constructor. If
6894 the bitfield does not meet either of those conditions,
6895 we can't do this optimization. */
6896 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6897 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6898 == MODE_INT)
6899 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6900 <= HOST_BITS_PER_WIDE_INT))))
6901 {
6902 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6903 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6904 {
6905 HOST_WIDE_INT bitsize
6906 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6907
6908 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6909 {
6910 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6911 op0 = expand_and (op0, op1, target);
6912 }
6913 else
6914 {
6915 enum machine_mode imode
6916 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6917 tree count
6918 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6919 0);
6920
6921 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6922 target, 0);
6923 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6924 target, 0);
6925 }
6926 }
6927
6928 return op0;
6929 }
6930 }
6931
6932 {
6933 enum machine_mode mode1;
6934 HOST_WIDE_INT bitsize, bitpos;
6935 tree offset;
6936 int volatilep = 0;
6937 unsigned int alignment;
6938 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6939 &mode1, &unsignedp, &volatilep,
6940 &alignment);
6941 rtx orig_op0;
6942
6943 /* If we got back the original object, something is wrong. Perhaps
6944 we are evaluating an expression too early. In any event, don't
6945 infinitely recurse. */
6946 if (tem == exp)
6947 abort ();
6948
6949 /* If TEM's type is a union of variable size, pass TARGET to the inner
6950 computation, since it will need a temporary and TARGET is known
6951 to have to do. This occurs in unchecked conversion in Ada. */
6952
6953 orig_op0 = op0
6954 = expand_expr (tem,
6955 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6956 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6957 != INTEGER_CST)
6958 ? target : NULL_RTX),
6959 VOIDmode,
6960 (modifier == EXPAND_INITIALIZER
6961 || modifier == EXPAND_CONST_ADDRESS)
6962 ? modifier : EXPAND_NORMAL);
6963
6964 /* If this is a constant, put it into a register if it is a
6965 legitimate constant and OFFSET is 0 and memory if it isn't. */
6966 if (CONSTANT_P (op0))
6967 {
6968 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6969 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6970 && offset == 0)
6971 op0 = force_reg (mode, op0);
6972 else
6973 op0 = validize_mem (force_const_mem (mode, op0));
6974 }
6975
6976 if (offset != 0)
6977 {
6978 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6979
6980 /* If this object is in a register, put it into memory.
6981 This case can't occur in C, but can in Ada if we have
6982 unchecked conversion of an expression from a scalar type to
6983 an array or record type. */
6984 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6985 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6986 {
6987 /* If the operand is a SAVE_EXPR, we can deal with this by
6988 forcing the SAVE_EXPR into memory. */
6989 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6990 {
6991 put_var_into_stack (TREE_OPERAND (exp, 0));
6992 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6993 }
6994 else
6995 {
6996 tree nt
6997 = build_qualified_type (TREE_TYPE (tem),
6998 (TYPE_QUALS (TREE_TYPE (tem))
6999 | TYPE_QUAL_CONST));
7000 rtx memloc = assign_temp (nt, 1, 1, 1);
7001
7002 mark_temp_addr_taken (memloc);
7003 emit_move_insn (memloc, op0);
7004 op0 = memloc;
7005 }
7006 }
7007
7008 if (GET_CODE (op0) != MEM)
7009 abort ();
7010
7011 if (GET_MODE (offset_rtx) != ptr_mode)
7012 {
7013 #ifdef POINTERS_EXTEND_UNSIGNED
7014 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7015 #else
7016 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7017 #endif
7018 }
7019
7020 /* A constant address in OP0 can have VOIDmode, we must not try
7021 to call force_reg for that case. Avoid that case. */
7022 if (GET_CODE (op0) == MEM
7023 && GET_MODE (op0) == BLKmode
7024 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7025 && bitsize != 0
7026 && (bitpos % bitsize) == 0
7027 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7028 && alignment == GET_MODE_ALIGNMENT (mode1))
7029 {
7030 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7031
7032 if (GET_CODE (XEXP (temp, 0)) == REG)
7033 op0 = temp;
7034 else
7035 op0 = (replace_equiv_address
7036 (op0,
7037 force_reg (GET_MODE (XEXP (temp, 0)),
7038 XEXP (temp, 0))));
7039 bitpos = 0;
7040 }
7041
7042 op0 = change_address (op0, VOIDmode,
7043 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7044 force_reg (ptr_mode,
7045 offset_rtx)));
7046 }
7047
7048 /* Don't forget about volatility even if this is a bitfield. */
7049 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7050 {
7051 if (op0 == orig_op0)
7052 op0 = copy_rtx (op0);
7053
7054 MEM_VOLATILE_P (op0) = 1;
7055 }
7056
7057 /* Check the access. */
7058 if (cfun != 0 && current_function_check_memory_usage
7059 && GET_CODE (op0) == MEM)
7060 {
7061 enum memory_use_mode memory_usage;
7062 memory_usage = get_memory_usage_from_modifier (modifier);
7063
7064 if (memory_usage != MEMORY_USE_DONT)
7065 {
7066 rtx to;
7067 int size;
7068
7069 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7070 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7071
7072 /* Check the access right of the pointer. */
7073 in_check_memory_usage = 1;
7074 if (size > BITS_PER_UNIT)
7075 emit_library_call (chkr_check_addr_libfunc,
7076 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7077 Pmode, GEN_INT (size / BITS_PER_UNIT),
7078 TYPE_MODE (sizetype),
7079 GEN_INT (memory_usage),
7080 TYPE_MODE (integer_type_node));
7081 in_check_memory_usage = 0;
7082 }
7083 }
7084
7085 /* In cases where an aligned union has an unaligned object
7086 as a field, we might be extracting a BLKmode value from
7087 an integer-mode (e.g., SImode) object. Handle this case
7088 by doing the extract into an object as wide as the field
7089 (which we know to be the width of a basic mode), then
7090 storing into memory, and changing the mode to BLKmode. */
7091 if (mode1 == VOIDmode
7092 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7093 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7094 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7095 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7096 && modifier != EXPAND_CONST_ADDRESS
7097 && modifier != EXPAND_INITIALIZER)
7098 /* If the field isn't aligned enough to fetch as a memref,
7099 fetch it as a bit field. */
7100 || (mode1 != BLKmode
7101 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7102 && ((TYPE_ALIGN (TREE_TYPE (tem))
7103 < GET_MODE_ALIGNMENT (mode))
7104 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7105 /* If the type and the field are a constant size and the
7106 size of the type isn't the same size as the bitfield,
7107 we must use bitfield operations. */
7108 || (bitsize >= 0
7109 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7110 == INTEGER_CST)
7111 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7112 bitsize))
7113 || (mode == BLKmode
7114 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7115 && (TYPE_ALIGN (type) > alignment
7116 || bitpos % TYPE_ALIGN (type) != 0)))
7117 {
7118 enum machine_mode ext_mode = mode;
7119
7120 if (ext_mode == BLKmode
7121 && ! (target != 0 && GET_CODE (op0) == MEM
7122 && GET_CODE (target) == MEM
7123 && bitpos % BITS_PER_UNIT == 0))
7124 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7125
7126 if (ext_mode == BLKmode)
7127 {
7128 /* In this case, BITPOS must start at a byte boundary and
7129 TARGET, if specified, must be a MEM. */
7130 if (GET_CODE (op0) != MEM
7131 || (target != 0 && GET_CODE (target) != MEM)
7132 || bitpos % BITS_PER_UNIT != 0)
7133 abort ();
7134
7135 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7136 if (target == 0)
7137 target = assign_temp (type, 0, 1, 1);
7138
7139 emit_block_move (target, op0,
7140 bitsize == -1 ? expr_size (exp)
7141 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7142 / BITS_PER_UNIT),
7143 BITS_PER_UNIT);
7144
7145 return target;
7146 }
7147
7148 op0 = validize_mem (op0);
7149
7150 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7151 mark_reg_pointer (XEXP (op0, 0), alignment);
7152
7153 op0 = extract_bit_field (op0, bitsize, bitpos,
7154 unsignedp, target, ext_mode, ext_mode,
7155 alignment,
7156 int_size_in_bytes (TREE_TYPE (tem)));
7157
7158 /* If the result is a record type and BITSIZE is narrower than
7159 the mode of OP0, an integral mode, and this is a big endian
7160 machine, we must put the field into the high-order bits. */
7161 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7162 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7163 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7164 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7165 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7166 - bitsize),
7167 op0, 1);
7168
7169 if (mode == BLKmode)
7170 {
7171 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7172 TYPE_QUAL_CONST);
7173 rtx new = assign_temp (nt, 0, 1, 1);
7174
7175 emit_move_insn (new, op0);
7176 op0 = copy_rtx (new);
7177 PUT_MODE (op0, BLKmode);
7178 }
7179
7180 return op0;
7181 }
7182
7183 /* If the result is BLKmode, use that to access the object
7184 now as well. */
7185 if (mode == BLKmode)
7186 mode1 = BLKmode;
7187
7188 /* Get a reference to just this component. */
7189 if (modifier == EXPAND_CONST_ADDRESS
7190 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7191 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7192 else
7193 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7194
7195 if (op0 == orig_op0)
7196 op0 = copy_rtx (op0);
7197
7198 set_mem_attributes (op0, exp, 0);
7199 if (GET_CODE (XEXP (op0, 0)) == REG)
7200 mark_reg_pointer (XEXP (op0, 0), alignment);
7201
7202 MEM_VOLATILE_P (op0) |= volatilep;
7203 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7204 || modifier == EXPAND_CONST_ADDRESS
7205 || modifier == EXPAND_INITIALIZER)
7206 return op0;
7207 else if (target == 0)
7208 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7209
7210 convert_move (target, op0, unsignedp);
7211 return target;
7212 }
7213
7214 /* Intended for a reference to a buffer of a file-object in Pascal.
7215 But it's not certain that a special tree code will really be
7216 necessary for these. INDIRECT_REF might work for them. */
7217 case BUFFER_REF:
7218 abort ();
7219
7220 case IN_EXPR:
7221 {
7222 /* Pascal set IN expression.
7223
7224 Algorithm:
7225 rlo = set_low - (set_low%bits_per_word);
7226 the_word = set [ (index - rlo)/bits_per_word ];
7227 bit_index = index % bits_per_word;
7228 bitmask = 1 << bit_index;
7229 return !!(the_word & bitmask); */
7230
7231 tree set = TREE_OPERAND (exp, 0);
7232 tree index = TREE_OPERAND (exp, 1);
7233 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7234 tree set_type = TREE_TYPE (set);
7235 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7236 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7237 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7238 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7239 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7240 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7241 rtx setaddr = XEXP (setval, 0);
7242 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7243 rtx rlow;
7244 rtx diff, quo, rem, addr, bit, result;
7245
7246 /* If domain is empty, answer is no. Likewise if index is constant
7247 and out of bounds. */
7248 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7249 && TREE_CODE (set_low_bound) == INTEGER_CST
7250 && tree_int_cst_lt (set_high_bound, set_low_bound))
7251 || (TREE_CODE (index) == INTEGER_CST
7252 && TREE_CODE (set_low_bound) == INTEGER_CST
7253 && tree_int_cst_lt (index, set_low_bound))
7254 || (TREE_CODE (set_high_bound) == INTEGER_CST
7255 && TREE_CODE (index) == INTEGER_CST
7256 && tree_int_cst_lt (set_high_bound, index))))
7257 return const0_rtx;
7258
7259 if (target == 0)
7260 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7261
7262 /* If we get here, we have to generate the code for both cases
7263 (in range and out of range). */
7264
7265 op0 = gen_label_rtx ();
7266 op1 = gen_label_rtx ();
7267
7268 if (! (GET_CODE (index_val) == CONST_INT
7269 && GET_CODE (lo_r) == CONST_INT))
7270 {
7271 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7272 GET_MODE (index_val), iunsignedp, 0, op1);
7273 }
7274
7275 if (! (GET_CODE (index_val) == CONST_INT
7276 && GET_CODE (hi_r) == CONST_INT))
7277 {
7278 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7279 GET_MODE (index_val), iunsignedp, 0, op1);
7280 }
7281
7282 /* Calculate the element number of bit zero in the first word
7283 of the set. */
7284 if (GET_CODE (lo_r) == CONST_INT)
7285 rlow = GEN_INT (INTVAL (lo_r)
7286 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7287 else
7288 rlow = expand_binop (index_mode, and_optab, lo_r,
7289 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7290 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7291
7292 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7293 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7294
7295 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7296 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7297 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7298 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7299
7300 addr = memory_address (byte_mode,
7301 expand_binop (index_mode, add_optab, diff,
7302 setaddr, NULL_RTX, iunsignedp,
7303 OPTAB_LIB_WIDEN));
7304
7305 /* Extract the bit we want to examine. */
7306 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7307 gen_rtx_MEM (byte_mode, addr),
7308 make_tree (TREE_TYPE (index), rem),
7309 NULL_RTX, 1);
7310 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7311 GET_MODE (target) == byte_mode ? target : 0,
7312 1, OPTAB_LIB_WIDEN);
7313
7314 if (result != target)
7315 convert_move (target, result, 1);
7316
7317 /* Output the code to handle the out-of-range case. */
7318 emit_jump (op0);
7319 emit_label (op1);
7320 emit_move_insn (target, const0_rtx);
7321 emit_label (op0);
7322 return target;
7323 }
7324
7325 case WITH_CLEANUP_EXPR:
7326 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7327 {
7328 WITH_CLEANUP_EXPR_RTL (exp)
7329 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7330 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7331
7332 /* That's it for this cleanup. */
7333 TREE_OPERAND (exp, 1) = 0;
7334 }
7335 return WITH_CLEANUP_EXPR_RTL (exp);
7336
7337 case CLEANUP_POINT_EXPR:
7338 {
7339 /* Start a new binding layer that will keep track of all cleanup
7340 actions to be performed. */
7341 expand_start_bindings (2);
7342
7343 target_temp_slot_level = temp_slot_level;
7344
7345 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7346 /* If we're going to use this value, load it up now. */
7347 if (! ignore)
7348 op0 = force_not_mem (op0);
7349 preserve_temp_slots (op0);
7350 expand_end_bindings (NULL_TREE, 0, 0);
7351 }
7352 return op0;
7353
7354 case CALL_EXPR:
7355 /* Check for a built-in function. */
7356 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7357 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7358 == FUNCTION_DECL)
7359 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7360 {
7361 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7362 == BUILT_IN_FRONTEND)
7363 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7364 else
7365 return expand_builtin (exp, target, subtarget, tmode, ignore);
7366 }
7367
7368 return expand_call (exp, target, ignore);
7369
7370 case NON_LVALUE_EXPR:
7371 case NOP_EXPR:
7372 case CONVERT_EXPR:
7373 case REFERENCE_EXPR:
7374 if (TREE_OPERAND (exp, 0) == error_mark_node)
7375 return const0_rtx;
7376
7377 if (TREE_CODE (type) == UNION_TYPE)
7378 {
7379 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7380
7381 /* If both input and output are BLKmode, this conversion
7382 isn't actually doing anything unless we need to make the
7383 alignment stricter. */
7384 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7385 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7386 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7387 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7388 modifier);
7389
7390 if (target == 0)
7391 target = assign_temp (type, 0, 1, 1);
7392
7393 if (GET_CODE (target) == MEM)
7394 /* Store data into beginning of memory target. */
7395 store_expr (TREE_OPERAND (exp, 0),
7396 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7397
7398 else if (GET_CODE (target) == REG)
7399 /* Store this field into a union of the proper type. */
7400 store_field (target,
7401 MIN ((int_size_in_bytes (TREE_TYPE
7402 (TREE_OPERAND (exp, 0)))
7403 * BITS_PER_UNIT),
7404 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7405 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7406 VOIDmode, 0, BITS_PER_UNIT,
7407 int_size_in_bytes (type), 0);
7408 else
7409 abort ();
7410
7411 /* Return the entire union. */
7412 return target;
7413 }
7414
7415 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7416 {
7417 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7418 ro_modifier);
7419
7420 /* If the signedness of the conversion differs and OP0 is
7421 a promoted SUBREG, clear that indication since we now
7422 have to do the proper extension. */
7423 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7424 && GET_CODE (op0) == SUBREG)
7425 SUBREG_PROMOTED_VAR_P (op0) = 0;
7426
7427 return op0;
7428 }
7429
7430 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7431 if (GET_MODE (op0) == mode)
7432 return op0;
7433
7434 /* If OP0 is a constant, just convert it into the proper mode. */
7435 if (CONSTANT_P (op0))
7436 return
7437 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7438 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7439
7440 if (modifier == EXPAND_INITIALIZER)
7441 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7442
7443 if (target == 0)
7444 return
7445 convert_to_mode (mode, op0,
7446 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7447 else
7448 convert_move (target, op0,
7449 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7450 return target;
7451
7452 case PLUS_EXPR:
7453 /* We come here from MINUS_EXPR when the second operand is a
7454 constant. */
7455 plus_expr:
7456 this_optab = ! unsignedp && flag_trapv
7457 && (GET_MODE_CLASS(mode) == MODE_INT)
7458 ? addv_optab : add_optab;
7459
7460 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7461 something else, make sure we add the register to the constant and
7462 then to the other thing. This case can occur during strength
7463 reduction and doing it this way will produce better code if the
7464 frame pointer or argument pointer is eliminated.
7465
7466 fold-const.c will ensure that the constant is always in the inner
7467 PLUS_EXPR, so the only case we need to do anything about is if
7468 sp, ap, or fp is our second argument, in which case we must swap
7469 the innermost first argument and our second argument. */
7470
7471 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7472 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7473 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7474 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7475 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7476 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7477 {
7478 tree t = TREE_OPERAND (exp, 1);
7479
7480 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7481 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7482 }
7483
7484 /* If the result is to be ptr_mode and we are adding an integer to
7485 something, we might be forming a constant. So try to use
7486 plus_constant. If it produces a sum and we can't accept it,
7487 use force_operand. This allows P = &ARR[const] to generate
7488 efficient code on machines where a SYMBOL_REF is not a valid
7489 address.
7490
7491 If this is an EXPAND_SUM call, always return the sum. */
7492 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7493 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7494 {
7495 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7496 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7497 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7498 {
7499 rtx constant_part;
7500
7501 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7502 EXPAND_SUM);
7503 /* Use immed_double_const to ensure that the constant is
7504 truncated according to the mode of OP1, then sign extended
7505 to a HOST_WIDE_INT. Using the constant directly can result
7506 in non-canonical RTL in a 64x32 cross compile. */
7507 constant_part
7508 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7509 (HOST_WIDE_INT) 0,
7510 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7511 op1 = plus_constant (op1, INTVAL (constant_part));
7512 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7513 op1 = force_operand (op1, target);
7514 return op1;
7515 }
7516
7517 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7518 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7519 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7520 {
7521 rtx constant_part;
7522
7523 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7524 EXPAND_SUM);
7525 if (! CONSTANT_P (op0))
7526 {
7527 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7528 VOIDmode, modifier);
7529 /* Don't go to both_summands if modifier
7530 says it's not right to return a PLUS. */
7531 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7532 goto binop2;
7533 goto both_summands;
7534 }
7535 /* Use immed_double_const to ensure that the constant is
7536 truncated according to the mode of OP1, then sign extended
7537 to a HOST_WIDE_INT. Using the constant directly can result
7538 in non-canonical RTL in a 64x32 cross compile. */
7539 constant_part
7540 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7541 (HOST_WIDE_INT) 0,
7542 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7543 op0 = plus_constant (op0, INTVAL (constant_part));
7544 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7545 op0 = force_operand (op0, target);
7546 return op0;
7547 }
7548 }
7549
7550 /* No sense saving up arithmetic to be done
7551 if it's all in the wrong mode to form part of an address.
7552 And force_operand won't know whether to sign-extend or
7553 zero-extend. */
7554 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7555 || mode != ptr_mode)
7556 goto binop;
7557
7558 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7559 subtarget = 0;
7560
7561 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7562 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7563
7564 both_summands:
7565 /* Make sure any term that's a sum with a constant comes last. */
7566 if (GET_CODE (op0) == PLUS
7567 && CONSTANT_P (XEXP (op0, 1)))
7568 {
7569 temp = op0;
7570 op0 = op1;
7571 op1 = temp;
7572 }
7573 /* If adding to a sum including a constant,
7574 associate it to put the constant outside. */
7575 if (GET_CODE (op1) == PLUS
7576 && CONSTANT_P (XEXP (op1, 1)))
7577 {
7578 rtx constant_term = const0_rtx;
7579
7580 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7581 if (temp != 0)
7582 op0 = temp;
7583 /* Ensure that MULT comes first if there is one. */
7584 else if (GET_CODE (op0) == MULT)
7585 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7586 else
7587 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7588
7589 /* Let's also eliminate constants from op0 if possible. */
7590 op0 = eliminate_constant_term (op0, &constant_term);
7591
7592 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7593 their sum should be a constant. Form it into OP1, since the
7594 result we want will then be OP0 + OP1. */
7595
7596 temp = simplify_binary_operation (PLUS, mode, constant_term,
7597 XEXP (op1, 1));
7598 if (temp != 0)
7599 op1 = temp;
7600 else
7601 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7602 }
7603
7604 /* Put a constant term last and put a multiplication first. */
7605 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7606 temp = op1, op1 = op0, op0 = temp;
7607
7608 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7609 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7610
7611 case MINUS_EXPR:
7612 /* For initializers, we are allowed to return a MINUS of two
7613 symbolic constants. Here we handle all cases when both operands
7614 are constant. */
7615 /* Handle difference of two symbolic constants,
7616 for the sake of an initializer. */
7617 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7618 && really_constant_p (TREE_OPERAND (exp, 0))
7619 && really_constant_p (TREE_OPERAND (exp, 1)))
7620 {
7621 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7622 VOIDmode, ro_modifier);
7623 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7624 VOIDmode, ro_modifier);
7625
7626 /* If the last operand is a CONST_INT, use plus_constant of
7627 the negated constant. Else make the MINUS. */
7628 if (GET_CODE (op1) == CONST_INT)
7629 return plus_constant (op0, - INTVAL (op1));
7630 else
7631 return gen_rtx_MINUS (mode, op0, op1);
7632 }
7633 /* Convert A - const to A + (-const). */
7634 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7635 {
7636 tree negated = fold (build1 (NEGATE_EXPR, type,
7637 TREE_OPERAND (exp, 1)));
7638
7639 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7640 /* If we can't negate the constant in TYPE, leave it alone and
7641 expand_binop will negate it for us. We used to try to do it
7642 here in the signed version of TYPE, but that doesn't work
7643 on POINTER_TYPEs. */;
7644 else
7645 {
7646 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7647 goto plus_expr;
7648 }
7649 }
7650 this_optab = ! unsignedp && flag_trapv
7651 && (GET_MODE_CLASS(mode) == MODE_INT)
7652 ? subv_optab : sub_optab;
7653 goto binop;
7654
7655 case MULT_EXPR:
7656 /* If first operand is constant, swap them.
7657 Thus the following special case checks need only
7658 check the second operand. */
7659 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7660 {
7661 register tree t1 = TREE_OPERAND (exp, 0);
7662 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7663 TREE_OPERAND (exp, 1) = t1;
7664 }
7665
7666 /* Attempt to return something suitable for generating an
7667 indexed address, for machines that support that. */
7668
7669 if (modifier == EXPAND_SUM && mode == ptr_mode
7670 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7671 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7672 {
7673 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7674 EXPAND_SUM);
7675
7676 /* Apply distributive law if OP0 is x+c. */
7677 if (GET_CODE (op0) == PLUS
7678 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7679 return
7680 gen_rtx_PLUS
7681 (mode,
7682 gen_rtx_MULT
7683 (mode, XEXP (op0, 0),
7684 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7685 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7686 * INTVAL (XEXP (op0, 1))));
7687
7688 if (GET_CODE (op0) != REG)
7689 op0 = force_operand (op0, NULL_RTX);
7690 if (GET_CODE (op0) != REG)
7691 op0 = copy_to_mode_reg (mode, op0);
7692
7693 return
7694 gen_rtx_MULT (mode, op0,
7695 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7696 }
7697
7698 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7699 subtarget = 0;
7700
7701 /* Check for multiplying things that have been extended
7702 from a narrower type. If this machine supports multiplying
7703 in that narrower type with a result in the desired type,
7704 do it that way, and avoid the explicit type-conversion. */
7705 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7706 && TREE_CODE (type) == INTEGER_TYPE
7707 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7708 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7709 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7710 && int_fits_type_p (TREE_OPERAND (exp, 1),
7711 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7712 /* Don't use a widening multiply if a shift will do. */
7713 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7714 > HOST_BITS_PER_WIDE_INT)
7715 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7716 ||
7717 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7718 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7719 ==
7720 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7721 /* If both operands are extended, they must either both
7722 be zero-extended or both be sign-extended. */
7723 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7724 ==
7725 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7726 {
7727 enum machine_mode innermode
7728 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7729 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7730 ? smul_widen_optab : umul_widen_optab);
7731 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7732 ? umul_widen_optab : smul_widen_optab);
7733 if (mode == GET_MODE_WIDER_MODE (innermode))
7734 {
7735 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7736 {
7737 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7738 NULL_RTX, VOIDmode, 0);
7739 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7740 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7741 VOIDmode, 0);
7742 else
7743 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7744 NULL_RTX, VOIDmode, 0);
7745 goto binop2;
7746 }
7747 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7748 && innermode == word_mode)
7749 {
7750 rtx htem;
7751 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7752 NULL_RTX, VOIDmode, 0);
7753 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7754 op1 = convert_modes (innermode, mode,
7755 expand_expr (TREE_OPERAND (exp, 1),
7756 NULL_RTX, VOIDmode, 0),
7757 unsignedp);
7758 else
7759 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7760 NULL_RTX, VOIDmode, 0);
7761 temp = expand_binop (mode, other_optab, op0, op1, target,
7762 unsignedp, OPTAB_LIB_WIDEN);
7763 htem = expand_mult_highpart_adjust (innermode,
7764 gen_highpart (innermode, temp),
7765 op0, op1,
7766 gen_highpart (innermode, temp),
7767 unsignedp);
7768 emit_move_insn (gen_highpart (innermode, temp), htem);
7769 return temp;
7770 }
7771 }
7772 }
7773 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7774 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7775 return expand_mult (mode, op0, op1, target, unsignedp);
7776
7777 case TRUNC_DIV_EXPR:
7778 case FLOOR_DIV_EXPR:
7779 case CEIL_DIV_EXPR:
7780 case ROUND_DIV_EXPR:
7781 case EXACT_DIV_EXPR:
7782 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7783 subtarget = 0;
7784 /* Possible optimization: compute the dividend with EXPAND_SUM
7785 then if the divisor is constant can optimize the case
7786 where some terms of the dividend have coeffs divisible by it. */
7787 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7788 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7789 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7790
7791 case RDIV_EXPR:
7792 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7793 expensive divide. If not, combine will rebuild the original
7794 computation. */
7795 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7796 && !real_onep (TREE_OPERAND (exp, 0)))
7797 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7798 build (RDIV_EXPR, type,
7799 build_real (type, dconst1),
7800 TREE_OPERAND (exp, 1))),
7801 target, tmode, unsignedp);
7802 this_optab = sdiv_optab;
7803 goto binop;
7804
7805 case TRUNC_MOD_EXPR:
7806 case FLOOR_MOD_EXPR:
7807 case CEIL_MOD_EXPR:
7808 case ROUND_MOD_EXPR:
7809 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7810 subtarget = 0;
7811 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7812 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7813 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7814
7815 case FIX_ROUND_EXPR:
7816 case FIX_FLOOR_EXPR:
7817 case FIX_CEIL_EXPR:
7818 abort (); /* Not used for C. */
7819
7820 case FIX_TRUNC_EXPR:
7821 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7822 if (target == 0)
7823 target = gen_reg_rtx (mode);
7824 expand_fix (target, op0, unsignedp);
7825 return target;
7826
7827 case FLOAT_EXPR:
7828 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7829 if (target == 0)
7830 target = gen_reg_rtx (mode);
7831 /* expand_float can't figure out what to do if FROM has VOIDmode.
7832 So give it the correct mode. With -O, cse will optimize this. */
7833 if (GET_MODE (op0) == VOIDmode)
7834 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7835 op0);
7836 expand_float (target, op0,
7837 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7838 return target;
7839
7840 case NEGATE_EXPR:
7841 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7842 temp = expand_unop (mode,
7843 ! unsignedp && flag_trapv
7844 && (GET_MODE_CLASS(mode) == MODE_INT)
7845 ? negv_optab : neg_optab, op0, target, 0);
7846 if (temp == 0)
7847 abort ();
7848 return temp;
7849
7850 case ABS_EXPR:
7851 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7852
7853 /* Handle complex values specially. */
7854 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7855 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7856 return expand_complex_abs (mode, op0, target, unsignedp);
7857
7858 /* Unsigned abs is simply the operand. Testing here means we don't
7859 risk generating incorrect code below. */
7860 if (TREE_UNSIGNED (type))
7861 return op0;
7862
7863 return expand_abs (mode, op0, target, unsignedp,
7864 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7865
7866 case MAX_EXPR:
7867 case MIN_EXPR:
7868 target = original_target;
7869 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7870 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7871 || GET_MODE (target) != mode
7872 || (GET_CODE (target) == REG
7873 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7874 target = gen_reg_rtx (mode);
7875 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7876 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7877
7878 /* First try to do it with a special MIN or MAX instruction.
7879 If that does not win, use a conditional jump to select the proper
7880 value. */
7881 this_optab = (TREE_UNSIGNED (type)
7882 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7883 : (code == MIN_EXPR ? smin_optab : smax_optab));
7884
7885 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7886 OPTAB_WIDEN);
7887 if (temp != 0)
7888 return temp;
7889
7890 /* At this point, a MEM target is no longer useful; we will get better
7891 code without it. */
7892
7893 if (GET_CODE (target) == MEM)
7894 target = gen_reg_rtx (mode);
7895
7896 if (target != op0)
7897 emit_move_insn (target, op0);
7898
7899 op0 = gen_label_rtx ();
7900
7901 /* If this mode is an integer too wide to compare properly,
7902 compare word by word. Rely on cse to optimize constant cases. */
7903 if (GET_MODE_CLASS (mode) == MODE_INT
7904 && ! can_compare_p (GE, mode, ccp_jump))
7905 {
7906 if (code == MAX_EXPR)
7907 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7908 target, op1, NULL_RTX, op0);
7909 else
7910 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7911 op1, target, NULL_RTX, op0);
7912 }
7913 else
7914 {
7915 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7916 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7917 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7918 op0);
7919 }
7920 emit_move_insn (target, op1);
7921 emit_label (op0);
7922 return target;
7923
7924 case BIT_NOT_EXPR:
7925 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7926 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7927 if (temp == 0)
7928 abort ();
7929 return temp;
7930
7931 case FFS_EXPR:
7932 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7933 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7934 if (temp == 0)
7935 abort ();
7936 return temp;
7937
7938 /* ??? Can optimize bitwise operations with one arg constant.
7939 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7940 and (a bitwise1 b) bitwise2 b (etc)
7941 but that is probably not worth while. */
7942
7943 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7944 boolean values when we want in all cases to compute both of them. In
7945 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7946 as actual zero-or-1 values and then bitwise anding. In cases where
7947 there cannot be any side effects, better code would be made by
7948 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7949 how to recognize those cases. */
7950
7951 case TRUTH_AND_EXPR:
7952 case BIT_AND_EXPR:
7953 this_optab = and_optab;
7954 goto binop;
7955
7956 case TRUTH_OR_EXPR:
7957 case BIT_IOR_EXPR:
7958 this_optab = ior_optab;
7959 goto binop;
7960
7961 case TRUTH_XOR_EXPR:
7962 case BIT_XOR_EXPR:
7963 this_optab = xor_optab;
7964 goto binop;
7965
7966 case LSHIFT_EXPR:
7967 case RSHIFT_EXPR:
7968 case LROTATE_EXPR:
7969 case RROTATE_EXPR:
7970 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7971 subtarget = 0;
7972 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7973 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7974 unsignedp);
7975
7976 /* Could determine the answer when only additive constants differ. Also,
7977 the addition of one can be handled by changing the condition. */
7978 case LT_EXPR:
7979 case LE_EXPR:
7980 case GT_EXPR:
7981 case GE_EXPR:
7982 case EQ_EXPR:
7983 case NE_EXPR:
7984 case UNORDERED_EXPR:
7985 case ORDERED_EXPR:
7986 case UNLT_EXPR:
7987 case UNLE_EXPR:
7988 case UNGT_EXPR:
7989 case UNGE_EXPR:
7990 case UNEQ_EXPR:
7991 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7992 if (temp != 0)
7993 return temp;
7994
7995 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7996 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7997 && original_target
7998 && GET_CODE (original_target) == REG
7999 && (GET_MODE (original_target)
8000 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8001 {
8002 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8003 VOIDmode, 0);
8004
8005 if (temp != original_target)
8006 temp = copy_to_reg (temp);
8007
8008 op1 = gen_label_rtx ();
8009 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8010 GET_MODE (temp), unsignedp, 0, op1);
8011 emit_move_insn (temp, const1_rtx);
8012 emit_label (op1);
8013 return temp;
8014 }
8015
8016 /* If no set-flag instruction, must generate a conditional
8017 store into a temporary variable. Drop through
8018 and handle this like && and ||. */
8019
8020 case TRUTH_ANDIF_EXPR:
8021 case TRUTH_ORIF_EXPR:
8022 if (! ignore
8023 && (target == 0 || ! safe_from_p (target, exp, 1)
8024 /* Make sure we don't have a hard reg (such as function's return
8025 value) live across basic blocks, if not optimizing. */
8026 || (!optimize && GET_CODE (target) == REG
8027 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8028 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8029
8030 if (target)
8031 emit_clr_insn (target);
8032
8033 op1 = gen_label_rtx ();
8034 jumpifnot (exp, op1);
8035
8036 if (target)
8037 emit_0_to_1_insn (target);
8038
8039 emit_label (op1);
8040 return ignore ? const0_rtx : target;
8041
8042 case TRUTH_NOT_EXPR:
8043 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8044 /* The parser is careful to generate TRUTH_NOT_EXPR
8045 only with operands that are always zero or one. */
8046 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8047 target, 1, OPTAB_LIB_WIDEN);
8048 if (temp == 0)
8049 abort ();
8050 return temp;
8051
8052 case COMPOUND_EXPR:
8053 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8054 emit_queue ();
8055 return expand_expr (TREE_OPERAND (exp, 1),
8056 (ignore ? const0_rtx : target),
8057 VOIDmode, 0);
8058
8059 case COND_EXPR:
8060 /* If we would have a "singleton" (see below) were it not for a
8061 conversion in each arm, bring that conversion back out. */
8062 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8063 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8064 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8065 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8066 {
8067 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8068 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8069
8070 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8071 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8072 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8073 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8074 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8075 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8076 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8077 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8078 return expand_expr (build1 (NOP_EXPR, type,
8079 build (COND_EXPR, TREE_TYPE (iftrue),
8080 TREE_OPERAND (exp, 0),
8081 iftrue, iffalse)),
8082 target, tmode, modifier);
8083 }
8084
8085 {
8086 /* Note that COND_EXPRs whose type is a structure or union
8087 are required to be constructed to contain assignments of
8088 a temporary variable, so that we can evaluate them here
8089 for side effect only. If type is void, we must do likewise. */
8090
8091 /* If an arm of the branch requires a cleanup,
8092 only that cleanup is performed. */
8093
8094 tree singleton = 0;
8095 tree binary_op = 0, unary_op = 0;
8096
8097 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8098 convert it to our mode, if necessary. */
8099 if (integer_onep (TREE_OPERAND (exp, 1))
8100 && integer_zerop (TREE_OPERAND (exp, 2))
8101 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8102 {
8103 if (ignore)
8104 {
8105 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8106 ro_modifier);
8107 return const0_rtx;
8108 }
8109
8110 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8111 if (GET_MODE (op0) == mode)
8112 return op0;
8113
8114 if (target == 0)
8115 target = gen_reg_rtx (mode);
8116 convert_move (target, op0, unsignedp);
8117 return target;
8118 }
8119
8120 /* Check for X ? A + B : A. If we have this, we can copy A to the
8121 output and conditionally add B. Similarly for unary operations.
8122 Don't do this if X has side-effects because those side effects
8123 might affect A or B and the "?" operation is a sequence point in
8124 ANSI. (operand_equal_p tests for side effects.) */
8125
8126 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8127 && operand_equal_p (TREE_OPERAND (exp, 2),
8128 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8129 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8130 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8131 && operand_equal_p (TREE_OPERAND (exp, 1),
8132 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8133 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8134 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8135 && operand_equal_p (TREE_OPERAND (exp, 2),
8136 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8137 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8138 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8139 && operand_equal_p (TREE_OPERAND (exp, 1),
8140 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8141 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8142
8143 /* If we are not to produce a result, we have no target. Otherwise,
8144 if a target was specified use it; it will not be used as an
8145 intermediate target unless it is safe. If no target, use a
8146 temporary. */
8147
8148 if (ignore)
8149 temp = 0;
8150 else if (original_target
8151 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8152 || (singleton && GET_CODE (original_target) == REG
8153 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8154 && original_target == var_rtx (singleton)))
8155 && GET_MODE (original_target) == mode
8156 #ifdef HAVE_conditional_move
8157 && (! can_conditionally_move_p (mode)
8158 || GET_CODE (original_target) == REG
8159 || TREE_ADDRESSABLE (type))
8160 #endif
8161 && (GET_CODE (original_target) != MEM
8162 || TREE_ADDRESSABLE (type)))
8163 temp = original_target;
8164 else if (TREE_ADDRESSABLE (type))
8165 abort ();
8166 else
8167 temp = assign_temp (type, 0, 0, 1);
8168
8169 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8170 do the test of X as a store-flag operation, do this as
8171 A + ((X != 0) << log C). Similarly for other simple binary
8172 operators. Only do for C == 1 if BRANCH_COST is low. */
8173 if (temp && singleton && binary_op
8174 && (TREE_CODE (binary_op) == PLUS_EXPR
8175 || TREE_CODE (binary_op) == MINUS_EXPR
8176 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8177 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8178 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8179 : integer_onep (TREE_OPERAND (binary_op, 1)))
8180 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8181 {
8182 rtx result;
8183 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8184 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8185 ? addv_optab : add_optab)
8186 : TREE_CODE (binary_op) == MINUS_EXPR
8187 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8188 ? subv_optab : sub_optab)
8189 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8190 : xor_optab);
8191
8192 /* If we had X ? A : A + 1, do this as A + (X == 0).
8193
8194 We have to invert the truth value here and then put it
8195 back later if do_store_flag fails. We cannot simply copy
8196 TREE_OPERAND (exp, 0) to another variable and modify that
8197 because invert_truthvalue can modify the tree pointed to
8198 by its argument. */
8199 if (singleton == TREE_OPERAND (exp, 1))
8200 TREE_OPERAND (exp, 0)
8201 = invert_truthvalue (TREE_OPERAND (exp, 0));
8202
8203 result = do_store_flag (TREE_OPERAND (exp, 0),
8204 (safe_from_p (temp, singleton, 1)
8205 ? temp : NULL_RTX),
8206 mode, BRANCH_COST <= 1);
8207
8208 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8209 result = expand_shift (LSHIFT_EXPR, mode, result,
8210 build_int_2 (tree_log2
8211 (TREE_OPERAND
8212 (binary_op, 1)),
8213 0),
8214 (safe_from_p (temp, singleton, 1)
8215 ? temp : NULL_RTX), 0);
8216
8217 if (result)
8218 {
8219 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8220 return expand_binop (mode, boptab, op1, result, temp,
8221 unsignedp, OPTAB_LIB_WIDEN);
8222 }
8223 else if (singleton == TREE_OPERAND (exp, 1))
8224 TREE_OPERAND (exp, 0)
8225 = invert_truthvalue (TREE_OPERAND (exp, 0));
8226 }
8227
8228 do_pending_stack_adjust ();
8229 NO_DEFER_POP;
8230 op0 = gen_label_rtx ();
8231
8232 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8233 {
8234 if (temp != 0)
8235 {
8236 /* If the target conflicts with the other operand of the
8237 binary op, we can't use it. Also, we can't use the target
8238 if it is a hard register, because evaluating the condition
8239 might clobber it. */
8240 if ((binary_op
8241 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8242 || (GET_CODE (temp) == REG
8243 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8244 temp = gen_reg_rtx (mode);
8245 store_expr (singleton, temp, 0);
8246 }
8247 else
8248 expand_expr (singleton,
8249 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8250 if (singleton == TREE_OPERAND (exp, 1))
8251 jumpif (TREE_OPERAND (exp, 0), op0);
8252 else
8253 jumpifnot (TREE_OPERAND (exp, 0), op0);
8254
8255 start_cleanup_deferral ();
8256 if (binary_op && temp == 0)
8257 /* Just touch the other operand. */
8258 expand_expr (TREE_OPERAND (binary_op, 1),
8259 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8260 else if (binary_op)
8261 store_expr (build (TREE_CODE (binary_op), type,
8262 make_tree (type, temp),
8263 TREE_OPERAND (binary_op, 1)),
8264 temp, 0);
8265 else
8266 store_expr (build1 (TREE_CODE (unary_op), type,
8267 make_tree (type, temp)),
8268 temp, 0);
8269 op1 = op0;
8270 }
8271 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8272 comparison operator. If we have one of these cases, set the
8273 output to A, branch on A (cse will merge these two references),
8274 then set the output to FOO. */
8275 else if (temp
8276 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8277 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8278 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8279 TREE_OPERAND (exp, 1), 0)
8280 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8281 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8282 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8283 {
8284 if (GET_CODE (temp) == REG
8285 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8286 temp = gen_reg_rtx (mode);
8287 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8288 jumpif (TREE_OPERAND (exp, 0), op0);
8289
8290 start_cleanup_deferral ();
8291 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8292 op1 = op0;
8293 }
8294 else if (temp
8295 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8296 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8297 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8298 TREE_OPERAND (exp, 2), 0)
8299 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8300 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8301 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8302 {
8303 if (GET_CODE (temp) == REG
8304 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8305 temp = gen_reg_rtx (mode);
8306 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8307 jumpifnot (TREE_OPERAND (exp, 0), op0);
8308
8309 start_cleanup_deferral ();
8310 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8311 op1 = op0;
8312 }
8313 else
8314 {
8315 op1 = gen_label_rtx ();
8316 jumpifnot (TREE_OPERAND (exp, 0), op0);
8317
8318 start_cleanup_deferral ();
8319
8320 /* One branch of the cond can be void, if it never returns. For
8321 example A ? throw : E */
8322 if (temp != 0
8323 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8324 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8325 else
8326 expand_expr (TREE_OPERAND (exp, 1),
8327 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8328 end_cleanup_deferral ();
8329 emit_queue ();
8330 emit_jump_insn (gen_jump (op1));
8331 emit_barrier ();
8332 emit_label (op0);
8333 start_cleanup_deferral ();
8334 if (temp != 0
8335 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8336 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8337 else
8338 expand_expr (TREE_OPERAND (exp, 2),
8339 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8340 }
8341
8342 end_cleanup_deferral ();
8343
8344 emit_queue ();
8345 emit_label (op1);
8346 OK_DEFER_POP;
8347
8348 return temp;
8349 }
8350
8351 case TARGET_EXPR:
8352 {
8353 /* Something needs to be initialized, but we didn't know
8354 where that thing was when building the tree. For example,
8355 it could be the return value of a function, or a parameter
8356 to a function which lays down in the stack, or a temporary
8357 variable which must be passed by reference.
8358
8359 We guarantee that the expression will either be constructed
8360 or copied into our original target. */
8361
8362 tree slot = TREE_OPERAND (exp, 0);
8363 tree cleanups = NULL_TREE;
8364 tree exp1;
8365
8366 if (TREE_CODE (slot) != VAR_DECL)
8367 abort ();
8368
8369 if (! ignore)
8370 target = original_target;
8371
8372 /* Set this here so that if we get a target that refers to a
8373 register variable that's already been used, put_reg_into_stack
8374 knows that it should fix up those uses. */
8375 TREE_USED (slot) = 1;
8376
8377 if (target == 0)
8378 {
8379 if (DECL_RTL_SET_P (slot))
8380 {
8381 target = DECL_RTL (slot);
8382 /* If we have already expanded the slot, so don't do
8383 it again. (mrs) */
8384 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8385 return target;
8386 }
8387 else
8388 {
8389 target = assign_temp (type, 2, 0, 1);
8390 /* All temp slots at this level must not conflict. */
8391 preserve_temp_slots (target);
8392 SET_DECL_RTL (slot, target);
8393 if (TREE_ADDRESSABLE (slot))
8394 put_var_into_stack (slot);
8395
8396 /* Since SLOT is not known to the called function
8397 to belong to its stack frame, we must build an explicit
8398 cleanup. This case occurs when we must build up a reference
8399 to pass the reference as an argument. In this case,
8400 it is very likely that such a reference need not be
8401 built here. */
8402
8403 if (TREE_OPERAND (exp, 2) == 0)
8404 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8405 cleanups = TREE_OPERAND (exp, 2);
8406 }
8407 }
8408 else
8409 {
8410 /* This case does occur, when expanding a parameter which
8411 needs to be constructed on the stack. The target
8412 is the actual stack address that we want to initialize.
8413 The function we call will perform the cleanup in this case. */
8414
8415 /* If we have already assigned it space, use that space,
8416 not target that we were passed in, as our target
8417 parameter is only a hint. */
8418 if (DECL_RTL_SET_P (slot))
8419 {
8420 target = DECL_RTL (slot);
8421 /* If we have already expanded the slot, so don't do
8422 it again. (mrs) */
8423 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8424 return target;
8425 }
8426 else
8427 {
8428 SET_DECL_RTL (slot, target);
8429 /* If we must have an addressable slot, then make sure that
8430 the RTL that we just stored in slot is OK. */
8431 if (TREE_ADDRESSABLE (slot))
8432 put_var_into_stack (slot);
8433 }
8434 }
8435
8436 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8437 /* Mark it as expanded. */
8438 TREE_OPERAND (exp, 1) = NULL_TREE;
8439
8440 store_expr (exp1, target, 0);
8441
8442 expand_decl_cleanup (NULL_TREE, cleanups);
8443
8444 return target;
8445 }
8446
8447 case INIT_EXPR:
8448 {
8449 tree lhs = TREE_OPERAND (exp, 0);
8450 tree rhs = TREE_OPERAND (exp, 1);
8451
8452 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8453 return temp;
8454 }
8455
8456 case MODIFY_EXPR:
8457 {
8458 /* If lhs is complex, expand calls in rhs before computing it.
8459 That's so we don't compute a pointer and save it over a
8460 call. If lhs is simple, compute it first so we can give it
8461 as a target if the rhs is just a call. This avoids an
8462 extra temp and copy and that prevents a partial-subsumption
8463 which makes bad code. Actually we could treat
8464 component_ref's of vars like vars. */
8465
8466 tree lhs = TREE_OPERAND (exp, 0);
8467 tree rhs = TREE_OPERAND (exp, 1);
8468
8469 temp = 0;
8470
8471 /* Check for |= or &= of a bitfield of size one into another bitfield
8472 of size 1. In this case, (unless we need the result of the
8473 assignment) we can do this more efficiently with a
8474 test followed by an assignment, if necessary.
8475
8476 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8477 things change so we do, this code should be enhanced to
8478 support it. */
8479 if (ignore
8480 && TREE_CODE (lhs) == COMPONENT_REF
8481 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8482 || TREE_CODE (rhs) == BIT_AND_EXPR)
8483 && TREE_OPERAND (rhs, 0) == lhs
8484 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8485 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8486 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8487 {
8488 rtx label = gen_label_rtx ();
8489
8490 do_jump (TREE_OPERAND (rhs, 1),
8491 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8492 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8493 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8494 (TREE_CODE (rhs) == BIT_IOR_EXPR
8495 ? integer_one_node
8496 : integer_zero_node)),
8497 0, 0);
8498 do_pending_stack_adjust ();
8499 emit_label (label);
8500 return const0_rtx;
8501 }
8502
8503 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8504
8505 return temp;
8506 }
8507
8508 case RETURN_EXPR:
8509 if (!TREE_OPERAND (exp, 0))
8510 expand_null_return ();
8511 else
8512 expand_return (TREE_OPERAND (exp, 0));
8513 return const0_rtx;
8514
8515 case PREINCREMENT_EXPR:
8516 case PREDECREMENT_EXPR:
8517 return expand_increment (exp, 0, ignore);
8518
8519 case POSTINCREMENT_EXPR:
8520 case POSTDECREMENT_EXPR:
8521 /* Faster to treat as pre-increment if result is not used. */
8522 return expand_increment (exp, ! ignore, ignore);
8523
8524 case ADDR_EXPR:
8525 /* If nonzero, TEMP will be set to the address of something that might
8526 be a MEM corresponding to a stack slot. */
8527 temp = 0;
8528
8529 /* Are we taking the address of a nested function? */
8530 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8531 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8532 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8533 && ! TREE_STATIC (exp))
8534 {
8535 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8536 op0 = force_operand (op0, target);
8537 }
8538 /* If we are taking the address of something erroneous, just
8539 return a zero. */
8540 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8541 return const0_rtx;
8542 else
8543 {
8544 /* We make sure to pass const0_rtx down if we came in with
8545 ignore set, to avoid doing the cleanups twice for something. */
8546 op0 = expand_expr (TREE_OPERAND (exp, 0),
8547 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8548 (modifier == EXPAND_INITIALIZER
8549 ? modifier : EXPAND_CONST_ADDRESS));
8550
8551 /* If we are going to ignore the result, OP0 will have been set
8552 to const0_rtx, so just return it. Don't get confused and
8553 think we are taking the address of the constant. */
8554 if (ignore)
8555 return op0;
8556
8557 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8558 clever and returns a REG when given a MEM. */
8559 op0 = protect_from_queue (op0, 1);
8560
8561 /* We would like the object in memory. If it is a constant, we can
8562 have it be statically allocated into memory. For a non-constant,
8563 we need to allocate some memory and store the value into it. */
8564
8565 if (CONSTANT_P (op0))
8566 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8567 op0);
8568 else if (GET_CODE (op0) == MEM)
8569 {
8570 mark_temp_addr_taken (op0);
8571 temp = XEXP (op0, 0);
8572 }
8573
8574 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8575 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8576 || GET_CODE (op0) == PARALLEL)
8577 {
8578 /* If this object is in a register, it must be not
8579 be BLKmode. */
8580 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8581 tree nt = build_qualified_type (inner_type,
8582 (TYPE_QUALS (inner_type)
8583 | TYPE_QUAL_CONST));
8584 rtx memloc = assign_temp (nt, 1, 1, 1);
8585
8586 mark_temp_addr_taken (memloc);
8587 if (GET_CODE (op0) == PARALLEL)
8588 /* Handle calls that pass values in multiple non-contiguous
8589 locations. The Irix 6 ABI has examples of this. */
8590 emit_group_store (memloc, op0,
8591 int_size_in_bytes (inner_type),
8592 TYPE_ALIGN (inner_type));
8593 else
8594 emit_move_insn (memloc, op0);
8595 op0 = memloc;
8596 }
8597
8598 if (GET_CODE (op0) != MEM)
8599 abort ();
8600
8601 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8602 {
8603 temp = XEXP (op0, 0);
8604 #ifdef POINTERS_EXTEND_UNSIGNED
8605 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8606 && mode == ptr_mode)
8607 temp = convert_memory_address (ptr_mode, temp);
8608 #endif
8609 return temp;
8610 }
8611
8612 op0 = force_operand (XEXP (op0, 0), target);
8613 }
8614
8615 if (flag_force_addr && GET_CODE (op0) != REG)
8616 op0 = force_reg (Pmode, op0);
8617
8618 if (GET_CODE (op0) == REG
8619 && ! REG_USERVAR_P (op0))
8620 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8621
8622 /* If we might have had a temp slot, add an equivalent address
8623 for it. */
8624 if (temp != 0)
8625 update_temp_slot_address (temp, op0);
8626
8627 #ifdef POINTERS_EXTEND_UNSIGNED
8628 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8629 && mode == ptr_mode)
8630 op0 = convert_memory_address (ptr_mode, op0);
8631 #endif
8632
8633 return op0;
8634
8635 case ENTRY_VALUE_EXPR:
8636 abort ();
8637
8638 /* COMPLEX type for Extended Pascal & Fortran */
8639 case COMPLEX_EXPR:
8640 {
8641 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8642 rtx insns;
8643
8644 /* Get the rtx code of the operands. */
8645 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8646 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8647
8648 if (! target)
8649 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8650
8651 start_sequence ();
8652
8653 /* Move the real (op0) and imaginary (op1) parts to their location. */
8654 emit_move_insn (gen_realpart (mode, target), op0);
8655 emit_move_insn (gen_imagpart (mode, target), op1);
8656
8657 insns = get_insns ();
8658 end_sequence ();
8659
8660 /* Complex construction should appear as a single unit. */
8661 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8662 each with a separate pseudo as destination.
8663 It's not correct for flow to treat them as a unit. */
8664 if (GET_CODE (target) != CONCAT)
8665 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8666 else
8667 emit_insns (insns);
8668
8669 return target;
8670 }
8671
8672 case REALPART_EXPR:
8673 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8674 return gen_realpart (mode, op0);
8675
8676 case IMAGPART_EXPR:
8677 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8678 return gen_imagpart (mode, op0);
8679
8680 case CONJ_EXPR:
8681 {
8682 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8683 rtx imag_t;
8684 rtx insns;
8685
8686 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8687
8688 if (! target)
8689 target = gen_reg_rtx (mode);
8690
8691 start_sequence ();
8692
8693 /* Store the realpart and the negated imagpart to target. */
8694 emit_move_insn (gen_realpart (partmode, target),
8695 gen_realpart (partmode, op0));
8696
8697 imag_t = gen_imagpart (partmode, target);
8698 temp = expand_unop (partmode,
8699 ! unsignedp && flag_trapv
8700 && (GET_MODE_CLASS(partmode) == MODE_INT)
8701 ? negv_optab : neg_optab,
8702 gen_imagpart (partmode, op0), imag_t, 0);
8703 if (temp != imag_t)
8704 emit_move_insn (imag_t, temp);
8705
8706 insns = get_insns ();
8707 end_sequence ();
8708
8709 /* Conjugate should appear as a single unit
8710 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8711 each with a separate pseudo as destination.
8712 It's not correct for flow to treat them as a unit. */
8713 if (GET_CODE (target) != CONCAT)
8714 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8715 else
8716 emit_insns (insns);
8717
8718 return target;
8719 }
8720
8721 case TRY_CATCH_EXPR:
8722 {
8723 tree handler = TREE_OPERAND (exp, 1);
8724
8725 expand_eh_region_start ();
8726
8727 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8728
8729 expand_eh_region_end_cleanup (handler);
8730
8731 return op0;
8732 }
8733
8734 case TRY_FINALLY_EXPR:
8735 {
8736 tree try_block = TREE_OPERAND (exp, 0);
8737 tree finally_block = TREE_OPERAND (exp, 1);
8738 rtx finally_label = gen_label_rtx ();
8739 rtx done_label = gen_label_rtx ();
8740 rtx return_link = gen_reg_rtx (Pmode);
8741 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8742 (tree) finally_label, (tree) return_link);
8743 TREE_SIDE_EFFECTS (cleanup) = 1;
8744
8745 /* Start a new binding layer that will keep track of all cleanup
8746 actions to be performed. */
8747 expand_start_bindings (2);
8748
8749 target_temp_slot_level = temp_slot_level;
8750
8751 expand_decl_cleanup (NULL_TREE, cleanup);
8752 op0 = expand_expr (try_block, target, tmode, modifier);
8753
8754 preserve_temp_slots (op0);
8755 expand_end_bindings (NULL_TREE, 0, 0);
8756 emit_jump (done_label);
8757 emit_label (finally_label);
8758 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8759 emit_indirect_jump (return_link);
8760 emit_label (done_label);
8761 return op0;
8762 }
8763
8764 case GOTO_SUBROUTINE_EXPR:
8765 {
8766 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8767 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8768 rtx return_address = gen_label_rtx ();
8769 emit_move_insn (return_link,
8770 gen_rtx_LABEL_REF (Pmode, return_address));
8771 emit_jump (subr);
8772 emit_label (return_address);
8773 return const0_rtx;
8774 }
8775
8776 case VA_ARG_EXPR:
8777 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8778
8779 case EXC_PTR_EXPR:
8780 return get_exception_pointer (cfun);
8781
8782 case FDESC_EXPR:
8783 /* Function descriptors are not valid except for as
8784 initialization constants, and should not be expanded. */
8785 abort ();
8786
8787 default:
8788 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8789 }
8790
8791 /* Here to do an ordinary binary operator, generating an instruction
8792 from the optab already placed in `this_optab'. */
8793 binop:
8794 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8795 subtarget = 0;
8796 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8797 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8798 binop2:
8799 temp = expand_binop (mode, this_optab, op0, op1, target,
8800 unsignedp, OPTAB_LIB_WIDEN);
8801 if (temp == 0)
8802 abort ();
8803 return temp;
8804 }
8805 \f
8806 /* Similar to expand_expr, except that we don't specify a target, target
8807 mode, or modifier and we return the alignment of the inner type. This is
8808 used in cases where it is not necessary to align the result to the
8809 alignment of its type as long as we know the alignment of the result, for
8810 example for comparisons of BLKmode values. */
8811
8812 static rtx
8813 expand_expr_unaligned (exp, palign)
8814 register tree exp;
8815 unsigned int *palign;
8816 {
8817 register rtx op0;
8818 tree type = TREE_TYPE (exp);
8819 register enum machine_mode mode = TYPE_MODE (type);
8820
8821 /* Default the alignment we return to that of the type. */
8822 *palign = TYPE_ALIGN (type);
8823
8824 /* The only cases in which we do anything special is if the resulting mode
8825 is BLKmode. */
8826 if (mode != BLKmode)
8827 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8828
8829 switch (TREE_CODE (exp))
8830 {
8831 case CONVERT_EXPR:
8832 case NOP_EXPR:
8833 case NON_LVALUE_EXPR:
8834 /* Conversions between BLKmode values don't change the underlying
8835 alignment or value. */
8836 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8837 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8838 break;
8839
8840 case ARRAY_REF:
8841 /* Much of the code for this case is copied directly from expand_expr.
8842 We need to duplicate it here because we will do something different
8843 in the fall-through case, so we need to handle the same exceptions
8844 it does. */
8845 {
8846 tree array = TREE_OPERAND (exp, 0);
8847 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8848 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8849 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8850 HOST_WIDE_INT i;
8851
8852 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8853 abort ();
8854
8855 /* Optimize the special-case of a zero lower bound.
8856
8857 We convert the low_bound to sizetype to avoid some problems
8858 with constant folding. (E.g. suppose the lower bound is 1,
8859 and its mode is QI. Without the conversion, (ARRAY
8860 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8861 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8862
8863 if (! integer_zerop (low_bound))
8864 index = size_diffop (index, convert (sizetype, low_bound));
8865
8866 /* If this is a constant index into a constant array,
8867 just get the value from the array. Handle both the cases when
8868 we have an explicit constructor and when our operand is a variable
8869 that was declared const. */
8870
8871 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8872 && host_integerp (index, 0)
8873 && 0 > compare_tree_int (index,
8874 list_length (CONSTRUCTOR_ELTS
8875 (TREE_OPERAND (exp, 0)))))
8876 {
8877 tree elem;
8878
8879 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8880 i = tree_low_cst (index, 0);
8881 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8882 ;
8883
8884 if (elem)
8885 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8886 }
8887
8888 else if (optimize >= 1
8889 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8890 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8891 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8892 {
8893 if (TREE_CODE (index) == INTEGER_CST)
8894 {
8895 tree init = DECL_INITIAL (array);
8896
8897 if (TREE_CODE (init) == CONSTRUCTOR)
8898 {
8899 tree elem;
8900
8901 for (elem = CONSTRUCTOR_ELTS (init);
8902 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8903 elem = TREE_CHAIN (elem))
8904 ;
8905
8906 if (elem)
8907 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8908 palign);
8909 }
8910 }
8911 }
8912 }
8913 /* Fall through. */
8914
8915 case COMPONENT_REF:
8916 case BIT_FIELD_REF:
8917 case ARRAY_RANGE_REF:
8918 /* If the operand is a CONSTRUCTOR, we can just extract the
8919 appropriate field if it is present. Don't do this if we have
8920 already written the data since we want to refer to that copy
8921 and varasm.c assumes that's what we'll do. */
8922 if (TREE_CODE (exp) == COMPONENT_REF
8923 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8924 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8925 {
8926 tree elt;
8927
8928 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8929 elt = TREE_CHAIN (elt))
8930 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8931 /* Note that unlike the case in expand_expr, we know this is
8932 BLKmode and hence not an integer. */
8933 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8934 }
8935
8936 {
8937 enum machine_mode mode1;
8938 HOST_WIDE_INT bitsize, bitpos;
8939 tree offset;
8940 int volatilep = 0;
8941 unsigned int alignment;
8942 int unsignedp;
8943 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8944 &mode1, &unsignedp, &volatilep,
8945 &alignment);
8946
8947 /* If we got back the original object, something is wrong. Perhaps
8948 we are evaluating an expression too early. In any event, don't
8949 infinitely recurse. */
8950 if (tem == exp)
8951 abort ();
8952
8953 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8954
8955 /* If this is a constant, put it into a register if it is a
8956 legitimate constant and OFFSET is 0 and memory if it isn't. */
8957 if (CONSTANT_P (op0))
8958 {
8959 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8960
8961 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8962 && offset == 0)
8963 op0 = force_reg (inner_mode, op0);
8964 else
8965 op0 = validize_mem (force_const_mem (inner_mode, op0));
8966 }
8967
8968 if (offset != 0)
8969 {
8970 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8971
8972 /* If this object is in a register, put it into memory.
8973 This case can't occur in C, but can in Ada if we have
8974 unchecked conversion of an expression from a scalar type to
8975 an array or record type. */
8976 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8977 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8978 {
8979 tree nt = build_qualified_type (TREE_TYPE (tem),
8980 (TYPE_QUALS (TREE_TYPE (tem))
8981 | TYPE_QUAL_CONST));
8982 rtx memloc = assign_temp (nt, 1, 1, 1);
8983
8984 mark_temp_addr_taken (memloc);
8985 emit_move_insn (memloc, op0);
8986 op0 = memloc;
8987 }
8988
8989 if (GET_CODE (op0) != MEM)
8990 abort ();
8991
8992 if (GET_MODE (offset_rtx) != ptr_mode)
8993 {
8994 #ifdef POINTERS_EXTEND_UNSIGNED
8995 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8996 #else
8997 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8998 #endif
8999 }
9000
9001 op0 = change_address (op0, VOIDmode,
9002 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9003 force_reg (ptr_mode,
9004 offset_rtx)));
9005 }
9006
9007 /* Don't forget about volatility even if this is a bitfield. */
9008 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9009 {
9010 op0 = copy_rtx (op0);
9011 MEM_VOLATILE_P (op0) = 1;
9012 }
9013
9014 /* Check the access. */
9015 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9016 {
9017 rtx to;
9018 int size;
9019
9020 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9021 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9022
9023 /* Check the access right of the pointer. */
9024 in_check_memory_usage = 1;
9025 if (size > BITS_PER_UNIT)
9026 emit_library_call (chkr_check_addr_libfunc,
9027 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9028 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9029 TYPE_MODE (sizetype),
9030 GEN_INT (MEMORY_USE_RO),
9031 TYPE_MODE (integer_type_node));
9032 in_check_memory_usage = 0;
9033 }
9034
9035 /* In cases where an aligned union has an unaligned object
9036 as a field, we might be extracting a BLKmode value from
9037 an integer-mode (e.g., SImode) object. Handle this case
9038 by doing the extract into an object as wide as the field
9039 (which we know to be the width of a basic mode), then
9040 storing into memory, and changing the mode to BLKmode.
9041 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9042 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9043 if (mode1 == VOIDmode
9044 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9045 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9046 && (TYPE_ALIGN (type) > alignment
9047 || bitpos % TYPE_ALIGN (type) != 0)))
9048 {
9049 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9050
9051 if (ext_mode == BLKmode)
9052 {
9053 /* In this case, BITPOS must start at a byte boundary. */
9054 if (GET_CODE (op0) != MEM
9055 || bitpos % BITS_PER_UNIT != 0)
9056 abort ();
9057
9058 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9059 }
9060 else
9061 {
9062 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9063 TYPE_QUAL_CONST);
9064 rtx new = assign_temp (nt, 0, 1, 1);
9065
9066 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9067 unsignedp, NULL_RTX, ext_mode,
9068 ext_mode, alignment,
9069 int_size_in_bytes (TREE_TYPE (tem)));
9070
9071 /* If the result is a record type and BITSIZE is narrower than
9072 the mode of OP0, an integral mode, and this is a big endian
9073 machine, we must put the field into the high-order bits. */
9074 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9075 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9076 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9077 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9078 size_int (GET_MODE_BITSIZE
9079 (GET_MODE (op0))
9080 - bitsize),
9081 op0, 1);
9082
9083 emit_move_insn (new, op0);
9084 op0 = copy_rtx (new);
9085 PUT_MODE (op0, BLKmode);
9086 }
9087 }
9088 else
9089 /* Get a reference to just this component. */
9090 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9091
9092 set_mem_alias_set (op0, get_alias_set (exp));
9093
9094 /* Adjust the alignment in case the bit position is not
9095 a multiple of the alignment of the inner object. */
9096 while (bitpos % alignment != 0)
9097 alignment >>= 1;
9098
9099 if (GET_CODE (XEXP (op0, 0)) == REG)
9100 mark_reg_pointer (XEXP (op0, 0), alignment);
9101
9102 MEM_IN_STRUCT_P (op0) = 1;
9103 MEM_VOLATILE_P (op0) |= volatilep;
9104
9105 *palign = alignment;
9106 return op0;
9107 }
9108
9109 default:
9110 break;
9111
9112 }
9113
9114 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9115 }
9116 \f
9117 /* Return the tree node if a ARG corresponds to a string constant or zero
9118 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9119 in bytes within the string that ARG is accessing. The type of the
9120 offset will be `sizetype'. */
9121
9122 tree
9123 string_constant (arg, ptr_offset)
9124 tree arg;
9125 tree *ptr_offset;
9126 {
9127 STRIP_NOPS (arg);
9128
9129 if (TREE_CODE (arg) == ADDR_EXPR
9130 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9131 {
9132 *ptr_offset = size_zero_node;
9133 return TREE_OPERAND (arg, 0);
9134 }
9135 else if (TREE_CODE (arg) == PLUS_EXPR)
9136 {
9137 tree arg0 = TREE_OPERAND (arg, 0);
9138 tree arg1 = TREE_OPERAND (arg, 1);
9139
9140 STRIP_NOPS (arg0);
9141 STRIP_NOPS (arg1);
9142
9143 if (TREE_CODE (arg0) == ADDR_EXPR
9144 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9145 {
9146 *ptr_offset = convert (sizetype, arg1);
9147 return TREE_OPERAND (arg0, 0);
9148 }
9149 else if (TREE_CODE (arg1) == ADDR_EXPR
9150 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9151 {
9152 *ptr_offset = convert (sizetype, arg0);
9153 return TREE_OPERAND (arg1, 0);
9154 }
9155 }
9156
9157 return 0;
9158 }
9159 \f
9160 /* Expand code for a post- or pre- increment or decrement
9161 and return the RTX for the result.
9162 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9163
9164 static rtx
9165 expand_increment (exp, post, ignore)
9166 register tree exp;
9167 int post, ignore;
9168 {
9169 register rtx op0, op1;
9170 register rtx temp, value;
9171 register tree incremented = TREE_OPERAND (exp, 0);
9172 optab this_optab = add_optab;
9173 int icode;
9174 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9175 int op0_is_copy = 0;
9176 int single_insn = 0;
9177 /* 1 means we can't store into OP0 directly,
9178 because it is a subreg narrower than a word,
9179 and we don't dare clobber the rest of the word. */
9180 int bad_subreg = 0;
9181
9182 /* Stabilize any component ref that might need to be
9183 evaluated more than once below. */
9184 if (!post
9185 || TREE_CODE (incremented) == BIT_FIELD_REF
9186 || (TREE_CODE (incremented) == COMPONENT_REF
9187 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9188 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9189 incremented = stabilize_reference (incremented);
9190 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9191 ones into save exprs so that they don't accidentally get evaluated
9192 more than once by the code below. */
9193 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9194 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9195 incremented = save_expr (incremented);
9196
9197 /* Compute the operands as RTX.
9198 Note whether OP0 is the actual lvalue or a copy of it:
9199 I believe it is a copy iff it is a register or subreg
9200 and insns were generated in computing it. */
9201
9202 temp = get_last_insn ();
9203 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9204
9205 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9206 in place but instead must do sign- or zero-extension during assignment,
9207 so we copy it into a new register and let the code below use it as
9208 a copy.
9209
9210 Note that we can safely modify this SUBREG since it is know not to be
9211 shared (it was made by the expand_expr call above). */
9212
9213 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9214 {
9215 if (post)
9216 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9217 else
9218 bad_subreg = 1;
9219 }
9220 else if (GET_CODE (op0) == SUBREG
9221 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9222 {
9223 /* We cannot increment this SUBREG in place. If we are
9224 post-incrementing, get a copy of the old value. Otherwise,
9225 just mark that we cannot increment in place. */
9226 if (post)
9227 op0 = copy_to_reg (op0);
9228 else
9229 bad_subreg = 1;
9230 }
9231
9232 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9233 && temp != get_last_insn ());
9234 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9235 EXPAND_MEMORY_USE_BAD);
9236
9237 /* Decide whether incrementing or decrementing. */
9238 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9239 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9240 this_optab = sub_optab;
9241
9242 /* Convert decrement by a constant into a negative increment. */
9243 if (this_optab == sub_optab
9244 && GET_CODE (op1) == CONST_INT)
9245 {
9246 op1 = GEN_INT (-INTVAL (op1));
9247 this_optab = add_optab;
9248 }
9249
9250 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9251 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9252
9253 /* For a preincrement, see if we can do this with a single instruction. */
9254 if (!post)
9255 {
9256 icode = (int) this_optab->handlers[(int) mode].insn_code;
9257 if (icode != (int) CODE_FOR_nothing
9258 /* Make sure that OP0 is valid for operands 0 and 1
9259 of the insn we want to queue. */
9260 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9261 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9262 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9263 single_insn = 1;
9264 }
9265
9266 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9267 then we cannot just increment OP0. We must therefore contrive to
9268 increment the original value. Then, for postincrement, we can return
9269 OP0 since it is a copy of the old value. For preincrement, expand here
9270 unless we can do it with a single insn.
9271
9272 Likewise if storing directly into OP0 would clobber high bits
9273 we need to preserve (bad_subreg). */
9274 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9275 {
9276 /* This is the easiest way to increment the value wherever it is.
9277 Problems with multiple evaluation of INCREMENTED are prevented
9278 because either (1) it is a component_ref or preincrement,
9279 in which case it was stabilized above, or (2) it is an array_ref
9280 with constant index in an array in a register, which is
9281 safe to reevaluate. */
9282 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9283 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9284 ? MINUS_EXPR : PLUS_EXPR),
9285 TREE_TYPE (exp),
9286 incremented,
9287 TREE_OPERAND (exp, 1));
9288
9289 while (TREE_CODE (incremented) == NOP_EXPR
9290 || TREE_CODE (incremented) == CONVERT_EXPR)
9291 {
9292 newexp = convert (TREE_TYPE (incremented), newexp);
9293 incremented = TREE_OPERAND (incremented, 0);
9294 }
9295
9296 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9297 return post ? op0 : temp;
9298 }
9299
9300 if (post)
9301 {
9302 /* We have a true reference to the value in OP0.
9303 If there is an insn to add or subtract in this mode, queue it.
9304 Queueing the increment insn avoids the register shuffling
9305 that often results if we must increment now and first save
9306 the old value for subsequent use. */
9307
9308 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9309 op0 = stabilize (op0);
9310 #endif
9311
9312 icode = (int) this_optab->handlers[(int) mode].insn_code;
9313 if (icode != (int) CODE_FOR_nothing
9314 /* Make sure that OP0 is valid for operands 0 and 1
9315 of the insn we want to queue. */
9316 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9317 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9318 {
9319 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9320 op1 = force_reg (mode, op1);
9321
9322 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9323 }
9324 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9325 {
9326 rtx addr = (general_operand (XEXP (op0, 0), mode)
9327 ? force_reg (Pmode, XEXP (op0, 0))
9328 : copy_to_reg (XEXP (op0, 0)));
9329 rtx temp, result;
9330
9331 op0 = replace_equiv_address (op0, addr);
9332 temp = force_reg (GET_MODE (op0), op0);
9333 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9334 op1 = force_reg (mode, op1);
9335
9336 /* The increment queue is LIFO, thus we have to `queue'
9337 the instructions in reverse order. */
9338 enqueue_insn (op0, gen_move_insn (op0, temp));
9339 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9340 return result;
9341 }
9342 }
9343
9344 /* Preincrement, or we can't increment with one simple insn. */
9345 if (post)
9346 /* Save a copy of the value before inc or dec, to return it later. */
9347 temp = value = copy_to_reg (op0);
9348 else
9349 /* Arrange to return the incremented value. */
9350 /* Copy the rtx because expand_binop will protect from the queue,
9351 and the results of that would be invalid for us to return
9352 if our caller does emit_queue before using our result. */
9353 temp = copy_rtx (value = op0);
9354
9355 /* Increment however we can. */
9356 op1 = expand_binop (mode, this_optab, value, op1,
9357 current_function_check_memory_usage ? NULL_RTX : op0,
9358 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9359 /* Make sure the value is stored into OP0. */
9360 if (op1 != op0)
9361 emit_move_insn (op0, op1);
9362
9363 return temp;
9364 }
9365 \f
9366 /* At the start of a function, record that we have no previously-pushed
9367 arguments waiting to be popped. */
9368
9369 void
9370 init_pending_stack_adjust ()
9371 {
9372 pending_stack_adjust = 0;
9373 }
9374
9375 /* When exiting from function, if safe, clear out any pending stack adjust
9376 so the adjustment won't get done.
9377
9378 Note, if the current function calls alloca, then it must have a
9379 frame pointer regardless of the value of flag_omit_frame_pointer. */
9380
9381 void
9382 clear_pending_stack_adjust ()
9383 {
9384 #ifdef EXIT_IGNORE_STACK
9385 if (optimize > 0
9386 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9387 && EXIT_IGNORE_STACK
9388 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9389 && ! flag_inline_functions)
9390 {
9391 stack_pointer_delta -= pending_stack_adjust,
9392 pending_stack_adjust = 0;
9393 }
9394 #endif
9395 }
9396
9397 /* Pop any previously-pushed arguments that have not been popped yet. */
9398
9399 void
9400 do_pending_stack_adjust ()
9401 {
9402 if (inhibit_defer_pop == 0)
9403 {
9404 if (pending_stack_adjust != 0)
9405 adjust_stack (GEN_INT (pending_stack_adjust));
9406 pending_stack_adjust = 0;
9407 }
9408 }
9409 \f
9410 /* Expand conditional expressions. */
9411
9412 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9413 LABEL is an rtx of code CODE_LABEL, in this function and all the
9414 functions here. */
9415
9416 void
9417 jumpifnot (exp, label)
9418 tree exp;
9419 rtx label;
9420 {
9421 do_jump (exp, label, NULL_RTX);
9422 }
9423
9424 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9425
9426 void
9427 jumpif (exp, label)
9428 tree exp;
9429 rtx label;
9430 {
9431 do_jump (exp, NULL_RTX, label);
9432 }
9433
9434 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9435 the result is zero, or IF_TRUE_LABEL if the result is one.
9436 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9437 meaning fall through in that case.
9438
9439 do_jump always does any pending stack adjust except when it does not
9440 actually perform a jump. An example where there is no jump
9441 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9442
9443 This function is responsible for optimizing cases such as
9444 &&, || and comparison operators in EXP. */
9445
9446 void
9447 do_jump (exp, if_false_label, if_true_label)
9448 tree exp;
9449 rtx if_false_label, if_true_label;
9450 {
9451 register enum tree_code code = TREE_CODE (exp);
9452 /* Some cases need to create a label to jump to
9453 in order to properly fall through.
9454 These cases set DROP_THROUGH_LABEL nonzero. */
9455 rtx drop_through_label = 0;
9456 rtx temp;
9457 int i;
9458 tree type;
9459 enum machine_mode mode;
9460
9461 #ifdef MAX_INTEGER_COMPUTATION_MODE
9462 check_max_integer_computation_mode (exp);
9463 #endif
9464
9465 emit_queue ();
9466
9467 switch (code)
9468 {
9469 case ERROR_MARK:
9470 break;
9471
9472 case INTEGER_CST:
9473 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9474 if (temp)
9475 emit_jump (temp);
9476 break;
9477
9478 #if 0
9479 /* This is not true with #pragma weak */
9480 case ADDR_EXPR:
9481 /* The address of something can never be zero. */
9482 if (if_true_label)
9483 emit_jump (if_true_label);
9484 break;
9485 #endif
9486
9487 case NOP_EXPR:
9488 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9489 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9490 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9491 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9492 goto normal;
9493 case CONVERT_EXPR:
9494 /* If we are narrowing the operand, we have to do the compare in the
9495 narrower mode. */
9496 if ((TYPE_PRECISION (TREE_TYPE (exp))
9497 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9498 goto normal;
9499 case NON_LVALUE_EXPR:
9500 case REFERENCE_EXPR:
9501 case ABS_EXPR:
9502 case NEGATE_EXPR:
9503 case LROTATE_EXPR:
9504 case RROTATE_EXPR:
9505 /* These cannot change zero->non-zero or vice versa. */
9506 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9507 break;
9508
9509 case WITH_RECORD_EXPR:
9510 /* Put the object on the placeholder list, recurse through our first
9511 operand, and pop the list. */
9512 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9513 placeholder_list);
9514 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9515 placeholder_list = TREE_CHAIN (placeholder_list);
9516 break;
9517
9518 #if 0
9519 /* This is never less insns than evaluating the PLUS_EXPR followed by
9520 a test and can be longer if the test is eliminated. */
9521 case PLUS_EXPR:
9522 /* Reduce to minus. */
9523 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9524 TREE_OPERAND (exp, 0),
9525 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9526 TREE_OPERAND (exp, 1))));
9527 /* Process as MINUS. */
9528 #endif
9529
9530 case MINUS_EXPR:
9531 /* Non-zero iff operands of minus differ. */
9532 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9533 TREE_OPERAND (exp, 0),
9534 TREE_OPERAND (exp, 1)),
9535 NE, NE, if_false_label, if_true_label);
9536 break;
9537
9538 case BIT_AND_EXPR:
9539 /* If we are AND'ing with a small constant, do this comparison in the
9540 smallest type that fits. If the machine doesn't have comparisons
9541 that small, it will be converted back to the wider comparison.
9542 This helps if we are testing the sign bit of a narrower object.
9543 combine can't do this for us because it can't know whether a
9544 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9545
9546 if (! SLOW_BYTE_ACCESS
9547 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9548 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9549 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9550 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9551 && (type = type_for_mode (mode, 1)) != 0
9552 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9553 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9554 != CODE_FOR_nothing))
9555 {
9556 do_jump (convert (type, exp), if_false_label, if_true_label);
9557 break;
9558 }
9559 goto normal;
9560
9561 case TRUTH_NOT_EXPR:
9562 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9563 break;
9564
9565 case TRUTH_ANDIF_EXPR:
9566 if (if_false_label == 0)
9567 if_false_label = drop_through_label = gen_label_rtx ();
9568 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9569 start_cleanup_deferral ();
9570 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9571 end_cleanup_deferral ();
9572 break;
9573
9574 case TRUTH_ORIF_EXPR:
9575 if (if_true_label == 0)
9576 if_true_label = drop_through_label = gen_label_rtx ();
9577 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9578 start_cleanup_deferral ();
9579 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9580 end_cleanup_deferral ();
9581 break;
9582
9583 case COMPOUND_EXPR:
9584 push_temp_slots ();
9585 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9586 preserve_temp_slots (NULL_RTX);
9587 free_temp_slots ();
9588 pop_temp_slots ();
9589 emit_queue ();
9590 do_pending_stack_adjust ();
9591 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9592 break;
9593
9594 case COMPONENT_REF:
9595 case BIT_FIELD_REF:
9596 case ARRAY_REF:
9597 case ARRAY_RANGE_REF:
9598 {
9599 HOST_WIDE_INT bitsize, bitpos;
9600 int unsignedp;
9601 enum machine_mode mode;
9602 tree type;
9603 tree offset;
9604 int volatilep = 0;
9605 unsigned int alignment;
9606
9607 /* Get description of this reference. We don't actually care
9608 about the underlying object here. */
9609 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9610 &unsignedp, &volatilep, &alignment);
9611
9612 type = type_for_size (bitsize, unsignedp);
9613 if (! SLOW_BYTE_ACCESS
9614 && type != 0 && bitsize >= 0
9615 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9616 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9617 != CODE_FOR_nothing))
9618 {
9619 do_jump (convert (type, exp), if_false_label, if_true_label);
9620 break;
9621 }
9622 goto normal;
9623 }
9624
9625 case COND_EXPR:
9626 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9627 if (integer_onep (TREE_OPERAND (exp, 1))
9628 && integer_zerop (TREE_OPERAND (exp, 2)))
9629 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9630
9631 else if (integer_zerop (TREE_OPERAND (exp, 1))
9632 && integer_onep (TREE_OPERAND (exp, 2)))
9633 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9634
9635 else
9636 {
9637 register rtx label1 = gen_label_rtx ();
9638 drop_through_label = gen_label_rtx ();
9639
9640 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9641
9642 start_cleanup_deferral ();
9643 /* Now the THEN-expression. */
9644 do_jump (TREE_OPERAND (exp, 1),
9645 if_false_label ? if_false_label : drop_through_label,
9646 if_true_label ? if_true_label : drop_through_label);
9647 /* In case the do_jump just above never jumps. */
9648 do_pending_stack_adjust ();
9649 emit_label (label1);
9650
9651 /* Now the ELSE-expression. */
9652 do_jump (TREE_OPERAND (exp, 2),
9653 if_false_label ? if_false_label : drop_through_label,
9654 if_true_label ? if_true_label : drop_through_label);
9655 end_cleanup_deferral ();
9656 }
9657 break;
9658
9659 case EQ_EXPR:
9660 {
9661 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9662
9663 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9664 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9665 {
9666 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9667 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9668 do_jump
9669 (fold
9670 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9671 fold (build (EQ_EXPR, TREE_TYPE (exp),
9672 fold (build1 (REALPART_EXPR,
9673 TREE_TYPE (inner_type),
9674 exp0)),
9675 fold (build1 (REALPART_EXPR,
9676 TREE_TYPE (inner_type),
9677 exp1)))),
9678 fold (build (EQ_EXPR, TREE_TYPE (exp),
9679 fold (build1 (IMAGPART_EXPR,
9680 TREE_TYPE (inner_type),
9681 exp0)),
9682 fold (build1 (IMAGPART_EXPR,
9683 TREE_TYPE (inner_type),
9684 exp1)))))),
9685 if_false_label, if_true_label);
9686 }
9687
9688 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9689 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9690
9691 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9692 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9693 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9694 else
9695 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9696 break;
9697 }
9698
9699 case NE_EXPR:
9700 {
9701 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9702
9703 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9704 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9705 {
9706 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9707 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9708 do_jump
9709 (fold
9710 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9711 fold (build (NE_EXPR, TREE_TYPE (exp),
9712 fold (build1 (REALPART_EXPR,
9713 TREE_TYPE (inner_type),
9714 exp0)),
9715 fold (build1 (REALPART_EXPR,
9716 TREE_TYPE (inner_type),
9717 exp1)))),
9718 fold (build (NE_EXPR, TREE_TYPE (exp),
9719 fold (build1 (IMAGPART_EXPR,
9720 TREE_TYPE (inner_type),
9721 exp0)),
9722 fold (build1 (IMAGPART_EXPR,
9723 TREE_TYPE (inner_type),
9724 exp1)))))),
9725 if_false_label, if_true_label);
9726 }
9727
9728 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9729 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9730
9731 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9732 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9733 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9734 else
9735 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9736 break;
9737 }
9738
9739 case LT_EXPR:
9740 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9741 if (GET_MODE_CLASS (mode) == MODE_INT
9742 && ! can_compare_p (LT, mode, ccp_jump))
9743 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9744 else
9745 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9746 break;
9747
9748 case LE_EXPR:
9749 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9750 if (GET_MODE_CLASS (mode) == MODE_INT
9751 && ! can_compare_p (LE, mode, ccp_jump))
9752 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9753 else
9754 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9755 break;
9756
9757 case GT_EXPR:
9758 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9759 if (GET_MODE_CLASS (mode) == MODE_INT
9760 && ! can_compare_p (GT, mode, ccp_jump))
9761 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9762 else
9763 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9764 break;
9765
9766 case GE_EXPR:
9767 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9768 if (GET_MODE_CLASS (mode) == MODE_INT
9769 && ! can_compare_p (GE, mode, ccp_jump))
9770 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9771 else
9772 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9773 break;
9774
9775 case UNORDERED_EXPR:
9776 case ORDERED_EXPR:
9777 {
9778 enum rtx_code cmp, rcmp;
9779 int do_rev;
9780
9781 if (code == UNORDERED_EXPR)
9782 cmp = UNORDERED, rcmp = ORDERED;
9783 else
9784 cmp = ORDERED, rcmp = UNORDERED;
9785 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9786
9787 do_rev = 0;
9788 if (! can_compare_p (cmp, mode, ccp_jump)
9789 && (can_compare_p (rcmp, mode, ccp_jump)
9790 /* If the target doesn't provide either UNORDERED or ORDERED
9791 comparisons, canonicalize on UNORDERED for the library. */
9792 || rcmp == UNORDERED))
9793 do_rev = 1;
9794
9795 if (! do_rev)
9796 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9797 else
9798 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9799 }
9800 break;
9801
9802 {
9803 enum rtx_code rcode1;
9804 enum tree_code tcode2;
9805
9806 case UNLT_EXPR:
9807 rcode1 = UNLT;
9808 tcode2 = LT_EXPR;
9809 goto unordered_bcc;
9810 case UNLE_EXPR:
9811 rcode1 = UNLE;
9812 tcode2 = LE_EXPR;
9813 goto unordered_bcc;
9814 case UNGT_EXPR:
9815 rcode1 = UNGT;
9816 tcode2 = GT_EXPR;
9817 goto unordered_bcc;
9818 case UNGE_EXPR:
9819 rcode1 = UNGE;
9820 tcode2 = GE_EXPR;
9821 goto unordered_bcc;
9822 case UNEQ_EXPR:
9823 rcode1 = UNEQ;
9824 tcode2 = EQ_EXPR;
9825 goto unordered_bcc;
9826
9827 unordered_bcc:
9828 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9829 if (can_compare_p (rcode1, mode, ccp_jump))
9830 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9831 if_true_label);
9832 else
9833 {
9834 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9835 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9836 tree cmp0, cmp1;
9837
9838 /* If the target doesn't support combined unordered
9839 compares, decompose into UNORDERED + comparison. */
9840 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9841 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9842 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9843 do_jump (exp, if_false_label, if_true_label);
9844 }
9845 }
9846 break;
9847
9848 /* Special case:
9849 __builtin_expect (<test>, 0) and
9850 __builtin_expect (<test>, 1)
9851
9852 We need to do this here, so that <test> is not converted to a SCC
9853 operation on machines that use condition code registers and COMPARE
9854 like the PowerPC, and then the jump is done based on whether the SCC
9855 operation produced a 1 or 0. */
9856 case CALL_EXPR:
9857 /* Check for a built-in function. */
9858 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9859 {
9860 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9861 tree arglist = TREE_OPERAND (exp, 1);
9862
9863 if (TREE_CODE (fndecl) == FUNCTION_DECL
9864 && DECL_BUILT_IN (fndecl)
9865 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9866 && arglist != NULL_TREE
9867 && TREE_CHAIN (arglist) != NULL_TREE)
9868 {
9869 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9870 if_true_label);
9871
9872 if (seq != NULL_RTX)
9873 {
9874 emit_insn (seq);
9875 return;
9876 }
9877 }
9878 }
9879 /* fall through and generate the normal code. */
9880
9881 default:
9882 normal:
9883 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9884 #if 0
9885 /* This is not needed any more and causes poor code since it causes
9886 comparisons and tests from non-SI objects to have different code
9887 sequences. */
9888 /* Copy to register to avoid generating bad insns by cse
9889 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9890 if (!cse_not_expected && GET_CODE (temp) == MEM)
9891 temp = copy_to_reg (temp);
9892 #endif
9893 do_pending_stack_adjust ();
9894 /* Do any postincrements in the expression that was tested. */
9895 emit_queue ();
9896
9897 if (GET_CODE (temp) == CONST_INT
9898 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9899 || GET_CODE (temp) == LABEL_REF)
9900 {
9901 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9902 if (target)
9903 emit_jump (target);
9904 }
9905 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9906 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9907 /* Note swapping the labels gives us not-equal. */
9908 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9909 else if (GET_MODE (temp) != VOIDmode)
9910 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9911 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9912 GET_MODE (temp), NULL_RTX, 0,
9913 if_false_label, if_true_label);
9914 else
9915 abort ();
9916 }
9917
9918 if (drop_through_label)
9919 {
9920 /* If do_jump produces code that might be jumped around,
9921 do any stack adjusts from that code, before the place
9922 where control merges in. */
9923 do_pending_stack_adjust ();
9924 emit_label (drop_through_label);
9925 }
9926 }
9927 \f
9928 /* Given a comparison expression EXP for values too wide to be compared
9929 with one insn, test the comparison and jump to the appropriate label.
9930 The code of EXP is ignored; we always test GT if SWAP is 0,
9931 and LT if SWAP is 1. */
9932
9933 static void
9934 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9935 tree exp;
9936 int swap;
9937 rtx if_false_label, if_true_label;
9938 {
9939 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9940 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9941 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9942 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9943
9944 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9945 }
9946
9947 /* Compare OP0 with OP1, word at a time, in mode MODE.
9948 UNSIGNEDP says to do unsigned comparison.
9949 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9950
9951 void
9952 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9953 enum machine_mode mode;
9954 int unsignedp;
9955 rtx op0, op1;
9956 rtx if_false_label, if_true_label;
9957 {
9958 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9959 rtx drop_through_label = 0;
9960 int i;
9961
9962 if (! if_true_label || ! if_false_label)
9963 drop_through_label = gen_label_rtx ();
9964 if (! if_true_label)
9965 if_true_label = drop_through_label;
9966 if (! if_false_label)
9967 if_false_label = drop_through_label;
9968
9969 /* Compare a word at a time, high order first. */
9970 for (i = 0; i < nwords; i++)
9971 {
9972 rtx op0_word, op1_word;
9973
9974 if (WORDS_BIG_ENDIAN)
9975 {
9976 op0_word = operand_subword_force (op0, i, mode);
9977 op1_word = operand_subword_force (op1, i, mode);
9978 }
9979 else
9980 {
9981 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9982 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9983 }
9984
9985 /* All but high-order word must be compared as unsigned. */
9986 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9987 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9988 NULL_RTX, if_true_label);
9989
9990 /* Consider lower words only if these are equal. */
9991 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9992 NULL_RTX, 0, NULL_RTX, if_false_label);
9993 }
9994
9995 if (if_false_label)
9996 emit_jump (if_false_label);
9997 if (drop_through_label)
9998 emit_label (drop_through_label);
9999 }
10000
10001 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10002 with one insn, test the comparison and jump to the appropriate label. */
10003
10004 static void
10005 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10006 tree exp;
10007 rtx if_false_label, if_true_label;
10008 {
10009 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10010 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10011 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10012 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10013 int i;
10014 rtx drop_through_label = 0;
10015
10016 if (! if_false_label)
10017 drop_through_label = if_false_label = gen_label_rtx ();
10018
10019 for (i = 0; i < nwords; i++)
10020 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10021 operand_subword_force (op1, i, mode),
10022 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10023 word_mode, NULL_RTX, 0, if_false_label,
10024 NULL_RTX);
10025
10026 if (if_true_label)
10027 emit_jump (if_true_label);
10028 if (drop_through_label)
10029 emit_label (drop_through_label);
10030 }
10031 \f
10032 /* Jump according to whether OP0 is 0.
10033 We assume that OP0 has an integer mode that is too wide
10034 for the available compare insns. */
10035
10036 void
10037 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10038 rtx op0;
10039 rtx if_false_label, if_true_label;
10040 {
10041 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10042 rtx part;
10043 int i;
10044 rtx drop_through_label = 0;
10045
10046 /* The fastest way of doing this comparison on almost any machine is to
10047 "or" all the words and compare the result. If all have to be loaded
10048 from memory and this is a very wide item, it's possible this may
10049 be slower, but that's highly unlikely. */
10050
10051 part = gen_reg_rtx (word_mode);
10052 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10053 for (i = 1; i < nwords && part != 0; i++)
10054 part = expand_binop (word_mode, ior_optab, part,
10055 operand_subword_force (op0, i, GET_MODE (op0)),
10056 part, 1, OPTAB_WIDEN);
10057
10058 if (part != 0)
10059 {
10060 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10061 NULL_RTX, 0, if_false_label, if_true_label);
10062
10063 return;
10064 }
10065
10066 /* If we couldn't do the "or" simply, do this with a series of compares. */
10067 if (! if_false_label)
10068 drop_through_label = if_false_label = gen_label_rtx ();
10069
10070 for (i = 0; i < nwords; i++)
10071 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10072 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10073 if_false_label, NULL_RTX);
10074
10075 if (if_true_label)
10076 emit_jump (if_true_label);
10077
10078 if (drop_through_label)
10079 emit_label (drop_through_label);
10080 }
10081 \f
10082 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10083 (including code to compute the values to be compared)
10084 and set (CC0) according to the result.
10085 The decision as to signed or unsigned comparison must be made by the caller.
10086
10087 We force a stack adjustment unless there are currently
10088 things pushed on the stack that aren't yet used.
10089
10090 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10091 compared.
10092
10093 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10094 size of MODE should be used. */
10095
10096 rtx
10097 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10098 register rtx op0, op1;
10099 enum rtx_code code;
10100 int unsignedp;
10101 enum machine_mode mode;
10102 rtx size;
10103 unsigned int align;
10104 {
10105 rtx tem;
10106
10107 /* If one operand is constant, make it the second one. Only do this
10108 if the other operand is not constant as well. */
10109
10110 if (swap_commutative_operands_p (op0, op1))
10111 {
10112 tem = op0;
10113 op0 = op1;
10114 op1 = tem;
10115 code = swap_condition (code);
10116 }
10117
10118 if (flag_force_mem)
10119 {
10120 op0 = force_not_mem (op0);
10121 op1 = force_not_mem (op1);
10122 }
10123
10124 do_pending_stack_adjust ();
10125
10126 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10127 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10128 return tem;
10129
10130 #if 0
10131 /* There's no need to do this now that combine.c can eliminate lots of
10132 sign extensions. This can be less efficient in certain cases on other
10133 machines. */
10134
10135 /* If this is a signed equality comparison, we can do it as an
10136 unsigned comparison since zero-extension is cheaper than sign
10137 extension and comparisons with zero are done as unsigned. This is
10138 the case even on machines that can do fast sign extension, since
10139 zero-extension is easier to combine with other operations than
10140 sign-extension is. If we are comparing against a constant, we must
10141 convert it to what it would look like unsigned. */
10142 if ((code == EQ || code == NE) && ! unsignedp
10143 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10144 {
10145 if (GET_CODE (op1) == CONST_INT
10146 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10147 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10148 unsignedp = 1;
10149 }
10150 #endif
10151
10152 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10153
10154 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10155 }
10156
10157 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10158 The decision as to signed or unsigned comparison must be made by the caller.
10159
10160 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10161 compared.
10162
10163 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10164 size of MODE should be used. */
10165
10166 void
10167 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10168 if_false_label, if_true_label)
10169 register rtx op0, op1;
10170 enum rtx_code code;
10171 int unsignedp;
10172 enum machine_mode mode;
10173 rtx size;
10174 unsigned int align;
10175 rtx if_false_label, if_true_label;
10176 {
10177 rtx tem;
10178 int dummy_true_label = 0;
10179
10180 /* Reverse the comparison if that is safe and we want to jump if it is
10181 false. */
10182 if (! if_true_label && ! FLOAT_MODE_P (mode))
10183 {
10184 if_true_label = if_false_label;
10185 if_false_label = 0;
10186 code = reverse_condition (code);
10187 }
10188
10189 /* If one operand is constant, make it the second one. Only do this
10190 if the other operand is not constant as well. */
10191
10192 if (swap_commutative_operands_p (op0, op1))
10193 {
10194 tem = op0;
10195 op0 = op1;
10196 op1 = tem;
10197 code = swap_condition (code);
10198 }
10199
10200 if (flag_force_mem)
10201 {
10202 op0 = force_not_mem (op0);
10203 op1 = force_not_mem (op1);
10204 }
10205
10206 do_pending_stack_adjust ();
10207
10208 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10209 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10210 {
10211 if (tem == const_true_rtx)
10212 {
10213 if (if_true_label)
10214 emit_jump (if_true_label);
10215 }
10216 else
10217 {
10218 if (if_false_label)
10219 emit_jump (if_false_label);
10220 }
10221 return;
10222 }
10223
10224 #if 0
10225 /* There's no need to do this now that combine.c can eliminate lots of
10226 sign extensions. This can be less efficient in certain cases on other
10227 machines. */
10228
10229 /* If this is a signed equality comparison, we can do it as an
10230 unsigned comparison since zero-extension is cheaper than sign
10231 extension and comparisons with zero are done as unsigned. This is
10232 the case even on machines that can do fast sign extension, since
10233 zero-extension is easier to combine with other operations than
10234 sign-extension is. If we are comparing against a constant, we must
10235 convert it to what it would look like unsigned. */
10236 if ((code == EQ || code == NE) && ! unsignedp
10237 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10238 {
10239 if (GET_CODE (op1) == CONST_INT
10240 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10241 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10242 unsignedp = 1;
10243 }
10244 #endif
10245
10246 if (! if_true_label)
10247 {
10248 dummy_true_label = 1;
10249 if_true_label = gen_label_rtx ();
10250 }
10251
10252 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10253 if_true_label);
10254
10255 if (if_false_label)
10256 emit_jump (if_false_label);
10257 if (dummy_true_label)
10258 emit_label (if_true_label);
10259 }
10260
10261 /* Generate code for a comparison expression EXP (including code to compute
10262 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10263 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10264 generated code will drop through.
10265 SIGNED_CODE should be the rtx operation for this comparison for
10266 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10267
10268 We force a stack adjustment unless there are currently
10269 things pushed on the stack that aren't yet used. */
10270
10271 static void
10272 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10273 if_true_label)
10274 register tree exp;
10275 enum rtx_code signed_code, unsigned_code;
10276 rtx if_false_label, if_true_label;
10277 {
10278 unsigned int align0, align1;
10279 register rtx op0, op1;
10280 register tree type;
10281 register enum machine_mode mode;
10282 int unsignedp;
10283 enum rtx_code code;
10284
10285 /* Don't crash if the comparison was erroneous. */
10286 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10287 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10288 return;
10289
10290 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10291 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10292 return;
10293
10294 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10295 mode = TYPE_MODE (type);
10296 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10297 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10298 || (GET_MODE_BITSIZE (mode)
10299 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10300 1)))))))
10301 {
10302 /* op0 might have been replaced by promoted constant, in which
10303 case the type of second argument should be used. */
10304 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10305 mode = TYPE_MODE (type);
10306 }
10307 unsignedp = TREE_UNSIGNED (type);
10308 code = unsignedp ? unsigned_code : signed_code;
10309
10310 #ifdef HAVE_canonicalize_funcptr_for_compare
10311 /* If function pointers need to be "canonicalized" before they can
10312 be reliably compared, then canonicalize them. */
10313 if (HAVE_canonicalize_funcptr_for_compare
10314 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10315 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10316 == FUNCTION_TYPE))
10317 {
10318 rtx new_op0 = gen_reg_rtx (mode);
10319
10320 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10321 op0 = new_op0;
10322 }
10323
10324 if (HAVE_canonicalize_funcptr_for_compare
10325 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10326 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10327 == FUNCTION_TYPE))
10328 {
10329 rtx new_op1 = gen_reg_rtx (mode);
10330
10331 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10332 op1 = new_op1;
10333 }
10334 #endif
10335
10336 /* Do any postincrements in the expression that was tested. */
10337 emit_queue ();
10338
10339 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10340 ((mode == BLKmode)
10341 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10342 MIN (align0, align1),
10343 if_false_label, if_true_label);
10344 }
10345 \f
10346 /* Generate code to calculate EXP using a store-flag instruction
10347 and return an rtx for the result. EXP is either a comparison
10348 or a TRUTH_NOT_EXPR whose operand is a comparison.
10349
10350 If TARGET is nonzero, store the result there if convenient.
10351
10352 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10353 cheap.
10354
10355 Return zero if there is no suitable set-flag instruction
10356 available on this machine.
10357
10358 Once expand_expr has been called on the arguments of the comparison,
10359 we are committed to doing the store flag, since it is not safe to
10360 re-evaluate the expression. We emit the store-flag insn by calling
10361 emit_store_flag, but only expand the arguments if we have a reason
10362 to believe that emit_store_flag will be successful. If we think that
10363 it will, but it isn't, we have to simulate the store-flag with a
10364 set/jump/set sequence. */
10365
10366 static rtx
10367 do_store_flag (exp, target, mode, only_cheap)
10368 tree exp;
10369 rtx target;
10370 enum machine_mode mode;
10371 int only_cheap;
10372 {
10373 enum rtx_code code;
10374 tree arg0, arg1, type;
10375 tree tem;
10376 enum machine_mode operand_mode;
10377 int invert = 0;
10378 int unsignedp;
10379 rtx op0, op1;
10380 enum insn_code icode;
10381 rtx subtarget = target;
10382 rtx result, label;
10383
10384 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10385 result at the end. We can't simply invert the test since it would
10386 have already been inverted if it were valid. This case occurs for
10387 some floating-point comparisons. */
10388
10389 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10390 invert = 1, exp = TREE_OPERAND (exp, 0);
10391
10392 arg0 = TREE_OPERAND (exp, 0);
10393 arg1 = TREE_OPERAND (exp, 1);
10394
10395 /* Don't crash if the comparison was erroneous. */
10396 if (arg0 == error_mark_node || arg1 == error_mark_node)
10397 return const0_rtx;
10398
10399 type = TREE_TYPE (arg0);
10400 operand_mode = TYPE_MODE (type);
10401 unsignedp = TREE_UNSIGNED (type);
10402
10403 /* We won't bother with BLKmode store-flag operations because it would mean
10404 passing a lot of information to emit_store_flag. */
10405 if (operand_mode == BLKmode)
10406 return 0;
10407
10408 /* We won't bother with store-flag operations involving function pointers
10409 when function pointers must be canonicalized before comparisons. */
10410 #ifdef HAVE_canonicalize_funcptr_for_compare
10411 if (HAVE_canonicalize_funcptr_for_compare
10412 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10413 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10414 == FUNCTION_TYPE))
10415 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10416 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10417 == FUNCTION_TYPE))))
10418 return 0;
10419 #endif
10420
10421 STRIP_NOPS (arg0);
10422 STRIP_NOPS (arg1);
10423
10424 /* Get the rtx comparison code to use. We know that EXP is a comparison
10425 operation of some type. Some comparisons against 1 and -1 can be
10426 converted to comparisons with zero. Do so here so that the tests
10427 below will be aware that we have a comparison with zero. These
10428 tests will not catch constants in the first operand, but constants
10429 are rarely passed as the first operand. */
10430
10431 switch (TREE_CODE (exp))
10432 {
10433 case EQ_EXPR:
10434 code = EQ;
10435 break;
10436 case NE_EXPR:
10437 code = NE;
10438 break;
10439 case LT_EXPR:
10440 if (integer_onep (arg1))
10441 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10442 else
10443 code = unsignedp ? LTU : LT;
10444 break;
10445 case LE_EXPR:
10446 if (! unsignedp && integer_all_onesp (arg1))
10447 arg1 = integer_zero_node, code = LT;
10448 else
10449 code = unsignedp ? LEU : LE;
10450 break;
10451 case GT_EXPR:
10452 if (! unsignedp && integer_all_onesp (arg1))
10453 arg1 = integer_zero_node, code = GE;
10454 else
10455 code = unsignedp ? GTU : GT;
10456 break;
10457 case GE_EXPR:
10458 if (integer_onep (arg1))
10459 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10460 else
10461 code = unsignedp ? GEU : GE;
10462 break;
10463
10464 case UNORDERED_EXPR:
10465 code = UNORDERED;
10466 break;
10467 case ORDERED_EXPR:
10468 code = ORDERED;
10469 break;
10470 case UNLT_EXPR:
10471 code = UNLT;
10472 break;
10473 case UNLE_EXPR:
10474 code = UNLE;
10475 break;
10476 case UNGT_EXPR:
10477 code = UNGT;
10478 break;
10479 case UNGE_EXPR:
10480 code = UNGE;
10481 break;
10482 case UNEQ_EXPR:
10483 code = UNEQ;
10484 break;
10485
10486 default:
10487 abort ();
10488 }
10489
10490 /* Put a constant second. */
10491 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10492 {
10493 tem = arg0; arg0 = arg1; arg1 = tem;
10494 code = swap_condition (code);
10495 }
10496
10497 /* If this is an equality or inequality test of a single bit, we can
10498 do this by shifting the bit being tested to the low-order bit and
10499 masking the result with the constant 1. If the condition was EQ,
10500 we xor it with 1. This does not require an scc insn and is faster
10501 than an scc insn even if we have it. */
10502
10503 if ((code == NE || code == EQ)
10504 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10505 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10506 {
10507 tree inner = TREE_OPERAND (arg0, 0);
10508 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10509 int ops_unsignedp;
10510
10511 /* If INNER is a right shift of a constant and it plus BITNUM does
10512 not overflow, adjust BITNUM and INNER. */
10513
10514 if (TREE_CODE (inner) == RSHIFT_EXPR
10515 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10516 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10517 && bitnum < TYPE_PRECISION (type)
10518 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10519 bitnum - TYPE_PRECISION (type)))
10520 {
10521 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10522 inner = TREE_OPERAND (inner, 0);
10523 }
10524
10525 /* If we are going to be able to omit the AND below, we must do our
10526 operations as unsigned. If we must use the AND, we have a choice.
10527 Normally unsigned is faster, but for some machines signed is. */
10528 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10529 #ifdef LOAD_EXTEND_OP
10530 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10531 #else
10532 : 1
10533 #endif
10534 );
10535
10536 if (! get_subtarget (subtarget)
10537 || GET_MODE (subtarget) != operand_mode
10538 || ! safe_from_p (subtarget, inner, 1))
10539 subtarget = 0;
10540
10541 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10542
10543 if (bitnum != 0)
10544 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10545 size_int (bitnum), subtarget, ops_unsignedp);
10546
10547 if (GET_MODE (op0) != mode)
10548 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10549
10550 if ((code == EQ && ! invert) || (code == NE && invert))
10551 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10552 ops_unsignedp, OPTAB_LIB_WIDEN);
10553
10554 /* Put the AND last so it can combine with more things. */
10555 if (bitnum != TYPE_PRECISION (type) - 1)
10556 op0 = expand_and (op0, const1_rtx, subtarget);
10557
10558 return op0;
10559 }
10560
10561 /* Now see if we are likely to be able to do this. Return if not. */
10562 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10563 return 0;
10564
10565 icode = setcc_gen_code[(int) code];
10566 if (icode == CODE_FOR_nothing
10567 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10568 {
10569 /* We can only do this if it is one of the special cases that
10570 can be handled without an scc insn. */
10571 if ((code == LT && integer_zerop (arg1))
10572 || (! only_cheap && code == GE && integer_zerop (arg1)))
10573 ;
10574 else if (BRANCH_COST >= 0
10575 && ! only_cheap && (code == NE || code == EQ)
10576 && TREE_CODE (type) != REAL_TYPE
10577 && ((abs_optab->handlers[(int) operand_mode].insn_code
10578 != CODE_FOR_nothing)
10579 || (ffs_optab->handlers[(int) operand_mode].insn_code
10580 != CODE_FOR_nothing)))
10581 ;
10582 else
10583 return 0;
10584 }
10585
10586 if (! get_subtarget (target)
10587 || GET_MODE (subtarget) != operand_mode
10588 || ! safe_from_p (subtarget, arg1, 1))
10589 subtarget = 0;
10590
10591 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10592 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10593
10594 if (target == 0)
10595 target = gen_reg_rtx (mode);
10596
10597 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10598 because, if the emit_store_flag does anything it will succeed and
10599 OP0 and OP1 will not be used subsequently. */
10600
10601 result = emit_store_flag (target, code,
10602 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10603 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10604 operand_mode, unsignedp, 1);
10605
10606 if (result)
10607 {
10608 if (invert)
10609 result = expand_binop (mode, xor_optab, result, const1_rtx,
10610 result, 0, OPTAB_LIB_WIDEN);
10611 return result;
10612 }
10613
10614 /* If this failed, we have to do this with set/compare/jump/set code. */
10615 if (GET_CODE (target) != REG
10616 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10617 target = gen_reg_rtx (GET_MODE (target));
10618
10619 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10620 result = compare_from_rtx (op0, op1, code, unsignedp,
10621 operand_mode, NULL_RTX, 0);
10622 if (GET_CODE (result) == CONST_INT)
10623 return (((result == const0_rtx && ! invert)
10624 || (result != const0_rtx && invert))
10625 ? const0_rtx : const1_rtx);
10626
10627 label = gen_label_rtx ();
10628 if (bcc_gen_fctn[(int) code] == 0)
10629 abort ();
10630
10631 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10632 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10633 emit_label (label);
10634
10635 return target;
10636 }
10637 \f
10638
10639 /* Stubs in case we haven't got a casesi insn. */
10640 #ifndef HAVE_casesi
10641 # define HAVE_casesi 0
10642 # define gen_casesi(a, b, c, d, e) (0)
10643 # define CODE_FOR_casesi CODE_FOR_nothing
10644 #endif
10645
10646 /* If the machine does not have a case insn that compares the bounds,
10647 this means extra overhead for dispatch tables, which raises the
10648 threshold for using them. */
10649 #ifndef CASE_VALUES_THRESHOLD
10650 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10651 #endif /* CASE_VALUES_THRESHOLD */
10652
10653 unsigned int
10654 case_values_threshold ()
10655 {
10656 return CASE_VALUES_THRESHOLD;
10657 }
10658
10659 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10660 0 otherwise (i.e. if there is no casesi instruction). */
10661 int
10662 try_casesi (index_type, index_expr, minval, range,
10663 table_label, default_label)
10664 tree index_type, index_expr, minval, range;
10665 rtx table_label ATTRIBUTE_UNUSED;
10666 rtx default_label;
10667 {
10668 enum machine_mode index_mode = SImode;
10669 int index_bits = GET_MODE_BITSIZE (index_mode);
10670 rtx op1, op2, index;
10671 enum machine_mode op_mode;
10672
10673 if (! HAVE_casesi)
10674 return 0;
10675
10676 /* Convert the index to SImode. */
10677 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10678 {
10679 enum machine_mode omode = TYPE_MODE (index_type);
10680 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10681
10682 /* We must handle the endpoints in the original mode. */
10683 index_expr = build (MINUS_EXPR, index_type,
10684 index_expr, minval);
10685 minval = integer_zero_node;
10686 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10687 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10688 omode, 1, 0, default_label);
10689 /* Now we can safely truncate. */
10690 index = convert_to_mode (index_mode, index, 0);
10691 }
10692 else
10693 {
10694 if (TYPE_MODE (index_type) != index_mode)
10695 {
10696 index_expr = convert (type_for_size (index_bits, 0),
10697 index_expr);
10698 index_type = TREE_TYPE (index_expr);
10699 }
10700
10701 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10702 }
10703 emit_queue ();
10704 index = protect_from_queue (index, 0);
10705 do_pending_stack_adjust ();
10706
10707 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10708 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10709 (index, op_mode))
10710 index = copy_to_mode_reg (op_mode, index);
10711
10712 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10713
10714 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10715 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10716 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10717 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10718 (op1, op_mode))
10719 op1 = copy_to_mode_reg (op_mode, op1);
10720
10721 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10722
10723 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10724 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10725 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10726 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10727 (op2, op_mode))
10728 op2 = copy_to_mode_reg (op_mode, op2);
10729
10730 emit_jump_insn (gen_casesi (index, op1, op2,
10731 table_label, default_label));
10732 return 1;
10733 }
10734
10735 /* Attempt to generate a tablejump instruction; same concept. */
10736 #ifndef HAVE_tablejump
10737 #define HAVE_tablejump 0
10738 #define gen_tablejump(x, y) (0)
10739 #endif
10740
10741 /* Subroutine of the next function.
10742
10743 INDEX is the value being switched on, with the lowest value
10744 in the table already subtracted.
10745 MODE is its expected mode (needed if INDEX is constant).
10746 RANGE is the length of the jump table.
10747 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10748
10749 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10750 index value is out of range. */
10751
10752 static void
10753 do_tablejump (index, mode, range, table_label, default_label)
10754 rtx index, range, table_label, default_label;
10755 enum machine_mode mode;
10756 {
10757 register rtx temp, vector;
10758
10759 /* Do an unsigned comparison (in the proper mode) between the index
10760 expression and the value which represents the length of the range.
10761 Since we just finished subtracting the lower bound of the range
10762 from the index expression, this comparison allows us to simultaneously
10763 check that the original index expression value is both greater than
10764 or equal to the minimum value of the range and less than or equal to
10765 the maximum value of the range. */
10766
10767 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10768 0, default_label);
10769
10770 /* If index is in range, it must fit in Pmode.
10771 Convert to Pmode so we can index with it. */
10772 if (mode != Pmode)
10773 index = convert_to_mode (Pmode, index, 1);
10774
10775 /* Don't let a MEM slip thru, because then INDEX that comes
10776 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10777 and break_out_memory_refs will go to work on it and mess it up. */
10778 #ifdef PIC_CASE_VECTOR_ADDRESS
10779 if (flag_pic && GET_CODE (index) != REG)
10780 index = copy_to_mode_reg (Pmode, index);
10781 #endif
10782
10783 /* If flag_force_addr were to affect this address
10784 it could interfere with the tricky assumptions made
10785 about addresses that contain label-refs,
10786 which may be valid only very near the tablejump itself. */
10787 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10788 GET_MODE_SIZE, because this indicates how large insns are. The other
10789 uses should all be Pmode, because they are addresses. This code
10790 could fail if addresses and insns are not the same size. */
10791 index = gen_rtx_PLUS (Pmode,
10792 gen_rtx_MULT (Pmode, index,
10793 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10794 gen_rtx_LABEL_REF (Pmode, table_label));
10795 #ifdef PIC_CASE_VECTOR_ADDRESS
10796 if (flag_pic)
10797 index = PIC_CASE_VECTOR_ADDRESS (index);
10798 else
10799 #endif
10800 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10801 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10802 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10803 RTX_UNCHANGING_P (vector) = 1;
10804 convert_move (temp, vector, 0);
10805
10806 emit_jump_insn (gen_tablejump (temp, table_label));
10807
10808 /* If we are generating PIC code or if the table is PC-relative, the
10809 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10810 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10811 emit_barrier ();
10812 }
10813
10814 int
10815 try_tablejump (index_type, index_expr, minval, range,
10816 table_label, default_label)
10817 tree index_type, index_expr, minval, range;
10818 rtx table_label, default_label;
10819 {
10820 rtx index;
10821
10822 if (! HAVE_tablejump)
10823 return 0;
10824
10825 index_expr = fold (build (MINUS_EXPR, index_type,
10826 convert (index_type, index_expr),
10827 convert (index_type, minval)));
10828 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10829 emit_queue ();
10830 index = protect_from_queue (index, 0);
10831 do_pending_stack_adjust ();
10832
10833 do_tablejump (index, TYPE_MODE (index_type),
10834 convert_modes (TYPE_MODE (index_type),
10835 TYPE_MODE (TREE_TYPE (range)),
10836 expand_expr (range, NULL_RTX,
10837 VOIDmode, 0),
10838 TREE_UNSIGNED (TREE_TYPE (range))),
10839 table_label, default_label);
10840 return 1;
10841 }