26527810da988693285ead5bbe4df5b102778c7c
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "ggc.h"
44 #include "intl.h"
45 #include "tm_p.h"
46
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
49
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
52
53 #ifdef PUSH_ROUNDING
54
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
57 #endif
58
59 #endif
60
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
64 #else
65 #define STACK_PUSH_CODE PRE_INC
66 #endif
67 #endif
68
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
72 #endif
73
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
80 parameter. */
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
82
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
90
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
95
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
98
99 /* This structure is used by move_by_pieces to describe the move to
100 be performed. */
101 struct move_by_pieces
102 {
103 rtx to;
104 rtx to_addr;
105 int autinc_to;
106 int explicit_inc_to;
107 rtx from;
108 rtx from_addr;
109 int autinc_from;
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
113 int reverse;
114 };
115
116 /* This structure is used by store_by_pieces to describe the clear to
117 be performed. */
118
119 struct store_by_pieces
120 {
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
128 PTR constfundata;
129 int reverse;
130 };
131
132 extern struct obstack permanent_obstack;
133
134 static rtx get_push_address PARAMS ((int));
135
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
139 unsigned int));
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
143 enum machine_mode));
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
145 unsigned int));
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
147 unsigned int));
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
149 enum machine_mode,
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, unsigned int, int,
157 int));
158 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
159 HOST_WIDE_INT));
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int,
163 unsigned int, HOST_WIDE_INT, int));
164 static enum memory_use_mode
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166 static rtx var_rtx PARAMS ((tree));
167 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 rtx, rtx));
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 #ifdef PUSH_ROUNDING
176 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 #endif
178 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
179
180 /* Record for each mode whether we can move a register directly to or
181 from an object of that mode in memory. If we can't, we won't try
182 to use that mode directly when accessing a field of that mode. */
183
184 static char direct_load[NUM_MACHINE_MODES];
185 static char direct_store[NUM_MACHINE_MODES];
186
187 /* If a memory-to-memory move would take MOVE_RATIO or more simple
188 move-instruction sequences, we will do a movstr or libcall instead. */
189
190 #ifndef MOVE_RATIO
191 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
192 #define MOVE_RATIO 2
193 #else
194 /* If we are optimizing for space (-Os), cut down the default move ratio. */
195 #define MOVE_RATIO (optimize_size ? 3 : 15)
196 #endif
197 #endif
198
199 /* This macro is used to determine whether move_by_pieces should be called
200 to perform a structure copy. */
201 #ifndef MOVE_BY_PIECES_P
202 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
204 #endif
205
206 /* This array records the insn_code of insns to perform block moves. */
207 enum insn_code movstr_optab[NUM_MACHINE_MODES];
208
209 /* This array records the insn_code of insns to perform block clears. */
210 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
211
212 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
213
214 #ifndef SLOW_UNALIGNED_ACCESS
215 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 #endif
217 \f
218 /* This is run once per compilation to set up which modes can be used
219 directly in memory and to initialize the block move optab. */
220
221 void
222 init_expr_once ()
223 {
224 rtx insn, pat;
225 enum machine_mode mode;
226 int num_clobbers;
227 rtx mem, mem1;
228
229 start_sequence ();
230
231 /* Try indexing by frame ptr and try by stack ptr.
232 It is known that on the Convex the stack ptr isn't a valid index.
233 With luck, one or the other is valid on any machine. */
234 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
235 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
236
237 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
238 pat = PATTERN (insn);
239
240 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
241 mode = (enum machine_mode) ((int) mode + 1))
242 {
243 int regno;
244 rtx reg;
245
246 direct_load[(int) mode] = direct_store[(int) mode] = 0;
247 PUT_MODE (mem, mode);
248 PUT_MODE (mem1, mode);
249
250 /* See if there is some register that can be used in this mode and
251 directly loaded or stored from memory. */
252
253 if (mode != VOIDmode && mode != BLKmode)
254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
255 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
256 regno++)
257 {
258 if (! HARD_REGNO_MODE_OK (regno, mode))
259 continue;
260
261 reg = gen_rtx_REG (mode, regno);
262
263 SET_SRC (pat) = mem;
264 SET_DEST (pat) = reg;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_load[(int) mode] = 1;
267
268 SET_SRC (pat) = mem1;
269 SET_DEST (pat) = reg;
270 if (recog (pat, insn, &num_clobbers) >= 0)
271 direct_load[(int) mode] = 1;
272
273 SET_SRC (pat) = reg;
274 SET_DEST (pat) = mem;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_store[(int) mode] = 1;
277
278 SET_SRC (pat) = reg;
279 SET_DEST (pat) = mem1;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_store[(int) mode] = 1;
282 }
283 }
284
285 end_sequence ();
286 }
287
288 /* This is run at the start of compiling a function. */
289
290 void
291 init_expr ()
292 {
293 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
294
295 pending_chain = 0;
296 pending_stack_adjust = 0;
297 stack_pointer_delta = 0;
298 inhibit_defer_pop = 0;
299 saveregs_value = 0;
300 apply_args_value = 0;
301 forced_labels = 0;
302 }
303
304 void
305 mark_expr_status (p)
306 struct expr_status *p;
307 {
308 if (p == NULL)
309 return;
310
311 ggc_mark_rtx (p->x_saveregs_value);
312 ggc_mark_rtx (p->x_apply_args_value);
313 ggc_mark_rtx (p->x_forced_labels);
314 }
315
316 void
317 free_expr_status (f)
318 struct function *f;
319 {
320 free (f->expr);
321 f->expr = NULL;
322 }
323
324 /* Small sanity check that the queue is empty at the end of a function. */
325
326 void
327 finish_expr_for_function ()
328 {
329 if (pending_chain)
330 abort ();
331 }
332 \f
333 /* Manage the queue of increment instructions to be output
334 for POSTINCREMENT_EXPR expressions, etc. */
335
336 /* Queue up to increment (or change) VAR later. BODY says how:
337 BODY should be the same thing you would pass to emit_insn
338 to increment right away. It will go to emit_insn later on.
339
340 The value is a QUEUED expression to be used in place of VAR
341 where you want to guarantee the pre-incrementation value of VAR. */
342
343 static rtx
344 enqueue_insn (var, body)
345 rtx var, body;
346 {
347 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
348 body, pending_chain);
349 return pending_chain;
350 }
351
352 /* Use protect_from_queue to convert a QUEUED expression
353 into something that you can put immediately into an instruction.
354 If the queued incrementation has not happened yet,
355 protect_from_queue returns the variable itself.
356 If the incrementation has happened, protect_from_queue returns a temp
357 that contains a copy of the old value of the variable.
358
359 Any time an rtx which might possibly be a QUEUED is to be put
360 into an instruction, it must be passed through protect_from_queue first.
361 QUEUED expressions are not meaningful in instructions.
362
363 Do not pass a value through protect_from_queue and then hold
364 on to it for a while before putting it in an instruction!
365 If the queue is flushed in between, incorrect code will result. */
366
367 rtx
368 protect_from_queue (x, modify)
369 rtx x;
370 int modify;
371 {
372 RTX_CODE code = GET_CODE (x);
373
374 #if 0 /* A QUEUED can hang around after the queue is forced out. */
375 /* Shortcut for most common case. */
376 if (pending_chain == 0)
377 return x;
378 #endif
379
380 if (code != QUEUED)
381 {
382 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
383 use of autoincrement. Make a copy of the contents of the memory
384 location rather than a copy of the address, but not if the value is
385 of mode BLKmode. Don't modify X in place since it might be
386 shared. */
387 if (code == MEM && GET_MODE (x) != BLKmode
388 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
389 {
390 rtx y = XEXP (x, 0);
391 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
392
393 if (QUEUED_INSN (y))
394 {
395 rtx temp = gen_reg_rtx (GET_MODE (x));
396
397 emit_insn_before (gen_move_insn (temp, new),
398 QUEUED_INSN (y));
399 return temp;
400 }
401
402 /* Copy the address into a pseudo, so that the returned value
403 remains correct across calls to emit_queue. */
404 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
405 }
406
407 /* Otherwise, recursively protect the subexpressions of all
408 the kinds of rtx's that can contain a QUEUED. */
409 if (code == MEM)
410 {
411 rtx tem = protect_from_queue (XEXP (x, 0), 0);
412 if (tem != XEXP (x, 0))
413 {
414 x = copy_rtx (x);
415 XEXP (x, 0) = tem;
416 }
417 }
418 else if (code == PLUS || code == MULT)
419 {
420 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
421 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
422 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
423 {
424 x = copy_rtx (x);
425 XEXP (x, 0) = new0;
426 XEXP (x, 1) = new1;
427 }
428 }
429 return x;
430 }
431 /* If the increment has not happened, use the variable itself. Copy it
432 into a new pseudo so that the value remains correct across calls to
433 emit_queue. */
434 if (QUEUED_INSN (x) == 0)
435 return copy_to_reg (QUEUED_VAR (x));
436 /* If the increment has happened and a pre-increment copy exists,
437 use that copy. */
438 if (QUEUED_COPY (x) != 0)
439 return QUEUED_COPY (x);
440 /* The increment has happened but we haven't set up a pre-increment copy.
441 Set one up now, and use it. */
442 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
443 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
444 QUEUED_INSN (x));
445 return QUEUED_COPY (x);
446 }
447
448 /* Return nonzero if X contains a QUEUED expression:
449 if it contains anything that will be altered by a queued increment.
450 We handle only combinations of MEM, PLUS, MINUS and MULT operators
451 since memory addresses generally contain only those. */
452
453 int
454 queued_subexp_p (x)
455 rtx x;
456 {
457 enum rtx_code code = GET_CODE (x);
458 switch (code)
459 {
460 case QUEUED:
461 return 1;
462 case MEM:
463 return queued_subexp_p (XEXP (x, 0));
464 case MULT:
465 case PLUS:
466 case MINUS:
467 return (queued_subexp_p (XEXP (x, 0))
468 || queued_subexp_p (XEXP (x, 1)));
469 default:
470 return 0;
471 }
472 }
473
474 /* Perform all the pending incrementations. */
475
476 void
477 emit_queue ()
478 {
479 rtx p;
480 while ((p = pending_chain))
481 {
482 rtx body = QUEUED_BODY (p);
483
484 if (GET_CODE (body) == SEQUENCE)
485 {
486 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
487 emit_insn (QUEUED_BODY (p));
488 }
489 else
490 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
491 pending_chain = QUEUED_NEXT (p);
492 }
493 }
494 \f
495 /* Copy data from FROM to TO, where the machine modes are not the same.
496 Both modes may be integer, or both may be floating.
497 UNSIGNEDP should be nonzero if FROM is an unsigned type.
498 This causes zero-extension instead of sign-extension. */
499
500 void
501 convert_move (to, from, unsignedp)
502 rtx to, from;
503 int unsignedp;
504 {
505 enum machine_mode to_mode = GET_MODE (to);
506 enum machine_mode from_mode = GET_MODE (from);
507 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
508 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
509 enum insn_code code;
510 rtx libcall;
511
512 /* rtx code for making an equivalent value. */
513 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
514
515 to = protect_from_queue (to, 1);
516 from = protect_from_queue (from, 0);
517
518 if (to_real != from_real)
519 abort ();
520
521 /* If FROM is a SUBREG that indicates that we have already done at least
522 the required extension, strip it. We don't handle such SUBREGs as
523 TO here. */
524
525 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
526 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
527 >= GET_MODE_SIZE (to_mode))
528 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
529 from = gen_lowpart (to_mode, from), from_mode = to_mode;
530
531 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
532 abort ();
533
534 if (to_mode == from_mode
535 || (from_mode == VOIDmode && CONSTANT_P (from)))
536 {
537 emit_move_insn (to, from);
538 return;
539 }
540
541 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
542 {
543 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
544 abort ();
545
546 if (VECTOR_MODE_P (to_mode))
547 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
548 else
549 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
550
551 emit_move_insn (to, from);
552 return;
553 }
554
555 if (to_real != from_real)
556 abort ();
557
558 if (to_real)
559 {
560 rtx value, insns;
561
562 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
563 {
564 /* Try converting directly if the insn is supported. */
565 if ((code = can_extend_p (to_mode, from_mode, 0))
566 != CODE_FOR_nothing)
567 {
568 emit_unop_insn (code, to, from, UNKNOWN);
569 return;
570 }
571 }
572
573 #ifdef HAVE_trunchfqf2
574 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
575 {
576 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
577 return;
578 }
579 #endif
580 #ifdef HAVE_trunctqfqf2
581 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
582 {
583 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
584 return;
585 }
586 #endif
587 #ifdef HAVE_truncsfqf2
588 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
589 {
590 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
591 return;
592 }
593 #endif
594 #ifdef HAVE_truncdfqf2
595 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
596 {
597 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
598 return;
599 }
600 #endif
601 #ifdef HAVE_truncxfqf2
602 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
603 {
604 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
605 return;
606 }
607 #endif
608 #ifdef HAVE_trunctfqf2
609 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
610 {
611 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
612 return;
613 }
614 #endif
615
616 #ifdef HAVE_trunctqfhf2
617 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
618 {
619 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
620 return;
621 }
622 #endif
623 #ifdef HAVE_truncsfhf2
624 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
625 {
626 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
627 return;
628 }
629 #endif
630 #ifdef HAVE_truncdfhf2
631 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
632 {
633 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
634 return;
635 }
636 #endif
637 #ifdef HAVE_truncxfhf2
638 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
639 {
640 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
641 return;
642 }
643 #endif
644 #ifdef HAVE_trunctfhf2
645 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
646 {
647 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
648 return;
649 }
650 #endif
651
652 #ifdef HAVE_truncsftqf2
653 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
654 {
655 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
656 return;
657 }
658 #endif
659 #ifdef HAVE_truncdftqf2
660 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
661 {
662 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
663 return;
664 }
665 #endif
666 #ifdef HAVE_truncxftqf2
667 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
668 {
669 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
670 return;
671 }
672 #endif
673 #ifdef HAVE_trunctftqf2
674 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
675 {
676 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
677 return;
678 }
679 #endif
680
681 #ifdef HAVE_truncdfsf2
682 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
683 {
684 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
685 return;
686 }
687 #endif
688 #ifdef HAVE_truncxfsf2
689 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
690 {
691 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
692 return;
693 }
694 #endif
695 #ifdef HAVE_trunctfsf2
696 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
697 {
698 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
699 return;
700 }
701 #endif
702 #ifdef HAVE_truncxfdf2
703 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
704 {
705 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
706 return;
707 }
708 #endif
709 #ifdef HAVE_trunctfdf2
710 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
711 {
712 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
713 return;
714 }
715 #endif
716
717 libcall = (rtx) 0;
718 switch (from_mode)
719 {
720 case SFmode:
721 switch (to_mode)
722 {
723 case DFmode:
724 libcall = extendsfdf2_libfunc;
725 break;
726
727 case XFmode:
728 libcall = extendsfxf2_libfunc;
729 break;
730
731 case TFmode:
732 libcall = extendsftf2_libfunc;
733 break;
734
735 default:
736 break;
737 }
738 break;
739
740 case DFmode:
741 switch (to_mode)
742 {
743 case SFmode:
744 libcall = truncdfsf2_libfunc;
745 break;
746
747 case XFmode:
748 libcall = extenddfxf2_libfunc;
749 break;
750
751 case TFmode:
752 libcall = extenddftf2_libfunc;
753 break;
754
755 default:
756 break;
757 }
758 break;
759
760 case XFmode:
761 switch (to_mode)
762 {
763 case SFmode:
764 libcall = truncxfsf2_libfunc;
765 break;
766
767 case DFmode:
768 libcall = truncxfdf2_libfunc;
769 break;
770
771 default:
772 break;
773 }
774 break;
775
776 case TFmode:
777 switch (to_mode)
778 {
779 case SFmode:
780 libcall = trunctfsf2_libfunc;
781 break;
782
783 case DFmode:
784 libcall = trunctfdf2_libfunc;
785 break;
786
787 default:
788 break;
789 }
790 break;
791
792 default:
793 break;
794 }
795
796 if (libcall == (rtx) 0)
797 /* This conversion is not implemented yet. */
798 abort ();
799
800 start_sequence ();
801 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
802 1, from, from_mode);
803 insns = get_insns ();
804 end_sequence ();
805 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
806 from));
807 return;
808 }
809
810 /* Now both modes are integers. */
811
812 /* Handle expanding beyond a word. */
813 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
814 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
815 {
816 rtx insns;
817 rtx lowpart;
818 rtx fill_value;
819 rtx lowfrom;
820 int i;
821 enum machine_mode lowpart_mode;
822 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
823
824 /* Try converting directly if the insn is supported. */
825 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
826 != CODE_FOR_nothing)
827 {
828 /* If FROM is a SUBREG, put it into a register. Do this
829 so that we always generate the same set of insns for
830 better cse'ing; if an intermediate assignment occurred,
831 we won't be doing the operation directly on the SUBREG. */
832 if (optimize > 0 && GET_CODE (from) == SUBREG)
833 from = force_reg (from_mode, from);
834 emit_unop_insn (code, to, from, equiv_code);
835 return;
836 }
837 /* Next, try converting via full word. */
838 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
839 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
840 != CODE_FOR_nothing))
841 {
842 if (GET_CODE (to) == REG)
843 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
844 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
845 emit_unop_insn (code, to,
846 gen_lowpart (word_mode, to), equiv_code);
847 return;
848 }
849
850 /* No special multiword conversion insn; do it by hand. */
851 start_sequence ();
852
853 /* Since we will turn this into a no conflict block, we must ensure
854 that the source does not overlap the target. */
855
856 if (reg_overlap_mentioned_p (to, from))
857 from = force_reg (from_mode, from);
858
859 /* Get a copy of FROM widened to a word, if necessary. */
860 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
861 lowpart_mode = word_mode;
862 else
863 lowpart_mode = from_mode;
864
865 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
866
867 lowpart = gen_lowpart (lowpart_mode, to);
868 emit_move_insn (lowpart, lowfrom);
869
870 /* Compute the value to put in each remaining word. */
871 if (unsignedp)
872 fill_value = const0_rtx;
873 else
874 {
875 #ifdef HAVE_slt
876 if (HAVE_slt
877 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
878 && STORE_FLAG_VALUE == -1)
879 {
880 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
881 lowpart_mode, 0, 0);
882 fill_value = gen_reg_rtx (word_mode);
883 emit_insn (gen_slt (fill_value));
884 }
885 else
886 #endif
887 {
888 fill_value
889 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
890 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
891 NULL_RTX, 0);
892 fill_value = convert_to_mode (word_mode, fill_value, 1);
893 }
894 }
895
896 /* Fill the remaining words. */
897 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
898 {
899 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
900 rtx subword = operand_subword (to, index, 1, to_mode);
901
902 if (subword == 0)
903 abort ();
904
905 if (fill_value != subword)
906 emit_move_insn (subword, fill_value);
907 }
908
909 insns = get_insns ();
910 end_sequence ();
911
912 emit_no_conflict_block (insns, to, from, NULL_RTX,
913 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
914 return;
915 }
916
917 /* Truncating multi-word to a word or less. */
918 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
919 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
920 {
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 convert_move (to, gen_lowpart (word_mode, from), 0);
929 return;
930 }
931
932 /* Handle pointer conversion. */ /* SPEE 900220. */
933 if (to_mode == PQImode)
934 {
935 if (from_mode != QImode)
936 from = convert_to_mode (QImode, from, unsignedp);
937
938 #ifdef HAVE_truncqipqi2
939 if (HAVE_truncqipqi2)
940 {
941 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
942 return;
943 }
944 #endif /* HAVE_truncqipqi2 */
945 abort ();
946 }
947
948 if (from_mode == PQImode)
949 {
950 if (to_mode != QImode)
951 {
952 from = convert_to_mode (QImode, from, unsignedp);
953 from_mode = QImode;
954 }
955 else
956 {
957 #ifdef HAVE_extendpqiqi2
958 if (HAVE_extendpqiqi2)
959 {
960 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
961 return;
962 }
963 #endif /* HAVE_extendpqiqi2 */
964 abort ();
965 }
966 }
967
968 if (to_mode == PSImode)
969 {
970 if (from_mode != SImode)
971 from = convert_to_mode (SImode, from, unsignedp);
972
973 #ifdef HAVE_truncsipsi2
974 if (HAVE_truncsipsi2)
975 {
976 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
977 return;
978 }
979 #endif /* HAVE_truncsipsi2 */
980 abort ();
981 }
982
983 if (from_mode == PSImode)
984 {
985 if (to_mode != SImode)
986 {
987 from = convert_to_mode (SImode, from, unsignedp);
988 from_mode = SImode;
989 }
990 else
991 {
992 #ifdef HAVE_extendpsisi2
993 if (! unsignedp && HAVE_extendpsisi2)
994 {
995 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
996 return;
997 }
998 #endif /* HAVE_extendpsisi2 */
999 #ifdef HAVE_zero_extendpsisi2
1000 if (unsignedp && HAVE_zero_extendpsisi2)
1001 {
1002 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1003 return;
1004 }
1005 #endif /* HAVE_zero_extendpsisi2 */
1006 abort ();
1007 }
1008 }
1009
1010 if (to_mode == PDImode)
1011 {
1012 if (from_mode != DImode)
1013 from = convert_to_mode (DImode, from, unsignedp);
1014
1015 #ifdef HAVE_truncdipdi2
1016 if (HAVE_truncdipdi2)
1017 {
1018 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1019 return;
1020 }
1021 #endif /* HAVE_truncdipdi2 */
1022 abort ();
1023 }
1024
1025 if (from_mode == PDImode)
1026 {
1027 if (to_mode != DImode)
1028 {
1029 from = convert_to_mode (DImode, from, unsignedp);
1030 from_mode = DImode;
1031 }
1032 else
1033 {
1034 #ifdef HAVE_extendpdidi2
1035 if (HAVE_extendpdidi2)
1036 {
1037 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1038 return;
1039 }
1040 #endif /* HAVE_extendpdidi2 */
1041 abort ();
1042 }
1043 }
1044
1045 /* Now follow all the conversions between integers
1046 no more than a word long. */
1047
1048 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1049 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1050 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1051 GET_MODE_BITSIZE (from_mode)))
1052 {
1053 if (!((GET_CODE (from) == MEM
1054 && ! MEM_VOLATILE_P (from)
1055 && direct_load[(int) to_mode]
1056 && ! mode_dependent_address_p (XEXP (from, 0)))
1057 || GET_CODE (from) == REG
1058 || GET_CODE (from) == SUBREG))
1059 from = force_reg (from_mode, from);
1060 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1061 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1062 from = copy_to_reg (from);
1063 emit_move_insn (to, gen_lowpart (to_mode, from));
1064 return;
1065 }
1066
1067 /* Handle extension. */
1068 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1069 {
1070 /* Convert directly if that works. */
1071 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1072 != CODE_FOR_nothing)
1073 {
1074 emit_unop_insn (code, to, from, equiv_code);
1075 return;
1076 }
1077 else
1078 {
1079 enum machine_mode intermediate;
1080 rtx tmp;
1081 tree shift_amount;
1082
1083 /* Search for a mode to convert via. */
1084 for (intermediate = from_mode; intermediate != VOIDmode;
1085 intermediate = GET_MODE_WIDER_MODE (intermediate))
1086 if (((can_extend_p (to_mode, intermediate, unsignedp)
1087 != CODE_FOR_nothing)
1088 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1089 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1090 GET_MODE_BITSIZE (intermediate))))
1091 && (can_extend_p (intermediate, from_mode, unsignedp)
1092 != CODE_FOR_nothing))
1093 {
1094 convert_move (to, convert_to_mode (intermediate, from,
1095 unsignedp), unsignedp);
1096 return;
1097 }
1098
1099 /* No suitable intermediate mode.
1100 Generate what we need with shifts. */
1101 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1102 - GET_MODE_BITSIZE (from_mode), 0);
1103 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1104 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1105 to, unsignedp);
1106 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1107 to, unsignedp);
1108 if (tmp != to)
1109 emit_move_insn (to, tmp);
1110 return;
1111 }
1112 }
1113
1114 /* Support special truncate insns for certain modes. */
1115
1116 if (from_mode == DImode && to_mode == SImode)
1117 {
1118 #ifdef HAVE_truncdisi2
1119 if (HAVE_truncdisi2)
1120 {
1121 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1122 return;
1123 }
1124 #endif
1125 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 return;
1127 }
1128
1129 if (from_mode == DImode && to_mode == HImode)
1130 {
1131 #ifdef HAVE_truncdihi2
1132 if (HAVE_truncdihi2)
1133 {
1134 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1135 return;
1136 }
1137 #endif
1138 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 return;
1140 }
1141
1142 if (from_mode == DImode && to_mode == QImode)
1143 {
1144 #ifdef HAVE_truncdiqi2
1145 if (HAVE_truncdiqi2)
1146 {
1147 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1148 return;
1149 }
1150 #endif
1151 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 return;
1153 }
1154
1155 if (from_mode == SImode && to_mode == HImode)
1156 {
1157 #ifdef HAVE_truncsihi2
1158 if (HAVE_truncsihi2)
1159 {
1160 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1161 return;
1162 }
1163 #endif
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 return;
1166 }
1167
1168 if (from_mode == SImode && to_mode == QImode)
1169 {
1170 #ifdef HAVE_truncsiqi2
1171 if (HAVE_truncsiqi2)
1172 {
1173 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1174 return;
1175 }
1176 #endif
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 return;
1179 }
1180
1181 if (from_mode == HImode && to_mode == QImode)
1182 {
1183 #ifdef HAVE_trunchiqi2
1184 if (HAVE_trunchiqi2)
1185 {
1186 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1187 return;
1188 }
1189 #endif
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 return;
1192 }
1193
1194 if (from_mode == TImode && to_mode == DImode)
1195 {
1196 #ifdef HAVE_trunctidi2
1197 if (HAVE_trunctidi2)
1198 {
1199 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1200 return;
1201 }
1202 #endif
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 return;
1205 }
1206
1207 if (from_mode == TImode && to_mode == SImode)
1208 {
1209 #ifdef HAVE_trunctisi2
1210 if (HAVE_trunctisi2)
1211 {
1212 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1213 return;
1214 }
1215 #endif
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 return;
1218 }
1219
1220 if (from_mode == TImode && to_mode == HImode)
1221 {
1222 #ifdef HAVE_trunctihi2
1223 if (HAVE_trunctihi2)
1224 {
1225 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1226 return;
1227 }
1228 #endif
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 return;
1231 }
1232
1233 if (from_mode == TImode && to_mode == QImode)
1234 {
1235 #ifdef HAVE_trunctiqi2
1236 if (HAVE_trunctiqi2)
1237 {
1238 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1239 return;
1240 }
1241 #endif
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1243 return;
1244 }
1245
1246 /* Handle truncation of volatile memrefs, and so on;
1247 the things that couldn't be truncated directly,
1248 and for which there was no special instruction. */
1249 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1250 {
1251 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1252 emit_move_insn (to, temp);
1253 return;
1254 }
1255
1256 /* Mode combination is not recognized. */
1257 abort ();
1258 }
1259
1260 /* Return an rtx for a value that would result
1261 from converting X to mode MODE.
1262 Both X and MODE may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1264 This can be done by referring to a part of X in place
1265 or by copying to a new temporary with conversion.
1266
1267 This function *must not* call protect_from_queue
1268 except when putting X into an insn (in which case convert_move does it). */
1269
1270 rtx
1271 convert_to_mode (mode, x, unsignedp)
1272 enum machine_mode mode;
1273 rtx x;
1274 int unsignedp;
1275 {
1276 return convert_modes (mode, VOIDmode, x, unsignedp);
1277 }
1278
1279 /* Return an rtx for a value that would result
1280 from converting X from mode OLDMODE to mode MODE.
1281 Both modes may be floating, or both integer.
1282 UNSIGNEDP is nonzero if X is an unsigned value.
1283
1284 This can be done by referring to a part of X in place
1285 or by copying to a new temporary with conversion.
1286
1287 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1288
1289 This function *must not* call protect_from_queue
1290 except when putting X into an insn (in which case convert_move does it). */
1291
1292 rtx
1293 convert_modes (mode, oldmode, x, unsignedp)
1294 enum machine_mode mode, oldmode;
1295 rtx x;
1296 int unsignedp;
1297 {
1298 rtx temp;
1299
1300 /* If FROM is a SUBREG that indicates that we have already done at least
1301 the required extension, strip it. */
1302
1303 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1304 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1305 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1306 x = gen_lowpart (mode, x);
1307
1308 if (GET_MODE (x) != VOIDmode)
1309 oldmode = GET_MODE (x);
1310
1311 if (mode == oldmode)
1312 return x;
1313
1314 /* There is one case that we must handle specially: If we are converting
1315 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1316 we are to interpret the constant as unsigned, gen_lowpart will do
1317 the wrong if the constant appears negative. What we want to do is
1318 make the high-order word of the constant zero, not all ones. */
1319
1320 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1321 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1322 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1323 {
1324 HOST_WIDE_INT val = INTVAL (x);
1325
1326 if (oldmode != VOIDmode
1327 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1328 {
1329 int width = GET_MODE_BITSIZE (oldmode);
1330
1331 /* We need to zero extend VAL. */
1332 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1333 }
1334
1335 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1336 }
1337
1338 /* We can do this with a gen_lowpart if both desired and current modes
1339 are integer, and this is either a constant integer, a register, or a
1340 non-volatile MEM. Except for the constant case where MODE is no
1341 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1342
1343 if ((GET_CODE (x) == CONST_INT
1344 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1345 || (GET_MODE_CLASS (mode) == MODE_INT
1346 && GET_MODE_CLASS (oldmode) == MODE_INT
1347 && (GET_CODE (x) == CONST_DOUBLE
1348 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1349 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1350 && direct_load[(int) mode])
1351 || (GET_CODE (x) == REG
1352 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1353 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1354 {
1355 /* ?? If we don't know OLDMODE, we have to assume here that
1356 X does not need sign- or zero-extension. This may not be
1357 the case, but it's the best we can do. */
1358 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1359 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1360 {
1361 HOST_WIDE_INT val = INTVAL (x);
1362 int width = GET_MODE_BITSIZE (oldmode);
1363
1364 /* We must sign or zero-extend in this case. Start by
1365 zero-extending, then sign extend if we need to. */
1366 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1367 if (! unsignedp
1368 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1369 val |= (HOST_WIDE_INT) (-1) << width;
1370
1371 return GEN_INT (trunc_int_for_mode (val, mode));
1372 }
1373
1374 return gen_lowpart (mode, x);
1375 }
1376
1377 temp = gen_reg_rtx (mode);
1378 convert_move (temp, x, unsignedp);
1379 return temp;
1380 }
1381 \f
1382 /* This macro is used to determine what the largest unit size that
1383 move_by_pieces can use is. */
1384
1385 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1386 move efficiently, as opposed to MOVE_MAX which is the maximum
1387 number of bytes we can move with a single instruction. */
1388
1389 #ifndef MOVE_MAX_PIECES
1390 #define MOVE_MAX_PIECES MOVE_MAX
1391 #endif
1392
1393 /* Generate several move instructions to copy LEN bytes from block FROM to
1394 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1395 and TO through protect_from_queue before calling.
1396
1397 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1398 used to push FROM to the stack.
1399
1400 ALIGN is maximum alignment we can assume. */
1401
1402 void
1403 move_by_pieces (to, from, len, align)
1404 rtx to, from;
1405 unsigned HOST_WIDE_INT len;
1406 unsigned int align;
1407 {
1408 struct move_by_pieces data;
1409 rtx to_addr, from_addr = XEXP (from, 0);
1410 unsigned int max_size = MOVE_MAX_PIECES + 1;
1411 enum machine_mode mode = VOIDmode, tmode;
1412 enum insn_code icode;
1413
1414 data.offset = 0;
1415 data.from_addr = from_addr;
1416 if (to)
1417 {
1418 to_addr = XEXP (to, 0);
1419 data.to = to;
1420 data.autinc_to
1421 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1422 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1423 data.reverse
1424 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1425 }
1426 else
1427 {
1428 to_addr = NULL_RTX;
1429 data.to = NULL_RTX;
1430 data.autinc_to = 1;
1431 #ifdef STACK_GROWS_DOWNWARD
1432 data.reverse = 1;
1433 #else
1434 data.reverse = 0;
1435 #endif
1436 }
1437 data.to_addr = to_addr;
1438 data.from = from;
1439 data.autinc_from
1440 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1441 || GET_CODE (from_addr) == POST_INC
1442 || GET_CODE (from_addr) == POST_DEC);
1443
1444 data.explicit_inc_from = 0;
1445 data.explicit_inc_to = 0;
1446 if (data.reverse) data.offset = len;
1447 data.len = len;
1448
1449 /* If copying requires more than two move insns,
1450 copy addresses to registers (to make displacements shorter)
1451 and use post-increment if available. */
1452 if (!(data.autinc_from && data.autinc_to)
1453 && move_by_pieces_ninsns (len, align) > 2)
1454 {
1455 /* Find the mode of the largest move... */
1456 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1457 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1458 if (GET_MODE_SIZE (tmode) < max_size)
1459 mode = tmode;
1460
1461 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1462 {
1463 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1464 data.autinc_from = 1;
1465 data.explicit_inc_from = -1;
1466 }
1467 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1468 {
1469 data.from_addr = copy_addr_to_reg (from_addr);
1470 data.autinc_from = 1;
1471 data.explicit_inc_from = 1;
1472 }
1473 if (!data.autinc_from && CONSTANT_P (from_addr))
1474 data.from_addr = copy_addr_to_reg (from_addr);
1475 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1476 {
1477 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1478 data.autinc_to = 1;
1479 data.explicit_inc_to = -1;
1480 }
1481 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1482 {
1483 data.to_addr = copy_addr_to_reg (to_addr);
1484 data.autinc_to = 1;
1485 data.explicit_inc_to = 1;
1486 }
1487 if (!data.autinc_to && CONSTANT_P (to_addr))
1488 data.to_addr = copy_addr_to_reg (to_addr);
1489 }
1490
1491 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1492 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1493 align = MOVE_MAX * BITS_PER_UNIT;
1494
1495 /* First move what we can in the largest integer mode, then go to
1496 successively smaller modes. */
1497
1498 while (max_size > 1)
1499 {
1500 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1501 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1502 if (GET_MODE_SIZE (tmode) < max_size)
1503 mode = tmode;
1504
1505 if (mode == VOIDmode)
1506 break;
1507
1508 icode = mov_optab->handlers[(int) mode].insn_code;
1509 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1510 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1511
1512 max_size = GET_MODE_SIZE (mode);
1513 }
1514
1515 /* The code above should have handled everything. */
1516 if (data.len > 0)
1517 abort ();
1518 }
1519
1520 /* Return number of insns required to move L bytes by pieces.
1521 ALIGN (in bits) is maximum alignment we can assume. */
1522
1523 static unsigned HOST_WIDE_INT
1524 move_by_pieces_ninsns (l, align)
1525 unsigned HOST_WIDE_INT l;
1526 unsigned int align;
1527 {
1528 unsigned HOST_WIDE_INT n_insns = 0;
1529 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1530
1531 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1532 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1533 align = MOVE_MAX * BITS_PER_UNIT;
1534
1535 while (max_size > 1)
1536 {
1537 enum machine_mode mode = VOIDmode, tmode;
1538 enum insn_code icode;
1539
1540 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1541 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1542 if (GET_MODE_SIZE (tmode) < max_size)
1543 mode = tmode;
1544
1545 if (mode == VOIDmode)
1546 break;
1547
1548 icode = mov_optab->handlers[(int) mode].insn_code;
1549 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1550 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1551
1552 max_size = GET_MODE_SIZE (mode);
1553 }
1554
1555 if (l)
1556 abort ();
1557 return n_insns;
1558 }
1559
1560 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1561 with move instructions for mode MODE. GENFUN is the gen_... function
1562 to make a move insn for that mode. DATA has all the other info. */
1563
1564 static void
1565 move_by_pieces_1 (genfun, mode, data)
1566 rtx (*genfun) PARAMS ((rtx, ...));
1567 enum machine_mode mode;
1568 struct move_by_pieces *data;
1569 {
1570 unsigned int size = GET_MODE_SIZE (mode);
1571 rtx to1 = NULL_RTX, from1;
1572
1573 while (data->len >= size)
1574 {
1575 if (data->reverse)
1576 data->offset -= size;
1577
1578 if (data->to)
1579 {
1580 if (data->autinc_to)
1581 {
1582 to1 = replace_equiv_address (data->to, data->to_addr);
1583 to1 = adjust_address (to1, mode, 0);
1584 }
1585 else
1586 to1 = adjust_address (data->to, mode, data->offset);
1587 }
1588
1589 if (data->autinc_from)
1590 {
1591 from1 = replace_equiv_address (data->from, data->from_addr);
1592 from1 = adjust_address (from1, mode, 0);
1593 }
1594 else
1595 from1 = adjust_address (data->from, mode, data->offset);
1596
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1598 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1600 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1601
1602 if (data->to)
1603 emit_insn ((*genfun) (to1, from1));
1604 else
1605 {
1606 #ifdef PUSH_ROUNDING
1607 emit_single_push_insn (mode, from1, NULL);
1608 #else
1609 abort ();
1610 #endif
1611 }
1612
1613 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1614 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1615 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1616 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1617
1618 if (! data->reverse)
1619 data->offset += size;
1620
1621 data->len -= size;
1622 }
1623 }
1624 \f
1625 /* Emit code to move a block Y to a block X.
1626 This may be done with string-move instructions,
1627 with multiple scalar move instructions, or with a library call.
1628
1629 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1630 with mode BLKmode.
1631 SIZE is an rtx that says how long they are.
1632 ALIGN is the maximum alignment we can assume they have.
1633
1634 Return the address of the new block, if memcpy is called and returns it,
1635 0 otherwise. */
1636
1637 rtx
1638 emit_block_move (x, y, size)
1639 rtx x, y;
1640 rtx size;
1641 {
1642 rtx retval = 0;
1643 #ifdef TARGET_MEM_FUNCTIONS
1644 static tree fn;
1645 tree call_expr, arg_list;
1646 #endif
1647 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1648
1649 if (GET_MODE (x) != BLKmode)
1650 abort ();
1651
1652 if (GET_MODE (y) != BLKmode)
1653 abort ();
1654
1655 x = protect_from_queue (x, 1);
1656 y = protect_from_queue (y, 0);
1657 size = protect_from_queue (size, 0);
1658
1659 if (GET_CODE (x) != MEM)
1660 abort ();
1661 if (GET_CODE (y) != MEM)
1662 abort ();
1663 if (size == 0)
1664 abort ();
1665
1666 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1667 move_by_pieces (x, y, INTVAL (size), align);
1668 else
1669 {
1670 /* Try the most limited insn first, because there's no point
1671 including more than one in the machine description unless
1672 the more limited one has some advantage. */
1673
1674 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1675 enum machine_mode mode;
1676
1677 /* Since this is a move insn, we don't care about volatility. */
1678 volatile_ok = 1;
1679
1680 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1681 mode = GET_MODE_WIDER_MODE (mode))
1682 {
1683 enum insn_code code = movstr_optab[(int) mode];
1684 insn_operand_predicate_fn pred;
1685
1686 if (code != CODE_FOR_nothing
1687 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1688 here because if SIZE is less than the mode mask, as it is
1689 returned by the macro, it will definitely be less than the
1690 actual mode mask. */
1691 && ((GET_CODE (size) == CONST_INT
1692 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1693 <= (GET_MODE_MASK (mode) >> 1)))
1694 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1695 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1696 || (*pred) (x, BLKmode))
1697 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1698 || (*pred) (y, BLKmode))
1699 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1700 || (*pred) (opalign, VOIDmode)))
1701 {
1702 rtx op2;
1703 rtx last = get_last_insn ();
1704 rtx pat;
1705
1706 op2 = convert_to_mode (mode, size, 1);
1707 pred = insn_data[(int) code].operand[2].predicate;
1708 if (pred != 0 && ! (*pred) (op2, mode))
1709 op2 = copy_to_mode_reg (mode, op2);
1710
1711 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1712 if (pat)
1713 {
1714 emit_insn (pat);
1715 volatile_ok = 0;
1716 return 0;
1717 }
1718 else
1719 delete_insns_since (last);
1720 }
1721 }
1722
1723 volatile_ok = 0;
1724
1725 /* X, Y, or SIZE may have been passed through protect_from_queue.
1726
1727 It is unsafe to save the value generated by protect_from_queue
1728 and reuse it later. Consider what happens if emit_queue is
1729 called before the return value from protect_from_queue is used.
1730
1731 Expansion of the CALL_EXPR below will call emit_queue before
1732 we are finished emitting RTL for argument setup. So if we are
1733 not careful we could get the wrong value for an argument.
1734
1735 To avoid this problem we go ahead and emit code to copy X, Y &
1736 SIZE into new pseudos. We can then place those new pseudos
1737 into an RTL_EXPR and use them later, even after a call to
1738 emit_queue.
1739
1740 Note this is not strictly needed for library calls since they
1741 do not call emit_queue before loading their arguments. However,
1742 we may need to have library calls call emit_queue in the future
1743 since failing to do so could cause problems for targets which
1744 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1745 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1746 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1747
1748 #ifdef TARGET_MEM_FUNCTIONS
1749 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1750 #else
1751 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1752 TREE_UNSIGNED (integer_type_node));
1753 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1754 #endif
1755
1756 #ifdef TARGET_MEM_FUNCTIONS
1757 /* It is incorrect to use the libcall calling conventions to call
1758 memcpy in this context.
1759
1760 This could be a user call to memcpy and the user may wish to
1761 examine the return value from memcpy.
1762
1763 For targets where libcalls and normal calls have different conventions
1764 for returning pointers, we could end up generating incorrect code.
1765
1766 So instead of using a libcall sequence we build up a suitable
1767 CALL_EXPR and expand the call in the normal fashion. */
1768 if (fn == NULL_TREE)
1769 {
1770 tree fntype;
1771
1772 /* This was copied from except.c, I don't know if all this is
1773 necessary in this context or not. */
1774 fn = get_identifier ("memcpy");
1775 fntype = build_pointer_type (void_type_node);
1776 fntype = build_function_type (fntype, NULL_TREE);
1777 fn = build_decl (FUNCTION_DECL, fn, fntype);
1778 ggc_add_tree_root (&fn, 1);
1779 DECL_EXTERNAL (fn) = 1;
1780 TREE_PUBLIC (fn) = 1;
1781 DECL_ARTIFICIAL (fn) = 1;
1782 TREE_NOTHROW (fn) = 1;
1783 make_decl_rtl (fn, NULL);
1784 assemble_external (fn);
1785 }
1786
1787 /* We need to make an argument list for the function call.
1788
1789 memcpy has three arguments, the first two are void * addresses and
1790 the last is a size_t byte count for the copy. */
1791 arg_list
1792 = build_tree_list (NULL_TREE,
1793 make_tree (build_pointer_type (void_type_node), x));
1794 TREE_CHAIN (arg_list)
1795 = build_tree_list (NULL_TREE,
1796 make_tree (build_pointer_type (void_type_node), y));
1797 TREE_CHAIN (TREE_CHAIN (arg_list))
1798 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1799 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1800
1801 /* Now we have to build up the CALL_EXPR itself. */
1802 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1803 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1804 call_expr, arg_list, NULL_TREE);
1805 TREE_SIDE_EFFECTS (call_expr) = 1;
1806
1807 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1808 #else
1809 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1810 VOIDmode, 3, y, Pmode, x, Pmode,
1811 convert_to_mode (TYPE_MODE (integer_type_node), size,
1812 TREE_UNSIGNED (integer_type_node)),
1813 TYPE_MODE (integer_type_node));
1814 #endif
1815 }
1816
1817 return retval;
1818 }
1819 \f
1820 /* Copy all or part of a value X into registers starting at REGNO.
1821 The number of registers to be filled is NREGS. */
1822
1823 void
1824 move_block_to_reg (regno, x, nregs, mode)
1825 int regno;
1826 rtx x;
1827 int nregs;
1828 enum machine_mode mode;
1829 {
1830 int i;
1831 #ifdef HAVE_load_multiple
1832 rtx pat;
1833 rtx last;
1834 #endif
1835
1836 if (nregs == 0)
1837 return;
1838
1839 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1840 x = validize_mem (force_const_mem (mode, x));
1841
1842 /* See if the machine can do this with a load multiple insn. */
1843 #ifdef HAVE_load_multiple
1844 if (HAVE_load_multiple)
1845 {
1846 last = get_last_insn ();
1847 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1848 GEN_INT (nregs));
1849 if (pat)
1850 {
1851 emit_insn (pat);
1852 return;
1853 }
1854 else
1855 delete_insns_since (last);
1856 }
1857 #endif
1858
1859 for (i = 0; i < nregs; i++)
1860 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1861 operand_subword_force (x, i, mode));
1862 }
1863
1864 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1865 The number of registers to be filled is NREGS. SIZE indicates the number
1866 of bytes in the object X. */
1867
1868 void
1869 move_block_from_reg (regno, x, nregs, size)
1870 int regno;
1871 rtx x;
1872 int nregs;
1873 int size;
1874 {
1875 int i;
1876 #ifdef HAVE_store_multiple
1877 rtx pat;
1878 rtx last;
1879 #endif
1880 enum machine_mode mode;
1881
1882 if (nregs == 0)
1883 return;
1884
1885 /* If SIZE is that of a mode no bigger than a word, just use that
1886 mode's store operation. */
1887 if (size <= UNITS_PER_WORD
1888 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1889 {
1890 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1891 return;
1892 }
1893
1894 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1895 to the left before storing to memory. Note that the previous test
1896 doesn't handle all cases (e.g. SIZE == 3). */
1897 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1898 {
1899 rtx tem = operand_subword (x, 0, 1, BLKmode);
1900 rtx shift;
1901
1902 if (tem == 0)
1903 abort ();
1904
1905 shift = expand_shift (LSHIFT_EXPR, word_mode,
1906 gen_rtx_REG (word_mode, regno),
1907 build_int_2 ((UNITS_PER_WORD - size)
1908 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1909 emit_move_insn (tem, shift);
1910 return;
1911 }
1912
1913 /* See if the machine can do this with a store multiple insn. */
1914 #ifdef HAVE_store_multiple
1915 if (HAVE_store_multiple)
1916 {
1917 last = get_last_insn ();
1918 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1919 GEN_INT (nregs));
1920 if (pat)
1921 {
1922 emit_insn (pat);
1923 return;
1924 }
1925 else
1926 delete_insns_since (last);
1927 }
1928 #endif
1929
1930 for (i = 0; i < nregs; i++)
1931 {
1932 rtx tem = operand_subword (x, i, 1, BLKmode);
1933
1934 if (tem == 0)
1935 abort ();
1936
1937 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1938 }
1939 }
1940
1941 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1942 registers represented by a PARALLEL. SSIZE represents the total size of
1943 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1944 SRC in bits. */
1945 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1946 the balance will be in what would be the low-order memory addresses, i.e.
1947 left justified for big endian, right justified for little endian. This
1948 happens to be true for the targets currently using this support. If this
1949 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1950 would be needed. */
1951
1952 void
1953 emit_group_load (dst, orig_src, ssize, align)
1954 rtx dst, orig_src;
1955 unsigned int align;
1956 int ssize;
1957 {
1958 rtx *tmps, src;
1959 int start, i;
1960
1961 if (GET_CODE (dst) != PARALLEL)
1962 abort ();
1963
1964 /* Check for a NULL entry, used to indicate that the parameter goes
1965 both on the stack and in registers. */
1966 if (XEXP (XVECEXP (dst, 0, 0), 0))
1967 start = 0;
1968 else
1969 start = 1;
1970
1971 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1972
1973 /* Process the pieces. */
1974 for (i = start; i < XVECLEN (dst, 0); i++)
1975 {
1976 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1977 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1978 unsigned int bytelen = GET_MODE_SIZE (mode);
1979 int shift = 0;
1980
1981 /* Handle trailing fragments that run over the size of the struct. */
1982 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1983 {
1984 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1985 bytelen = ssize - bytepos;
1986 if (bytelen <= 0)
1987 abort ();
1988 }
1989
1990 /* If we won't be loading directly from memory, protect the real source
1991 from strange tricks we might play; but make sure that the source can
1992 be loaded directly into the destination. */
1993 src = orig_src;
1994 if (GET_CODE (orig_src) != MEM
1995 && (!CONSTANT_P (orig_src)
1996 || (GET_MODE (orig_src) != mode
1997 && GET_MODE (orig_src) != VOIDmode)))
1998 {
1999 if (GET_MODE (orig_src) == VOIDmode)
2000 src = gen_reg_rtx (mode);
2001 else
2002 src = gen_reg_rtx (GET_MODE (orig_src));
2003 emit_move_insn (src, orig_src);
2004 }
2005
2006 /* Optimize the access just a bit. */
2007 if (GET_CODE (src) == MEM
2008 && align >= GET_MODE_ALIGNMENT (mode)
2009 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2010 && bytelen == GET_MODE_SIZE (mode))
2011 {
2012 tmps[i] = gen_reg_rtx (mode);
2013 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2014 }
2015 else if (GET_CODE (src) == CONCAT)
2016 {
2017 if (bytepos == 0
2018 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2019 tmps[i] = XEXP (src, 0);
2020 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2021 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2022 tmps[i] = XEXP (src, 1);
2023 else if (bytepos == 0)
2024 {
2025 rtx mem;
2026 mem = assign_stack_temp (GET_MODE (src),
2027 GET_MODE_SIZE (GET_MODE (src)), 0);
2028 emit_move_insn (mem, src);
2029 tmps[i] = change_address (mem, mode, XEXP (mem, 0));
2030 }
2031 else
2032 abort ();
2033 }
2034 else if (CONSTANT_P (src)
2035 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2036 tmps[i] = src;
2037 else
2038 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2039 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2040 mode, mode, align, ssize);
2041
2042 if (BYTES_BIG_ENDIAN && shift)
2043 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2044 tmps[i], 0, OPTAB_WIDEN);
2045 }
2046
2047 emit_queue ();
2048
2049 /* Copy the extracted pieces into the proper (probable) hard regs. */
2050 for (i = start; i < XVECLEN (dst, 0); i++)
2051 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2052 }
2053
2054 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2055 registers represented by a PARALLEL. SSIZE represents the total size of
2056 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2057
2058 void
2059 emit_group_store (orig_dst, src, ssize, align)
2060 rtx orig_dst, src;
2061 int ssize;
2062 unsigned int align;
2063 {
2064 rtx *tmps, dst;
2065 int start, i;
2066
2067 if (GET_CODE (src) != PARALLEL)
2068 abort ();
2069
2070 /* Check for a NULL entry, used to indicate that the parameter goes
2071 both on the stack and in registers. */
2072 if (XEXP (XVECEXP (src, 0, 0), 0))
2073 start = 0;
2074 else
2075 start = 1;
2076
2077 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2078
2079 /* Copy the (probable) hard regs into pseudos. */
2080 for (i = start; i < XVECLEN (src, 0); i++)
2081 {
2082 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2083 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2084 emit_move_insn (tmps[i], reg);
2085 }
2086 emit_queue ();
2087
2088 /* If we won't be storing directly into memory, protect the real destination
2089 from strange tricks we might play. */
2090 dst = orig_dst;
2091 if (GET_CODE (dst) == PARALLEL)
2092 {
2093 rtx temp;
2094
2095 /* We can get a PARALLEL dst if there is a conditional expression in
2096 a return statement. In that case, the dst and src are the same,
2097 so no action is necessary. */
2098 if (rtx_equal_p (dst, src))
2099 return;
2100
2101 /* It is unclear if we can ever reach here, but we may as well handle
2102 it. Allocate a temporary, and split this into a store/load to/from
2103 the temporary. */
2104
2105 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2106 emit_group_store (temp, src, ssize, align);
2107 emit_group_load (dst, temp, ssize, align);
2108 return;
2109 }
2110 else if (GET_CODE (dst) != MEM)
2111 {
2112 dst = gen_reg_rtx (GET_MODE (orig_dst));
2113 /* Make life a bit easier for combine. */
2114 emit_move_insn (dst, const0_rtx);
2115 }
2116
2117 /* Process the pieces. */
2118 for (i = start; i < XVECLEN (src, 0); i++)
2119 {
2120 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2121 enum machine_mode mode = GET_MODE (tmps[i]);
2122 unsigned int bytelen = GET_MODE_SIZE (mode);
2123
2124 /* Handle trailing fragments that run over the size of the struct. */
2125 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2126 {
2127 if (BYTES_BIG_ENDIAN)
2128 {
2129 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2130 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2131 tmps[i], 0, OPTAB_WIDEN);
2132 }
2133 bytelen = ssize - bytepos;
2134 }
2135
2136 /* Optimize the access just a bit. */
2137 if (GET_CODE (dst) == MEM
2138 && align >= GET_MODE_ALIGNMENT (mode)
2139 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2140 && bytelen == GET_MODE_SIZE (mode))
2141 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2142 else
2143 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2144 mode, tmps[i], align, ssize);
2145 }
2146
2147 emit_queue ();
2148
2149 /* Copy from the pseudo into the (probable) hard reg. */
2150 if (GET_CODE (dst) == REG)
2151 emit_move_insn (orig_dst, dst);
2152 }
2153
2154 /* Generate code to copy a BLKmode object of TYPE out of a
2155 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2156 is null, a stack temporary is created. TGTBLK is returned.
2157
2158 The primary purpose of this routine is to handle functions
2159 that return BLKmode structures in registers. Some machines
2160 (the PA for example) want to return all small structures
2161 in registers regardless of the structure's alignment. */
2162
2163 rtx
2164 copy_blkmode_from_reg (tgtblk, srcreg, type)
2165 rtx tgtblk;
2166 rtx srcreg;
2167 tree type;
2168 {
2169 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2170 rtx src = NULL, dst = NULL;
2171 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2172 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2173
2174 if (tgtblk == 0)
2175 {
2176 tgtblk = assign_temp (build_qualified_type (type,
2177 (TYPE_QUALS (type)
2178 | TYPE_QUAL_CONST)),
2179 0, 1, 1);
2180 preserve_temp_slots (tgtblk);
2181 }
2182
2183 /* This code assumes srcreg is at least a full word. If it isn't,
2184 copy it into a new pseudo which is a full word. */
2185 if (GET_MODE (srcreg) != BLKmode
2186 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2187 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2188
2189 /* Structures whose size is not a multiple of a word are aligned
2190 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2191 machine, this means we must skip the empty high order bytes when
2192 calculating the bit offset. */
2193 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2194 big_endian_correction
2195 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2196
2197 /* Copy the structure BITSIZE bites at a time.
2198
2199 We could probably emit more efficient code for machines which do not use
2200 strict alignment, but it doesn't seem worth the effort at the current
2201 time. */
2202 for (bitpos = 0, xbitpos = big_endian_correction;
2203 bitpos < bytes * BITS_PER_UNIT;
2204 bitpos += bitsize, xbitpos += bitsize)
2205 {
2206 /* We need a new source operand each time xbitpos is on a
2207 word boundary and when xbitpos == big_endian_correction
2208 (the first time through). */
2209 if (xbitpos % BITS_PER_WORD == 0
2210 || xbitpos == big_endian_correction)
2211 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2212 GET_MODE (srcreg));
2213
2214 /* We need a new destination operand each time bitpos is on
2215 a word boundary. */
2216 if (bitpos % BITS_PER_WORD == 0)
2217 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2218
2219 /* Use xbitpos for the source extraction (right justified) and
2220 xbitpos for the destination store (left justified). */
2221 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2222 extract_bit_field (src, bitsize,
2223 xbitpos % BITS_PER_WORD, 1,
2224 NULL_RTX, word_mode, word_mode,
2225 bitsize, BITS_PER_WORD),
2226 bitsize, BITS_PER_WORD);
2227 }
2228
2229 return tgtblk;
2230 }
2231
2232 /* Add a USE expression for REG to the (possibly empty) list pointed
2233 to by CALL_FUSAGE. REG must denote a hard register. */
2234
2235 void
2236 use_reg (call_fusage, reg)
2237 rtx *call_fusage, reg;
2238 {
2239 if (GET_CODE (reg) != REG
2240 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2241 abort ();
2242
2243 *call_fusage
2244 = gen_rtx_EXPR_LIST (VOIDmode,
2245 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2246 }
2247
2248 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2249 starting at REGNO. All of these registers must be hard registers. */
2250
2251 void
2252 use_regs (call_fusage, regno, nregs)
2253 rtx *call_fusage;
2254 int regno;
2255 int nregs;
2256 {
2257 int i;
2258
2259 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2260 abort ();
2261
2262 for (i = 0; i < nregs; i++)
2263 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2264 }
2265
2266 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2267 PARALLEL REGS. This is for calls that pass values in multiple
2268 non-contiguous locations. The Irix 6 ABI has examples of this. */
2269
2270 void
2271 use_group_regs (call_fusage, regs)
2272 rtx *call_fusage;
2273 rtx regs;
2274 {
2275 int i;
2276
2277 for (i = 0; i < XVECLEN (regs, 0); i++)
2278 {
2279 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2280
2281 /* A NULL entry means the parameter goes both on the stack and in
2282 registers. This can also be a MEM for targets that pass values
2283 partially on the stack and partially in registers. */
2284 if (reg != 0 && GET_CODE (reg) == REG)
2285 use_reg (call_fusage, reg);
2286 }
2287 }
2288 \f
2289
2290 int
2291 can_store_by_pieces (len, constfun, constfundata, align)
2292 unsigned HOST_WIDE_INT len;
2293 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2294 PTR constfundata;
2295 unsigned int align;
2296 {
2297 unsigned HOST_WIDE_INT max_size, l;
2298 HOST_WIDE_INT offset = 0;
2299 enum machine_mode mode, tmode;
2300 enum insn_code icode;
2301 int reverse;
2302 rtx cst;
2303
2304 if (! MOVE_BY_PIECES_P (len, align))
2305 return 0;
2306
2307 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2308 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2309 align = MOVE_MAX * BITS_PER_UNIT;
2310
2311 /* We would first store what we can in the largest integer mode, then go to
2312 successively smaller modes. */
2313
2314 for (reverse = 0;
2315 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2316 reverse++)
2317 {
2318 l = len;
2319 mode = VOIDmode;
2320 max_size = MOVE_MAX_PIECES + 1;
2321 while (max_size > 1)
2322 {
2323 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2324 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2325 if (GET_MODE_SIZE (tmode) < max_size)
2326 mode = tmode;
2327
2328 if (mode == VOIDmode)
2329 break;
2330
2331 icode = mov_optab->handlers[(int) mode].insn_code;
2332 if (icode != CODE_FOR_nothing
2333 && align >= GET_MODE_ALIGNMENT (mode))
2334 {
2335 unsigned int size = GET_MODE_SIZE (mode);
2336
2337 while (l >= size)
2338 {
2339 if (reverse)
2340 offset -= size;
2341
2342 cst = (*constfun) (constfundata, offset, mode);
2343 if (!LEGITIMATE_CONSTANT_P (cst))
2344 return 0;
2345
2346 if (!reverse)
2347 offset += size;
2348
2349 l -= size;
2350 }
2351 }
2352
2353 max_size = GET_MODE_SIZE (mode);
2354 }
2355
2356 /* The code above should have handled everything. */
2357 if (l != 0)
2358 abort ();
2359 }
2360
2361 return 1;
2362 }
2363
2364 /* Generate several move instructions to store LEN bytes generated by
2365 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2366 pointer which will be passed as argument in every CONSTFUN call.
2367 ALIGN is maximum alignment we can assume. */
2368
2369 void
2370 store_by_pieces (to, len, constfun, constfundata, align)
2371 rtx to;
2372 unsigned HOST_WIDE_INT len;
2373 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2374 PTR constfundata;
2375 unsigned int align;
2376 {
2377 struct store_by_pieces data;
2378
2379 if (! MOVE_BY_PIECES_P (len, align))
2380 abort ();
2381 to = protect_from_queue (to, 1);
2382 data.constfun = constfun;
2383 data.constfundata = constfundata;
2384 data.len = len;
2385 data.to = to;
2386 store_by_pieces_1 (&data, align);
2387 }
2388
2389 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2390 rtx with BLKmode). The caller must pass TO through protect_from_queue
2391 before calling. ALIGN is maximum alignment we can assume. */
2392
2393 static void
2394 clear_by_pieces (to, len, align)
2395 rtx to;
2396 unsigned HOST_WIDE_INT len;
2397 unsigned int align;
2398 {
2399 struct store_by_pieces data;
2400
2401 data.constfun = clear_by_pieces_1;
2402 data.constfundata = NULL;
2403 data.len = len;
2404 data.to = to;
2405 store_by_pieces_1 (&data, align);
2406 }
2407
2408 /* Callback routine for clear_by_pieces.
2409 Return const0_rtx unconditionally. */
2410
2411 static rtx
2412 clear_by_pieces_1 (data, offset, mode)
2413 PTR data ATTRIBUTE_UNUSED;
2414 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2415 enum machine_mode mode ATTRIBUTE_UNUSED;
2416 {
2417 return const0_rtx;
2418 }
2419
2420 /* Subroutine of clear_by_pieces and store_by_pieces.
2421 Generate several move instructions to store LEN bytes of block TO. (A MEM
2422 rtx with BLKmode). The caller must pass TO through protect_from_queue
2423 before calling. ALIGN is maximum alignment we can assume. */
2424
2425 static void
2426 store_by_pieces_1 (data, align)
2427 struct store_by_pieces *data;
2428 unsigned int align;
2429 {
2430 rtx to_addr = XEXP (data->to, 0);
2431 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2432 enum machine_mode mode = VOIDmode, tmode;
2433 enum insn_code icode;
2434
2435 data->offset = 0;
2436 data->to_addr = to_addr;
2437 data->autinc_to
2438 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2439 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2440
2441 data->explicit_inc_to = 0;
2442 data->reverse
2443 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2444 if (data->reverse)
2445 data->offset = data->len;
2446
2447 /* If storing requires more than two move insns,
2448 copy addresses to registers (to make displacements shorter)
2449 and use post-increment if available. */
2450 if (!data->autinc_to
2451 && move_by_pieces_ninsns (data->len, align) > 2)
2452 {
2453 /* Determine the main mode we'll be using. */
2454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2456 if (GET_MODE_SIZE (tmode) < max_size)
2457 mode = tmode;
2458
2459 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2460 {
2461 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2462 data->autinc_to = 1;
2463 data->explicit_inc_to = -1;
2464 }
2465
2466 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2467 && ! data->autinc_to)
2468 {
2469 data->to_addr = copy_addr_to_reg (to_addr);
2470 data->autinc_to = 1;
2471 data->explicit_inc_to = 1;
2472 }
2473
2474 if ( !data->autinc_to && CONSTANT_P (to_addr))
2475 data->to_addr = copy_addr_to_reg (to_addr);
2476 }
2477
2478 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2479 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2480 align = MOVE_MAX * BITS_PER_UNIT;
2481
2482 /* First store what we can in the largest integer mode, then go to
2483 successively smaller modes. */
2484
2485 while (max_size > 1)
2486 {
2487 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2488 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2489 if (GET_MODE_SIZE (tmode) < max_size)
2490 mode = tmode;
2491
2492 if (mode == VOIDmode)
2493 break;
2494
2495 icode = mov_optab->handlers[(int) mode].insn_code;
2496 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2497 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2498
2499 max_size = GET_MODE_SIZE (mode);
2500 }
2501
2502 /* The code above should have handled everything. */
2503 if (data->len != 0)
2504 abort ();
2505 }
2506
2507 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2508 with move instructions for mode MODE. GENFUN is the gen_... function
2509 to make a move insn for that mode. DATA has all the other info. */
2510
2511 static void
2512 store_by_pieces_2 (genfun, mode, data)
2513 rtx (*genfun) PARAMS ((rtx, ...));
2514 enum machine_mode mode;
2515 struct store_by_pieces *data;
2516 {
2517 unsigned int size = GET_MODE_SIZE (mode);
2518 rtx to1, cst;
2519
2520 while (data->len >= size)
2521 {
2522 if (data->reverse)
2523 data->offset -= size;
2524
2525 if (data->autinc_to)
2526 {
2527 to1 = replace_equiv_address (data->to, data->to_addr);
2528 to1 = adjust_address (to1, mode, 0);
2529 }
2530 else
2531 to1 = adjust_address (data->to, mode, data->offset);
2532
2533 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2534 emit_insn (gen_add2_insn (data->to_addr,
2535 GEN_INT (-(HOST_WIDE_INT) size)));
2536
2537 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2538 emit_insn ((*genfun) (to1, cst));
2539
2540 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2541 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2542
2543 if (! data->reverse)
2544 data->offset += size;
2545
2546 data->len -= size;
2547 }
2548 }
2549 \f
2550 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2551 its length in bytes. */
2552
2553 rtx
2554 clear_storage (object, size)
2555 rtx object;
2556 rtx size;
2557 {
2558 #ifdef TARGET_MEM_FUNCTIONS
2559 static tree fn;
2560 tree call_expr, arg_list;
2561 #endif
2562 rtx retval = 0;
2563 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2564 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2565
2566 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2567 just move a zero. Otherwise, do this a piece at a time. */
2568 if (GET_MODE (object) != BLKmode
2569 && GET_CODE (size) == CONST_INT
2570 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2571 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2572 else
2573 {
2574 object = protect_from_queue (object, 1);
2575 size = protect_from_queue (size, 0);
2576
2577 if (GET_CODE (size) == CONST_INT
2578 && MOVE_BY_PIECES_P (INTVAL (size), align))
2579 clear_by_pieces (object, INTVAL (size), align);
2580 else
2581 {
2582 /* Try the most limited insn first, because there's no point
2583 including more than one in the machine description unless
2584 the more limited one has some advantage. */
2585
2586 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2587 enum machine_mode mode;
2588
2589 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2590 mode = GET_MODE_WIDER_MODE (mode))
2591 {
2592 enum insn_code code = clrstr_optab[(int) mode];
2593 insn_operand_predicate_fn pred;
2594
2595 if (code != CODE_FOR_nothing
2596 /* We don't need MODE to be narrower than
2597 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2598 the mode mask, as it is returned by the macro, it will
2599 definitely be less than the actual mode mask. */
2600 && ((GET_CODE (size) == CONST_INT
2601 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2602 <= (GET_MODE_MASK (mode) >> 1)))
2603 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2604 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2605 || (*pred) (object, BLKmode))
2606 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2607 || (*pred) (opalign, VOIDmode)))
2608 {
2609 rtx op1;
2610 rtx last = get_last_insn ();
2611 rtx pat;
2612
2613 op1 = convert_to_mode (mode, size, 1);
2614 pred = insn_data[(int) code].operand[1].predicate;
2615 if (pred != 0 && ! (*pred) (op1, mode))
2616 op1 = copy_to_mode_reg (mode, op1);
2617
2618 pat = GEN_FCN ((int) code) (object, op1, opalign);
2619 if (pat)
2620 {
2621 emit_insn (pat);
2622 return 0;
2623 }
2624 else
2625 delete_insns_since (last);
2626 }
2627 }
2628
2629 /* OBJECT or SIZE may have been passed through protect_from_queue.
2630
2631 It is unsafe to save the value generated by protect_from_queue
2632 and reuse it later. Consider what happens if emit_queue is
2633 called before the return value from protect_from_queue is used.
2634
2635 Expansion of the CALL_EXPR below will call emit_queue before
2636 we are finished emitting RTL for argument setup. So if we are
2637 not careful we could get the wrong value for an argument.
2638
2639 To avoid this problem we go ahead and emit code to copy OBJECT
2640 and SIZE into new pseudos. We can then place those new pseudos
2641 into an RTL_EXPR and use them later, even after a call to
2642 emit_queue.
2643
2644 Note this is not strictly needed for library calls since they
2645 do not call emit_queue before loading their arguments. However,
2646 we may need to have library calls call emit_queue in the future
2647 since failing to do so could cause problems for targets which
2648 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2649 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2650
2651 #ifdef TARGET_MEM_FUNCTIONS
2652 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2653 #else
2654 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2655 TREE_UNSIGNED (integer_type_node));
2656 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2657 #endif
2658
2659 #ifdef TARGET_MEM_FUNCTIONS
2660 /* It is incorrect to use the libcall calling conventions to call
2661 memset in this context.
2662
2663 This could be a user call to memset and the user may wish to
2664 examine the return value from memset.
2665
2666 For targets where libcalls and normal calls have different
2667 conventions for returning pointers, we could end up generating
2668 incorrect code.
2669
2670 So instead of using a libcall sequence we build up a suitable
2671 CALL_EXPR and expand the call in the normal fashion. */
2672 if (fn == NULL_TREE)
2673 {
2674 tree fntype;
2675
2676 /* This was copied from except.c, I don't know if all this is
2677 necessary in this context or not. */
2678 fn = get_identifier ("memset");
2679 fntype = build_pointer_type (void_type_node);
2680 fntype = build_function_type (fntype, NULL_TREE);
2681 fn = build_decl (FUNCTION_DECL, fn, fntype);
2682 ggc_add_tree_root (&fn, 1);
2683 DECL_EXTERNAL (fn) = 1;
2684 TREE_PUBLIC (fn) = 1;
2685 DECL_ARTIFICIAL (fn) = 1;
2686 TREE_NOTHROW (fn) = 1;
2687 make_decl_rtl (fn, NULL);
2688 assemble_external (fn);
2689 }
2690
2691 /* We need to make an argument list for the function call.
2692
2693 memset has three arguments, the first is a void * addresses, the
2694 second an integer with the initialization value, the last is a
2695 size_t byte count for the copy. */
2696 arg_list
2697 = build_tree_list (NULL_TREE,
2698 make_tree (build_pointer_type (void_type_node),
2699 object));
2700 TREE_CHAIN (arg_list)
2701 = build_tree_list (NULL_TREE,
2702 make_tree (integer_type_node, const0_rtx));
2703 TREE_CHAIN (TREE_CHAIN (arg_list))
2704 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2705 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2706
2707 /* Now we have to build up the CALL_EXPR itself. */
2708 call_expr = build1 (ADDR_EXPR,
2709 build_pointer_type (TREE_TYPE (fn)), fn);
2710 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2711 call_expr, arg_list, NULL_TREE);
2712 TREE_SIDE_EFFECTS (call_expr) = 1;
2713
2714 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2715 #else
2716 emit_library_call (bzero_libfunc, LCT_NORMAL,
2717 VOIDmode, 2, object, Pmode, size,
2718 TYPE_MODE (integer_type_node));
2719 #endif
2720 }
2721 }
2722
2723 return retval;
2724 }
2725
2726 /* Generate code to copy Y into X.
2727 Both Y and X must have the same mode, except that
2728 Y can be a constant with VOIDmode.
2729 This mode cannot be BLKmode; use emit_block_move for that.
2730
2731 Return the last instruction emitted. */
2732
2733 rtx
2734 emit_move_insn (x, y)
2735 rtx x, y;
2736 {
2737 enum machine_mode mode = GET_MODE (x);
2738 rtx y_cst = NULL_RTX;
2739 rtx last_insn;
2740
2741 x = protect_from_queue (x, 1);
2742 y = protect_from_queue (y, 0);
2743
2744 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2745 abort ();
2746
2747 /* Never force constant_p_rtx to memory. */
2748 if (GET_CODE (y) == CONSTANT_P_RTX)
2749 ;
2750 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2751 {
2752 y_cst = y;
2753 y = force_const_mem (mode, y);
2754 }
2755
2756 /* If X or Y are memory references, verify that their addresses are valid
2757 for the machine. */
2758 if (GET_CODE (x) == MEM
2759 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2760 && ! push_operand (x, GET_MODE (x)))
2761 || (flag_force_addr
2762 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2763 x = validize_mem (x);
2764
2765 if (GET_CODE (y) == MEM
2766 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2767 || (flag_force_addr
2768 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2769 y = validize_mem (y);
2770
2771 if (mode == BLKmode)
2772 abort ();
2773
2774 last_insn = emit_move_insn_1 (x, y);
2775
2776 if (y_cst && GET_CODE (x) == REG)
2777 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2778
2779 return last_insn;
2780 }
2781
2782 /* Low level part of emit_move_insn.
2783 Called just like emit_move_insn, but assumes X and Y
2784 are basically valid. */
2785
2786 rtx
2787 emit_move_insn_1 (x, y)
2788 rtx x, y;
2789 {
2790 enum machine_mode mode = GET_MODE (x);
2791 enum machine_mode submode;
2792 enum mode_class class = GET_MODE_CLASS (mode);
2793 unsigned int i;
2794
2795 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2796 abort ();
2797
2798 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2799 return
2800 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2801
2802 /* Expand complex moves by moving real part and imag part, if possible. */
2803 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2804 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2805 * BITS_PER_UNIT),
2806 (class == MODE_COMPLEX_INT
2807 ? MODE_INT : MODE_FLOAT),
2808 0))
2809 && (mov_optab->handlers[(int) submode].insn_code
2810 != CODE_FOR_nothing))
2811 {
2812 /* Don't split destination if it is a stack push. */
2813 int stack = push_operand (x, GET_MODE (x));
2814
2815 #ifdef PUSH_ROUNDING
2816 /* In case we output to the stack, but the size is smaller machine can
2817 push exactly, we need to use move instructions. */
2818 if (stack
2819 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2820 {
2821 rtx temp;
2822 int offset1, offset2;
2823
2824 /* Do not use anti_adjust_stack, since we don't want to update
2825 stack_pointer_delta. */
2826 temp = expand_binop (Pmode,
2827 #ifdef STACK_GROWS_DOWNWARD
2828 sub_optab,
2829 #else
2830 add_optab,
2831 #endif
2832 stack_pointer_rtx,
2833 GEN_INT
2834 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2835 stack_pointer_rtx,
2836 0,
2837 OPTAB_LIB_WIDEN);
2838 if (temp != stack_pointer_rtx)
2839 emit_move_insn (stack_pointer_rtx, temp);
2840 #ifdef STACK_GROWS_DOWNWARD
2841 offset1 = 0;
2842 offset2 = GET_MODE_SIZE (submode);
2843 #else
2844 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2845 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2846 + GET_MODE_SIZE (submode));
2847 #endif
2848 emit_move_insn (change_address (x, submode,
2849 gen_rtx_PLUS (Pmode,
2850 stack_pointer_rtx,
2851 GEN_INT (offset1))),
2852 gen_realpart (submode, y));
2853 emit_move_insn (change_address (x, submode,
2854 gen_rtx_PLUS (Pmode,
2855 stack_pointer_rtx,
2856 GEN_INT (offset2))),
2857 gen_imagpart (submode, y));
2858 }
2859 else
2860 #endif
2861 /* If this is a stack, push the highpart first, so it
2862 will be in the argument order.
2863
2864 In that case, change_address is used only to convert
2865 the mode, not to change the address. */
2866 if (stack)
2867 {
2868 /* Note that the real part always precedes the imag part in memory
2869 regardless of machine's endianness. */
2870 #ifdef STACK_GROWS_DOWNWARD
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (gen_rtx_MEM (submode, XEXP (x, 0)),
2873 gen_imagpart (submode, y)));
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_realpart (submode, y)));
2877 #else
2878 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2879 (gen_rtx_MEM (submode, XEXP (x, 0)),
2880 gen_realpart (submode, y)));
2881 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2882 (gen_rtx_MEM (submode, XEXP (x, 0)),
2883 gen_imagpart (submode, y)));
2884 #endif
2885 }
2886 else
2887 {
2888 rtx realpart_x, realpart_y;
2889 rtx imagpart_x, imagpart_y;
2890
2891 /* If this is a complex value with each part being smaller than a
2892 word, the usual calling sequence will likely pack the pieces into
2893 a single register. Unfortunately, SUBREG of hard registers only
2894 deals in terms of words, so we have a problem converting input
2895 arguments to the CONCAT of two registers that is used elsewhere
2896 for complex values. If this is before reload, we can copy it into
2897 memory and reload. FIXME, we should see about using extract and
2898 insert on integer registers, but complex short and complex char
2899 variables should be rarely used. */
2900 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2901 && (reload_in_progress | reload_completed) == 0)
2902 {
2903 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2904 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2905
2906 if (packed_dest_p || packed_src_p)
2907 {
2908 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2909 ? MODE_FLOAT : MODE_INT);
2910
2911 enum machine_mode reg_mode
2912 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2913
2914 if (reg_mode != BLKmode)
2915 {
2916 rtx mem = assign_stack_temp (reg_mode,
2917 GET_MODE_SIZE (mode), 0);
2918 rtx cmem = adjust_address (mem, mode, 0);
2919
2920 cfun->cannot_inline
2921 = N_("function using short complex types cannot be inline");
2922
2923 if (packed_dest_p)
2924 {
2925 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2926 emit_move_insn_1 (cmem, y);
2927 return emit_move_insn_1 (sreg, mem);
2928 }
2929 else
2930 {
2931 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2932 emit_move_insn_1 (mem, sreg);
2933 return emit_move_insn_1 (x, cmem);
2934 }
2935 }
2936 }
2937 }
2938
2939 realpart_x = gen_realpart (submode, x);
2940 realpart_y = gen_realpart (submode, y);
2941 imagpart_x = gen_imagpart (submode, x);
2942 imagpart_y = gen_imagpart (submode, y);
2943
2944 /* Show the output dies here. This is necessary for SUBREGs
2945 of pseudos since we cannot track their lifetimes correctly;
2946 hard regs shouldn't appear here except as return values.
2947 We never want to emit such a clobber after reload. */
2948 if (x != y
2949 && ! (reload_in_progress || reload_completed)
2950 && (GET_CODE (realpart_x) == SUBREG
2951 || GET_CODE (imagpart_x) == SUBREG))
2952 {
2953 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2954 }
2955
2956 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2957 (realpart_x, realpart_y));
2958 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2959 (imagpart_x, imagpart_y));
2960 }
2961
2962 return get_last_insn ();
2963 }
2964
2965 /* This will handle any multi-word mode that lacks a move_insn pattern.
2966 However, you will get better code if you define such patterns,
2967 even if they must turn into multiple assembler instructions. */
2968 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2969 {
2970 rtx last_insn = 0;
2971 rtx seq, inner;
2972 int need_clobber;
2973
2974 #ifdef PUSH_ROUNDING
2975
2976 /* If X is a push on the stack, do the push now and replace
2977 X with a reference to the stack pointer. */
2978 if (push_operand (x, GET_MODE (x)))
2979 {
2980 rtx temp;
2981 enum rtx_code code;
2982
2983 /* Do not use anti_adjust_stack, since we don't want to update
2984 stack_pointer_delta. */
2985 temp = expand_binop (Pmode,
2986 #ifdef STACK_GROWS_DOWNWARD
2987 sub_optab,
2988 #else
2989 add_optab,
2990 #endif
2991 stack_pointer_rtx,
2992 GEN_INT
2993 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2994 stack_pointer_rtx,
2995 0,
2996 OPTAB_LIB_WIDEN);
2997 if (temp != stack_pointer_rtx)
2998 emit_move_insn (stack_pointer_rtx, temp);
2999
3000 code = GET_CODE (XEXP (x, 0));
3001 /* Just hope that small offsets off SP are OK. */
3002 if (code == POST_INC)
3003 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3004 GEN_INT (-(HOST_WIDE_INT)
3005 GET_MODE_SIZE (GET_MODE (x))));
3006 else if (code == POST_DEC)
3007 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3008 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3009 else
3010 temp = stack_pointer_rtx;
3011
3012 x = change_address (x, VOIDmode, temp);
3013 }
3014 #endif
3015
3016 /* If we are in reload, see if either operand is a MEM whose address
3017 is scheduled for replacement. */
3018 if (reload_in_progress && GET_CODE (x) == MEM
3019 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3020 x = replace_equiv_address_nv (x, inner);
3021 if (reload_in_progress && GET_CODE (y) == MEM
3022 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3023 y = replace_equiv_address_nv (y, inner);
3024
3025 start_sequence ();
3026
3027 need_clobber = 0;
3028 for (i = 0;
3029 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3030 i++)
3031 {
3032 rtx xpart = operand_subword (x, i, 1, mode);
3033 rtx ypart = operand_subword (y, i, 1, mode);
3034
3035 /* If we can't get a part of Y, put Y into memory if it is a
3036 constant. Otherwise, force it into a register. If we still
3037 can't get a part of Y, abort. */
3038 if (ypart == 0 && CONSTANT_P (y))
3039 {
3040 y = force_const_mem (mode, y);
3041 ypart = operand_subword (y, i, 1, mode);
3042 }
3043 else if (ypart == 0)
3044 ypart = operand_subword_force (y, i, mode);
3045
3046 if (xpart == 0 || ypart == 0)
3047 abort ();
3048
3049 need_clobber |= (GET_CODE (xpart) == SUBREG);
3050
3051 last_insn = emit_move_insn (xpart, ypart);
3052 }
3053
3054 seq = gen_sequence ();
3055 end_sequence ();
3056
3057 /* Show the output dies here. This is necessary for SUBREGs
3058 of pseudos since we cannot track their lifetimes correctly;
3059 hard regs shouldn't appear here except as return values.
3060 We never want to emit such a clobber after reload. */
3061 if (x != y
3062 && ! (reload_in_progress || reload_completed)
3063 && need_clobber != 0)
3064 {
3065 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3066 }
3067
3068 emit_insn (seq);
3069
3070 return last_insn;
3071 }
3072 else
3073 abort ();
3074 }
3075 \f
3076 /* Pushing data onto the stack. */
3077
3078 /* Push a block of length SIZE (perhaps variable)
3079 and return an rtx to address the beginning of the block.
3080 Note that it is not possible for the value returned to be a QUEUED.
3081 The value may be virtual_outgoing_args_rtx.
3082
3083 EXTRA is the number of bytes of padding to push in addition to SIZE.
3084 BELOW nonzero means this padding comes at low addresses;
3085 otherwise, the padding comes at high addresses. */
3086
3087 rtx
3088 push_block (size, extra, below)
3089 rtx size;
3090 int extra, below;
3091 {
3092 rtx temp;
3093
3094 size = convert_modes (Pmode, ptr_mode, size, 1);
3095 if (CONSTANT_P (size))
3096 anti_adjust_stack (plus_constant (size, extra));
3097 else if (GET_CODE (size) == REG && extra == 0)
3098 anti_adjust_stack (size);
3099 else
3100 {
3101 temp = copy_to_mode_reg (Pmode, size);
3102 if (extra != 0)
3103 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3104 temp, 0, OPTAB_LIB_WIDEN);
3105 anti_adjust_stack (temp);
3106 }
3107
3108 #ifndef STACK_GROWS_DOWNWARD
3109 if (0)
3110 #else
3111 if (1)
3112 #endif
3113 {
3114 temp = virtual_outgoing_args_rtx;
3115 if (extra != 0 && below)
3116 temp = plus_constant (temp, extra);
3117 }
3118 else
3119 {
3120 if (GET_CODE (size) == CONST_INT)
3121 temp = plus_constant (virtual_outgoing_args_rtx,
3122 -INTVAL (size) - (below ? 0 : extra));
3123 else if (extra != 0 && !below)
3124 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3125 negate_rtx (Pmode, plus_constant (size, extra)));
3126 else
3127 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3128 negate_rtx (Pmode, size));
3129 }
3130
3131 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3132 }
3133
3134
3135 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3136 block of SIZE bytes. */
3137
3138 static rtx
3139 get_push_address (size)
3140 int size;
3141 {
3142 rtx temp;
3143
3144 if (STACK_PUSH_CODE == POST_DEC)
3145 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3146 else if (STACK_PUSH_CODE == POST_INC)
3147 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3148 else
3149 temp = stack_pointer_rtx;
3150
3151 return copy_to_reg (temp);
3152 }
3153
3154 #ifdef PUSH_ROUNDING
3155
3156 /* Emit single push insn. */
3157
3158 static void
3159 emit_single_push_insn (mode, x, type)
3160 rtx x;
3161 enum machine_mode mode;
3162 tree type;
3163 {
3164 rtx dest_addr;
3165 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3166 rtx dest;
3167 enum insn_code icode;
3168 insn_operand_predicate_fn pred;
3169
3170 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3171 /* If there is push pattern, use it. Otherwise try old way of throwing
3172 MEM representing push operation to move expander. */
3173 icode = push_optab->handlers[(int) mode].insn_code;
3174 if (icode != CODE_FOR_nothing)
3175 {
3176 if (((pred = insn_data[(int) icode].operand[0].predicate)
3177 && !((*pred) (x, mode))))
3178 x = force_reg (mode, x);
3179 emit_insn (GEN_FCN (icode) (x));
3180 return;
3181 }
3182 if (GET_MODE_SIZE (mode) == rounded_size)
3183 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3184 else
3185 {
3186 #ifdef STACK_GROWS_DOWNWARD
3187 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3188 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3189 #else
3190 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3191 GEN_INT (rounded_size));
3192 #endif
3193 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3194 }
3195
3196 dest = gen_rtx_MEM (mode, dest_addr);
3197
3198 if (type != 0)
3199 {
3200 set_mem_attributes (dest, type, 1);
3201 /* Function incoming arguments may overlap with sibling call
3202 outgoing arguments and we cannot allow reordering of reads
3203 from function arguments with stores to outgoing arguments
3204 of sibling calls. */
3205 set_mem_alias_set (dest, 0);
3206 }
3207 emit_move_insn (dest, x);
3208 }
3209 #endif
3210
3211 /* Generate code to push X onto the stack, assuming it has mode MODE and
3212 type TYPE.
3213 MODE is redundant except when X is a CONST_INT (since they don't
3214 carry mode info).
3215 SIZE is an rtx for the size of data to be copied (in bytes),
3216 needed only if X is BLKmode.
3217
3218 ALIGN (in bits) is maximum alignment we can assume.
3219
3220 If PARTIAL and REG are both nonzero, then copy that many of the first
3221 words of X into registers starting with REG, and push the rest of X.
3222 The amount of space pushed is decreased by PARTIAL words,
3223 rounded *down* to a multiple of PARM_BOUNDARY.
3224 REG must be a hard register in this case.
3225 If REG is zero but PARTIAL is not, take any all others actions for an
3226 argument partially in registers, but do not actually load any
3227 registers.
3228
3229 EXTRA is the amount in bytes of extra space to leave next to this arg.
3230 This is ignored if an argument block has already been allocated.
3231
3232 On a machine that lacks real push insns, ARGS_ADDR is the address of
3233 the bottom of the argument block for this call. We use indexing off there
3234 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3235 argument block has not been preallocated.
3236
3237 ARGS_SO_FAR is the size of args previously pushed for this call.
3238
3239 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3240 for arguments passed in registers. If nonzero, it will be the number
3241 of bytes required. */
3242
3243 void
3244 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3245 args_addr, args_so_far, reg_parm_stack_space,
3246 alignment_pad)
3247 rtx x;
3248 enum machine_mode mode;
3249 tree type;
3250 rtx size;
3251 unsigned int align;
3252 int partial;
3253 rtx reg;
3254 int extra;
3255 rtx args_addr;
3256 rtx args_so_far;
3257 int reg_parm_stack_space;
3258 rtx alignment_pad;
3259 {
3260 rtx xinner;
3261 enum direction stack_direction
3262 #ifdef STACK_GROWS_DOWNWARD
3263 = downward;
3264 #else
3265 = upward;
3266 #endif
3267
3268 /* Decide where to pad the argument: `downward' for below,
3269 `upward' for above, or `none' for don't pad it.
3270 Default is below for small data on big-endian machines; else above. */
3271 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3272
3273 /* Invert direction if stack is post-decrement.
3274 FIXME: why? */
3275 if (STACK_PUSH_CODE == POST_DEC)
3276 if (where_pad != none)
3277 where_pad = (where_pad == downward ? upward : downward);
3278
3279 xinner = x = protect_from_queue (x, 0);
3280
3281 if (mode == BLKmode)
3282 {
3283 /* Copy a block into the stack, entirely or partially. */
3284
3285 rtx temp;
3286 int used = partial * UNITS_PER_WORD;
3287 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3288 int skip;
3289
3290 if (size == 0)
3291 abort ();
3292
3293 used -= offset;
3294
3295 /* USED is now the # of bytes we need not copy to the stack
3296 because registers will take care of them. */
3297
3298 if (partial != 0)
3299 xinner = adjust_address (xinner, BLKmode, used);
3300
3301 /* If the partial register-part of the arg counts in its stack size,
3302 skip the part of stack space corresponding to the registers.
3303 Otherwise, start copying to the beginning of the stack space,
3304 by setting SKIP to 0. */
3305 skip = (reg_parm_stack_space == 0) ? 0 : used;
3306
3307 #ifdef PUSH_ROUNDING
3308 /* Do it with several push insns if that doesn't take lots of insns
3309 and if there is no difficulty with push insns that skip bytes
3310 on the stack for alignment purposes. */
3311 if (args_addr == 0
3312 && PUSH_ARGS
3313 && GET_CODE (size) == CONST_INT
3314 && skip == 0
3315 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3316 /* Here we avoid the case of a structure whose weak alignment
3317 forces many pushes of a small amount of data,
3318 and such small pushes do rounding that causes trouble. */
3319 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3320 || align >= BIGGEST_ALIGNMENT
3321 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3322 == (align / BITS_PER_UNIT)))
3323 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3324 {
3325 /* Push padding now if padding above and stack grows down,
3326 or if padding below and stack grows up.
3327 But if space already allocated, this has already been done. */
3328 if (extra && args_addr == 0
3329 && where_pad != none && where_pad != stack_direction)
3330 anti_adjust_stack (GEN_INT (extra));
3331
3332 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3333
3334 if (current_function_check_memory_usage && ! in_check_memory_usage)
3335 {
3336 rtx temp;
3337
3338 in_check_memory_usage = 1;
3339 temp = get_push_address (INTVAL (size) - used);
3340 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3341 emit_library_call (chkr_copy_bitmap_libfunc,
3342 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3343 Pmode, XEXP (xinner, 0), Pmode,
3344 GEN_INT (INTVAL (size) - used),
3345 TYPE_MODE (sizetype));
3346 else
3347 emit_library_call (chkr_set_right_libfunc,
3348 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3349 Pmode, GEN_INT (INTVAL (size) - used),
3350 TYPE_MODE (sizetype),
3351 GEN_INT (MEMORY_USE_RW),
3352 TYPE_MODE (integer_type_node));
3353 in_check_memory_usage = 0;
3354 }
3355 }
3356 else
3357 #endif /* PUSH_ROUNDING */
3358 {
3359 rtx target;
3360
3361 /* Otherwise make space on the stack and copy the data
3362 to the address of that space. */
3363
3364 /* Deduct words put into registers from the size we must copy. */
3365 if (partial != 0)
3366 {
3367 if (GET_CODE (size) == CONST_INT)
3368 size = GEN_INT (INTVAL (size) - used);
3369 else
3370 size = expand_binop (GET_MODE (size), sub_optab, size,
3371 GEN_INT (used), NULL_RTX, 0,
3372 OPTAB_LIB_WIDEN);
3373 }
3374
3375 /* Get the address of the stack space.
3376 In this case, we do not deal with EXTRA separately.
3377 A single stack adjust will do. */
3378 if (! args_addr)
3379 {
3380 temp = push_block (size, extra, where_pad == downward);
3381 extra = 0;
3382 }
3383 else if (GET_CODE (args_so_far) == CONST_INT)
3384 temp = memory_address (BLKmode,
3385 plus_constant (args_addr,
3386 skip + INTVAL (args_so_far)));
3387 else
3388 temp = memory_address (BLKmode,
3389 plus_constant (gen_rtx_PLUS (Pmode,
3390 args_addr,
3391 args_so_far),
3392 skip));
3393 if (current_function_check_memory_usage && ! in_check_memory_usage)
3394 {
3395 in_check_memory_usage = 1;
3396 target = copy_to_reg (temp);
3397 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3398 emit_library_call (chkr_copy_bitmap_libfunc,
3399 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3400 target, Pmode,
3401 XEXP (xinner, 0), Pmode,
3402 size, TYPE_MODE (sizetype));
3403 else
3404 emit_library_call (chkr_set_right_libfunc,
3405 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3406 target, Pmode,
3407 size, TYPE_MODE (sizetype),
3408 GEN_INT (MEMORY_USE_RW),
3409 TYPE_MODE (integer_type_node));
3410 in_check_memory_usage = 0;
3411 }
3412
3413 target = gen_rtx_MEM (BLKmode, temp);
3414
3415 if (type != 0)
3416 {
3417 set_mem_attributes (target, type, 1);
3418 /* Function incoming arguments may overlap with sibling call
3419 outgoing arguments and we cannot allow reordering of reads
3420 from function arguments with stores to outgoing arguments
3421 of sibling calls. */
3422 set_mem_alias_set (target, 0);
3423 }
3424 else
3425 set_mem_align (target, align);
3426
3427 /* TEMP is the address of the block. Copy the data there. */
3428 if (GET_CODE (size) == CONST_INT
3429 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3430 {
3431 move_by_pieces (target, xinner, INTVAL (size), align);
3432 goto ret;
3433 }
3434 else
3435 {
3436 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3437 enum machine_mode mode;
3438
3439 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3440 mode != VOIDmode;
3441 mode = GET_MODE_WIDER_MODE (mode))
3442 {
3443 enum insn_code code = movstr_optab[(int) mode];
3444 insn_operand_predicate_fn pred;
3445
3446 if (code != CODE_FOR_nothing
3447 && ((GET_CODE (size) == CONST_INT
3448 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3449 <= (GET_MODE_MASK (mode) >> 1)))
3450 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3451 && (!(pred = insn_data[(int) code].operand[0].predicate)
3452 || ((*pred) (target, BLKmode)))
3453 && (!(pred = insn_data[(int) code].operand[1].predicate)
3454 || ((*pred) (xinner, BLKmode)))
3455 && (!(pred = insn_data[(int) code].operand[3].predicate)
3456 || ((*pred) (opalign, VOIDmode))))
3457 {
3458 rtx op2 = convert_to_mode (mode, size, 1);
3459 rtx last = get_last_insn ();
3460 rtx pat;
3461
3462 pred = insn_data[(int) code].operand[2].predicate;
3463 if (pred != 0 && ! (*pred) (op2, mode))
3464 op2 = copy_to_mode_reg (mode, op2);
3465
3466 pat = GEN_FCN ((int) code) (target, xinner,
3467 op2, opalign);
3468 if (pat)
3469 {
3470 emit_insn (pat);
3471 goto ret;
3472 }
3473 else
3474 delete_insns_since (last);
3475 }
3476 }
3477 }
3478
3479 if (!ACCUMULATE_OUTGOING_ARGS)
3480 {
3481 /* If the source is referenced relative to the stack pointer,
3482 copy it to another register to stabilize it. We do not need
3483 to do this if we know that we won't be changing sp. */
3484
3485 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3486 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3487 temp = copy_to_reg (temp);
3488 }
3489
3490 /* Make inhibit_defer_pop nonzero around the library call
3491 to force it to pop the bcopy-arguments right away. */
3492 NO_DEFER_POP;
3493 #ifdef TARGET_MEM_FUNCTIONS
3494 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3495 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3496 convert_to_mode (TYPE_MODE (sizetype),
3497 size, TREE_UNSIGNED (sizetype)),
3498 TYPE_MODE (sizetype));
3499 #else
3500 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3501 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3502 convert_to_mode (TYPE_MODE (integer_type_node),
3503 size,
3504 TREE_UNSIGNED (integer_type_node)),
3505 TYPE_MODE (integer_type_node));
3506 #endif
3507 OK_DEFER_POP;
3508 }
3509 }
3510 else if (partial > 0)
3511 {
3512 /* Scalar partly in registers. */
3513
3514 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3515 int i;
3516 int not_stack;
3517 /* # words of start of argument
3518 that we must make space for but need not store. */
3519 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3520 int args_offset = INTVAL (args_so_far);
3521 int skip;
3522
3523 /* Push padding now if padding above and stack grows down,
3524 or if padding below and stack grows up.
3525 But if space already allocated, this has already been done. */
3526 if (extra && args_addr == 0
3527 && where_pad != none && where_pad != stack_direction)
3528 anti_adjust_stack (GEN_INT (extra));
3529
3530 /* If we make space by pushing it, we might as well push
3531 the real data. Otherwise, we can leave OFFSET nonzero
3532 and leave the space uninitialized. */
3533 if (args_addr == 0)
3534 offset = 0;
3535
3536 /* Now NOT_STACK gets the number of words that we don't need to
3537 allocate on the stack. */
3538 not_stack = partial - offset;
3539
3540 /* If the partial register-part of the arg counts in its stack size,
3541 skip the part of stack space corresponding to the registers.
3542 Otherwise, start copying to the beginning of the stack space,
3543 by setting SKIP to 0. */
3544 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3545
3546 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3547 x = validize_mem (force_const_mem (mode, x));
3548
3549 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3550 SUBREGs of such registers are not allowed. */
3551 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3552 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3553 x = copy_to_reg (x);
3554
3555 /* Loop over all the words allocated on the stack for this arg. */
3556 /* We can do it by words, because any scalar bigger than a word
3557 has a size a multiple of a word. */
3558 #ifndef PUSH_ARGS_REVERSED
3559 for (i = not_stack; i < size; i++)
3560 #else
3561 for (i = size - 1; i >= not_stack; i--)
3562 #endif
3563 if (i >= not_stack + offset)
3564 emit_push_insn (operand_subword_force (x, i, mode),
3565 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3566 0, args_addr,
3567 GEN_INT (args_offset + ((i - not_stack + skip)
3568 * UNITS_PER_WORD)),
3569 reg_parm_stack_space, alignment_pad);
3570 }
3571 else
3572 {
3573 rtx addr;
3574 rtx target = NULL_RTX;
3575 rtx dest;
3576
3577 /* Push padding now if padding above and stack grows down,
3578 or if padding below and stack grows up.
3579 But if space already allocated, this has already been done. */
3580 if (extra && args_addr == 0
3581 && where_pad != none && where_pad != stack_direction)
3582 anti_adjust_stack (GEN_INT (extra));
3583
3584 #ifdef PUSH_ROUNDING
3585 if (args_addr == 0 && PUSH_ARGS)
3586 emit_single_push_insn (mode, x, type);
3587 else
3588 #endif
3589 {
3590 if (GET_CODE (args_so_far) == CONST_INT)
3591 addr
3592 = memory_address (mode,
3593 plus_constant (args_addr,
3594 INTVAL (args_so_far)));
3595 else
3596 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3597 args_so_far));
3598 target = addr;
3599 dest = gen_rtx_MEM (mode, addr);
3600 if (type != 0)
3601 {
3602 set_mem_attributes (dest, type, 1);
3603 /* Function incoming arguments may overlap with sibling call
3604 outgoing arguments and we cannot allow reordering of reads
3605 from function arguments with stores to outgoing arguments
3606 of sibling calls. */
3607 set_mem_alias_set (dest, 0);
3608 }
3609
3610 emit_move_insn (dest, x);
3611
3612 }
3613
3614 if (current_function_check_memory_usage && ! in_check_memory_usage)
3615 {
3616 in_check_memory_usage = 1;
3617 if (target == 0)
3618 target = get_push_address (GET_MODE_SIZE (mode));
3619
3620 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3621 emit_library_call (chkr_copy_bitmap_libfunc,
3622 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3623 Pmode, XEXP (x, 0), Pmode,
3624 GEN_INT (GET_MODE_SIZE (mode)),
3625 TYPE_MODE (sizetype));
3626 else
3627 emit_library_call (chkr_set_right_libfunc,
3628 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3629 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3630 TYPE_MODE (sizetype),
3631 GEN_INT (MEMORY_USE_RW),
3632 TYPE_MODE (integer_type_node));
3633 in_check_memory_usage = 0;
3634 }
3635 }
3636
3637 ret:
3638 /* If part should go in registers, copy that part
3639 into the appropriate registers. Do this now, at the end,
3640 since mem-to-mem copies above may do function calls. */
3641 if (partial > 0 && reg != 0)
3642 {
3643 /* Handle calls that pass values in multiple non-contiguous locations.
3644 The Irix 6 ABI has examples of this. */
3645 if (GET_CODE (reg) == PARALLEL)
3646 emit_group_load (reg, x, -1, align); /* ??? size? */
3647 else
3648 move_block_to_reg (REGNO (reg), x, partial, mode);
3649 }
3650
3651 if (extra && args_addr == 0 && where_pad == stack_direction)
3652 anti_adjust_stack (GEN_INT (extra));
3653
3654 if (alignment_pad && args_addr == 0)
3655 anti_adjust_stack (alignment_pad);
3656 }
3657 \f
3658 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3659 operations. */
3660
3661 static rtx
3662 get_subtarget (x)
3663 rtx x;
3664 {
3665 return ((x == 0
3666 /* Only registers can be subtargets. */
3667 || GET_CODE (x) != REG
3668 /* If the register is readonly, it can't be set more than once. */
3669 || RTX_UNCHANGING_P (x)
3670 /* Don't use hard regs to avoid extending their life. */
3671 || REGNO (x) < FIRST_PSEUDO_REGISTER
3672 /* Avoid subtargets inside loops,
3673 since they hide some invariant expressions. */
3674 || preserve_subexpressions_p ())
3675 ? 0 : x);
3676 }
3677
3678 /* Expand an assignment that stores the value of FROM into TO.
3679 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3680 (This may contain a QUEUED rtx;
3681 if the value is constant, this rtx is a constant.)
3682 Otherwise, the returned value is NULL_RTX.
3683
3684 SUGGEST_REG is no longer actually used.
3685 It used to mean, copy the value through a register
3686 and return that register, if that is possible.
3687 We now use WANT_VALUE to decide whether to do this. */
3688
3689 rtx
3690 expand_assignment (to, from, want_value, suggest_reg)
3691 tree to, from;
3692 int want_value;
3693 int suggest_reg ATTRIBUTE_UNUSED;
3694 {
3695 rtx to_rtx = 0;
3696 rtx result;
3697
3698 /* Don't crash if the lhs of the assignment was erroneous. */
3699
3700 if (TREE_CODE (to) == ERROR_MARK)
3701 {
3702 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3703 return want_value ? result : NULL_RTX;
3704 }
3705
3706 /* Assignment of a structure component needs special treatment
3707 if the structure component's rtx is not simply a MEM.
3708 Assignment of an array element at a constant index, and assignment of
3709 an array element in an unaligned packed structure field, has the same
3710 problem. */
3711
3712 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3713 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3714 {
3715 enum machine_mode mode1;
3716 HOST_WIDE_INT bitsize, bitpos;
3717 tree offset;
3718 int unsignedp;
3719 int volatilep = 0;
3720 tree tem;
3721 unsigned int alignment;
3722
3723 push_temp_slots ();
3724 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3725 &unsignedp, &volatilep, &alignment);
3726
3727 /* If we are going to use store_bit_field and extract_bit_field,
3728 make sure to_rtx will be safe for multiple use. */
3729
3730 if (mode1 == VOIDmode && want_value)
3731 tem = stabilize_reference (tem);
3732
3733 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3734 if (offset != 0)
3735 {
3736 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3737
3738 if (GET_CODE (to_rtx) != MEM)
3739 abort ();
3740
3741 if (GET_MODE (offset_rtx) != ptr_mode)
3742 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3743
3744 #ifdef POINTERS_EXTEND_UNSIGNED
3745 if (GET_MODE (offset_rtx) != Pmode)
3746 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3747 #endif
3748
3749 /* A constant address in TO_RTX can have VOIDmode, we must not try
3750 to call force_reg for that case. Avoid that case. */
3751 if (GET_CODE (to_rtx) == MEM
3752 && GET_MODE (to_rtx) == BLKmode
3753 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3754 && bitsize
3755 && (bitpos % bitsize) == 0
3756 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3757 && alignment == GET_MODE_ALIGNMENT (mode1))
3758 {
3759 rtx temp
3760 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3761
3762 if (GET_CODE (XEXP (temp, 0)) == REG)
3763 to_rtx = temp;
3764 else
3765 to_rtx = (replace_equiv_address
3766 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3767 XEXP (temp, 0))));
3768 bitpos = 0;
3769 }
3770
3771 to_rtx = offset_address (to_rtx, offset_rtx,
3772 highest_pow2_factor (offset));
3773 }
3774
3775 if (volatilep)
3776 {
3777 if (GET_CODE (to_rtx) == MEM)
3778 {
3779 /* When the offset is zero, to_rtx is the address of the
3780 structure we are storing into, and hence may be shared.
3781 We must make a new MEM before setting the volatile bit. */
3782 if (offset == 0)
3783 to_rtx = copy_rtx (to_rtx);
3784
3785 MEM_VOLATILE_P (to_rtx) = 1;
3786 }
3787 #if 0 /* This was turned off because, when a field is volatile
3788 in an object which is not volatile, the object may be in a register,
3789 and then we would abort over here. */
3790 else
3791 abort ();
3792 #endif
3793 }
3794
3795 if (TREE_CODE (to) == COMPONENT_REF
3796 && TREE_READONLY (TREE_OPERAND (to, 1)))
3797 {
3798 if (offset == 0)
3799 to_rtx = copy_rtx (to_rtx);
3800
3801 RTX_UNCHANGING_P (to_rtx) = 1;
3802 }
3803
3804 /* Check the access. */
3805 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3806 {
3807 rtx to_addr;
3808 int size;
3809 int best_mode_size;
3810 enum machine_mode best_mode;
3811
3812 best_mode = get_best_mode (bitsize, bitpos,
3813 TYPE_ALIGN (TREE_TYPE (tem)),
3814 mode1, volatilep);
3815 if (best_mode == VOIDmode)
3816 best_mode = QImode;
3817
3818 best_mode_size = GET_MODE_BITSIZE (best_mode);
3819 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3820 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3821 size *= GET_MODE_SIZE (best_mode);
3822
3823 /* Check the access right of the pointer. */
3824 in_check_memory_usage = 1;
3825 if (size)
3826 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3827 VOIDmode, 3, to_addr, Pmode,
3828 GEN_INT (size), TYPE_MODE (sizetype),
3829 GEN_INT (MEMORY_USE_WO),
3830 TYPE_MODE (integer_type_node));
3831 in_check_memory_usage = 0;
3832 }
3833
3834 /* If this is a varying-length object, we must get the address of
3835 the source and do an explicit block move. */
3836 if (bitsize < 0)
3837 {
3838 unsigned int from_align;
3839 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3840 rtx inner_to_rtx
3841 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3842
3843 emit_block_move (inner_to_rtx, from_rtx, expr_size (from));
3844
3845 free_temp_slots ();
3846 pop_temp_slots ();
3847 return to_rtx;
3848 }
3849 else
3850 {
3851 if (! can_address_p (to))
3852 {
3853 to_rtx = copy_rtx (to_rtx);
3854 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3855 }
3856
3857 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3858 (want_value
3859 /* Spurious cast for HPUX compiler. */
3860 ? ((enum machine_mode)
3861 TYPE_MODE (TREE_TYPE (to)))
3862 : VOIDmode),
3863 unsignedp,
3864 alignment,
3865 int_size_in_bytes (TREE_TYPE (tem)),
3866 get_alias_set (to));
3867
3868 preserve_temp_slots (result);
3869 free_temp_slots ();
3870 pop_temp_slots ();
3871
3872 /* If the value is meaningful, convert RESULT to the proper mode.
3873 Otherwise, return nothing. */
3874 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3875 TYPE_MODE (TREE_TYPE (from)),
3876 result,
3877 TREE_UNSIGNED (TREE_TYPE (to)))
3878 : NULL_RTX);
3879 }
3880 }
3881
3882 /* If the rhs is a function call and its value is not an aggregate,
3883 call the function before we start to compute the lhs.
3884 This is needed for correct code for cases such as
3885 val = setjmp (buf) on machines where reference to val
3886 requires loading up part of an address in a separate insn.
3887
3888 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3889 since it might be a promoted variable where the zero- or sign- extension
3890 needs to be done. Handling this in the normal way is safe because no
3891 computation is done before the call. */
3892 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3893 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3894 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3895 && GET_CODE (DECL_RTL (to)) == REG))
3896 {
3897 rtx value;
3898
3899 push_temp_slots ();
3900 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3901 if (to_rtx == 0)
3902 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3903
3904 /* Handle calls that return values in multiple non-contiguous locations.
3905 The Irix 6 ABI has examples of this. */
3906 if (GET_CODE (to_rtx) == PARALLEL)
3907 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3908 TYPE_ALIGN (TREE_TYPE (from)));
3909 else if (GET_MODE (to_rtx) == BLKmode)
3910 emit_block_move (to_rtx, value, expr_size (from));
3911 else
3912 {
3913 #ifdef POINTERS_EXTEND_UNSIGNED
3914 if (POINTER_TYPE_P (TREE_TYPE (to))
3915 && GET_MODE (to_rtx) != GET_MODE (value))
3916 value = convert_memory_address (GET_MODE (to_rtx), value);
3917 #endif
3918 emit_move_insn (to_rtx, value);
3919 }
3920 preserve_temp_slots (to_rtx);
3921 free_temp_slots ();
3922 pop_temp_slots ();
3923 return want_value ? to_rtx : NULL_RTX;
3924 }
3925
3926 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3927 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3928
3929 if (to_rtx == 0)
3930 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3931
3932 /* Don't move directly into a return register. */
3933 if (TREE_CODE (to) == RESULT_DECL
3934 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3935 {
3936 rtx temp;
3937
3938 push_temp_slots ();
3939 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3940
3941 if (GET_CODE (to_rtx) == PARALLEL)
3942 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3943 TYPE_ALIGN (TREE_TYPE (from)));
3944 else
3945 emit_move_insn (to_rtx, temp);
3946
3947 preserve_temp_slots (to_rtx);
3948 free_temp_slots ();
3949 pop_temp_slots ();
3950 return want_value ? to_rtx : NULL_RTX;
3951 }
3952
3953 /* In case we are returning the contents of an object which overlaps
3954 the place the value is being stored, use a safe function when copying
3955 a value through a pointer into a structure value return block. */
3956 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3957 && current_function_returns_struct
3958 && !current_function_returns_pcc_struct)
3959 {
3960 rtx from_rtx, size;
3961
3962 push_temp_slots ();
3963 size = expr_size (from);
3964 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3965 EXPAND_MEMORY_USE_DONT);
3966
3967 /* Copy the rights of the bitmap. */
3968 if (current_function_check_memory_usage)
3969 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3970 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3971 XEXP (from_rtx, 0), Pmode,
3972 convert_to_mode (TYPE_MODE (sizetype),
3973 size, TREE_UNSIGNED (sizetype)),
3974 TYPE_MODE (sizetype));
3975
3976 #ifdef TARGET_MEM_FUNCTIONS
3977 emit_library_call (memmove_libfunc, LCT_NORMAL,
3978 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3979 XEXP (from_rtx, 0), Pmode,
3980 convert_to_mode (TYPE_MODE (sizetype),
3981 size, TREE_UNSIGNED (sizetype)),
3982 TYPE_MODE (sizetype));
3983 #else
3984 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3985 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3986 XEXP (to_rtx, 0), Pmode,
3987 convert_to_mode (TYPE_MODE (integer_type_node),
3988 size, TREE_UNSIGNED (integer_type_node)),
3989 TYPE_MODE (integer_type_node));
3990 #endif
3991
3992 preserve_temp_slots (to_rtx);
3993 free_temp_slots ();
3994 pop_temp_slots ();
3995 return want_value ? to_rtx : NULL_RTX;
3996 }
3997
3998 /* Compute FROM and store the value in the rtx we got. */
3999
4000 push_temp_slots ();
4001 result = store_expr (from, to_rtx, want_value);
4002 preserve_temp_slots (result);
4003 free_temp_slots ();
4004 pop_temp_slots ();
4005 return want_value ? result : NULL_RTX;
4006 }
4007
4008 /* Generate code for computing expression EXP,
4009 and storing the value into TARGET.
4010 TARGET may contain a QUEUED rtx.
4011
4012 If WANT_VALUE is nonzero, return a copy of the value
4013 not in TARGET, so that we can be sure to use the proper
4014 value in a containing expression even if TARGET has something
4015 else stored in it. If possible, we copy the value through a pseudo
4016 and return that pseudo. Or, if the value is constant, we try to
4017 return the constant. In some cases, we return a pseudo
4018 copied *from* TARGET.
4019
4020 If the mode is BLKmode then we may return TARGET itself.
4021 It turns out that in BLKmode it doesn't cause a problem.
4022 because C has no operators that could combine two different
4023 assignments into the same BLKmode object with different values
4024 with no sequence point. Will other languages need this to
4025 be more thorough?
4026
4027 If WANT_VALUE is 0, we return NULL, to make sure
4028 to catch quickly any cases where the caller uses the value
4029 and fails to set WANT_VALUE. */
4030
4031 rtx
4032 store_expr (exp, target, want_value)
4033 tree exp;
4034 rtx target;
4035 int want_value;
4036 {
4037 rtx temp;
4038 int dont_return_target = 0;
4039 int dont_store_target = 0;
4040
4041 if (TREE_CODE (exp) == COMPOUND_EXPR)
4042 {
4043 /* Perform first part of compound expression, then assign from second
4044 part. */
4045 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4046 emit_queue ();
4047 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4048 }
4049 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4050 {
4051 /* For conditional expression, get safe form of the target. Then
4052 test the condition, doing the appropriate assignment on either
4053 side. This avoids the creation of unnecessary temporaries.
4054 For non-BLKmode, it is more efficient not to do this. */
4055
4056 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4057
4058 emit_queue ();
4059 target = protect_from_queue (target, 1);
4060
4061 do_pending_stack_adjust ();
4062 NO_DEFER_POP;
4063 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4064 start_cleanup_deferral ();
4065 store_expr (TREE_OPERAND (exp, 1), target, 0);
4066 end_cleanup_deferral ();
4067 emit_queue ();
4068 emit_jump_insn (gen_jump (lab2));
4069 emit_barrier ();
4070 emit_label (lab1);
4071 start_cleanup_deferral ();
4072 store_expr (TREE_OPERAND (exp, 2), target, 0);
4073 end_cleanup_deferral ();
4074 emit_queue ();
4075 emit_label (lab2);
4076 OK_DEFER_POP;
4077
4078 return want_value ? target : NULL_RTX;
4079 }
4080 else if (queued_subexp_p (target))
4081 /* If target contains a postincrement, let's not risk
4082 using it as the place to generate the rhs. */
4083 {
4084 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4085 {
4086 /* Expand EXP into a new pseudo. */
4087 temp = gen_reg_rtx (GET_MODE (target));
4088 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4089 }
4090 else
4091 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4092
4093 /* If target is volatile, ANSI requires accessing the value
4094 *from* the target, if it is accessed. So make that happen.
4095 In no case return the target itself. */
4096 if (! MEM_VOLATILE_P (target) && want_value)
4097 dont_return_target = 1;
4098 }
4099 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4100 && GET_MODE (target) != BLKmode)
4101 /* If target is in memory and caller wants value in a register instead,
4102 arrange that. Pass TARGET as target for expand_expr so that,
4103 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4104 We know expand_expr will not use the target in that case.
4105 Don't do this if TARGET is volatile because we are supposed
4106 to write it and then read it. */
4107 {
4108 temp = expand_expr (exp, target, GET_MODE (target), 0);
4109 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4110 {
4111 /* If TEMP is already in the desired TARGET, only copy it from
4112 memory and don't store it there again. */
4113 if (temp == target
4114 || (rtx_equal_p (temp, target)
4115 && ! side_effects_p (temp) && ! side_effects_p (target)))
4116 dont_store_target = 1;
4117 temp = copy_to_reg (temp);
4118 }
4119 dont_return_target = 1;
4120 }
4121 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4122 /* If this is an scalar in a register that is stored in a wider mode
4123 than the declared mode, compute the result into its declared mode
4124 and then convert to the wider mode. Our value is the computed
4125 expression. */
4126 {
4127 /* If we don't want a value, we can do the conversion inside EXP,
4128 which will often result in some optimizations. Do the conversion
4129 in two steps: first change the signedness, if needed, then
4130 the extend. But don't do this if the type of EXP is a subtype
4131 of something else since then the conversion might involve
4132 more than just converting modes. */
4133 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4134 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4135 {
4136 if (TREE_UNSIGNED (TREE_TYPE (exp))
4137 != SUBREG_PROMOTED_UNSIGNED_P (target))
4138 exp
4139 = convert
4140 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4141 TREE_TYPE (exp)),
4142 exp);
4143
4144 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4145 SUBREG_PROMOTED_UNSIGNED_P (target)),
4146 exp);
4147 }
4148
4149 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4150
4151 /* If TEMP is a volatile MEM and we want a result value, make
4152 the access now so it gets done only once. Likewise if
4153 it contains TARGET. */
4154 if (GET_CODE (temp) == MEM && want_value
4155 && (MEM_VOLATILE_P (temp)
4156 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4157 temp = copy_to_reg (temp);
4158
4159 /* If TEMP is a VOIDmode constant, use convert_modes to make
4160 sure that we properly convert it. */
4161 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4162 {
4163 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4164 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4165 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4166 GET_MODE (target), temp,
4167 SUBREG_PROMOTED_UNSIGNED_P (target));
4168 }
4169
4170 convert_move (SUBREG_REG (target), temp,
4171 SUBREG_PROMOTED_UNSIGNED_P (target));
4172
4173 /* If we promoted a constant, change the mode back down to match
4174 target. Otherwise, the caller might get confused by a result whose
4175 mode is larger than expected. */
4176
4177 if (want_value && GET_MODE (temp) != GET_MODE (target)
4178 && GET_MODE (temp) != VOIDmode)
4179 {
4180 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4181 SUBREG_PROMOTED_VAR_P (temp) = 1;
4182 SUBREG_PROMOTED_UNSIGNED_P (temp)
4183 = SUBREG_PROMOTED_UNSIGNED_P (target);
4184 }
4185
4186 return want_value ? temp : NULL_RTX;
4187 }
4188 else
4189 {
4190 temp = expand_expr (exp, target, GET_MODE (target), 0);
4191 /* Return TARGET if it's a specified hardware register.
4192 If TARGET is a volatile mem ref, either return TARGET
4193 or return a reg copied *from* TARGET; ANSI requires this.
4194
4195 Otherwise, if TEMP is not TARGET, return TEMP
4196 if it is constant (for efficiency),
4197 or if we really want the correct value. */
4198 if (!(target && GET_CODE (target) == REG
4199 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4200 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4201 && ! rtx_equal_p (temp, target)
4202 && (CONSTANT_P (temp) || want_value))
4203 dont_return_target = 1;
4204 }
4205
4206 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4207 the same as that of TARGET, adjust the constant. This is needed, for
4208 example, in case it is a CONST_DOUBLE and we want only a word-sized
4209 value. */
4210 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4211 && TREE_CODE (exp) != ERROR_MARK
4212 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4213 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4214 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4215
4216 if (current_function_check_memory_usage
4217 && GET_CODE (target) == MEM
4218 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4219 {
4220 in_check_memory_usage = 1;
4221 if (GET_CODE (temp) == MEM)
4222 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4223 VOIDmode, 3, XEXP (target, 0), Pmode,
4224 XEXP (temp, 0), Pmode,
4225 expr_size (exp), TYPE_MODE (sizetype));
4226 else
4227 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4228 VOIDmode, 3, XEXP (target, 0), Pmode,
4229 expr_size (exp), TYPE_MODE (sizetype),
4230 GEN_INT (MEMORY_USE_WO),
4231 TYPE_MODE (integer_type_node));
4232 in_check_memory_usage = 0;
4233 }
4234
4235 /* If value was not generated in the target, store it there.
4236 Convert the value to TARGET's type first if nec. */
4237 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4238 one or both of them are volatile memory refs, we have to distinguish
4239 two cases:
4240 - expand_expr has used TARGET. In this case, we must not generate
4241 another copy. This can be detected by TARGET being equal according
4242 to == .
4243 - expand_expr has not used TARGET - that means that the source just
4244 happens to have the same RTX form. Since temp will have been created
4245 by expand_expr, it will compare unequal according to == .
4246 We must generate a copy in this case, to reach the correct number
4247 of volatile memory references. */
4248
4249 if ((! rtx_equal_p (temp, target)
4250 || (temp != target && (side_effects_p (temp)
4251 || side_effects_p (target))))
4252 && TREE_CODE (exp) != ERROR_MARK
4253 && ! dont_store_target)
4254 {
4255 target = protect_from_queue (target, 1);
4256 if (GET_MODE (temp) != GET_MODE (target)
4257 && GET_MODE (temp) != VOIDmode)
4258 {
4259 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4260 if (dont_return_target)
4261 {
4262 /* In this case, we will return TEMP,
4263 so make sure it has the proper mode.
4264 But don't forget to store the value into TARGET. */
4265 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4266 emit_move_insn (target, temp);
4267 }
4268 else
4269 convert_move (target, temp, unsignedp);
4270 }
4271
4272 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4273 {
4274 /* Handle copying a string constant into an array.
4275 The string constant may be shorter than the array.
4276 So copy just the string's actual length, and clear the rest. */
4277 rtx size;
4278 rtx addr;
4279
4280 /* Get the size of the data type of the string,
4281 which is actually the size of the target. */
4282 size = expr_size (exp);
4283 if (GET_CODE (size) == CONST_INT
4284 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4285 emit_block_move (target, temp, size);
4286 else
4287 {
4288 /* Compute the size of the data to copy from the string. */
4289 tree copy_size
4290 = size_binop (MIN_EXPR,
4291 make_tree (sizetype, size),
4292 size_int (TREE_STRING_LENGTH (exp)));
4293 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4294 VOIDmode, 0);
4295 rtx label = 0;
4296
4297 /* Copy that much. */
4298 emit_block_move (target, temp, copy_size_rtx);
4299
4300 /* Figure out how much is left in TARGET that we have to clear.
4301 Do all calculations in ptr_mode. */
4302
4303 addr = XEXP (target, 0);
4304 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4305
4306 if (GET_CODE (copy_size_rtx) == CONST_INT)
4307 {
4308 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4309 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4310 }
4311 else
4312 {
4313 addr = force_reg (ptr_mode, addr);
4314 addr = expand_binop (ptr_mode, add_optab, addr,
4315 copy_size_rtx, NULL_RTX, 0,
4316 OPTAB_LIB_WIDEN);
4317
4318 size = expand_binop (ptr_mode, sub_optab, size,
4319 copy_size_rtx, NULL_RTX, 0,
4320 OPTAB_LIB_WIDEN);
4321
4322 label = gen_label_rtx ();
4323 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4324 GET_MODE (size), 0, 0, label);
4325 }
4326
4327 if (size != const0_rtx)
4328 {
4329 rtx dest = gen_rtx_MEM (BLKmode, addr);
4330
4331 MEM_COPY_ATTRIBUTES (dest, target);
4332
4333 /* Be sure we can write on ADDR. */
4334 in_check_memory_usage = 1;
4335 if (current_function_check_memory_usage)
4336 emit_library_call (chkr_check_addr_libfunc,
4337 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4338 addr, Pmode,
4339 size, TYPE_MODE (sizetype),
4340 GEN_INT (MEMORY_USE_WO),
4341 TYPE_MODE (integer_type_node));
4342 in_check_memory_usage = 0;
4343 clear_storage (dest, size);
4344 }
4345
4346 if (label)
4347 emit_label (label);
4348 }
4349 }
4350 /* Handle calls that return values in multiple non-contiguous locations.
4351 The Irix 6 ABI has examples of this. */
4352 else if (GET_CODE (target) == PARALLEL)
4353 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4354 TYPE_ALIGN (TREE_TYPE (exp)));
4355 else if (GET_MODE (temp) == BLKmode)
4356 emit_block_move (target, temp, expr_size (exp));
4357 else
4358 emit_move_insn (target, temp);
4359 }
4360
4361 /* If we don't want a value, return NULL_RTX. */
4362 if (! want_value)
4363 return NULL_RTX;
4364
4365 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4366 ??? The latter test doesn't seem to make sense. */
4367 else if (dont_return_target && GET_CODE (temp) != MEM)
4368 return temp;
4369
4370 /* Return TARGET itself if it is a hard register. */
4371 else if (want_value && GET_MODE (target) != BLKmode
4372 && ! (GET_CODE (target) == REG
4373 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4374 return copy_to_reg (target);
4375
4376 else
4377 return target;
4378 }
4379 \f
4380 /* Return 1 if EXP just contains zeros. */
4381
4382 static int
4383 is_zeros_p (exp)
4384 tree exp;
4385 {
4386 tree elt;
4387
4388 switch (TREE_CODE (exp))
4389 {
4390 case CONVERT_EXPR:
4391 case NOP_EXPR:
4392 case NON_LVALUE_EXPR:
4393 return is_zeros_p (TREE_OPERAND (exp, 0));
4394
4395 case INTEGER_CST:
4396 return integer_zerop (exp);
4397
4398 case COMPLEX_CST:
4399 return
4400 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4401
4402 case REAL_CST:
4403 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4404
4405 case CONSTRUCTOR:
4406 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4407 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4408 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4409 if (! is_zeros_p (TREE_VALUE (elt)))
4410 return 0;
4411
4412 return 1;
4413
4414 default:
4415 return 0;
4416 }
4417 }
4418
4419 /* Return 1 if EXP contains mostly (3/4) zeros. */
4420
4421 static int
4422 mostly_zeros_p (exp)
4423 tree exp;
4424 {
4425 if (TREE_CODE (exp) == CONSTRUCTOR)
4426 {
4427 int elts = 0, zeros = 0;
4428 tree elt = CONSTRUCTOR_ELTS (exp);
4429 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4430 {
4431 /* If there are no ranges of true bits, it is all zero. */
4432 return elt == NULL_TREE;
4433 }
4434 for (; elt; elt = TREE_CHAIN (elt))
4435 {
4436 /* We do not handle the case where the index is a RANGE_EXPR,
4437 so the statistic will be somewhat inaccurate.
4438 We do make a more accurate count in store_constructor itself,
4439 so since this function is only used for nested array elements,
4440 this should be close enough. */
4441 if (mostly_zeros_p (TREE_VALUE (elt)))
4442 zeros++;
4443 elts++;
4444 }
4445
4446 return 4 * zeros >= 3 * elts;
4447 }
4448
4449 return is_zeros_p (exp);
4450 }
4451 \f
4452 /* Helper function for store_constructor.
4453 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4454 TYPE is the type of the CONSTRUCTOR, not the element type.
4455 ALIGN and CLEARED are as for store_constructor.
4456 ALIAS_SET is the alias set to use for any stores.
4457
4458 This provides a recursive shortcut back to store_constructor when it isn't
4459 necessary to go through store_field. This is so that we can pass through
4460 the cleared field to let store_constructor know that we may not have to
4461 clear a substructure if the outer structure has already been cleared. */
4462
4463 static void
4464 store_constructor_field (target, bitsize, bitpos,
4465 mode, exp, type, align, cleared, alias_set)
4466 rtx target;
4467 unsigned HOST_WIDE_INT bitsize;
4468 HOST_WIDE_INT bitpos;
4469 enum machine_mode mode;
4470 tree exp, type;
4471 unsigned int align;
4472 int cleared;
4473 int alias_set;
4474 {
4475 if (TREE_CODE (exp) == CONSTRUCTOR
4476 && bitpos % BITS_PER_UNIT == 0
4477 /* If we have a non-zero bitpos for a register target, then we just
4478 let store_field do the bitfield handling. This is unlikely to
4479 generate unnecessary clear instructions anyways. */
4480 && (bitpos == 0 || GET_CODE (target) == MEM))
4481 {
4482 if (GET_CODE (target) == MEM)
4483 target
4484 = adjust_address (target,
4485 GET_MODE (target) == BLKmode
4486 || 0 != (bitpos
4487 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4488 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4489
4490
4491 /* Show the alignment may no longer be what it was and update the alias
4492 set, if required. */
4493 if (bitpos != 0)
4494 align = MIN (align, (unsigned int) bitpos & - bitpos);
4495
4496 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4497 && MEM_ALIAS_SET (target) != 0)
4498 {
4499 target = copy_rtx (target);
4500 set_mem_alias_set (target, alias_set);
4501 }
4502
4503 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4504 }
4505 else
4506 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4507 int_size_in_bytes (type), alias_set);
4508 }
4509
4510 /* Store the value of constructor EXP into the rtx TARGET.
4511 TARGET is either a REG or a MEM.
4512 ALIGN is the maximum known alignment for TARGET.
4513 CLEARED is true if TARGET is known to have been zero'd.
4514 SIZE is the number of bytes of TARGET we are allowed to modify: this
4515 may not be the same as the size of EXP if we are assigning to a field
4516 which has been packed to exclude padding bits. */
4517
4518 static void
4519 store_constructor (exp, target, align, cleared, size)
4520 tree exp;
4521 rtx target;
4522 unsigned int align;
4523 int cleared;
4524 HOST_WIDE_INT size;
4525 {
4526 tree type = TREE_TYPE (exp);
4527 #ifdef WORD_REGISTER_OPERATIONS
4528 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4529 #endif
4530
4531 /* We know our target cannot conflict, since safe_from_p has been called. */
4532 #if 0
4533 /* Don't try copying piece by piece into a hard register
4534 since that is vulnerable to being clobbered by EXP.
4535 Instead, construct in a pseudo register and then copy it all. */
4536 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4537 {
4538 rtx temp = gen_reg_rtx (GET_MODE (target));
4539 store_constructor (exp, temp, align, cleared, size);
4540 emit_move_insn (target, temp);
4541 return;
4542 }
4543 #endif
4544
4545 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4546 || TREE_CODE (type) == QUAL_UNION_TYPE)
4547 {
4548 tree elt;
4549
4550 /* Inform later passes that the whole union value is dead. */
4551 if ((TREE_CODE (type) == UNION_TYPE
4552 || TREE_CODE (type) == QUAL_UNION_TYPE)
4553 && ! cleared)
4554 {
4555 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4556
4557 /* If the constructor is empty, clear the union. */
4558 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4559 clear_storage (target, expr_size (exp));
4560 }
4561
4562 /* If we are building a static constructor into a register,
4563 set the initial value as zero so we can fold the value into
4564 a constant. But if more than one register is involved,
4565 this probably loses. */
4566 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4567 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4568 {
4569 if (! cleared)
4570 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4571
4572 cleared = 1;
4573 }
4574
4575 /* If the constructor has fewer fields than the structure
4576 or if we are initializing the structure to mostly zeros,
4577 clear the whole structure first. Don't do this if TARGET is a
4578 register whose mode size isn't equal to SIZE since clear_storage
4579 can't handle this case. */
4580 else if (size > 0
4581 && ((list_length (CONSTRUCTOR_ELTS (exp))
4582 != fields_length (type))
4583 || mostly_zeros_p (exp))
4584 && (GET_CODE (target) != REG
4585 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4586 {
4587 if (! cleared)
4588 clear_storage (target, GEN_INT (size));
4589
4590 cleared = 1;
4591 }
4592 else if (! cleared)
4593 /* Inform later passes that the old value is dead. */
4594 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4595
4596 /* Store each element of the constructor into
4597 the corresponding field of TARGET. */
4598
4599 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4600 {
4601 tree field = TREE_PURPOSE (elt);
4602 #ifdef WORD_REGISTER_OPERATIONS
4603 tree value = TREE_VALUE (elt);
4604 #endif
4605 enum machine_mode mode;
4606 HOST_WIDE_INT bitsize;
4607 HOST_WIDE_INT bitpos = 0;
4608 int unsignedp;
4609 tree offset;
4610 rtx to_rtx = target;
4611
4612 /* Just ignore missing fields.
4613 We cleared the whole structure, above,
4614 if any fields are missing. */
4615 if (field == 0)
4616 continue;
4617
4618 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4619 continue;
4620
4621 if (host_integerp (DECL_SIZE (field), 1))
4622 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4623 else
4624 bitsize = -1;
4625
4626 unsignedp = TREE_UNSIGNED (field);
4627 mode = DECL_MODE (field);
4628 if (DECL_BIT_FIELD (field))
4629 mode = VOIDmode;
4630
4631 offset = DECL_FIELD_OFFSET (field);
4632 if (host_integerp (offset, 0)
4633 && host_integerp (bit_position (field), 0))
4634 {
4635 bitpos = int_bit_position (field);
4636 offset = 0;
4637 }
4638 else
4639 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4640
4641 if (offset)
4642 {
4643 rtx offset_rtx;
4644
4645 if (contains_placeholder_p (offset))
4646 offset = build (WITH_RECORD_EXPR, sizetype,
4647 offset, make_tree (TREE_TYPE (exp), target));
4648
4649 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4650 if (GET_CODE (to_rtx) != MEM)
4651 abort ();
4652
4653 if (GET_MODE (offset_rtx) != ptr_mode)
4654 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4655
4656 #ifdef POINTERS_EXTEND_UNSIGNED
4657 if (GET_MODE (offset_rtx) != Pmode)
4658 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4659 #endif
4660
4661 to_rtx = offset_address (to_rtx, offset_rtx,
4662 highest_pow2_factor (offset));
4663
4664 align = DECL_OFFSET_ALIGN (field);
4665 }
4666
4667 if (TREE_READONLY (field))
4668 {
4669 if (GET_CODE (to_rtx) == MEM)
4670 to_rtx = copy_rtx (to_rtx);
4671
4672 RTX_UNCHANGING_P (to_rtx) = 1;
4673 }
4674
4675 #ifdef WORD_REGISTER_OPERATIONS
4676 /* If this initializes a field that is smaller than a word, at the
4677 start of a word, try to widen it to a full word.
4678 This special case allows us to output C++ member function
4679 initializations in a form that the optimizers can understand. */
4680 if (GET_CODE (target) == REG
4681 && bitsize < BITS_PER_WORD
4682 && bitpos % BITS_PER_WORD == 0
4683 && GET_MODE_CLASS (mode) == MODE_INT
4684 && TREE_CODE (value) == INTEGER_CST
4685 && exp_size >= 0
4686 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4687 {
4688 tree type = TREE_TYPE (value);
4689 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4690 {
4691 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4692 value = convert (type, value);
4693 }
4694 if (BYTES_BIG_ENDIAN)
4695 value
4696 = fold (build (LSHIFT_EXPR, type, value,
4697 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4698 bitsize = BITS_PER_WORD;
4699 mode = word_mode;
4700 }
4701 #endif
4702
4703 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4704 && DECL_NONADDRESSABLE_P (field))
4705 {
4706 to_rtx = copy_rtx (to_rtx);
4707 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4708 }
4709
4710 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4711 TREE_VALUE (elt), type, align, cleared,
4712 get_alias_set (TREE_TYPE (field)));
4713 }
4714 }
4715 else if (TREE_CODE (type) == ARRAY_TYPE)
4716 {
4717 tree elt;
4718 int i;
4719 int need_to_clear;
4720 tree domain = TYPE_DOMAIN (type);
4721 tree elttype = TREE_TYPE (type);
4722 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4723 && TYPE_MAX_VALUE (domain)
4724 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4725 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4726 HOST_WIDE_INT minelt = 0;
4727 HOST_WIDE_INT maxelt = 0;
4728
4729 /* If we have constant bounds for the range of the type, get them. */
4730 if (const_bounds_p)
4731 {
4732 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4733 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4734 }
4735
4736 /* If the constructor has fewer elements than the array,
4737 clear the whole array first. Similarly if this is
4738 static constructor of a non-BLKmode object. */
4739 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4740 need_to_clear = 1;
4741 else
4742 {
4743 HOST_WIDE_INT count = 0, zero_count = 0;
4744 need_to_clear = ! const_bounds_p;
4745
4746 /* This loop is a more accurate version of the loop in
4747 mostly_zeros_p (it handles RANGE_EXPR in an index).
4748 It is also needed to check for missing elements. */
4749 for (elt = CONSTRUCTOR_ELTS (exp);
4750 elt != NULL_TREE && ! need_to_clear;
4751 elt = TREE_CHAIN (elt))
4752 {
4753 tree index = TREE_PURPOSE (elt);
4754 HOST_WIDE_INT this_node_count;
4755
4756 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4757 {
4758 tree lo_index = TREE_OPERAND (index, 0);
4759 tree hi_index = TREE_OPERAND (index, 1);
4760
4761 if (! host_integerp (lo_index, 1)
4762 || ! host_integerp (hi_index, 1))
4763 {
4764 need_to_clear = 1;
4765 break;
4766 }
4767
4768 this_node_count = (tree_low_cst (hi_index, 1)
4769 - tree_low_cst (lo_index, 1) + 1);
4770 }
4771 else
4772 this_node_count = 1;
4773
4774 count += this_node_count;
4775 if (mostly_zeros_p (TREE_VALUE (elt)))
4776 zero_count += this_node_count;
4777 }
4778
4779 /* Clear the entire array first if there are any missing elements,
4780 or if the incidence of zero elements is >= 75%. */
4781 if (! need_to_clear
4782 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4783 need_to_clear = 1;
4784 }
4785
4786 if (need_to_clear && size > 0)
4787 {
4788 if (! cleared)
4789 clear_storage (target, GEN_INT (size));
4790 cleared = 1;
4791 }
4792 else if (REG_P (target))
4793 /* Inform later passes that the old value is dead. */
4794 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4795
4796 /* Store each element of the constructor into
4797 the corresponding element of TARGET, determined
4798 by counting the elements. */
4799 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4800 elt;
4801 elt = TREE_CHAIN (elt), i++)
4802 {
4803 enum machine_mode mode;
4804 HOST_WIDE_INT bitsize;
4805 HOST_WIDE_INT bitpos;
4806 int unsignedp;
4807 tree value = TREE_VALUE (elt);
4808 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4809 tree index = TREE_PURPOSE (elt);
4810 rtx xtarget = target;
4811
4812 if (cleared && is_zeros_p (value))
4813 continue;
4814
4815 unsignedp = TREE_UNSIGNED (elttype);
4816 mode = TYPE_MODE (elttype);
4817 if (mode == BLKmode)
4818 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4819 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4820 : -1);
4821 else
4822 bitsize = GET_MODE_BITSIZE (mode);
4823
4824 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4825 {
4826 tree lo_index = TREE_OPERAND (index, 0);
4827 tree hi_index = TREE_OPERAND (index, 1);
4828 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4829 struct nesting *loop;
4830 HOST_WIDE_INT lo, hi, count;
4831 tree position;
4832
4833 /* If the range is constant and "small", unroll the loop. */
4834 if (const_bounds_p
4835 && host_integerp (lo_index, 0)
4836 && host_integerp (hi_index, 0)
4837 && (lo = tree_low_cst (lo_index, 0),
4838 hi = tree_low_cst (hi_index, 0),
4839 count = hi - lo + 1,
4840 (GET_CODE (target) != MEM
4841 || count <= 2
4842 || (host_integerp (TYPE_SIZE (elttype), 1)
4843 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4844 <= 40 * 8)))))
4845 {
4846 lo -= minelt; hi -= minelt;
4847 for (; lo <= hi; lo++)
4848 {
4849 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4850
4851 if (GET_CODE (target) == MEM
4852 && !MEM_KEEP_ALIAS_SET_P (target)
4853 && TYPE_NONALIASED_COMPONENT (type))
4854 {
4855 target = copy_rtx (target);
4856 MEM_KEEP_ALIAS_SET_P (target) = 1;
4857 }
4858
4859 store_constructor_field
4860 (target, bitsize, bitpos, mode, value, type, align,
4861 cleared, get_alias_set (elttype));
4862 }
4863 }
4864 else
4865 {
4866 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4867 loop_top = gen_label_rtx ();
4868 loop_end = gen_label_rtx ();
4869
4870 unsignedp = TREE_UNSIGNED (domain);
4871
4872 index = build_decl (VAR_DECL, NULL_TREE, domain);
4873
4874 index_r
4875 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4876 &unsignedp, 0));
4877 SET_DECL_RTL (index, index_r);
4878 if (TREE_CODE (value) == SAVE_EXPR
4879 && SAVE_EXPR_RTL (value) == 0)
4880 {
4881 /* Make sure value gets expanded once before the
4882 loop. */
4883 expand_expr (value, const0_rtx, VOIDmode, 0);
4884 emit_queue ();
4885 }
4886 store_expr (lo_index, index_r, 0);
4887 loop = expand_start_loop (0);
4888
4889 /* Assign value to element index. */
4890 position
4891 = convert (ssizetype,
4892 fold (build (MINUS_EXPR, TREE_TYPE (index),
4893 index, TYPE_MIN_VALUE (domain))));
4894 position = size_binop (MULT_EXPR, position,
4895 convert (ssizetype,
4896 TYPE_SIZE_UNIT (elttype)));
4897
4898 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4899 xtarget = offset_address (target, pos_rtx,
4900 highest_pow2_factor (position));
4901 xtarget = adjust_address (xtarget, mode, 0);
4902 if (TREE_CODE (value) == CONSTRUCTOR)
4903 store_constructor (value, xtarget, align, cleared,
4904 bitsize / BITS_PER_UNIT);
4905 else
4906 store_expr (value, xtarget, 0);
4907
4908 expand_exit_loop_if_false (loop,
4909 build (LT_EXPR, integer_type_node,
4910 index, hi_index));
4911
4912 expand_increment (build (PREINCREMENT_EXPR,
4913 TREE_TYPE (index),
4914 index, integer_one_node), 0, 0);
4915 expand_end_loop ();
4916 emit_label (loop_end);
4917 }
4918 }
4919 else if ((index != 0 && ! host_integerp (index, 0))
4920 || ! host_integerp (TYPE_SIZE (elttype), 1))
4921 {
4922 tree position;
4923
4924 if (index == 0)
4925 index = ssize_int (1);
4926
4927 if (minelt)
4928 index = convert (ssizetype,
4929 fold (build (MINUS_EXPR, index,
4930 TYPE_MIN_VALUE (domain))));
4931
4932 position = size_binop (MULT_EXPR, index,
4933 convert (ssizetype,
4934 TYPE_SIZE_UNIT (elttype)));
4935 xtarget = offset_address (target,
4936 expand_expr (position, 0, VOIDmode, 0),
4937 highest_pow2_factor (position));
4938 xtarget = adjust_address (xtarget, mode, 0);
4939 store_expr (value, xtarget, 0);
4940 }
4941 else
4942 {
4943 if (index != 0)
4944 bitpos = ((tree_low_cst (index, 0) - minelt)
4945 * tree_low_cst (TYPE_SIZE (elttype), 1));
4946 else
4947 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4948
4949 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4950 && TYPE_NONALIASED_COMPONENT (type))
4951 {
4952 target = copy_rtx (target);
4953 MEM_KEEP_ALIAS_SET_P (target) = 1;
4954 }
4955
4956 store_constructor_field (target, bitsize, bitpos, mode, value,
4957 type, align, cleared,
4958 get_alias_set (elttype));
4959
4960 }
4961 }
4962 }
4963
4964 /* Set constructor assignments. */
4965 else if (TREE_CODE (type) == SET_TYPE)
4966 {
4967 tree elt = CONSTRUCTOR_ELTS (exp);
4968 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4969 tree domain = TYPE_DOMAIN (type);
4970 tree domain_min, domain_max, bitlength;
4971
4972 /* The default implementation strategy is to extract the constant
4973 parts of the constructor, use that to initialize the target,
4974 and then "or" in whatever non-constant ranges we need in addition.
4975
4976 If a large set is all zero or all ones, it is
4977 probably better to set it using memset (if available) or bzero.
4978 Also, if a large set has just a single range, it may also be
4979 better to first clear all the first clear the set (using
4980 bzero/memset), and set the bits we want. */
4981
4982 /* Check for all zeros. */
4983 if (elt == NULL_TREE && size > 0)
4984 {
4985 if (!cleared)
4986 clear_storage (target, GEN_INT (size));
4987 return;
4988 }
4989
4990 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4991 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4992 bitlength = size_binop (PLUS_EXPR,
4993 size_diffop (domain_max, domain_min),
4994 ssize_int (1));
4995
4996 nbits = tree_low_cst (bitlength, 1);
4997
4998 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4999 are "complicated" (more than one range), initialize (the
5000 constant parts) by copying from a constant. */
5001 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5002 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5003 {
5004 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5005 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5006 char *bit_buffer = (char *) alloca (nbits);
5007 HOST_WIDE_INT word = 0;
5008 unsigned int bit_pos = 0;
5009 unsigned int ibit = 0;
5010 unsigned int offset = 0; /* In bytes from beginning of set. */
5011
5012 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5013 for (;;)
5014 {
5015 if (bit_buffer[ibit])
5016 {
5017 if (BYTES_BIG_ENDIAN)
5018 word |= (1 << (set_word_size - 1 - bit_pos));
5019 else
5020 word |= 1 << bit_pos;
5021 }
5022
5023 bit_pos++; ibit++;
5024 if (bit_pos >= set_word_size || ibit == nbits)
5025 {
5026 if (word != 0 || ! cleared)
5027 {
5028 rtx datum = GEN_INT (word);
5029 rtx to_rtx;
5030
5031 /* The assumption here is that it is safe to use
5032 XEXP if the set is multi-word, but not if
5033 it's single-word. */
5034 if (GET_CODE (target) == MEM)
5035 to_rtx = adjust_address (target, mode, offset);
5036 else if (offset == 0)
5037 to_rtx = target;
5038 else
5039 abort ();
5040 emit_move_insn (to_rtx, datum);
5041 }
5042
5043 if (ibit == nbits)
5044 break;
5045 word = 0;
5046 bit_pos = 0;
5047 offset += set_word_size / BITS_PER_UNIT;
5048 }
5049 }
5050 }
5051 else if (!cleared)
5052 /* Don't bother clearing storage if the set is all ones. */
5053 if (TREE_CHAIN (elt) != NULL_TREE
5054 || (TREE_PURPOSE (elt) == NULL_TREE
5055 ? nbits != 1
5056 : ( ! host_integerp (TREE_VALUE (elt), 0)
5057 || ! host_integerp (TREE_PURPOSE (elt), 0)
5058 || (tree_low_cst (TREE_VALUE (elt), 0)
5059 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5060 != (HOST_WIDE_INT) nbits))))
5061 clear_storage (target, expr_size (exp));
5062
5063 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5064 {
5065 /* Start of range of element or NULL. */
5066 tree startbit = TREE_PURPOSE (elt);
5067 /* End of range of element, or element value. */
5068 tree endbit = TREE_VALUE (elt);
5069 #ifdef TARGET_MEM_FUNCTIONS
5070 HOST_WIDE_INT startb, endb;
5071 #endif
5072 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5073
5074 bitlength_rtx = expand_expr (bitlength,
5075 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5076
5077 /* Handle non-range tuple element like [ expr ]. */
5078 if (startbit == NULL_TREE)
5079 {
5080 startbit = save_expr (endbit);
5081 endbit = startbit;
5082 }
5083
5084 startbit = convert (sizetype, startbit);
5085 endbit = convert (sizetype, endbit);
5086 if (! integer_zerop (domain_min))
5087 {
5088 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5089 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5090 }
5091 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5092 EXPAND_CONST_ADDRESS);
5093 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5094 EXPAND_CONST_ADDRESS);
5095
5096 if (REG_P (target))
5097 {
5098 targetx
5099 = assign_temp
5100 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5101 TYPE_QUAL_CONST)),
5102 0, 1, 1);
5103 emit_move_insn (targetx, target);
5104 }
5105
5106 else if (GET_CODE (target) == MEM)
5107 targetx = target;
5108 else
5109 abort ();
5110
5111 #ifdef TARGET_MEM_FUNCTIONS
5112 /* Optimization: If startbit and endbit are
5113 constants divisible by BITS_PER_UNIT,
5114 call memset instead. */
5115 if (TREE_CODE (startbit) == INTEGER_CST
5116 && TREE_CODE (endbit) == INTEGER_CST
5117 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5118 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5119 {
5120 emit_library_call (memset_libfunc, LCT_NORMAL,
5121 VOIDmode, 3,
5122 plus_constant (XEXP (targetx, 0),
5123 startb / BITS_PER_UNIT),
5124 Pmode,
5125 constm1_rtx, TYPE_MODE (integer_type_node),
5126 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5127 TYPE_MODE (sizetype));
5128 }
5129 else
5130 #endif
5131 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5132 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5133 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5134 startbit_rtx, TYPE_MODE (sizetype),
5135 endbit_rtx, TYPE_MODE (sizetype));
5136
5137 if (REG_P (target))
5138 emit_move_insn (target, targetx);
5139 }
5140 }
5141
5142 else
5143 abort ();
5144 }
5145
5146 /* Store the value of EXP (an expression tree)
5147 into a subfield of TARGET which has mode MODE and occupies
5148 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5149 If MODE is VOIDmode, it means that we are storing into a bit-field.
5150
5151 If VALUE_MODE is VOIDmode, return nothing in particular.
5152 UNSIGNEDP is not used in this case.
5153
5154 Otherwise, return an rtx for the value stored. This rtx
5155 has mode VALUE_MODE if that is convenient to do.
5156 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5157
5158 ALIGN is the alignment that TARGET is known to have.
5159 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5160
5161 ALIAS_SET is the alias set for the destination. This value will
5162 (in general) be different from that for TARGET, since TARGET is a
5163 reference to the containing structure. */
5164
5165 static rtx
5166 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5167 unsignedp, align, total_size, alias_set)
5168 rtx target;
5169 HOST_WIDE_INT bitsize;
5170 HOST_WIDE_INT bitpos;
5171 enum machine_mode mode;
5172 tree exp;
5173 enum machine_mode value_mode;
5174 int unsignedp;
5175 unsigned int align;
5176 HOST_WIDE_INT total_size;
5177 int alias_set;
5178 {
5179 HOST_WIDE_INT width_mask = 0;
5180
5181 if (TREE_CODE (exp) == ERROR_MARK)
5182 return const0_rtx;
5183
5184 /* If we have nothing to store, do nothing unless the expression has
5185 side-effects. */
5186 if (bitsize == 0)
5187 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5188
5189 if (bitsize < HOST_BITS_PER_WIDE_INT)
5190 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5191
5192 /* If we are storing into an unaligned field of an aligned union that is
5193 in a register, we may have the mode of TARGET being an integer mode but
5194 MODE == BLKmode. In that case, get an aligned object whose size and
5195 alignment are the same as TARGET and store TARGET into it (we can avoid
5196 the store if the field being stored is the entire width of TARGET). Then
5197 call ourselves recursively to store the field into a BLKmode version of
5198 that object. Finally, load from the object into TARGET. This is not
5199 very efficient in general, but should only be slightly more expensive
5200 than the otherwise-required unaligned accesses. Perhaps this can be
5201 cleaned up later. */
5202
5203 if (mode == BLKmode
5204 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5205 {
5206 rtx object
5207 = assign_temp
5208 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5209 TYPE_QUAL_CONST),
5210 0, 1, 1);
5211 rtx blk_object = copy_rtx (object);
5212
5213 PUT_MODE (blk_object, BLKmode);
5214
5215 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5216 emit_move_insn (object, target);
5217
5218 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5219 align, total_size, alias_set);
5220
5221 /* Even though we aren't returning target, we need to
5222 give it the updated value. */
5223 emit_move_insn (target, object);
5224
5225 return blk_object;
5226 }
5227
5228 if (GET_CODE (target) == CONCAT)
5229 {
5230 /* We're storing into a struct containing a single __complex. */
5231
5232 if (bitpos != 0)
5233 abort ();
5234 return store_expr (exp, target, 0);
5235 }
5236
5237 /* If the structure is in a register or if the component
5238 is a bit field, we cannot use addressing to access it.
5239 Use bit-field techniques or SUBREG to store in it. */
5240
5241 if (mode == VOIDmode
5242 || (mode != BLKmode && ! direct_store[(int) mode]
5243 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5244 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5245 || GET_CODE (target) == REG
5246 || GET_CODE (target) == SUBREG
5247 /* If the field isn't aligned enough to store as an ordinary memref,
5248 store it as a bit field. */
5249 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5250 && (align < GET_MODE_ALIGNMENT (mode)
5251 || bitpos % GET_MODE_ALIGNMENT (mode)))
5252 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5253 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5254 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5255 /* If the RHS and field are a constant size and the size of the
5256 RHS isn't the same size as the bitfield, we must use bitfield
5257 operations. */
5258 || (bitsize >= 0
5259 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5260 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5261 {
5262 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5263
5264 /* If BITSIZE is narrower than the size of the type of EXP
5265 we will be narrowing TEMP. Normally, what's wanted are the
5266 low-order bits. However, if EXP's type is a record and this is
5267 big-endian machine, we want the upper BITSIZE bits. */
5268 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5269 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5270 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5271 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5272 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5273 - bitsize),
5274 temp, 1);
5275
5276 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5277 MODE. */
5278 if (mode != VOIDmode && mode != BLKmode
5279 && mode != TYPE_MODE (TREE_TYPE (exp)))
5280 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5281
5282 /* If the modes of TARGET and TEMP are both BLKmode, both
5283 must be in memory and BITPOS must be aligned on a byte
5284 boundary. If so, we simply do a block copy. */
5285 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5286 {
5287 unsigned int exp_align = expr_align (exp);
5288
5289 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5290 || bitpos % BITS_PER_UNIT != 0)
5291 abort ();
5292
5293 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5294
5295 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5296 align = MIN (exp_align, align);
5297
5298 /* Find an alignment that is consistent with the bit position. */
5299 while ((bitpos % align) != 0)
5300 align >>= 1;
5301
5302 emit_block_move (target, temp,
5303 bitsize == -1 ? expr_size (exp)
5304 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5305 / BITS_PER_UNIT));
5306
5307 return value_mode == VOIDmode ? const0_rtx : target;
5308 }
5309
5310 /* Store the value in the bitfield. */
5311 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5312 if (value_mode != VOIDmode)
5313 {
5314 /* The caller wants an rtx for the value. */
5315 /* If possible, avoid refetching from the bitfield itself. */
5316 if (width_mask != 0
5317 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5318 {
5319 tree count;
5320 enum machine_mode tmode;
5321
5322 if (unsignedp)
5323 return expand_and (temp,
5324 GEN_INT
5325 (trunc_int_for_mode
5326 (width_mask,
5327 GET_MODE (temp) == VOIDmode
5328 ? value_mode
5329 : GET_MODE (temp))), NULL_RTX);
5330 tmode = GET_MODE (temp);
5331 if (tmode == VOIDmode)
5332 tmode = value_mode;
5333 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5334 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5335 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5336 }
5337 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5338 NULL_RTX, value_mode, 0, align,
5339 total_size);
5340 }
5341 return const0_rtx;
5342 }
5343 else
5344 {
5345 rtx addr = XEXP (target, 0);
5346 rtx to_rtx;
5347
5348 /* If a value is wanted, it must be the lhs;
5349 so make the address stable for multiple use. */
5350
5351 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5352 && ! CONSTANT_ADDRESS_P (addr)
5353 /* A frame-pointer reference is already stable. */
5354 && ! (GET_CODE (addr) == PLUS
5355 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5356 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5357 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5358 target = replace_equiv_address (target, copy_to_reg (addr));
5359
5360 /* Now build a reference to just the desired component. */
5361
5362 to_rtx = copy_rtx (adjust_address (target, mode,
5363 bitpos / BITS_PER_UNIT));
5364
5365 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5366 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5367 {
5368 to_rtx = copy_rtx (to_rtx);
5369 set_mem_alias_set (to_rtx, alias_set);
5370 }
5371
5372 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5373 }
5374 }
5375 \f
5376 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5377 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5378 codes and find the ultimate containing object, which we return.
5379
5380 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5381 bit position, and *PUNSIGNEDP to the signedness of the field.
5382 If the position of the field is variable, we store a tree
5383 giving the variable offset (in units) in *POFFSET.
5384 This offset is in addition to the bit position.
5385 If the position is not variable, we store 0 in *POFFSET.
5386 We set *PALIGNMENT to the alignment of the address that will be
5387 computed. This is the alignment of the thing we return if *POFFSET
5388 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5389
5390 If any of the extraction expressions is volatile,
5391 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5392
5393 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5394 is a mode that can be used to access the field. In that case, *PBITSIZE
5395 is redundant.
5396
5397 If the field describes a variable-sized object, *PMODE is set to
5398 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5399 this case, but the address of the object can be found. */
5400
5401 tree
5402 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5403 punsignedp, pvolatilep, palignment)
5404 tree exp;
5405 HOST_WIDE_INT *pbitsize;
5406 HOST_WIDE_INT *pbitpos;
5407 tree *poffset;
5408 enum machine_mode *pmode;
5409 int *punsignedp;
5410 int *pvolatilep;
5411 unsigned int *palignment;
5412 {
5413 tree size_tree = 0;
5414 enum machine_mode mode = VOIDmode;
5415 tree offset = size_zero_node;
5416 tree bit_offset = bitsize_zero_node;
5417 unsigned int alignment = BIGGEST_ALIGNMENT;
5418 tree placeholder_ptr = 0;
5419 tree tem;
5420
5421 /* First get the mode, signedness, and size. We do this from just the
5422 outermost expression. */
5423 if (TREE_CODE (exp) == COMPONENT_REF)
5424 {
5425 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5426 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5427 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5428
5429 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5430 }
5431 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5432 {
5433 size_tree = TREE_OPERAND (exp, 1);
5434 *punsignedp = TREE_UNSIGNED (exp);
5435 }
5436 else
5437 {
5438 mode = TYPE_MODE (TREE_TYPE (exp));
5439 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5440
5441 if (mode == BLKmode)
5442 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5443 else
5444 *pbitsize = GET_MODE_BITSIZE (mode);
5445 }
5446
5447 if (size_tree != 0)
5448 {
5449 if (! host_integerp (size_tree, 1))
5450 mode = BLKmode, *pbitsize = -1;
5451 else
5452 *pbitsize = tree_low_cst (size_tree, 1);
5453 }
5454
5455 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5456 and find the ultimate containing object. */
5457 while (1)
5458 {
5459 if (TREE_CODE (exp) == BIT_FIELD_REF)
5460 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5461 else if (TREE_CODE (exp) == COMPONENT_REF)
5462 {
5463 tree field = TREE_OPERAND (exp, 1);
5464 tree this_offset = DECL_FIELD_OFFSET (field);
5465
5466 /* If this field hasn't been filled in yet, don't go
5467 past it. This should only happen when folding expressions
5468 made during type construction. */
5469 if (this_offset == 0)
5470 break;
5471 else if (! TREE_CONSTANT (this_offset)
5472 && contains_placeholder_p (this_offset))
5473 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5474
5475 offset = size_binop (PLUS_EXPR, offset, this_offset);
5476 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5477 DECL_FIELD_BIT_OFFSET (field));
5478
5479 if (! host_integerp (offset, 0))
5480 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5481 }
5482
5483 else if (TREE_CODE (exp) == ARRAY_REF
5484 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5485 {
5486 tree index = TREE_OPERAND (exp, 1);
5487 tree array = TREE_OPERAND (exp, 0);
5488 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5489 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5490 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5491
5492 /* We assume all arrays have sizes that are a multiple of a byte.
5493 First subtract the lower bound, if any, in the type of the
5494 index, then convert to sizetype and multiply by the size of the
5495 array element. */
5496 if (low_bound != 0 && ! integer_zerop (low_bound))
5497 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5498 index, low_bound));
5499
5500 /* If the index has a self-referential type, pass it to a
5501 WITH_RECORD_EXPR; if the component size is, pass our
5502 component to one. */
5503 if (! TREE_CONSTANT (index)
5504 && contains_placeholder_p (index))
5505 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5506 if (! TREE_CONSTANT (unit_size)
5507 && contains_placeholder_p (unit_size))
5508 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5509
5510 offset = size_binop (PLUS_EXPR, offset,
5511 size_binop (MULT_EXPR,
5512 convert (sizetype, index),
5513 unit_size));
5514 }
5515
5516 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5517 {
5518 tree new = find_placeholder (exp, &placeholder_ptr);
5519
5520 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5521 We might have been called from tree optimization where we
5522 haven't set up an object yet. */
5523 if (new == 0)
5524 break;
5525 else
5526 exp = new;
5527
5528 continue;
5529 }
5530 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5531 && ! ((TREE_CODE (exp) == NOP_EXPR
5532 || TREE_CODE (exp) == CONVERT_EXPR)
5533 && (TYPE_MODE (TREE_TYPE (exp))
5534 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5535 break;
5536
5537 /* If any reference in the chain is volatile, the effect is volatile. */
5538 if (TREE_THIS_VOLATILE (exp))
5539 *pvolatilep = 1;
5540
5541 /* If the offset is non-constant already, then we can't assume any
5542 alignment more than the alignment here. */
5543 if (! TREE_CONSTANT (offset))
5544 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5545
5546 exp = TREE_OPERAND (exp, 0);
5547 }
5548
5549 if (DECL_P (exp))
5550 alignment = MIN (alignment, DECL_ALIGN (exp));
5551 else if (TREE_TYPE (exp) != 0)
5552 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5553
5554 /* If OFFSET is constant, see if we can return the whole thing as a
5555 constant bit position. Otherwise, split it up. */
5556 if (host_integerp (offset, 0)
5557 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5558 bitsize_unit_node))
5559 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5560 && host_integerp (tem, 0))
5561 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5562 else
5563 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5564
5565 *pmode = mode;
5566 *palignment = alignment;
5567 return exp;
5568 }
5569
5570 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5571
5572 static enum memory_use_mode
5573 get_memory_usage_from_modifier (modifier)
5574 enum expand_modifier modifier;
5575 {
5576 switch (modifier)
5577 {
5578 case EXPAND_NORMAL:
5579 case EXPAND_SUM:
5580 return MEMORY_USE_RO;
5581 break;
5582 case EXPAND_MEMORY_USE_WO:
5583 return MEMORY_USE_WO;
5584 break;
5585 case EXPAND_MEMORY_USE_RW:
5586 return MEMORY_USE_RW;
5587 break;
5588 case EXPAND_MEMORY_USE_DONT:
5589 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5590 MEMORY_USE_DONT, because they are modifiers to a call of
5591 expand_expr in the ADDR_EXPR case of expand_expr. */
5592 case EXPAND_CONST_ADDRESS:
5593 case EXPAND_INITIALIZER:
5594 return MEMORY_USE_DONT;
5595 case EXPAND_MEMORY_USE_BAD:
5596 default:
5597 abort ();
5598 }
5599 }
5600 \f
5601 /* Given an rtx VALUE that may contain additions and multiplications, return
5602 an equivalent value that just refers to a register, memory, or constant.
5603 This is done by generating instructions to perform the arithmetic and
5604 returning a pseudo-register containing the value.
5605
5606 The returned value may be a REG, SUBREG, MEM or constant. */
5607
5608 rtx
5609 force_operand (value, target)
5610 rtx value, target;
5611 {
5612 optab binoptab = 0;
5613 /* Use a temporary to force order of execution of calls to
5614 `force_operand'. */
5615 rtx tmp;
5616 rtx op2;
5617 /* Use subtarget as the target for operand 0 of a binary operation. */
5618 rtx subtarget = get_subtarget (target);
5619
5620 /* Check for a PIC address load. */
5621 if (flag_pic
5622 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5623 && XEXP (value, 0) == pic_offset_table_rtx
5624 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5625 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5626 || GET_CODE (XEXP (value, 1)) == CONST))
5627 {
5628 if (!subtarget)
5629 subtarget = gen_reg_rtx (GET_MODE (value));
5630 emit_move_insn (subtarget, value);
5631 return subtarget;
5632 }
5633
5634 if (GET_CODE (value) == PLUS)
5635 binoptab = add_optab;
5636 else if (GET_CODE (value) == MINUS)
5637 binoptab = sub_optab;
5638 else if (GET_CODE (value) == MULT)
5639 {
5640 op2 = XEXP (value, 1);
5641 if (!CONSTANT_P (op2)
5642 && !(GET_CODE (op2) == REG && op2 != subtarget))
5643 subtarget = 0;
5644 tmp = force_operand (XEXP (value, 0), subtarget);
5645 return expand_mult (GET_MODE (value), tmp,
5646 force_operand (op2, NULL_RTX),
5647 target, 1);
5648 }
5649
5650 if (binoptab)
5651 {
5652 op2 = XEXP (value, 1);
5653 if (!CONSTANT_P (op2)
5654 && !(GET_CODE (op2) == REG && op2 != subtarget))
5655 subtarget = 0;
5656 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5657 {
5658 binoptab = add_optab;
5659 op2 = negate_rtx (GET_MODE (value), op2);
5660 }
5661
5662 /* Check for an addition with OP2 a constant integer and our first
5663 operand a PLUS of a virtual register and something else. In that
5664 case, we want to emit the sum of the virtual register and the
5665 constant first and then add the other value. This allows virtual
5666 register instantiation to simply modify the constant rather than
5667 creating another one around this addition. */
5668 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5669 && GET_CODE (XEXP (value, 0)) == PLUS
5670 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5671 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5672 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5673 {
5674 rtx temp = expand_binop (GET_MODE (value), binoptab,
5675 XEXP (XEXP (value, 0), 0), op2,
5676 subtarget, 0, OPTAB_LIB_WIDEN);
5677 return expand_binop (GET_MODE (value), binoptab, temp,
5678 force_operand (XEXP (XEXP (value, 0), 1), 0),
5679 target, 0, OPTAB_LIB_WIDEN);
5680 }
5681
5682 tmp = force_operand (XEXP (value, 0), subtarget);
5683 return expand_binop (GET_MODE (value), binoptab, tmp,
5684 force_operand (op2, NULL_RTX),
5685 target, 0, OPTAB_LIB_WIDEN);
5686 /* We give UNSIGNEDP = 0 to expand_binop
5687 because the only operations we are expanding here are signed ones. */
5688 }
5689 return value;
5690 }
5691 \f
5692 /* Subroutine of expand_expr: return nonzero iff there is no way that
5693 EXP can reference X, which is being modified. TOP_P is nonzero if this
5694 call is going to be used to determine whether we need a temporary
5695 for EXP, as opposed to a recursive call to this function.
5696
5697 It is always safe for this routine to return zero since it merely
5698 searches for optimization opportunities. */
5699
5700 int
5701 safe_from_p (x, exp, top_p)
5702 rtx x;
5703 tree exp;
5704 int top_p;
5705 {
5706 rtx exp_rtl = 0;
5707 int i, nops;
5708 static tree save_expr_list;
5709
5710 if (x == 0
5711 /* If EXP has varying size, we MUST use a target since we currently
5712 have no way of allocating temporaries of variable size
5713 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5714 So we assume here that something at a higher level has prevented a
5715 clash. This is somewhat bogus, but the best we can do. Only
5716 do this when X is BLKmode and when we are at the top level. */
5717 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5718 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5719 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5720 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5721 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5722 != INTEGER_CST)
5723 && GET_MODE (x) == BLKmode)
5724 /* If X is in the outgoing argument area, it is always safe. */
5725 || (GET_CODE (x) == MEM
5726 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5727 || (GET_CODE (XEXP (x, 0)) == PLUS
5728 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5729 return 1;
5730
5731 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5732 find the underlying pseudo. */
5733 if (GET_CODE (x) == SUBREG)
5734 {
5735 x = SUBREG_REG (x);
5736 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5737 return 0;
5738 }
5739
5740 /* A SAVE_EXPR might appear many times in the expression passed to the
5741 top-level safe_from_p call, and if it has a complex subexpression,
5742 examining it multiple times could result in a combinatorial explosion.
5743 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5744 with optimization took about 28 minutes to compile -- even though it was
5745 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5746 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5747 we have processed. Note that the only test of top_p was above. */
5748
5749 if (top_p)
5750 {
5751 int rtn;
5752 tree t;
5753
5754 save_expr_list = 0;
5755
5756 rtn = safe_from_p (x, exp, 0);
5757
5758 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5759 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5760
5761 return rtn;
5762 }
5763
5764 /* Now look at our tree code and possibly recurse. */
5765 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5766 {
5767 case 'd':
5768 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5769 break;
5770
5771 case 'c':
5772 return 1;
5773
5774 case 'x':
5775 if (TREE_CODE (exp) == TREE_LIST)
5776 return ((TREE_VALUE (exp) == 0
5777 || safe_from_p (x, TREE_VALUE (exp), 0))
5778 && (TREE_CHAIN (exp) == 0
5779 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5780 else if (TREE_CODE (exp) == ERROR_MARK)
5781 return 1; /* An already-visited SAVE_EXPR? */
5782 else
5783 return 0;
5784
5785 case '1':
5786 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5787
5788 case '2':
5789 case '<':
5790 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5791 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5792
5793 case 'e':
5794 case 'r':
5795 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5796 the expression. If it is set, we conflict iff we are that rtx or
5797 both are in memory. Otherwise, we check all operands of the
5798 expression recursively. */
5799
5800 switch (TREE_CODE (exp))
5801 {
5802 case ADDR_EXPR:
5803 /* If the operand is static or we are static, we can't conflict.
5804 Likewise if we don't conflict with the operand at all. */
5805 if (staticp (TREE_OPERAND (exp, 0))
5806 || TREE_STATIC (exp)
5807 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5808 return 1;
5809
5810 /* Otherwise, the only way this can conflict is if we are taking
5811 the address of a DECL a that address if part of X, which is
5812 very rare. */
5813 exp = TREE_OPERAND (exp, 0);
5814 if (DECL_P (exp))
5815 {
5816 if (!DECL_RTL_SET_P (exp)
5817 || GET_CODE (DECL_RTL (exp)) != MEM)
5818 return 0;
5819 else
5820 exp_rtl = XEXP (DECL_RTL (exp), 0);
5821 }
5822 break;
5823
5824 case INDIRECT_REF:
5825 if (GET_CODE (x) == MEM
5826 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5827 get_alias_set (exp)))
5828 return 0;
5829 break;
5830
5831 case CALL_EXPR:
5832 /* Assume that the call will clobber all hard registers and
5833 all of memory. */
5834 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5835 || GET_CODE (x) == MEM)
5836 return 0;
5837 break;
5838
5839 case RTL_EXPR:
5840 /* If a sequence exists, we would have to scan every instruction
5841 in the sequence to see if it was safe. This is probably not
5842 worthwhile. */
5843 if (RTL_EXPR_SEQUENCE (exp))
5844 return 0;
5845
5846 exp_rtl = RTL_EXPR_RTL (exp);
5847 break;
5848
5849 case WITH_CLEANUP_EXPR:
5850 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5851 break;
5852
5853 case CLEANUP_POINT_EXPR:
5854 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5855
5856 case SAVE_EXPR:
5857 exp_rtl = SAVE_EXPR_RTL (exp);
5858 if (exp_rtl)
5859 break;
5860
5861 /* If we've already scanned this, don't do it again. Otherwise,
5862 show we've scanned it and record for clearing the flag if we're
5863 going on. */
5864 if (TREE_PRIVATE (exp))
5865 return 1;
5866
5867 TREE_PRIVATE (exp) = 1;
5868 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5869 {
5870 TREE_PRIVATE (exp) = 0;
5871 return 0;
5872 }
5873
5874 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5875 return 1;
5876
5877 case BIND_EXPR:
5878 /* The only operand we look at is operand 1. The rest aren't
5879 part of the expression. */
5880 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5881
5882 case METHOD_CALL_EXPR:
5883 /* This takes an rtx argument, but shouldn't appear here. */
5884 abort ();
5885
5886 default:
5887 break;
5888 }
5889
5890 /* If we have an rtx, we do not need to scan our operands. */
5891 if (exp_rtl)
5892 break;
5893
5894 nops = first_rtl_op (TREE_CODE (exp));
5895 for (i = 0; i < nops; i++)
5896 if (TREE_OPERAND (exp, i) != 0
5897 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5898 return 0;
5899
5900 /* If this is a language-specific tree code, it may require
5901 special handling. */
5902 if ((unsigned int) TREE_CODE (exp)
5903 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5904 && lang_safe_from_p
5905 && !(*lang_safe_from_p) (x, exp))
5906 return 0;
5907 }
5908
5909 /* If we have an rtl, find any enclosed object. Then see if we conflict
5910 with it. */
5911 if (exp_rtl)
5912 {
5913 if (GET_CODE (exp_rtl) == SUBREG)
5914 {
5915 exp_rtl = SUBREG_REG (exp_rtl);
5916 if (GET_CODE (exp_rtl) == REG
5917 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5918 return 0;
5919 }
5920
5921 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5922 are memory and they conflict. */
5923 return ! (rtx_equal_p (x, exp_rtl)
5924 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5925 && true_dependence (exp_rtl, GET_MODE (x), x,
5926 rtx_addr_varies_p)));
5927 }
5928
5929 /* If we reach here, it is safe. */
5930 return 1;
5931 }
5932
5933 /* Subroutine of expand_expr: return rtx if EXP is a
5934 variable or parameter; else return 0. */
5935
5936 static rtx
5937 var_rtx (exp)
5938 tree exp;
5939 {
5940 STRIP_NOPS (exp);
5941 switch (TREE_CODE (exp))
5942 {
5943 case PARM_DECL:
5944 case VAR_DECL:
5945 return DECL_RTL (exp);
5946 default:
5947 return 0;
5948 }
5949 }
5950
5951 #ifdef MAX_INTEGER_COMPUTATION_MODE
5952
5953 void
5954 check_max_integer_computation_mode (exp)
5955 tree exp;
5956 {
5957 enum tree_code code;
5958 enum machine_mode mode;
5959
5960 /* Strip any NOPs that don't change the mode. */
5961 STRIP_NOPS (exp);
5962 code = TREE_CODE (exp);
5963
5964 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5965 if (code == NOP_EXPR
5966 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5967 return;
5968
5969 /* First check the type of the overall operation. We need only look at
5970 unary, binary and relational operations. */
5971 if (TREE_CODE_CLASS (code) == '1'
5972 || TREE_CODE_CLASS (code) == '2'
5973 || TREE_CODE_CLASS (code) == '<')
5974 {
5975 mode = TYPE_MODE (TREE_TYPE (exp));
5976 if (GET_MODE_CLASS (mode) == MODE_INT
5977 && mode > MAX_INTEGER_COMPUTATION_MODE)
5978 internal_error ("unsupported wide integer operation");
5979 }
5980
5981 /* Check operand of a unary op. */
5982 if (TREE_CODE_CLASS (code) == '1')
5983 {
5984 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5985 if (GET_MODE_CLASS (mode) == MODE_INT
5986 && mode > MAX_INTEGER_COMPUTATION_MODE)
5987 internal_error ("unsupported wide integer operation");
5988 }
5989
5990 /* Check operands of a binary/comparison op. */
5991 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5992 {
5993 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5994 if (GET_MODE_CLASS (mode) == MODE_INT
5995 && mode > MAX_INTEGER_COMPUTATION_MODE)
5996 internal_error ("unsupported wide integer operation");
5997
5998 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5999 if (GET_MODE_CLASS (mode) == MODE_INT
6000 && mode > MAX_INTEGER_COMPUTATION_MODE)
6001 internal_error ("unsupported wide integer operation");
6002 }
6003 }
6004 #endif
6005 \f
6006 /* Return the highest power of two that EXP is known to be a multiple of.
6007 This is used in updating alignment of MEMs in array references. */
6008
6009 static HOST_WIDE_INT
6010 highest_pow2_factor (exp)
6011 tree exp;
6012 {
6013 HOST_WIDE_INT c0, c1;
6014
6015 switch (TREE_CODE (exp))
6016 {
6017 case INTEGER_CST:
6018 /* If the integer is expressable in a HOST_WIDE_INT, we can find
6019 the lowest bit that's a one. If the result is zero or negative,
6020 pessimize by returning 1. This is overly-conservative, but such
6021 things should not happen in the offset expressions that we are
6022 called with. */
6023 if (host_integerp (exp, 0))
6024 {
6025 c0 = tree_low_cst (exp, 0);
6026 return c0 >= 0 ? c0 & -c0 : 1;
6027 }
6028 break;
6029
6030 case PLUS_EXPR: case MINUS_EXPR:
6031 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6032 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6033 return MIN (c0, c1);
6034
6035 case MULT_EXPR:
6036 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6037 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6038 return c0 * c1;
6039
6040 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6041 case CEIL_DIV_EXPR:
6042 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6043 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6044 return MAX (1, c0 / c1);
6045
6046 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6047 case COMPOUND_EXPR: case SAVE_EXPR:
6048 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6049
6050 case COND_EXPR:
6051 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6052 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6053 return MIN (c0, c1);
6054
6055 default:
6056 break;
6057 }
6058
6059 return 1;
6060 }
6061 \f
6062 /* Return an object on the placeholder list that matches EXP, a
6063 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6064 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6065 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6066 is a location which initially points to a starting location in the
6067 placeholder list (zero means start of the list) and where a pointer into
6068 the placeholder list at which the object is found is placed. */
6069
6070 tree
6071 find_placeholder (exp, plist)
6072 tree exp;
6073 tree *plist;
6074 {
6075 tree type = TREE_TYPE (exp);
6076 tree placeholder_expr;
6077
6078 for (placeholder_expr
6079 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6080 placeholder_expr != 0;
6081 placeholder_expr = TREE_CHAIN (placeholder_expr))
6082 {
6083 tree need_type = TYPE_MAIN_VARIANT (type);
6084 tree elt;
6085
6086 /* Find the outermost reference that is of the type we want. If none,
6087 see if any object has a type that is a pointer to the type we
6088 want. */
6089 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6090 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6091 || TREE_CODE (elt) == COND_EXPR)
6092 ? TREE_OPERAND (elt, 1)
6093 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6094 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6095 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6096 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6097 ? TREE_OPERAND (elt, 0) : 0))
6098 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6099 {
6100 if (plist)
6101 *plist = placeholder_expr;
6102 return elt;
6103 }
6104
6105 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6106 elt
6107 = ((TREE_CODE (elt) == COMPOUND_EXPR
6108 || TREE_CODE (elt) == COND_EXPR)
6109 ? TREE_OPERAND (elt, 1)
6110 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6111 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6112 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6113 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6114 ? TREE_OPERAND (elt, 0) : 0))
6115 if (POINTER_TYPE_P (TREE_TYPE (elt))
6116 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6117 == need_type))
6118 {
6119 if (plist)
6120 *plist = placeholder_expr;
6121 return build1 (INDIRECT_REF, need_type, elt);
6122 }
6123 }
6124
6125 return 0;
6126 }
6127 \f
6128 /* expand_expr: generate code for computing expression EXP.
6129 An rtx for the computed value is returned. The value is never null.
6130 In the case of a void EXP, const0_rtx is returned.
6131
6132 The value may be stored in TARGET if TARGET is nonzero.
6133 TARGET is just a suggestion; callers must assume that
6134 the rtx returned may not be the same as TARGET.
6135
6136 If TARGET is CONST0_RTX, it means that the value will be ignored.
6137
6138 If TMODE is not VOIDmode, it suggests generating the
6139 result in mode TMODE. But this is done only when convenient.
6140 Otherwise, TMODE is ignored and the value generated in its natural mode.
6141 TMODE is just a suggestion; callers must assume that
6142 the rtx returned may not have mode TMODE.
6143
6144 Note that TARGET may have neither TMODE nor MODE. In that case, it
6145 probably will not be used.
6146
6147 If MODIFIER is EXPAND_SUM then when EXP is an addition
6148 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6149 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6150 products as above, or REG or MEM, or constant.
6151 Ordinarily in such cases we would output mul or add instructions
6152 and then return a pseudo reg containing the sum.
6153
6154 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6155 it also marks a label as absolutely required (it can't be dead).
6156 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6157 This is used for outputting expressions used in initializers.
6158
6159 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6160 with a constant address even if that address is not normally legitimate.
6161 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6162
6163 rtx
6164 expand_expr (exp, target, tmode, modifier)
6165 tree exp;
6166 rtx target;
6167 enum machine_mode tmode;
6168 enum expand_modifier modifier;
6169 {
6170 rtx op0, op1, temp;
6171 tree type = TREE_TYPE (exp);
6172 int unsignedp = TREE_UNSIGNED (type);
6173 enum machine_mode mode;
6174 enum tree_code code = TREE_CODE (exp);
6175 optab this_optab;
6176 rtx subtarget, original_target;
6177 int ignore;
6178 tree context;
6179 /* Used by check-memory-usage to make modifier read only. */
6180 enum expand_modifier ro_modifier;
6181
6182 /* Handle ERROR_MARK before anybody tries to access its type. */
6183 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6184 {
6185 op0 = CONST0_RTX (tmode);
6186 if (op0 != 0)
6187 return op0;
6188 return const0_rtx;
6189 }
6190
6191 mode = TYPE_MODE (type);
6192 /* Use subtarget as the target for operand 0 of a binary operation. */
6193 subtarget = get_subtarget (target);
6194 original_target = target;
6195 ignore = (target == const0_rtx
6196 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6197 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6198 || code == COND_EXPR)
6199 && TREE_CODE (type) == VOID_TYPE));
6200
6201 /* Make a read-only version of the modifier. */
6202 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6203 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6204 ro_modifier = modifier;
6205 else
6206 ro_modifier = EXPAND_NORMAL;
6207
6208 /* If we are going to ignore this result, we need only do something
6209 if there is a side-effect somewhere in the expression. If there
6210 is, short-circuit the most common cases here. Note that we must
6211 not call expand_expr with anything but const0_rtx in case this
6212 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6213
6214 if (ignore)
6215 {
6216 if (! TREE_SIDE_EFFECTS (exp))
6217 return const0_rtx;
6218
6219 /* Ensure we reference a volatile object even if value is ignored, but
6220 don't do this if all we are doing is taking its address. */
6221 if (TREE_THIS_VOLATILE (exp)
6222 && TREE_CODE (exp) != FUNCTION_DECL
6223 && mode != VOIDmode && mode != BLKmode
6224 && modifier != EXPAND_CONST_ADDRESS)
6225 {
6226 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6227 if (GET_CODE (temp) == MEM)
6228 temp = copy_to_reg (temp);
6229 return const0_rtx;
6230 }
6231
6232 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6233 || code == INDIRECT_REF || code == BUFFER_REF)
6234 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6235 VOIDmode, ro_modifier);
6236 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6237 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6238 {
6239 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6240 ro_modifier);
6241 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6242 ro_modifier);
6243 return const0_rtx;
6244 }
6245 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6246 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6247 /* If the second operand has no side effects, just evaluate
6248 the first. */
6249 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6250 VOIDmode, ro_modifier);
6251 else if (code == BIT_FIELD_REF)
6252 {
6253 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6254 ro_modifier);
6255 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6256 ro_modifier);
6257 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6258 ro_modifier);
6259 return const0_rtx;
6260 }
6261 ;
6262 target = 0;
6263 }
6264
6265 #ifdef MAX_INTEGER_COMPUTATION_MODE
6266 /* Only check stuff here if the mode we want is different from the mode
6267 of the expression; if it's the same, check_max_integer_computiation_mode
6268 will handle it. Do we really need to check this stuff at all? */
6269
6270 if (target
6271 && GET_MODE (target) != mode
6272 && TREE_CODE (exp) != INTEGER_CST
6273 && TREE_CODE (exp) != PARM_DECL
6274 && TREE_CODE (exp) != ARRAY_REF
6275 && TREE_CODE (exp) != ARRAY_RANGE_REF
6276 && TREE_CODE (exp) != COMPONENT_REF
6277 && TREE_CODE (exp) != BIT_FIELD_REF
6278 && TREE_CODE (exp) != INDIRECT_REF
6279 && TREE_CODE (exp) != CALL_EXPR
6280 && TREE_CODE (exp) != VAR_DECL
6281 && TREE_CODE (exp) != RTL_EXPR)
6282 {
6283 enum machine_mode mode = GET_MODE (target);
6284
6285 if (GET_MODE_CLASS (mode) == MODE_INT
6286 && mode > MAX_INTEGER_COMPUTATION_MODE)
6287 internal_error ("unsupported wide integer operation");
6288 }
6289
6290 if (tmode != mode
6291 && TREE_CODE (exp) != INTEGER_CST
6292 && TREE_CODE (exp) != PARM_DECL
6293 && TREE_CODE (exp) != ARRAY_REF
6294 && TREE_CODE (exp) != ARRAY_RANGE_REF
6295 && TREE_CODE (exp) != COMPONENT_REF
6296 && TREE_CODE (exp) != BIT_FIELD_REF
6297 && TREE_CODE (exp) != INDIRECT_REF
6298 && TREE_CODE (exp) != VAR_DECL
6299 && TREE_CODE (exp) != CALL_EXPR
6300 && TREE_CODE (exp) != RTL_EXPR
6301 && GET_MODE_CLASS (tmode) == MODE_INT
6302 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6303 internal_error ("unsupported wide integer operation");
6304
6305 check_max_integer_computation_mode (exp);
6306 #endif
6307
6308 /* If will do cse, generate all results into pseudo registers
6309 since 1) that allows cse to find more things
6310 and 2) otherwise cse could produce an insn the machine
6311 cannot support. */
6312
6313 if (! cse_not_expected && mode != BLKmode && target
6314 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6315 target = subtarget;
6316
6317 switch (code)
6318 {
6319 case LABEL_DECL:
6320 {
6321 tree function = decl_function_context (exp);
6322 /* Handle using a label in a containing function. */
6323 if (function != current_function_decl
6324 && function != inline_function_decl && function != 0)
6325 {
6326 struct function *p = find_function_data (function);
6327 p->expr->x_forced_labels
6328 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6329 p->expr->x_forced_labels);
6330 }
6331 else
6332 {
6333 if (modifier == EXPAND_INITIALIZER)
6334 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6335 label_rtx (exp),
6336 forced_labels);
6337 }
6338
6339 temp = gen_rtx_MEM (FUNCTION_MODE,
6340 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6341 if (function != current_function_decl
6342 && function != inline_function_decl && function != 0)
6343 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6344 return temp;
6345 }
6346
6347 case PARM_DECL:
6348 if (DECL_RTL (exp) == 0)
6349 {
6350 error_with_decl (exp, "prior parameter's size depends on `%s'");
6351 return CONST0_RTX (mode);
6352 }
6353
6354 /* ... fall through ... */
6355
6356 case VAR_DECL:
6357 /* If a static var's type was incomplete when the decl was written,
6358 but the type is complete now, lay out the decl now. */
6359 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6360 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6361 {
6362 layout_decl (exp, 0);
6363 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6364 }
6365
6366 /* Although static-storage variables start off initialized, according to
6367 ANSI C, a memcpy could overwrite them with uninitialized values. So
6368 we check them too. This also lets us check for read-only variables
6369 accessed via a non-const declaration, in case it won't be detected
6370 any other way (e.g., in an embedded system or OS kernel without
6371 memory protection).
6372
6373 Aggregates are not checked here; they're handled elsewhere. */
6374 if (cfun && current_function_check_memory_usage
6375 && code == VAR_DECL
6376 && GET_CODE (DECL_RTL (exp)) == MEM
6377 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6378 {
6379 enum memory_use_mode memory_usage;
6380 memory_usage = get_memory_usage_from_modifier (modifier);
6381
6382 in_check_memory_usage = 1;
6383 if (memory_usage != MEMORY_USE_DONT)
6384 emit_library_call (chkr_check_addr_libfunc,
6385 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6386 XEXP (DECL_RTL (exp), 0), Pmode,
6387 GEN_INT (int_size_in_bytes (type)),
6388 TYPE_MODE (sizetype),
6389 GEN_INT (memory_usage),
6390 TYPE_MODE (integer_type_node));
6391 in_check_memory_usage = 0;
6392 }
6393
6394 /* ... fall through ... */
6395
6396 case FUNCTION_DECL:
6397 case RESULT_DECL:
6398 if (DECL_RTL (exp) == 0)
6399 abort ();
6400
6401 /* Ensure variable marked as used even if it doesn't go through
6402 a parser. If it hasn't be used yet, write out an external
6403 definition. */
6404 if (! TREE_USED (exp))
6405 {
6406 assemble_external (exp);
6407 TREE_USED (exp) = 1;
6408 }
6409
6410 /* Show we haven't gotten RTL for this yet. */
6411 temp = 0;
6412
6413 /* Handle variables inherited from containing functions. */
6414 context = decl_function_context (exp);
6415
6416 /* We treat inline_function_decl as an alias for the current function
6417 because that is the inline function whose vars, types, etc.
6418 are being merged into the current function.
6419 See expand_inline_function. */
6420
6421 if (context != 0 && context != current_function_decl
6422 && context != inline_function_decl
6423 /* If var is static, we don't need a static chain to access it. */
6424 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6425 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6426 {
6427 rtx addr;
6428
6429 /* Mark as non-local and addressable. */
6430 DECL_NONLOCAL (exp) = 1;
6431 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6432 abort ();
6433 mark_addressable (exp);
6434 if (GET_CODE (DECL_RTL (exp)) != MEM)
6435 abort ();
6436 addr = XEXP (DECL_RTL (exp), 0);
6437 if (GET_CODE (addr) == MEM)
6438 addr
6439 = replace_equiv_address (addr,
6440 fix_lexical_addr (XEXP (addr, 0), exp));
6441 else
6442 addr = fix_lexical_addr (addr, exp);
6443
6444 temp = replace_equiv_address (DECL_RTL (exp), addr);
6445 }
6446
6447 /* This is the case of an array whose size is to be determined
6448 from its initializer, while the initializer is still being parsed.
6449 See expand_decl. */
6450
6451 else if (GET_CODE (DECL_RTL (exp)) == MEM
6452 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6453 temp = validize_mem (DECL_RTL (exp));
6454
6455 /* If DECL_RTL is memory, we are in the normal case and either
6456 the address is not valid or it is not a register and -fforce-addr
6457 is specified, get the address into a register. */
6458
6459 else if (GET_CODE (DECL_RTL (exp)) == MEM
6460 && modifier != EXPAND_CONST_ADDRESS
6461 && modifier != EXPAND_SUM
6462 && modifier != EXPAND_INITIALIZER
6463 && (! memory_address_p (DECL_MODE (exp),
6464 XEXP (DECL_RTL (exp), 0))
6465 || (flag_force_addr
6466 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6467 temp = replace_equiv_address (DECL_RTL (exp),
6468 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6469
6470 /* If we got something, return it. But first, set the alignment
6471 if the address is a register. */
6472 if (temp != 0)
6473 {
6474 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6475 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6476
6477 return temp;
6478 }
6479
6480 /* If the mode of DECL_RTL does not match that of the decl, it
6481 must be a promoted value. We return a SUBREG of the wanted mode,
6482 but mark it so that we know that it was already extended. */
6483
6484 if (GET_CODE (DECL_RTL (exp)) == REG
6485 && GET_MODE (DECL_RTL (exp)) != mode)
6486 {
6487 /* Get the signedness used for this variable. Ensure we get the
6488 same mode we got when the variable was declared. */
6489 if (GET_MODE (DECL_RTL (exp))
6490 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6491 abort ();
6492
6493 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6494 SUBREG_PROMOTED_VAR_P (temp) = 1;
6495 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6496 return temp;
6497 }
6498
6499 return DECL_RTL (exp);
6500
6501 case INTEGER_CST:
6502 return immed_double_const (TREE_INT_CST_LOW (exp),
6503 TREE_INT_CST_HIGH (exp), mode);
6504
6505 case CONST_DECL:
6506 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6507 EXPAND_MEMORY_USE_BAD);
6508
6509 case REAL_CST:
6510 /* If optimized, generate immediate CONST_DOUBLE
6511 which will be turned into memory by reload if necessary.
6512
6513 We used to force a register so that loop.c could see it. But
6514 this does not allow gen_* patterns to perform optimizations with
6515 the constants. It also produces two insns in cases like "x = 1.0;".
6516 On most machines, floating-point constants are not permitted in
6517 many insns, so we'd end up copying it to a register in any case.
6518
6519 Now, we do the copying in expand_binop, if appropriate. */
6520 return immed_real_const (exp);
6521
6522 case COMPLEX_CST:
6523 case STRING_CST:
6524 if (! TREE_CST_RTL (exp))
6525 output_constant_def (exp, 1);
6526
6527 /* TREE_CST_RTL probably contains a constant address.
6528 On RISC machines where a constant address isn't valid,
6529 make some insns to get that address into a register. */
6530 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6531 && modifier != EXPAND_CONST_ADDRESS
6532 && modifier != EXPAND_INITIALIZER
6533 && modifier != EXPAND_SUM
6534 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6535 || (flag_force_addr
6536 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6537 return replace_equiv_address (TREE_CST_RTL (exp),
6538 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6539 return TREE_CST_RTL (exp);
6540
6541 case EXPR_WITH_FILE_LOCATION:
6542 {
6543 rtx to_return;
6544 const char *saved_input_filename = input_filename;
6545 int saved_lineno = lineno;
6546 input_filename = EXPR_WFL_FILENAME (exp);
6547 lineno = EXPR_WFL_LINENO (exp);
6548 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6549 emit_line_note (input_filename, lineno);
6550 /* Possibly avoid switching back and forth here. */
6551 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6552 input_filename = saved_input_filename;
6553 lineno = saved_lineno;
6554 return to_return;
6555 }
6556
6557 case SAVE_EXPR:
6558 context = decl_function_context (exp);
6559
6560 /* If this SAVE_EXPR was at global context, assume we are an
6561 initialization function and move it into our context. */
6562 if (context == 0)
6563 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6564
6565 /* We treat inline_function_decl as an alias for the current function
6566 because that is the inline function whose vars, types, etc.
6567 are being merged into the current function.
6568 See expand_inline_function. */
6569 if (context == current_function_decl || context == inline_function_decl)
6570 context = 0;
6571
6572 /* If this is non-local, handle it. */
6573 if (context)
6574 {
6575 /* The following call just exists to abort if the context is
6576 not of a containing function. */
6577 find_function_data (context);
6578
6579 temp = SAVE_EXPR_RTL (exp);
6580 if (temp && GET_CODE (temp) == REG)
6581 {
6582 put_var_into_stack (exp);
6583 temp = SAVE_EXPR_RTL (exp);
6584 }
6585 if (temp == 0 || GET_CODE (temp) != MEM)
6586 abort ();
6587 return
6588 replace_equiv_address (temp,
6589 fix_lexical_addr (XEXP (temp, 0), exp));
6590 }
6591 if (SAVE_EXPR_RTL (exp) == 0)
6592 {
6593 if (mode == VOIDmode)
6594 temp = const0_rtx;
6595 else
6596 temp = assign_temp (build_qualified_type (type,
6597 (TYPE_QUALS (type)
6598 | TYPE_QUAL_CONST)),
6599 3, 0, 0);
6600
6601 SAVE_EXPR_RTL (exp) = temp;
6602 if (!optimize && GET_CODE (temp) == REG)
6603 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6604 save_expr_regs);
6605
6606 /* If the mode of TEMP does not match that of the expression, it
6607 must be a promoted value. We pass store_expr a SUBREG of the
6608 wanted mode but mark it so that we know that it was already
6609 extended. Note that `unsignedp' was modified above in
6610 this case. */
6611
6612 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6613 {
6614 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6615 SUBREG_PROMOTED_VAR_P (temp) = 1;
6616 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6617 }
6618
6619 if (temp == const0_rtx)
6620 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6621 EXPAND_MEMORY_USE_BAD);
6622 else
6623 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6624
6625 TREE_USED (exp) = 1;
6626 }
6627
6628 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6629 must be a promoted value. We return a SUBREG of the wanted mode,
6630 but mark it so that we know that it was already extended. */
6631
6632 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6633 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6634 {
6635 /* Compute the signedness and make the proper SUBREG. */
6636 promote_mode (type, mode, &unsignedp, 0);
6637 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6638 SUBREG_PROMOTED_VAR_P (temp) = 1;
6639 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6640 return temp;
6641 }
6642
6643 return SAVE_EXPR_RTL (exp);
6644
6645 case UNSAVE_EXPR:
6646 {
6647 rtx temp;
6648 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6649 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6650 return temp;
6651 }
6652
6653 case PLACEHOLDER_EXPR:
6654 {
6655 tree old_list = placeholder_list;
6656 tree placeholder_expr = 0;
6657
6658 exp = find_placeholder (exp, &placeholder_expr);
6659 if (exp == 0)
6660 abort ();
6661
6662 placeholder_list = TREE_CHAIN (placeholder_expr);
6663 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6664 placeholder_list = old_list;
6665 return temp;
6666 }
6667
6668 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6669 abort ();
6670
6671 case WITH_RECORD_EXPR:
6672 /* Put the object on the placeholder list, expand our first operand,
6673 and pop the list. */
6674 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6675 placeholder_list);
6676 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6677 tmode, ro_modifier);
6678 placeholder_list = TREE_CHAIN (placeholder_list);
6679 return target;
6680
6681 case GOTO_EXPR:
6682 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6683 expand_goto (TREE_OPERAND (exp, 0));
6684 else
6685 expand_computed_goto (TREE_OPERAND (exp, 0));
6686 return const0_rtx;
6687
6688 case EXIT_EXPR:
6689 expand_exit_loop_if_false (NULL,
6690 invert_truthvalue (TREE_OPERAND (exp, 0)));
6691 return const0_rtx;
6692
6693 case LABELED_BLOCK_EXPR:
6694 if (LABELED_BLOCK_BODY (exp))
6695 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6696 /* Should perhaps use expand_label, but this is simpler and safer. */
6697 do_pending_stack_adjust ();
6698 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6699 return const0_rtx;
6700
6701 case EXIT_BLOCK_EXPR:
6702 if (EXIT_BLOCK_RETURN (exp))
6703 sorry ("returned value in block_exit_expr");
6704 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6705 return const0_rtx;
6706
6707 case LOOP_EXPR:
6708 push_temp_slots ();
6709 expand_start_loop (1);
6710 expand_expr_stmt (TREE_OPERAND (exp, 0));
6711 expand_end_loop ();
6712 pop_temp_slots ();
6713
6714 return const0_rtx;
6715
6716 case BIND_EXPR:
6717 {
6718 tree vars = TREE_OPERAND (exp, 0);
6719 int vars_need_expansion = 0;
6720
6721 /* Need to open a binding contour here because
6722 if there are any cleanups they must be contained here. */
6723 expand_start_bindings (2);
6724
6725 /* Mark the corresponding BLOCK for output in its proper place. */
6726 if (TREE_OPERAND (exp, 2) != 0
6727 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6728 insert_block (TREE_OPERAND (exp, 2));
6729
6730 /* If VARS have not yet been expanded, expand them now. */
6731 while (vars)
6732 {
6733 if (!DECL_RTL_SET_P (vars))
6734 {
6735 vars_need_expansion = 1;
6736 expand_decl (vars);
6737 }
6738 expand_decl_init (vars);
6739 vars = TREE_CHAIN (vars);
6740 }
6741
6742 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6743
6744 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6745
6746 return temp;
6747 }
6748
6749 case RTL_EXPR:
6750 if (RTL_EXPR_SEQUENCE (exp))
6751 {
6752 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6753 abort ();
6754 emit_insns (RTL_EXPR_SEQUENCE (exp));
6755 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6756 }
6757 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6758 free_temps_for_rtl_expr (exp);
6759 return RTL_EXPR_RTL (exp);
6760
6761 case CONSTRUCTOR:
6762 /* If we don't need the result, just ensure we evaluate any
6763 subexpressions. */
6764 if (ignore)
6765 {
6766 tree elt;
6767 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6768 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6769 EXPAND_MEMORY_USE_BAD);
6770 return const0_rtx;
6771 }
6772
6773 /* All elts simple constants => refer to a constant in memory. But
6774 if this is a non-BLKmode mode, let it store a field at a time
6775 since that should make a CONST_INT or CONST_DOUBLE when we
6776 fold. Likewise, if we have a target we can use, it is best to
6777 store directly into the target unless the type is large enough
6778 that memcpy will be used. If we are making an initializer and
6779 all operands are constant, put it in memory as well. */
6780 else if ((TREE_STATIC (exp)
6781 && ((mode == BLKmode
6782 && ! (target != 0 && safe_from_p (target, exp, 1)))
6783 || TREE_ADDRESSABLE (exp)
6784 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6785 && (! MOVE_BY_PIECES_P
6786 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6787 TYPE_ALIGN (type)))
6788 && ! mostly_zeros_p (exp))))
6789 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6790 {
6791 rtx constructor = output_constant_def (exp, 1);
6792
6793 if (modifier != EXPAND_CONST_ADDRESS
6794 && modifier != EXPAND_INITIALIZER
6795 && modifier != EXPAND_SUM)
6796 constructor = validize_mem (constructor);
6797
6798 return constructor;
6799 }
6800 else
6801 {
6802 /* Handle calls that pass values in multiple non-contiguous
6803 locations. The Irix 6 ABI has examples of this. */
6804 if (target == 0 || ! safe_from_p (target, exp, 1)
6805 || GET_CODE (target) == PARALLEL)
6806 target
6807 = assign_temp (build_qualified_type (type,
6808 (TYPE_QUALS (type)
6809 | (TREE_READONLY (exp)
6810 * TYPE_QUAL_CONST))),
6811 TREE_ADDRESSABLE (exp), 1, 1);
6812
6813 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6814 int_size_in_bytes (TREE_TYPE (exp)));
6815 return target;
6816 }
6817
6818 case INDIRECT_REF:
6819 {
6820 tree exp1 = TREE_OPERAND (exp, 0);
6821 tree index;
6822 tree string = string_constant (exp1, &index);
6823
6824 /* Try to optimize reads from const strings. */
6825 if (string
6826 && TREE_CODE (string) == STRING_CST
6827 && TREE_CODE (index) == INTEGER_CST
6828 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6829 && GET_MODE_CLASS (mode) == MODE_INT
6830 && GET_MODE_SIZE (mode) == 1
6831 && modifier != EXPAND_MEMORY_USE_WO)
6832 return
6833 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6834
6835 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6836 op0 = memory_address (mode, op0);
6837
6838 if (cfun && current_function_check_memory_usage
6839 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6840 {
6841 enum memory_use_mode memory_usage;
6842 memory_usage = get_memory_usage_from_modifier (modifier);
6843
6844 if (memory_usage != MEMORY_USE_DONT)
6845 {
6846 in_check_memory_usage = 1;
6847 emit_library_call (chkr_check_addr_libfunc,
6848 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6849 Pmode, GEN_INT (int_size_in_bytes (type)),
6850 TYPE_MODE (sizetype),
6851 GEN_INT (memory_usage),
6852 TYPE_MODE (integer_type_node));
6853 in_check_memory_usage = 0;
6854 }
6855 }
6856
6857 temp = gen_rtx_MEM (mode, op0);
6858 set_mem_attributes (temp, exp, 0);
6859
6860 /* If we are writing to this object and its type is a record with
6861 readonly fields, we must mark it as readonly so it will
6862 conflict with readonly references to those fields. */
6863 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6864 RTX_UNCHANGING_P (temp) = 1;
6865
6866 return temp;
6867 }
6868
6869 case ARRAY_REF:
6870 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6871 abort ();
6872
6873 {
6874 tree array = TREE_OPERAND (exp, 0);
6875 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6876 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6877 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6878 HOST_WIDE_INT i;
6879
6880 /* Optimize the special-case of a zero lower bound.
6881
6882 We convert the low_bound to sizetype to avoid some problems
6883 with constant folding. (E.g. suppose the lower bound is 1,
6884 and its mode is QI. Without the conversion, (ARRAY
6885 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6886 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6887
6888 if (! integer_zerop (low_bound))
6889 index = size_diffop (index, convert (sizetype, low_bound));
6890
6891 /* Fold an expression like: "foo"[2].
6892 This is not done in fold so it won't happen inside &.
6893 Don't fold if this is for wide characters since it's too
6894 difficult to do correctly and this is a very rare case. */
6895
6896 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6897 && TREE_CODE (array) == STRING_CST
6898 && TREE_CODE (index) == INTEGER_CST
6899 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6900 && GET_MODE_CLASS (mode) == MODE_INT
6901 && GET_MODE_SIZE (mode) == 1)
6902 return
6903 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6904
6905 /* If this is a constant index into a constant array,
6906 just get the value from the array. Handle both the cases when
6907 we have an explicit constructor and when our operand is a variable
6908 that was declared const. */
6909
6910 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6911 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6912 && TREE_CODE (index) == INTEGER_CST
6913 && 0 > compare_tree_int (index,
6914 list_length (CONSTRUCTOR_ELTS
6915 (TREE_OPERAND (exp, 0)))))
6916 {
6917 tree elem;
6918
6919 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6920 i = TREE_INT_CST_LOW (index);
6921 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6922 ;
6923
6924 if (elem)
6925 return expand_expr (fold (TREE_VALUE (elem)), target,
6926 tmode, ro_modifier);
6927 }
6928
6929 else if (optimize >= 1
6930 && modifier != EXPAND_CONST_ADDRESS
6931 && modifier != EXPAND_INITIALIZER
6932 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6933 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6934 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6935 {
6936 if (TREE_CODE (index) == INTEGER_CST)
6937 {
6938 tree init = DECL_INITIAL (array);
6939
6940 if (TREE_CODE (init) == CONSTRUCTOR)
6941 {
6942 tree elem;
6943
6944 for (elem = CONSTRUCTOR_ELTS (init);
6945 (elem
6946 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6947 elem = TREE_CHAIN (elem))
6948 ;
6949
6950 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6951 return expand_expr (fold (TREE_VALUE (elem)), target,
6952 tmode, ro_modifier);
6953 }
6954 else if (TREE_CODE (init) == STRING_CST
6955 && 0 > compare_tree_int (index,
6956 TREE_STRING_LENGTH (init)))
6957 {
6958 tree type = TREE_TYPE (TREE_TYPE (init));
6959 enum machine_mode mode = TYPE_MODE (type);
6960
6961 if (GET_MODE_CLASS (mode) == MODE_INT
6962 && GET_MODE_SIZE (mode) == 1)
6963 return (GEN_INT
6964 (TREE_STRING_POINTER
6965 (init)[TREE_INT_CST_LOW (index)]));
6966 }
6967 }
6968 }
6969 }
6970 /* Fall through. */
6971
6972 case COMPONENT_REF:
6973 case BIT_FIELD_REF:
6974 case ARRAY_RANGE_REF:
6975 /* If the operand is a CONSTRUCTOR, we can just extract the
6976 appropriate field if it is present. Don't do this if we have
6977 already written the data since we want to refer to that copy
6978 and varasm.c assumes that's what we'll do. */
6979 if (code == COMPONENT_REF
6980 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6981 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6982 {
6983 tree elt;
6984
6985 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6986 elt = TREE_CHAIN (elt))
6987 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6988 /* We can normally use the value of the field in the
6989 CONSTRUCTOR. However, if this is a bitfield in
6990 an integral mode that we can fit in a HOST_WIDE_INT,
6991 we must mask only the number of bits in the bitfield,
6992 since this is done implicitly by the constructor. If
6993 the bitfield does not meet either of those conditions,
6994 we can't do this optimization. */
6995 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6996 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6997 == MODE_INT)
6998 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6999 <= HOST_BITS_PER_WIDE_INT))))
7000 {
7001 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7002 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7003 {
7004 HOST_WIDE_INT bitsize
7005 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7006
7007 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7008 {
7009 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7010 op0 = expand_and (op0, op1, target);
7011 }
7012 else
7013 {
7014 enum machine_mode imode
7015 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7016 tree count
7017 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7018 0);
7019
7020 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7021 target, 0);
7022 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7023 target, 0);
7024 }
7025 }
7026
7027 return op0;
7028 }
7029 }
7030
7031 {
7032 enum machine_mode mode1;
7033 HOST_WIDE_INT bitsize, bitpos;
7034 tree offset;
7035 int volatilep = 0;
7036 unsigned int alignment;
7037 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7038 &mode1, &unsignedp, &volatilep,
7039 &alignment);
7040 rtx orig_op0;
7041
7042 /* If we got back the original object, something is wrong. Perhaps
7043 we are evaluating an expression too early. In any event, don't
7044 infinitely recurse. */
7045 if (tem == exp)
7046 abort ();
7047
7048 /* If TEM's type is a union of variable size, pass TARGET to the inner
7049 computation, since it will need a temporary and TARGET is known
7050 to have to do. This occurs in unchecked conversion in Ada. */
7051
7052 orig_op0 = op0
7053 = expand_expr (tem,
7054 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7055 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7056 != INTEGER_CST)
7057 ? target : NULL_RTX),
7058 VOIDmode,
7059 (modifier == EXPAND_INITIALIZER
7060 || modifier == EXPAND_CONST_ADDRESS)
7061 ? modifier : EXPAND_NORMAL);
7062
7063 /* If this is a constant, put it into a register if it is a
7064 legitimate constant and OFFSET is 0 and memory if it isn't. */
7065 if (CONSTANT_P (op0))
7066 {
7067 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7068 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7069 && offset == 0)
7070 op0 = force_reg (mode, op0);
7071 else
7072 op0 = validize_mem (force_const_mem (mode, op0));
7073 }
7074
7075 if (offset != 0)
7076 {
7077 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7078
7079 /* If this object is in a register, put it into memory.
7080 This case can't occur in C, but can in Ada if we have
7081 unchecked conversion of an expression from a scalar type to
7082 an array or record type. */
7083 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7084 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7085 {
7086 /* If the operand is a SAVE_EXPR, we can deal with this by
7087 forcing the SAVE_EXPR into memory. */
7088 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7089 {
7090 put_var_into_stack (TREE_OPERAND (exp, 0));
7091 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7092 }
7093 else
7094 {
7095 tree nt
7096 = build_qualified_type (TREE_TYPE (tem),
7097 (TYPE_QUALS (TREE_TYPE (tem))
7098 | TYPE_QUAL_CONST));
7099 rtx memloc = assign_temp (nt, 1, 1, 1);
7100
7101 mark_temp_addr_taken (memloc);
7102 emit_move_insn (memloc, op0);
7103 op0 = memloc;
7104 }
7105 }
7106
7107 if (GET_CODE (op0) != MEM)
7108 abort ();
7109
7110 if (GET_MODE (offset_rtx) != ptr_mode)
7111 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7112
7113 #ifdef POINTERS_EXTEND_UNSIGNED
7114 if (GET_MODE (offset_rtx) != Pmode)
7115 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7116 #endif
7117
7118 /* A constant address in OP0 can have VOIDmode, we must not try
7119 to call force_reg for that case. Avoid that case. */
7120 if (GET_CODE (op0) == MEM
7121 && GET_MODE (op0) == BLKmode
7122 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7123 && bitsize != 0
7124 && (bitpos % bitsize) == 0
7125 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7126 && alignment == GET_MODE_ALIGNMENT (mode1))
7127 {
7128 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7129
7130 if (GET_CODE (XEXP (temp, 0)) == REG)
7131 op0 = temp;
7132 else
7133 op0 = (replace_equiv_address
7134 (op0,
7135 force_reg (GET_MODE (XEXP (temp, 0)),
7136 XEXP (temp, 0))));
7137 bitpos = 0;
7138 }
7139
7140 op0 = offset_address (op0, offset_rtx,
7141 highest_pow2_factor (offset));
7142 }
7143
7144 /* Don't forget about volatility even if this is a bitfield. */
7145 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7146 {
7147 if (op0 == orig_op0)
7148 op0 = copy_rtx (op0);
7149
7150 MEM_VOLATILE_P (op0) = 1;
7151 }
7152
7153 /* Check the access. */
7154 if (cfun != 0 && current_function_check_memory_usage
7155 && GET_CODE (op0) == MEM)
7156 {
7157 enum memory_use_mode memory_usage;
7158 memory_usage = get_memory_usage_from_modifier (modifier);
7159
7160 if (memory_usage != MEMORY_USE_DONT)
7161 {
7162 rtx to;
7163 int size;
7164
7165 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7166 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7167
7168 /* Check the access right of the pointer. */
7169 in_check_memory_usage = 1;
7170 if (size > BITS_PER_UNIT)
7171 emit_library_call (chkr_check_addr_libfunc,
7172 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7173 Pmode, GEN_INT (size / BITS_PER_UNIT),
7174 TYPE_MODE (sizetype),
7175 GEN_INT (memory_usage),
7176 TYPE_MODE (integer_type_node));
7177 in_check_memory_usage = 0;
7178 }
7179 }
7180
7181 /* In cases where an aligned union has an unaligned object
7182 as a field, we might be extracting a BLKmode value from
7183 an integer-mode (e.g., SImode) object. Handle this case
7184 by doing the extract into an object as wide as the field
7185 (which we know to be the width of a basic mode), then
7186 storing into memory, and changing the mode to BLKmode. */
7187 if (mode1 == VOIDmode
7188 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7189 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7190 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7191 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7192 && modifier != EXPAND_CONST_ADDRESS
7193 && modifier != EXPAND_INITIALIZER)
7194 /* If the field isn't aligned enough to fetch as a memref,
7195 fetch it as a bit field. */
7196 || (mode1 != BLKmode
7197 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7198 && ((TYPE_ALIGN (TREE_TYPE (tem))
7199 < GET_MODE_ALIGNMENT (mode))
7200 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7201 /* If the type and the field are a constant size and the
7202 size of the type isn't the same size as the bitfield,
7203 we must use bitfield operations. */
7204 || (bitsize >= 0
7205 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7206 == INTEGER_CST)
7207 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7208 bitsize))
7209 || (mode == BLKmode
7210 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7211 && (TYPE_ALIGN (type) > alignment
7212 || bitpos % TYPE_ALIGN (type) != 0)))
7213 {
7214 enum machine_mode ext_mode = mode;
7215
7216 if (ext_mode == BLKmode
7217 && ! (target != 0 && GET_CODE (op0) == MEM
7218 && GET_CODE (target) == MEM
7219 && bitpos % BITS_PER_UNIT == 0))
7220 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7221
7222 if (ext_mode == BLKmode)
7223 {
7224 /* In this case, BITPOS must start at a byte boundary and
7225 TARGET, if specified, must be a MEM. */
7226 if (GET_CODE (op0) != MEM
7227 || (target != 0 && GET_CODE (target) != MEM)
7228 || bitpos % BITS_PER_UNIT != 0)
7229 abort ();
7230
7231 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7232 if (target == 0)
7233 target = assign_temp (type, 0, 1, 1);
7234
7235 emit_block_move (target, op0,
7236 bitsize == -1 ? expr_size (exp)
7237 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7238 / BITS_PER_UNIT));
7239
7240 return target;
7241 }
7242
7243 op0 = validize_mem (op0);
7244
7245 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7246 mark_reg_pointer (XEXP (op0, 0), alignment);
7247
7248 op0 = extract_bit_field (op0, bitsize, bitpos,
7249 unsignedp, target, ext_mode, ext_mode,
7250 alignment,
7251 int_size_in_bytes (TREE_TYPE (tem)));
7252
7253 /* If the result is a record type and BITSIZE is narrower than
7254 the mode of OP0, an integral mode, and this is a big endian
7255 machine, we must put the field into the high-order bits. */
7256 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7257 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7258 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7259 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7260 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7261 - bitsize),
7262 op0, 1);
7263
7264 if (mode == BLKmode)
7265 {
7266 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7267 TYPE_QUAL_CONST);
7268 rtx new = assign_temp (nt, 0, 1, 1);
7269
7270 emit_move_insn (new, op0);
7271 op0 = copy_rtx (new);
7272 PUT_MODE (op0, BLKmode);
7273 }
7274
7275 return op0;
7276 }
7277
7278 /* If the result is BLKmode, use that to access the object
7279 now as well. */
7280 if (mode == BLKmode)
7281 mode1 = BLKmode;
7282
7283 /* Get a reference to just this component. */
7284 if (modifier == EXPAND_CONST_ADDRESS
7285 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7286 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7287 else
7288 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7289
7290 if (op0 == orig_op0)
7291 op0 = copy_rtx (op0);
7292
7293 set_mem_attributes (op0, exp, 0);
7294 if (GET_CODE (XEXP (op0, 0)) == REG)
7295 mark_reg_pointer (XEXP (op0, 0), alignment);
7296
7297 MEM_VOLATILE_P (op0) |= volatilep;
7298 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7299 || modifier == EXPAND_CONST_ADDRESS
7300 || modifier == EXPAND_INITIALIZER)
7301 return op0;
7302 else if (target == 0)
7303 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7304
7305 convert_move (target, op0, unsignedp);
7306 return target;
7307 }
7308
7309 case VTABLE_REF:
7310 {
7311 rtx insn, before = get_last_insn (), vtbl_ref;
7312
7313 /* Evaluate the interior expression. */
7314 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7315 tmode, modifier);
7316
7317 /* Get or create an instruction off which to hang a note. */
7318 if (REG_P (subtarget))
7319 {
7320 target = subtarget;
7321 insn = get_last_insn ();
7322 if (insn == before)
7323 abort ();
7324 if (! INSN_P (insn))
7325 insn = prev_nonnote_insn (insn);
7326 }
7327 else
7328 {
7329 target = gen_reg_rtx (GET_MODE (subtarget));
7330 insn = emit_move_insn (target, subtarget);
7331 }
7332
7333 /* Collect the data for the note. */
7334 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7335 vtbl_ref = plus_constant (vtbl_ref,
7336 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7337 /* Discard the initial CONST that was added. */
7338 vtbl_ref = XEXP (vtbl_ref, 0);
7339
7340 REG_NOTES (insn)
7341 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7342
7343 return target;
7344 }
7345
7346 /* Intended for a reference to a buffer of a file-object in Pascal.
7347 But it's not certain that a special tree code will really be
7348 necessary for these. INDIRECT_REF might work for them. */
7349 case BUFFER_REF:
7350 abort ();
7351
7352 case IN_EXPR:
7353 {
7354 /* Pascal set IN expression.
7355
7356 Algorithm:
7357 rlo = set_low - (set_low%bits_per_word);
7358 the_word = set [ (index - rlo)/bits_per_word ];
7359 bit_index = index % bits_per_word;
7360 bitmask = 1 << bit_index;
7361 return !!(the_word & bitmask); */
7362
7363 tree set = TREE_OPERAND (exp, 0);
7364 tree index = TREE_OPERAND (exp, 1);
7365 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7366 tree set_type = TREE_TYPE (set);
7367 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7368 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7369 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7370 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7371 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7372 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7373 rtx setaddr = XEXP (setval, 0);
7374 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7375 rtx rlow;
7376 rtx diff, quo, rem, addr, bit, result;
7377
7378 /* If domain is empty, answer is no. Likewise if index is constant
7379 and out of bounds. */
7380 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7381 && TREE_CODE (set_low_bound) == INTEGER_CST
7382 && tree_int_cst_lt (set_high_bound, set_low_bound))
7383 || (TREE_CODE (index) == INTEGER_CST
7384 && TREE_CODE (set_low_bound) == INTEGER_CST
7385 && tree_int_cst_lt (index, set_low_bound))
7386 || (TREE_CODE (set_high_bound) == INTEGER_CST
7387 && TREE_CODE (index) == INTEGER_CST
7388 && tree_int_cst_lt (set_high_bound, index))))
7389 return const0_rtx;
7390
7391 if (target == 0)
7392 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7393
7394 /* If we get here, we have to generate the code for both cases
7395 (in range and out of range). */
7396
7397 op0 = gen_label_rtx ();
7398 op1 = gen_label_rtx ();
7399
7400 if (! (GET_CODE (index_val) == CONST_INT
7401 && GET_CODE (lo_r) == CONST_INT))
7402 {
7403 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7404 GET_MODE (index_val), iunsignedp, 0, op1);
7405 }
7406
7407 if (! (GET_CODE (index_val) == CONST_INT
7408 && GET_CODE (hi_r) == CONST_INT))
7409 {
7410 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7411 GET_MODE (index_val), iunsignedp, 0, op1);
7412 }
7413
7414 /* Calculate the element number of bit zero in the first word
7415 of the set. */
7416 if (GET_CODE (lo_r) == CONST_INT)
7417 rlow = GEN_INT (INTVAL (lo_r)
7418 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7419 else
7420 rlow = expand_binop (index_mode, and_optab, lo_r,
7421 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7422 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7423
7424 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7425 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7426
7427 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7428 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7429 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7430 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7431
7432 addr = memory_address (byte_mode,
7433 expand_binop (index_mode, add_optab, diff,
7434 setaddr, NULL_RTX, iunsignedp,
7435 OPTAB_LIB_WIDEN));
7436
7437 /* Extract the bit we want to examine. */
7438 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7439 gen_rtx_MEM (byte_mode, addr),
7440 make_tree (TREE_TYPE (index), rem),
7441 NULL_RTX, 1);
7442 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7443 GET_MODE (target) == byte_mode ? target : 0,
7444 1, OPTAB_LIB_WIDEN);
7445
7446 if (result != target)
7447 convert_move (target, result, 1);
7448
7449 /* Output the code to handle the out-of-range case. */
7450 emit_jump (op0);
7451 emit_label (op1);
7452 emit_move_insn (target, const0_rtx);
7453 emit_label (op0);
7454 return target;
7455 }
7456
7457 case WITH_CLEANUP_EXPR:
7458 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7459 {
7460 WITH_CLEANUP_EXPR_RTL (exp)
7461 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7462 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7463
7464 /* That's it for this cleanup. */
7465 TREE_OPERAND (exp, 1) = 0;
7466 }
7467 return WITH_CLEANUP_EXPR_RTL (exp);
7468
7469 case CLEANUP_POINT_EXPR:
7470 {
7471 /* Start a new binding layer that will keep track of all cleanup
7472 actions to be performed. */
7473 expand_start_bindings (2);
7474
7475 target_temp_slot_level = temp_slot_level;
7476
7477 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7478 /* If we're going to use this value, load it up now. */
7479 if (! ignore)
7480 op0 = force_not_mem (op0);
7481 preserve_temp_slots (op0);
7482 expand_end_bindings (NULL_TREE, 0, 0);
7483 }
7484 return op0;
7485
7486 case CALL_EXPR:
7487 /* Check for a built-in function. */
7488 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7489 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7490 == FUNCTION_DECL)
7491 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7492 {
7493 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7494 == BUILT_IN_FRONTEND)
7495 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7496 else
7497 return expand_builtin (exp, target, subtarget, tmode, ignore);
7498 }
7499
7500 return expand_call (exp, target, ignore);
7501
7502 case NON_LVALUE_EXPR:
7503 case NOP_EXPR:
7504 case CONVERT_EXPR:
7505 case REFERENCE_EXPR:
7506 if (TREE_OPERAND (exp, 0) == error_mark_node)
7507 return const0_rtx;
7508
7509 if (TREE_CODE (type) == UNION_TYPE)
7510 {
7511 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7512
7513 /* If both input and output are BLKmode, this conversion
7514 isn't actually doing anything unless we need to make the
7515 alignment stricter. */
7516 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7517 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7518 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7519 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7520 modifier);
7521
7522 if (target == 0)
7523 target = assign_temp (type, 0, 1, 1);
7524
7525 if (GET_CODE (target) == MEM)
7526 /* Store data into beginning of memory target. */
7527 store_expr (TREE_OPERAND (exp, 0),
7528 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7529
7530 else if (GET_CODE (target) == REG)
7531 /* Store this field into a union of the proper type. */
7532 store_field (target,
7533 MIN ((int_size_in_bytes (TREE_TYPE
7534 (TREE_OPERAND (exp, 0)))
7535 * BITS_PER_UNIT),
7536 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7537 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7538 VOIDmode, 0, BITS_PER_UNIT,
7539 int_size_in_bytes (type), 0);
7540 else
7541 abort ();
7542
7543 /* Return the entire union. */
7544 return target;
7545 }
7546
7547 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7548 {
7549 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7550 ro_modifier);
7551
7552 /* If the signedness of the conversion differs and OP0 is
7553 a promoted SUBREG, clear that indication since we now
7554 have to do the proper extension. */
7555 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7556 && GET_CODE (op0) == SUBREG)
7557 SUBREG_PROMOTED_VAR_P (op0) = 0;
7558
7559 return op0;
7560 }
7561
7562 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7563 if (GET_MODE (op0) == mode)
7564 return op0;
7565
7566 /* If OP0 is a constant, just convert it into the proper mode. */
7567 if (CONSTANT_P (op0))
7568 return
7569 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7570 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7571
7572 if (modifier == EXPAND_INITIALIZER)
7573 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7574
7575 if (target == 0)
7576 return
7577 convert_to_mode (mode, op0,
7578 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7579 else
7580 convert_move (target, op0,
7581 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7582 return target;
7583
7584 case PLUS_EXPR:
7585 /* We come here from MINUS_EXPR when the second operand is a
7586 constant. */
7587 plus_expr:
7588 this_optab = ! unsignedp && flag_trapv
7589 && (GET_MODE_CLASS(mode) == MODE_INT)
7590 ? addv_optab : add_optab;
7591
7592 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7593 something else, make sure we add the register to the constant and
7594 then to the other thing. This case can occur during strength
7595 reduction and doing it this way will produce better code if the
7596 frame pointer or argument pointer is eliminated.
7597
7598 fold-const.c will ensure that the constant is always in the inner
7599 PLUS_EXPR, so the only case we need to do anything about is if
7600 sp, ap, or fp is our second argument, in which case we must swap
7601 the innermost first argument and our second argument. */
7602
7603 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7604 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7605 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7606 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7607 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7608 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7609 {
7610 tree t = TREE_OPERAND (exp, 1);
7611
7612 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7613 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7614 }
7615
7616 /* If the result is to be ptr_mode and we are adding an integer to
7617 something, we might be forming a constant. So try to use
7618 plus_constant. If it produces a sum and we can't accept it,
7619 use force_operand. This allows P = &ARR[const] to generate
7620 efficient code on machines where a SYMBOL_REF is not a valid
7621 address.
7622
7623 If this is an EXPAND_SUM call, always return the sum. */
7624 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7625 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7626 {
7627 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7628 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7629 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7630 {
7631 rtx constant_part;
7632
7633 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7634 EXPAND_SUM);
7635 /* Use immed_double_const to ensure that the constant is
7636 truncated according to the mode of OP1, then sign extended
7637 to a HOST_WIDE_INT. Using the constant directly can result
7638 in non-canonical RTL in a 64x32 cross compile. */
7639 constant_part
7640 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7641 (HOST_WIDE_INT) 0,
7642 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7643 op1 = plus_constant (op1, INTVAL (constant_part));
7644 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7645 op1 = force_operand (op1, target);
7646 return op1;
7647 }
7648
7649 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7650 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7651 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7652 {
7653 rtx constant_part;
7654
7655 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7656 EXPAND_SUM);
7657 if (! CONSTANT_P (op0))
7658 {
7659 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7660 VOIDmode, modifier);
7661 /* Don't go to both_summands if modifier
7662 says it's not right to return a PLUS. */
7663 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7664 goto binop2;
7665 goto both_summands;
7666 }
7667 /* Use immed_double_const to ensure that the constant is
7668 truncated according to the mode of OP1, then sign extended
7669 to a HOST_WIDE_INT. Using the constant directly can result
7670 in non-canonical RTL in a 64x32 cross compile. */
7671 constant_part
7672 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7673 (HOST_WIDE_INT) 0,
7674 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7675 op0 = plus_constant (op0, INTVAL (constant_part));
7676 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7677 op0 = force_operand (op0, target);
7678 return op0;
7679 }
7680 }
7681
7682 /* No sense saving up arithmetic to be done
7683 if it's all in the wrong mode to form part of an address.
7684 And force_operand won't know whether to sign-extend or
7685 zero-extend. */
7686 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7687 || mode != ptr_mode)
7688 goto binop;
7689
7690 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7691 subtarget = 0;
7692
7693 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7694 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7695
7696 both_summands:
7697 /* Make sure any term that's a sum with a constant comes last. */
7698 if (GET_CODE (op0) == PLUS
7699 && CONSTANT_P (XEXP (op0, 1)))
7700 {
7701 temp = op0;
7702 op0 = op1;
7703 op1 = temp;
7704 }
7705 /* If adding to a sum including a constant,
7706 associate it to put the constant outside. */
7707 if (GET_CODE (op1) == PLUS
7708 && CONSTANT_P (XEXP (op1, 1)))
7709 {
7710 rtx constant_term = const0_rtx;
7711
7712 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7713 if (temp != 0)
7714 op0 = temp;
7715 /* Ensure that MULT comes first if there is one. */
7716 else if (GET_CODE (op0) == MULT)
7717 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7718 else
7719 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7720
7721 /* Let's also eliminate constants from op0 if possible. */
7722 op0 = eliminate_constant_term (op0, &constant_term);
7723
7724 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7725 their sum should be a constant. Form it into OP1, since the
7726 result we want will then be OP0 + OP1. */
7727
7728 temp = simplify_binary_operation (PLUS, mode, constant_term,
7729 XEXP (op1, 1));
7730 if (temp != 0)
7731 op1 = temp;
7732 else
7733 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7734 }
7735
7736 /* Put a constant term last and put a multiplication first. */
7737 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7738 temp = op1, op1 = op0, op0 = temp;
7739
7740 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7741 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7742
7743 case MINUS_EXPR:
7744 /* For initializers, we are allowed to return a MINUS of two
7745 symbolic constants. Here we handle all cases when both operands
7746 are constant. */
7747 /* Handle difference of two symbolic constants,
7748 for the sake of an initializer. */
7749 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7750 && really_constant_p (TREE_OPERAND (exp, 0))
7751 && really_constant_p (TREE_OPERAND (exp, 1)))
7752 {
7753 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7754 VOIDmode, ro_modifier);
7755 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7756 VOIDmode, ro_modifier);
7757
7758 /* If the last operand is a CONST_INT, use plus_constant of
7759 the negated constant. Else make the MINUS. */
7760 if (GET_CODE (op1) == CONST_INT)
7761 return plus_constant (op0, - INTVAL (op1));
7762 else
7763 return gen_rtx_MINUS (mode, op0, op1);
7764 }
7765 /* Convert A - const to A + (-const). */
7766 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7767 {
7768 tree negated = fold (build1 (NEGATE_EXPR, type,
7769 TREE_OPERAND (exp, 1)));
7770
7771 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7772 /* If we can't negate the constant in TYPE, leave it alone and
7773 expand_binop will negate it for us. We used to try to do it
7774 here in the signed version of TYPE, but that doesn't work
7775 on POINTER_TYPEs. */;
7776 else
7777 {
7778 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7779 goto plus_expr;
7780 }
7781 }
7782 this_optab = ! unsignedp && flag_trapv
7783 && (GET_MODE_CLASS(mode) == MODE_INT)
7784 ? subv_optab : sub_optab;
7785 goto binop;
7786
7787 case MULT_EXPR:
7788 /* If first operand is constant, swap them.
7789 Thus the following special case checks need only
7790 check the second operand. */
7791 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7792 {
7793 tree t1 = TREE_OPERAND (exp, 0);
7794 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7795 TREE_OPERAND (exp, 1) = t1;
7796 }
7797
7798 /* Attempt to return something suitable for generating an
7799 indexed address, for machines that support that. */
7800
7801 if (modifier == EXPAND_SUM && mode == ptr_mode
7802 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7803 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7804 {
7805 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7806 EXPAND_SUM);
7807
7808 /* Apply distributive law if OP0 is x+c. */
7809 if (GET_CODE (op0) == PLUS
7810 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7811 return
7812 gen_rtx_PLUS
7813 (mode,
7814 gen_rtx_MULT
7815 (mode, XEXP (op0, 0),
7816 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7817 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7818 * INTVAL (XEXP (op0, 1))));
7819
7820 if (GET_CODE (op0) != REG)
7821 op0 = force_operand (op0, NULL_RTX);
7822 if (GET_CODE (op0) != REG)
7823 op0 = copy_to_mode_reg (mode, op0);
7824
7825 return
7826 gen_rtx_MULT (mode, op0,
7827 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7828 }
7829
7830 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7831 subtarget = 0;
7832
7833 /* Check for multiplying things that have been extended
7834 from a narrower type. If this machine supports multiplying
7835 in that narrower type with a result in the desired type,
7836 do it that way, and avoid the explicit type-conversion. */
7837 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7838 && TREE_CODE (type) == INTEGER_TYPE
7839 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7840 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7841 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7842 && int_fits_type_p (TREE_OPERAND (exp, 1),
7843 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7844 /* Don't use a widening multiply if a shift will do. */
7845 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7846 > HOST_BITS_PER_WIDE_INT)
7847 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7848 ||
7849 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7850 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7851 ==
7852 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7853 /* If both operands are extended, they must either both
7854 be zero-extended or both be sign-extended. */
7855 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7856 ==
7857 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7858 {
7859 enum machine_mode innermode
7860 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7861 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7862 ? smul_widen_optab : umul_widen_optab);
7863 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7864 ? umul_widen_optab : smul_widen_optab);
7865 if (mode == GET_MODE_WIDER_MODE (innermode))
7866 {
7867 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7868 {
7869 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7870 NULL_RTX, VOIDmode, 0);
7871 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7872 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7873 VOIDmode, 0);
7874 else
7875 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7876 NULL_RTX, VOIDmode, 0);
7877 goto binop2;
7878 }
7879 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7880 && innermode == word_mode)
7881 {
7882 rtx htem;
7883 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7884 NULL_RTX, VOIDmode, 0);
7885 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7886 op1 = convert_modes (innermode, mode,
7887 expand_expr (TREE_OPERAND (exp, 1),
7888 NULL_RTX, VOIDmode, 0),
7889 unsignedp);
7890 else
7891 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7892 NULL_RTX, VOIDmode, 0);
7893 temp = expand_binop (mode, other_optab, op0, op1, target,
7894 unsignedp, OPTAB_LIB_WIDEN);
7895 htem = expand_mult_highpart_adjust (innermode,
7896 gen_highpart (innermode, temp),
7897 op0, op1,
7898 gen_highpart (innermode, temp),
7899 unsignedp);
7900 emit_move_insn (gen_highpart (innermode, temp), htem);
7901 return temp;
7902 }
7903 }
7904 }
7905 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7906 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7907 return expand_mult (mode, op0, op1, target, unsignedp);
7908
7909 case TRUNC_DIV_EXPR:
7910 case FLOOR_DIV_EXPR:
7911 case CEIL_DIV_EXPR:
7912 case ROUND_DIV_EXPR:
7913 case EXACT_DIV_EXPR:
7914 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7915 subtarget = 0;
7916 /* Possible optimization: compute the dividend with EXPAND_SUM
7917 then if the divisor is constant can optimize the case
7918 where some terms of the dividend have coeffs divisible by it. */
7919 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7920 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7921 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7922
7923 case RDIV_EXPR:
7924 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7925 expensive divide. If not, combine will rebuild the original
7926 computation. */
7927 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7928 && !real_onep (TREE_OPERAND (exp, 0)))
7929 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7930 build (RDIV_EXPR, type,
7931 build_real (type, dconst1),
7932 TREE_OPERAND (exp, 1))),
7933 target, tmode, unsignedp);
7934 this_optab = sdiv_optab;
7935 goto binop;
7936
7937 case TRUNC_MOD_EXPR:
7938 case FLOOR_MOD_EXPR:
7939 case CEIL_MOD_EXPR:
7940 case ROUND_MOD_EXPR:
7941 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7942 subtarget = 0;
7943 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7944 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7945 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7946
7947 case FIX_ROUND_EXPR:
7948 case FIX_FLOOR_EXPR:
7949 case FIX_CEIL_EXPR:
7950 abort (); /* Not used for C. */
7951
7952 case FIX_TRUNC_EXPR:
7953 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7954 if (target == 0)
7955 target = gen_reg_rtx (mode);
7956 expand_fix (target, op0, unsignedp);
7957 return target;
7958
7959 case FLOAT_EXPR:
7960 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7961 if (target == 0)
7962 target = gen_reg_rtx (mode);
7963 /* expand_float can't figure out what to do if FROM has VOIDmode.
7964 So give it the correct mode. With -O, cse will optimize this. */
7965 if (GET_MODE (op0) == VOIDmode)
7966 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7967 op0);
7968 expand_float (target, op0,
7969 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7970 return target;
7971
7972 case NEGATE_EXPR:
7973 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7974 temp = expand_unop (mode,
7975 ! unsignedp && flag_trapv
7976 && (GET_MODE_CLASS(mode) == MODE_INT)
7977 ? negv_optab : neg_optab, op0, target, 0);
7978 if (temp == 0)
7979 abort ();
7980 return temp;
7981
7982 case ABS_EXPR:
7983 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7984
7985 /* Handle complex values specially. */
7986 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7987 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7988 return expand_complex_abs (mode, op0, target, unsignedp);
7989
7990 /* Unsigned abs is simply the operand. Testing here means we don't
7991 risk generating incorrect code below. */
7992 if (TREE_UNSIGNED (type))
7993 return op0;
7994
7995 return expand_abs (mode, op0, target, unsignedp,
7996 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7997
7998 case MAX_EXPR:
7999 case MIN_EXPR:
8000 target = original_target;
8001 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8002 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8003 || GET_MODE (target) != mode
8004 || (GET_CODE (target) == REG
8005 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8006 target = gen_reg_rtx (mode);
8007 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8008 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8009
8010 /* First try to do it with a special MIN or MAX instruction.
8011 If that does not win, use a conditional jump to select the proper
8012 value. */
8013 this_optab = (TREE_UNSIGNED (type)
8014 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8015 : (code == MIN_EXPR ? smin_optab : smax_optab));
8016
8017 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8018 OPTAB_WIDEN);
8019 if (temp != 0)
8020 return temp;
8021
8022 /* At this point, a MEM target is no longer useful; we will get better
8023 code without it. */
8024
8025 if (GET_CODE (target) == MEM)
8026 target = gen_reg_rtx (mode);
8027
8028 if (target != op0)
8029 emit_move_insn (target, op0);
8030
8031 op0 = gen_label_rtx ();
8032
8033 /* If this mode is an integer too wide to compare properly,
8034 compare word by word. Rely on cse to optimize constant cases. */
8035 if (GET_MODE_CLASS (mode) == MODE_INT
8036 && ! can_compare_p (GE, mode, ccp_jump))
8037 {
8038 if (code == MAX_EXPR)
8039 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8040 target, op1, NULL_RTX, op0);
8041 else
8042 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8043 op1, target, NULL_RTX, op0);
8044 }
8045 else
8046 {
8047 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8048 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8049 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
8050 op0);
8051 }
8052 emit_move_insn (target, op1);
8053 emit_label (op0);
8054 return target;
8055
8056 case BIT_NOT_EXPR:
8057 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8058 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8059 if (temp == 0)
8060 abort ();
8061 return temp;
8062
8063 case FFS_EXPR:
8064 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8065 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8066 if (temp == 0)
8067 abort ();
8068 return temp;
8069
8070 /* ??? Can optimize bitwise operations with one arg constant.
8071 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8072 and (a bitwise1 b) bitwise2 b (etc)
8073 but that is probably not worth while. */
8074
8075 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8076 boolean values when we want in all cases to compute both of them. In
8077 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8078 as actual zero-or-1 values and then bitwise anding. In cases where
8079 there cannot be any side effects, better code would be made by
8080 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8081 how to recognize those cases. */
8082
8083 case TRUTH_AND_EXPR:
8084 case BIT_AND_EXPR:
8085 this_optab = and_optab;
8086 goto binop;
8087
8088 case TRUTH_OR_EXPR:
8089 case BIT_IOR_EXPR:
8090 this_optab = ior_optab;
8091 goto binop;
8092
8093 case TRUTH_XOR_EXPR:
8094 case BIT_XOR_EXPR:
8095 this_optab = xor_optab;
8096 goto binop;
8097
8098 case LSHIFT_EXPR:
8099 case RSHIFT_EXPR:
8100 case LROTATE_EXPR:
8101 case RROTATE_EXPR:
8102 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8103 subtarget = 0;
8104 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8105 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8106 unsignedp);
8107
8108 /* Could determine the answer when only additive constants differ. Also,
8109 the addition of one can be handled by changing the condition. */
8110 case LT_EXPR:
8111 case LE_EXPR:
8112 case GT_EXPR:
8113 case GE_EXPR:
8114 case EQ_EXPR:
8115 case NE_EXPR:
8116 case UNORDERED_EXPR:
8117 case ORDERED_EXPR:
8118 case UNLT_EXPR:
8119 case UNLE_EXPR:
8120 case UNGT_EXPR:
8121 case UNGE_EXPR:
8122 case UNEQ_EXPR:
8123 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8124 if (temp != 0)
8125 return temp;
8126
8127 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8128 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8129 && original_target
8130 && GET_CODE (original_target) == REG
8131 && (GET_MODE (original_target)
8132 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8133 {
8134 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8135 VOIDmode, 0);
8136
8137 if (temp != original_target)
8138 temp = copy_to_reg (temp);
8139
8140 op1 = gen_label_rtx ();
8141 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8142 GET_MODE (temp), unsignedp, 0, op1);
8143 emit_move_insn (temp, const1_rtx);
8144 emit_label (op1);
8145 return temp;
8146 }
8147
8148 /* If no set-flag instruction, must generate a conditional
8149 store into a temporary variable. Drop through
8150 and handle this like && and ||. */
8151
8152 case TRUTH_ANDIF_EXPR:
8153 case TRUTH_ORIF_EXPR:
8154 if (! ignore
8155 && (target == 0 || ! safe_from_p (target, exp, 1)
8156 /* Make sure we don't have a hard reg (such as function's return
8157 value) live across basic blocks, if not optimizing. */
8158 || (!optimize && GET_CODE (target) == REG
8159 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8160 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8161
8162 if (target)
8163 emit_clr_insn (target);
8164
8165 op1 = gen_label_rtx ();
8166 jumpifnot (exp, op1);
8167
8168 if (target)
8169 emit_0_to_1_insn (target);
8170
8171 emit_label (op1);
8172 return ignore ? const0_rtx : target;
8173
8174 case TRUTH_NOT_EXPR:
8175 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8176 /* The parser is careful to generate TRUTH_NOT_EXPR
8177 only with operands that are always zero or one. */
8178 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8179 target, 1, OPTAB_LIB_WIDEN);
8180 if (temp == 0)
8181 abort ();
8182 return temp;
8183
8184 case COMPOUND_EXPR:
8185 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8186 emit_queue ();
8187 return expand_expr (TREE_OPERAND (exp, 1),
8188 (ignore ? const0_rtx : target),
8189 VOIDmode, 0);
8190
8191 case COND_EXPR:
8192 /* If we would have a "singleton" (see below) were it not for a
8193 conversion in each arm, bring that conversion back out. */
8194 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8195 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8196 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8197 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8198 {
8199 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8200 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8201
8202 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8203 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8204 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8205 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8206 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8207 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8208 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8209 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8210 return expand_expr (build1 (NOP_EXPR, type,
8211 build (COND_EXPR, TREE_TYPE (iftrue),
8212 TREE_OPERAND (exp, 0),
8213 iftrue, iffalse)),
8214 target, tmode, modifier);
8215 }
8216
8217 {
8218 /* Note that COND_EXPRs whose type is a structure or union
8219 are required to be constructed to contain assignments of
8220 a temporary variable, so that we can evaluate them here
8221 for side effect only. If type is void, we must do likewise. */
8222
8223 /* If an arm of the branch requires a cleanup,
8224 only that cleanup is performed. */
8225
8226 tree singleton = 0;
8227 tree binary_op = 0, unary_op = 0;
8228
8229 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8230 convert it to our mode, if necessary. */
8231 if (integer_onep (TREE_OPERAND (exp, 1))
8232 && integer_zerop (TREE_OPERAND (exp, 2))
8233 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8234 {
8235 if (ignore)
8236 {
8237 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8238 ro_modifier);
8239 return const0_rtx;
8240 }
8241
8242 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8243 if (GET_MODE (op0) == mode)
8244 return op0;
8245
8246 if (target == 0)
8247 target = gen_reg_rtx (mode);
8248 convert_move (target, op0, unsignedp);
8249 return target;
8250 }
8251
8252 /* Check for X ? A + B : A. If we have this, we can copy A to the
8253 output and conditionally add B. Similarly for unary operations.
8254 Don't do this if X has side-effects because those side effects
8255 might affect A or B and the "?" operation is a sequence point in
8256 ANSI. (operand_equal_p tests for side effects.) */
8257
8258 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8259 && operand_equal_p (TREE_OPERAND (exp, 2),
8260 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8261 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8262 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8263 && operand_equal_p (TREE_OPERAND (exp, 1),
8264 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8265 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8266 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8267 && operand_equal_p (TREE_OPERAND (exp, 2),
8268 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8269 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8270 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8271 && operand_equal_p (TREE_OPERAND (exp, 1),
8272 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8273 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8274
8275 /* If we are not to produce a result, we have no target. Otherwise,
8276 if a target was specified use it; it will not be used as an
8277 intermediate target unless it is safe. If no target, use a
8278 temporary. */
8279
8280 if (ignore)
8281 temp = 0;
8282 else if (original_target
8283 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8284 || (singleton && GET_CODE (original_target) == REG
8285 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8286 && original_target == var_rtx (singleton)))
8287 && GET_MODE (original_target) == mode
8288 #ifdef HAVE_conditional_move
8289 && (! can_conditionally_move_p (mode)
8290 || GET_CODE (original_target) == REG
8291 || TREE_ADDRESSABLE (type))
8292 #endif
8293 && (GET_CODE (original_target) != MEM
8294 || TREE_ADDRESSABLE (type)))
8295 temp = original_target;
8296 else if (TREE_ADDRESSABLE (type))
8297 abort ();
8298 else
8299 temp = assign_temp (type, 0, 0, 1);
8300
8301 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8302 do the test of X as a store-flag operation, do this as
8303 A + ((X != 0) << log C). Similarly for other simple binary
8304 operators. Only do for C == 1 if BRANCH_COST is low. */
8305 if (temp && singleton && binary_op
8306 && (TREE_CODE (binary_op) == PLUS_EXPR
8307 || TREE_CODE (binary_op) == MINUS_EXPR
8308 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8309 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8310 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8311 : integer_onep (TREE_OPERAND (binary_op, 1)))
8312 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8313 {
8314 rtx result;
8315 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8316 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8317 ? addv_optab : add_optab)
8318 : TREE_CODE (binary_op) == MINUS_EXPR
8319 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8320 ? subv_optab : sub_optab)
8321 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8322 : xor_optab);
8323
8324 /* If we had X ? A : A + 1, do this as A + (X == 0).
8325
8326 We have to invert the truth value here and then put it
8327 back later if do_store_flag fails. We cannot simply copy
8328 TREE_OPERAND (exp, 0) to another variable and modify that
8329 because invert_truthvalue can modify the tree pointed to
8330 by its argument. */
8331 if (singleton == TREE_OPERAND (exp, 1))
8332 TREE_OPERAND (exp, 0)
8333 = invert_truthvalue (TREE_OPERAND (exp, 0));
8334
8335 result = do_store_flag (TREE_OPERAND (exp, 0),
8336 (safe_from_p (temp, singleton, 1)
8337 ? temp : NULL_RTX),
8338 mode, BRANCH_COST <= 1);
8339
8340 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8341 result = expand_shift (LSHIFT_EXPR, mode, result,
8342 build_int_2 (tree_log2
8343 (TREE_OPERAND
8344 (binary_op, 1)),
8345 0),
8346 (safe_from_p (temp, singleton, 1)
8347 ? temp : NULL_RTX), 0);
8348
8349 if (result)
8350 {
8351 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8352 return expand_binop (mode, boptab, op1, result, temp,
8353 unsignedp, OPTAB_LIB_WIDEN);
8354 }
8355 else if (singleton == TREE_OPERAND (exp, 1))
8356 TREE_OPERAND (exp, 0)
8357 = invert_truthvalue (TREE_OPERAND (exp, 0));
8358 }
8359
8360 do_pending_stack_adjust ();
8361 NO_DEFER_POP;
8362 op0 = gen_label_rtx ();
8363
8364 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8365 {
8366 if (temp != 0)
8367 {
8368 /* If the target conflicts with the other operand of the
8369 binary op, we can't use it. Also, we can't use the target
8370 if it is a hard register, because evaluating the condition
8371 might clobber it. */
8372 if ((binary_op
8373 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8374 || (GET_CODE (temp) == REG
8375 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8376 temp = gen_reg_rtx (mode);
8377 store_expr (singleton, temp, 0);
8378 }
8379 else
8380 expand_expr (singleton,
8381 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8382 if (singleton == TREE_OPERAND (exp, 1))
8383 jumpif (TREE_OPERAND (exp, 0), op0);
8384 else
8385 jumpifnot (TREE_OPERAND (exp, 0), op0);
8386
8387 start_cleanup_deferral ();
8388 if (binary_op && temp == 0)
8389 /* Just touch the other operand. */
8390 expand_expr (TREE_OPERAND (binary_op, 1),
8391 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8392 else if (binary_op)
8393 store_expr (build (TREE_CODE (binary_op), type,
8394 make_tree (type, temp),
8395 TREE_OPERAND (binary_op, 1)),
8396 temp, 0);
8397 else
8398 store_expr (build1 (TREE_CODE (unary_op), type,
8399 make_tree (type, temp)),
8400 temp, 0);
8401 op1 = op0;
8402 }
8403 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8404 comparison operator. If we have one of these cases, set the
8405 output to A, branch on A (cse will merge these two references),
8406 then set the output to FOO. */
8407 else if (temp
8408 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8409 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8410 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8411 TREE_OPERAND (exp, 1), 0)
8412 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8413 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8414 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8415 {
8416 if (GET_CODE (temp) == REG
8417 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8418 temp = gen_reg_rtx (mode);
8419 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8420 jumpif (TREE_OPERAND (exp, 0), op0);
8421
8422 start_cleanup_deferral ();
8423 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8424 op1 = op0;
8425 }
8426 else if (temp
8427 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8428 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8429 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8430 TREE_OPERAND (exp, 2), 0)
8431 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8432 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8433 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8434 {
8435 if (GET_CODE (temp) == REG
8436 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8437 temp = gen_reg_rtx (mode);
8438 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8439 jumpifnot (TREE_OPERAND (exp, 0), op0);
8440
8441 start_cleanup_deferral ();
8442 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8443 op1 = op0;
8444 }
8445 else
8446 {
8447 op1 = gen_label_rtx ();
8448 jumpifnot (TREE_OPERAND (exp, 0), op0);
8449
8450 start_cleanup_deferral ();
8451
8452 /* One branch of the cond can be void, if it never returns. For
8453 example A ? throw : E */
8454 if (temp != 0
8455 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8456 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8457 else
8458 expand_expr (TREE_OPERAND (exp, 1),
8459 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8460 end_cleanup_deferral ();
8461 emit_queue ();
8462 emit_jump_insn (gen_jump (op1));
8463 emit_barrier ();
8464 emit_label (op0);
8465 start_cleanup_deferral ();
8466 if (temp != 0
8467 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8468 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8469 else
8470 expand_expr (TREE_OPERAND (exp, 2),
8471 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8472 }
8473
8474 end_cleanup_deferral ();
8475
8476 emit_queue ();
8477 emit_label (op1);
8478 OK_DEFER_POP;
8479
8480 return temp;
8481 }
8482
8483 case TARGET_EXPR:
8484 {
8485 /* Something needs to be initialized, but we didn't know
8486 where that thing was when building the tree. For example,
8487 it could be the return value of a function, or a parameter
8488 to a function which lays down in the stack, or a temporary
8489 variable which must be passed by reference.
8490
8491 We guarantee that the expression will either be constructed
8492 or copied into our original target. */
8493
8494 tree slot = TREE_OPERAND (exp, 0);
8495 tree cleanups = NULL_TREE;
8496 tree exp1;
8497
8498 if (TREE_CODE (slot) != VAR_DECL)
8499 abort ();
8500
8501 if (! ignore)
8502 target = original_target;
8503
8504 /* Set this here so that if we get a target that refers to a
8505 register variable that's already been used, put_reg_into_stack
8506 knows that it should fix up those uses. */
8507 TREE_USED (slot) = 1;
8508
8509 if (target == 0)
8510 {
8511 if (DECL_RTL_SET_P (slot))
8512 {
8513 target = DECL_RTL (slot);
8514 /* If we have already expanded the slot, so don't do
8515 it again. (mrs) */
8516 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8517 return target;
8518 }
8519 else
8520 {
8521 target = assign_temp (type, 2, 0, 1);
8522 /* All temp slots at this level must not conflict. */
8523 preserve_temp_slots (target);
8524 SET_DECL_RTL (slot, target);
8525 if (TREE_ADDRESSABLE (slot))
8526 put_var_into_stack (slot);
8527
8528 /* Since SLOT is not known to the called function
8529 to belong to its stack frame, we must build an explicit
8530 cleanup. This case occurs when we must build up a reference
8531 to pass the reference as an argument. In this case,
8532 it is very likely that such a reference need not be
8533 built here. */
8534
8535 if (TREE_OPERAND (exp, 2) == 0)
8536 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8537 cleanups = TREE_OPERAND (exp, 2);
8538 }
8539 }
8540 else
8541 {
8542 /* This case does occur, when expanding a parameter which
8543 needs to be constructed on the stack. The target
8544 is the actual stack address that we want to initialize.
8545 The function we call will perform the cleanup in this case. */
8546
8547 /* If we have already assigned it space, use that space,
8548 not target that we were passed in, as our target
8549 parameter is only a hint. */
8550 if (DECL_RTL_SET_P (slot))
8551 {
8552 target = DECL_RTL (slot);
8553 /* If we have already expanded the slot, so don't do
8554 it again. (mrs) */
8555 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8556 return target;
8557 }
8558 else
8559 {
8560 SET_DECL_RTL (slot, target);
8561 /* If we must have an addressable slot, then make sure that
8562 the RTL that we just stored in slot is OK. */
8563 if (TREE_ADDRESSABLE (slot))
8564 put_var_into_stack (slot);
8565 }
8566 }
8567
8568 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8569 /* Mark it as expanded. */
8570 TREE_OPERAND (exp, 1) = NULL_TREE;
8571
8572 store_expr (exp1, target, 0);
8573
8574 expand_decl_cleanup (NULL_TREE, cleanups);
8575
8576 return target;
8577 }
8578
8579 case INIT_EXPR:
8580 {
8581 tree lhs = TREE_OPERAND (exp, 0);
8582 tree rhs = TREE_OPERAND (exp, 1);
8583
8584 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8585 return temp;
8586 }
8587
8588 case MODIFY_EXPR:
8589 {
8590 /* If lhs is complex, expand calls in rhs before computing it.
8591 That's so we don't compute a pointer and save it over a
8592 call. If lhs is simple, compute it first so we can give it
8593 as a target if the rhs is just a call. This avoids an
8594 extra temp and copy and that prevents a partial-subsumption
8595 which makes bad code. Actually we could treat
8596 component_ref's of vars like vars. */
8597
8598 tree lhs = TREE_OPERAND (exp, 0);
8599 tree rhs = TREE_OPERAND (exp, 1);
8600
8601 temp = 0;
8602
8603 /* Check for |= or &= of a bitfield of size one into another bitfield
8604 of size 1. In this case, (unless we need the result of the
8605 assignment) we can do this more efficiently with a
8606 test followed by an assignment, if necessary.
8607
8608 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8609 things change so we do, this code should be enhanced to
8610 support it. */
8611 if (ignore
8612 && TREE_CODE (lhs) == COMPONENT_REF
8613 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8614 || TREE_CODE (rhs) == BIT_AND_EXPR)
8615 && TREE_OPERAND (rhs, 0) == lhs
8616 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8617 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8618 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8619 {
8620 rtx label = gen_label_rtx ();
8621
8622 do_jump (TREE_OPERAND (rhs, 1),
8623 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8624 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8625 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8626 (TREE_CODE (rhs) == BIT_IOR_EXPR
8627 ? integer_one_node
8628 : integer_zero_node)),
8629 0, 0);
8630 do_pending_stack_adjust ();
8631 emit_label (label);
8632 return const0_rtx;
8633 }
8634
8635 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8636
8637 return temp;
8638 }
8639
8640 case RETURN_EXPR:
8641 if (!TREE_OPERAND (exp, 0))
8642 expand_null_return ();
8643 else
8644 expand_return (TREE_OPERAND (exp, 0));
8645 return const0_rtx;
8646
8647 case PREINCREMENT_EXPR:
8648 case PREDECREMENT_EXPR:
8649 return expand_increment (exp, 0, ignore);
8650
8651 case POSTINCREMENT_EXPR:
8652 case POSTDECREMENT_EXPR:
8653 /* Faster to treat as pre-increment if result is not used. */
8654 return expand_increment (exp, ! ignore, ignore);
8655
8656 case ADDR_EXPR:
8657 /* If nonzero, TEMP will be set to the address of something that might
8658 be a MEM corresponding to a stack slot. */
8659 temp = 0;
8660
8661 /* Are we taking the address of a nested function? */
8662 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8663 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8664 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8665 && ! TREE_STATIC (exp))
8666 {
8667 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8668 op0 = force_operand (op0, target);
8669 }
8670 /* If we are taking the address of something erroneous, just
8671 return a zero. */
8672 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8673 return const0_rtx;
8674 /* If we are taking the address of a constant and are at the
8675 top level, we have to use output_constant_def since we can't
8676 call force_const_mem at top level. */
8677 else if (cfun == 0
8678 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8679 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8680 == 'c')))
8681 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8682 else
8683 {
8684 /* We make sure to pass const0_rtx down if we came in with
8685 ignore set, to avoid doing the cleanups twice for something. */
8686 op0 = expand_expr (TREE_OPERAND (exp, 0),
8687 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8688 (modifier == EXPAND_INITIALIZER
8689 ? modifier : EXPAND_CONST_ADDRESS));
8690
8691 /* If we are going to ignore the result, OP0 will have been set
8692 to const0_rtx, so just return it. Don't get confused and
8693 think we are taking the address of the constant. */
8694 if (ignore)
8695 return op0;
8696
8697 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8698 clever and returns a REG when given a MEM. */
8699 op0 = protect_from_queue (op0, 1);
8700
8701 /* We would like the object in memory. If it is a constant, we can
8702 have it be statically allocated into memory. For a non-constant,
8703 we need to allocate some memory and store the value into it. */
8704
8705 if (CONSTANT_P (op0))
8706 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8707 op0);
8708 else if (GET_CODE (op0) == MEM)
8709 {
8710 mark_temp_addr_taken (op0);
8711 temp = XEXP (op0, 0);
8712 }
8713
8714 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8715 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8716 || GET_CODE (op0) == PARALLEL)
8717 {
8718 /* If this object is in a register, it must be not
8719 be BLKmode. */
8720 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8721 tree nt = build_qualified_type (inner_type,
8722 (TYPE_QUALS (inner_type)
8723 | TYPE_QUAL_CONST));
8724 rtx memloc = assign_temp (nt, 1, 1, 1);
8725
8726 mark_temp_addr_taken (memloc);
8727 if (GET_CODE (op0) == PARALLEL)
8728 /* Handle calls that pass values in multiple non-contiguous
8729 locations. The Irix 6 ABI has examples of this. */
8730 emit_group_store (memloc, op0,
8731 int_size_in_bytes (inner_type),
8732 TYPE_ALIGN (inner_type));
8733 else
8734 emit_move_insn (memloc, op0);
8735 op0 = memloc;
8736 }
8737
8738 if (GET_CODE (op0) != MEM)
8739 abort ();
8740
8741 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8742 {
8743 temp = XEXP (op0, 0);
8744 #ifdef POINTERS_EXTEND_UNSIGNED
8745 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8746 && mode == ptr_mode)
8747 temp = convert_memory_address (ptr_mode, temp);
8748 #endif
8749 return temp;
8750 }
8751
8752 op0 = force_operand (XEXP (op0, 0), target);
8753 }
8754
8755 if (flag_force_addr && GET_CODE (op0) != REG)
8756 op0 = force_reg (Pmode, op0);
8757
8758 if (GET_CODE (op0) == REG
8759 && ! REG_USERVAR_P (op0))
8760 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8761
8762 /* If we might have had a temp slot, add an equivalent address
8763 for it. */
8764 if (temp != 0)
8765 update_temp_slot_address (temp, op0);
8766
8767 #ifdef POINTERS_EXTEND_UNSIGNED
8768 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8769 && mode == ptr_mode)
8770 op0 = convert_memory_address (ptr_mode, op0);
8771 #endif
8772
8773 return op0;
8774
8775 case ENTRY_VALUE_EXPR:
8776 abort ();
8777
8778 /* COMPLEX type for Extended Pascal & Fortran */
8779 case COMPLEX_EXPR:
8780 {
8781 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8782 rtx insns;
8783
8784 /* Get the rtx code of the operands. */
8785 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8786 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8787
8788 if (! target)
8789 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8790
8791 start_sequence ();
8792
8793 /* Move the real (op0) and imaginary (op1) parts to their location. */
8794 emit_move_insn (gen_realpart (mode, target), op0);
8795 emit_move_insn (gen_imagpart (mode, target), op1);
8796
8797 insns = get_insns ();
8798 end_sequence ();
8799
8800 /* Complex construction should appear as a single unit. */
8801 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8802 each with a separate pseudo as destination.
8803 It's not correct for flow to treat them as a unit. */
8804 if (GET_CODE (target) != CONCAT)
8805 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8806 else
8807 emit_insns (insns);
8808
8809 return target;
8810 }
8811
8812 case REALPART_EXPR:
8813 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8814 return gen_realpart (mode, op0);
8815
8816 case IMAGPART_EXPR:
8817 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8818 return gen_imagpart (mode, op0);
8819
8820 case CONJ_EXPR:
8821 {
8822 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8823 rtx imag_t;
8824 rtx insns;
8825
8826 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8827
8828 if (! target)
8829 target = gen_reg_rtx (mode);
8830
8831 start_sequence ();
8832
8833 /* Store the realpart and the negated imagpart to target. */
8834 emit_move_insn (gen_realpart (partmode, target),
8835 gen_realpart (partmode, op0));
8836
8837 imag_t = gen_imagpart (partmode, target);
8838 temp = expand_unop (partmode,
8839 ! unsignedp && flag_trapv
8840 && (GET_MODE_CLASS(partmode) == MODE_INT)
8841 ? negv_optab : neg_optab,
8842 gen_imagpart (partmode, op0), imag_t, 0);
8843 if (temp != imag_t)
8844 emit_move_insn (imag_t, temp);
8845
8846 insns = get_insns ();
8847 end_sequence ();
8848
8849 /* Conjugate should appear as a single unit
8850 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8851 each with a separate pseudo as destination.
8852 It's not correct for flow to treat them as a unit. */
8853 if (GET_CODE (target) != CONCAT)
8854 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8855 else
8856 emit_insns (insns);
8857
8858 return target;
8859 }
8860
8861 case TRY_CATCH_EXPR:
8862 {
8863 tree handler = TREE_OPERAND (exp, 1);
8864
8865 expand_eh_region_start ();
8866
8867 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8868
8869 expand_eh_region_end_cleanup (handler);
8870
8871 return op0;
8872 }
8873
8874 case TRY_FINALLY_EXPR:
8875 {
8876 tree try_block = TREE_OPERAND (exp, 0);
8877 tree finally_block = TREE_OPERAND (exp, 1);
8878 rtx finally_label = gen_label_rtx ();
8879 rtx done_label = gen_label_rtx ();
8880 rtx return_link = gen_reg_rtx (Pmode);
8881 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8882 (tree) finally_label, (tree) return_link);
8883 TREE_SIDE_EFFECTS (cleanup) = 1;
8884
8885 /* Start a new binding layer that will keep track of all cleanup
8886 actions to be performed. */
8887 expand_start_bindings (2);
8888
8889 target_temp_slot_level = temp_slot_level;
8890
8891 expand_decl_cleanup (NULL_TREE, cleanup);
8892 op0 = expand_expr (try_block, target, tmode, modifier);
8893
8894 preserve_temp_slots (op0);
8895 expand_end_bindings (NULL_TREE, 0, 0);
8896 emit_jump (done_label);
8897 emit_label (finally_label);
8898 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8899 emit_indirect_jump (return_link);
8900 emit_label (done_label);
8901 return op0;
8902 }
8903
8904 case GOTO_SUBROUTINE_EXPR:
8905 {
8906 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8907 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8908 rtx return_address = gen_label_rtx ();
8909 emit_move_insn (return_link,
8910 gen_rtx_LABEL_REF (Pmode, return_address));
8911 emit_jump (subr);
8912 emit_label (return_address);
8913 return const0_rtx;
8914 }
8915
8916 case VA_ARG_EXPR:
8917 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8918
8919 case EXC_PTR_EXPR:
8920 return get_exception_pointer (cfun);
8921
8922 case FDESC_EXPR:
8923 /* Function descriptors are not valid except for as
8924 initialization constants, and should not be expanded. */
8925 abort ();
8926
8927 default:
8928 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8929 }
8930
8931 /* Here to do an ordinary binary operator, generating an instruction
8932 from the optab already placed in `this_optab'. */
8933 binop:
8934 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8935 subtarget = 0;
8936 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8937 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8938 binop2:
8939 temp = expand_binop (mode, this_optab, op0, op1, target,
8940 unsignedp, OPTAB_LIB_WIDEN);
8941 if (temp == 0)
8942 abort ();
8943 return temp;
8944 }
8945 \f
8946 /* Similar to expand_expr, except that we don't specify a target, target
8947 mode, or modifier and we return the alignment of the inner type. This is
8948 used in cases where it is not necessary to align the result to the
8949 alignment of its type as long as we know the alignment of the result, for
8950 example for comparisons of BLKmode values. */
8951
8952 static rtx
8953 expand_expr_unaligned (exp, palign)
8954 tree exp;
8955 unsigned int *palign;
8956 {
8957 rtx op0;
8958 tree type = TREE_TYPE (exp);
8959 enum machine_mode mode = TYPE_MODE (type);
8960
8961 /* Default the alignment we return to that of the type. */
8962 *palign = TYPE_ALIGN (type);
8963
8964 /* The only cases in which we do anything special is if the resulting mode
8965 is BLKmode. */
8966 if (mode != BLKmode)
8967 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8968
8969 switch (TREE_CODE (exp))
8970 {
8971 case CONVERT_EXPR:
8972 case NOP_EXPR:
8973 case NON_LVALUE_EXPR:
8974 /* Conversions between BLKmode values don't change the underlying
8975 alignment or value. */
8976 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8977 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8978 break;
8979
8980 case ARRAY_REF:
8981 /* Much of the code for this case is copied directly from expand_expr.
8982 We need to duplicate it here because we will do something different
8983 in the fall-through case, so we need to handle the same exceptions
8984 it does. */
8985 {
8986 tree array = TREE_OPERAND (exp, 0);
8987 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8988 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8989 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8990 HOST_WIDE_INT i;
8991
8992 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8993 abort ();
8994
8995 /* Optimize the special-case of a zero lower bound.
8996
8997 We convert the low_bound to sizetype to avoid some problems
8998 with constant folding. (E.g. suppose the lower bound is 1,
8999 and its mode is QI. Without the conversion, (ARRAY
9000 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9001 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
9002
9003 if (! integer_zerop (low_bound))
9004 index = size_diffop (index, convert (sizetype, low_bound));
9005
9006 /* If this is a constant index into a constant array,
9007 just get the value from the array. Handle both the cases when
9008 we have an explicit constructor and when our operand is a variable
9009 that was declared const. */
9010
9011 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
9012 && host_integerp (index, 0)
9013 && 0 > compare_tree_int (index,
9014 list_length (CONSTRUCTOR_ELTS
9015 (TREE_OPERAND (exp, 0)))))
9016 {
9017 tree elem;
9018
9019 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
9020 i = tree_low_cst (index, 0);
9021 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
9022 ;
9023
9024 if (elem)
9025 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
9026 }
9027
9028 else if (optimize >= 1
9029 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9030 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
9031 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
9032 {
9033 if (TREE_CODE (index) == INTEGER_CST)
9034 {
9035 tree init = DECL_INITIAL (array);
9036
9037 if (TREE_CODE (init) == CONSTRUCTOR)
9038 {
9039 tree elem;
9040
9041 for (elem = CONSTRUCTOR_ELTS (init);
9042 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
9043 elem = TREE_CHAIN (elem))
9044 ;
9045
9046 if (elem)
9047 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
9048 palign);
9049 }
9050 }
9051 }
9052 }
9053 /* Fall through. */
9054
9055 case COMPONENT_REF:
9056 case BIT_FIELD_REF:
9057 case ARRAY_RANGE_REF:
9058 /* If the operand is a CONSTRUCTOR, we can just extract the
9059 appropriate field if it is present. Don't do this if we have
9060 already written the data since we want to refer to that copy
9061 and varasm.c assumes that's what we'll do. */
9062 if (TREE_CODE (exp) == COMPONENT_REF
9063 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9064 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
9065 {
9066 tree elt;
9067
9068 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
9069 elt = TREE_CHAIN (elt))
9070 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
9071 /* Note that unlike the case in expand_expr, we know this is
9072 BLKmode and hence not an integer. */
9073 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9074 }
9075
9076 {
9077 enum machine_mode mode1;
9078 HOST_WIDE_INT bitsize, bitpos;
9079 tree offset;
9080 int volatilep = 0;
9081 unsigned int alignment;
9082 int unsignedp;
9083 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9084 &mode1, &unsignedp, &volatilep,
9085 &alignment);
9086
9087 /* If we got back the original object, something is wrong. Perhaps
9088 we are evaluating an expression too early. In any event, don't
9089 infinitely recurse. */
9090 if (tem == exp)
9091 abort ();
9092
9093 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9094
9095 /* If this is a constant, put it into a register if it is a
9096 legitimate constant and OFFSET is 0 and memory if it isn't. */
9097 if (CONSTANT_P (op0))
9098 {
9099 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9100
9101 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9102 && offset == 0)
9103 op0 = force_reg (inner_mode, op0);
9104 else
9105 op0 = validize_mem (force_const_mem (inner_mode, op0));
9106 }
9107
9108 if (offset != 0)
9109 {
9110 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9111
9112 /* If this object is in a register, put it into memory.
9113 This case can't occur in C, but can in Ada if we have
9114 unchecked conversion of an expression from a scalar type to
9115 an array or record type. */
9116 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9117 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9118 {
9119 tree nt = build_qualified_type (TREE_TYPE (tem),
9120 (TYPE_QUALS (TREE_TYPE (tem))
9121 | TYPE_QUAL_CONST));
9122 rtx memloc = assign_temp (nt, 1, 1, 1);
9123
9124 mark_temp_addr_taken (memloc);
9125 emit_move_insn (memloc, op0);
9126 op0 = memloc;
9127 }
9128
9129 if (GET_CODE (op0) != MEM)
9130 abort ();
9131
9132 if (GET_MODE (offset_rtx) != ptr_mode)
9133 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9134
9135 #ifdef POINTERS_EXTEND_UNSIGNED
9136 if (GET_MODE (offset_rtx) != Pmode)
9137 offset_rtx = convert_memory_address (Pmode, offset_rtx);
9138 #endif
9139
9140 op0 = offset_address (op0, offset_rtx,
9141 highest_pow2_factor (offset));
9142 }
9143
9144 /* Don't forget about volatility even if this is a bitfield. */
9145 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9146 {
9147 op0 = copy_rtx (op0);
9148 MEM_VOLATILE_P (op0) = 1;
9149 }
9150
9151 /* Check the access. */
9152 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9153 {
9154 rtx to;
9155 int size;
9156
9157 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9158 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9159
9160 /* Check the access right of the pointer. */
9161 in_check_memory_usage = 1;
9162 if (size > BITS_PER_UNIT)
9163 emit_library_call (chkr_check_addr_libfunc,
9164 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9165 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9166 TYPE_MODE (sizetype),
9167 GEN_INT (MEMORY_USE_RO),
9168 TYPE_MODE (integer_type_node));
9169 in_check_memory_usage = 0;
9170 }
9171
9172 /* In cases where an aligned union has an unaligned object
9173 as a field, we might be extracting a BLKmode value from
9174 an integer-mode (e.g., SImode) object. Handle this case
9175 by doing the extract into an object as wide as the field
9176 (which we know to be the width of a basic mode), then
9177 storing into memory, and changing the mode to BLKmode.
9178 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9179 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9180 if (mode1 == VOIDmode
9181 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9182 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9183 && (TYPE_ALIGN (type) > alignment
9184 || bitpos % TYPE_ALIGN (type) != 0)))
9185 {
9186 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9187
9188 if (ext_mode == BLKmode)
9189 {
9190 /* In this case, BITPOS must start at a byte boundary. */
9191 if (GET_CODE (op0) != MEM
9192 || bitpos % BITS_PER_UNIT != 0)
9193 abort ();
9194
9195 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9196 }
9197 else
9198 {
9199 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9200 TYPE_QUAL_CONST);
9201 rtx new = assign_temp (nt, 0, 1, 1);
9202
9203 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9204 unsignedp, NULL_RTX, ext_mode,
9205 ext_mode, alignment,
9206 int_size_in_bytes (TREE_TYPE (tem)));
9207
9208 /* If the result is a record type and BITSIZE is narrower than
9209 the mode of OP0, an integral mode, and this is a big endian
9210 machine, we must put the field into the high-order bits. */
9211 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9212 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9213 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9214 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9215 size_int (GET_MODE_BITSIZE
9216 (GET_MODE (op0))
9217 - bitsize),
9218 op0, 1);
9219
9220 emit_move_insn (new, op0);
9221 op0 = copy_rtx (new);
9222 PUT_MODE (op0, BLKmode);
9223 }
9224 }
9225 else
9226 /* Get a reference to just this component. */
9227 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9228
9229 set_mem_attributes (op0, exp, 0);
9230
9231 /* Adjust the alignment in case the bit position is not
9232 a multiple of the alignment of the inner object. */
9233 while (bitpos % alignment != 0)
9234 alignment >>= 1;
9235
9236 if (GET_CODE (XEXP (op0, 0)) == REG)
9237 mark_reg_pointer (XEXP (op0, 0), alignment);
9238
9239 MEM_IN_STRUCT_P (op0) = 1;
9240 MEM_VOLATILE_P (op0) |= volatilep;
9241
9242 *palign = alignment;
9243 return op0;
9244 }
9245
9246 default:
9247 break;
9248
9249 }
9250
9251 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9252 }
9253 \f
9254 /* Return the tree node if a ARG corresponds to a string constant or zero
9255 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9256 in bytes within the string that ARG is accessing. The type of the
9257 offset will be `sizetype'. */
9258
9259 tree
9260 string_constant (arg, ptr_offset)
9261 tree arg;
9262 tree *ptr_offset;
9263 {
9264 STRIP_NOPS (arg);
9265
9266 if (TREE_CODE (arg) == ADDR_EXPR
9267 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9268 {
9269 *ptr_offset = size_zero_node;
9270 return TREE_OPERAND (arg, 0);
9271 }
9272 else if (TREE_CODE (arg) == PLUS_EXPR)
9273 {
9274 tree arg0 = TREE_OPERAND (arg, 0);
9275 tree arg1 = TREE_OPERAND (arg, 1);
9276
9277 STRIP_NOPS (arg0);
9278 STRIP_NOPS (arg1);
9279
9280 if (TREE_CODE (arg0) == ADDR_EXPR
9281 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9282 {
9283 *ptr_offset = convert (sizetype, arg1);
9284 return TREE_OPERAND (arg0, 0);
9285 }
9286 else if (TREE_CODE (arg1) == ADDR_EXPR
9287 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9288 {
9289 *ptr_offset = convert (sizetype, arg0);
9290 return TREE_OPERAND (arg1, 0);
9291 }
9292 }
9293
9294 return 0;
9295 }
9296 \f
9297 /* Expand code for a post- or pre- increment or decrement
9298 and return the RTX for the result.
9299 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9300
9301 static rtx
9302 expand_increment (exp, post, ignore)
9303 tree exp;
9304 int post, ignore;
9305 {
9306 rtx op0, op1;
9307 rtx temp, value;
9308 tree incremented = TREE_OPERAND (exp, 0);
9309 optab this_optab = add_optab;
9310 int icode;
9311 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9312 int op0_is_copy = 0;
9313 int single_insn = 0;
9314 /* 1 means we can't store into OP0 directly,
9315 because it is a subreg narrower than a word,
9316 and we don't dare clobber the rest of the word. */
9317 int bad_subreg = 0;
9318
9319 /* Stabilize any component ref that might need to be
9320 evaluated more than once below. */
9321 if (!post
9322 || TREE_CODE (incremented) == BIT_FIELD_REF
9323 || (TREE_CODE (incremented) == COMPONENT_REF
9324 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9325 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9326 incremented = stabilize_reference (incremented);
9327 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9328 ones into save exprs so that they don't accidentally get evaluated
9329 more than once by the code below. */
9330 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9331 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9332 incremented = save_expr (incremented);
9333
9334 /* Compute the operands as RTX.
9335 Note whether OP0 is the actual lvalue or a copy of it:
9336 I believe it is a copy iff it is a register or subreg
9337 and insns were generated in computing it. */
9338
9339 temp = get_last_insn ();
9340 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9341
9342 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9343 in place but instead must do sign- or zero-extension during assignment,
9344 so we copy it into a new register and let the code below use it as
9345 a copy.
9346
9347 Note that we can safely modify this SUBREG since it is know not to be
9348 shared (it was made by the expand_expr call above). */
9349
9350 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9351 {
9352 if (post)
9353 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9354 else
9355 bad_subreg = 1;
9356 }
9357 else if (GET_CODE (op0) == SUBREG
9358 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9359 {
9360 /* We cannot increment this SUBREG in place. If we are
9361 post-incrementing, get a copy of the old value. Otherwise,
9362 just mark that we cannot increment in place. */
9363 if (post)
9364 op0 = copy_to_reg (op0);
9365 else
9366 bad_subreg = 1;
9367 }
9368
9369 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9370 && temp != get_last_insn ());
9371 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9372 EXPAND_MEMORY_USE_BAD);
9373
9374 /* Decide whether incrementing or decrementing. */
9375 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9376 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9377 this_optab = sub_optab;
9378
9379 /* Convert decrement by a constant into a negative increment. */
9380 if (this_optab == sub_optab
9381 && GET_CODE (op1) == CONST_INT)
9382 {
9383 op1 = GEN_INT (-INTVAL (op1));
9384 this_optab = add_optab;
9385 }
9386
9387 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9388 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9389
9390 /* For a preincrement, see if we can do this with a single instruction. */
9391 if (!post)
9392 {
9393 icode = (int) this_optab->handlers[(int) mode].insn_code;
9394 if (icode != (int) CODE_FOR_nothing
9395 /* Make sure that OP0 is valid for operands 0 and 1
9396 of the insn we want to queue. */
9397 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9398 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9399 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9400 single_insn = 1;
9401 }
9402
9403 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9404 then we cannot just increment OP0. We must therefore contrive to
9405 increment the original value. Then, for postincrement, we can return
9406 OP0 since it is a copy of the old value. For preincrement, expand here
9407 unless we can do it with a single insn.
9408
9409 Likewise if storing directly into OP0 would clobber high bits
9410 we need to preserve (bad_subreg). */
9411 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9412 {
9413 /* This is the easiest way to increment the value wherever it is.
9414 Problems with multiple evaluation of INCREMENTED are prevented
9415 because either (1) it is a component_ref or preincrement,
9416 in which case it was stabilized above, or (2) it is an array_ref
9417 with constant index in an array in a register, which is
9418 safe to reevaluate. */
9419 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9420 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9421 ? MINUS_EXPR : PLUS_EXPR),
9422 TREE_TYPE (exp),
9423 incremented,
9424 TREE_OPERAND (exp, 1));
9425
9426 while (TREE_CODE (incremented) == NOP_EXPR
9427 || TREE_CODE (incremented) == CONVERT_EXPR)
9428 {
9429 newexp = convert (TREE_TYPE (incremented), newexp);
9430 incremented = TREE_OPERAND (incremented, 0);
9431 }
9432
9433 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9434 return post ? op0 : temp;
9435 }
9436
9437 if (post)
9438 {
9439 /* We have a true reference to the value in OP0.
9440 If there is an insn to add or subtract in this mode, queue it.
9441 Queueing the increment insn avoids the register shuffling
9442 that often results if we must increment now and first save
9443 the old value for subsequent use. */
9444
9445 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9446 op0 = stabilize (op0);
9447 #endif
9448
9449 icode = (int) this_optab->handlers[(int) mode].insn_code;
9450 if (icode != (int) CODE_FOR_nothing
9451 /* Make sure that OP0 is valid for operands 0 and 1
9452 of the insn we want to queue. */
9453 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9454 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9455 {
9456 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9457 op1 = force_reg (mode, op1);
9458
9459 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9460 }
9461 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9462 {
9463 rtx addr = (general_operand (XEXP (op0, 0), mode)
9464 ? force_reg (Pmode, XEXP (op0, 0))
9465 : copy_to_reg (XEXP (op0, 0)));
9466 rtx temp, result;
9467
9468 op0 = replace_equiv_address (op0, addr);
9469 temp = force_reg (GET_MODE (op0), op0);
9470 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9471 op1 = force_reg (mode, op1);
9472
9473 /* The increment queue is LIFO, thus we have to `queue'
9474 the instructions in reverse order. */
9475 enqueue_insn (op0, gen_move_insn (op0, temp));
9476 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9477 return result;
9478 }
9479 }
9480
9481 /* Preincrement, or we can't increment with one simple insn. */
9482 if (post)
9483 /* Save a copy of the value before inc or dec, to return it later. */
9484 temp = value = copy_to_reg (op0);
9485 else
9486 /* Arrange to return the incremented value. */
9487 /* Copy the rtx because expand_binop will protect from the queue,
9488 and the results of that would be invalid for us to return
9489 if our caller does emit_queue before using our result. */
9490 temp = copy_rtx (value = op0);
9491
9492 /* Increment however we can. */
9493 op1 = expand_binop (mode, this_optab, value, op1,
9494 current_function_check_memory_usage ? NULL_RTX : op0,
9495 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9496 /* Make sure the value is stored into OP0. */
9497 if (op1 != op0)
9498 emit_move_insn (op0, op1);
9499
9500 return temp;
9501 }
9502 \f
9503 /* At the start of a function, record that we have no previously-pushed
9504 arguments waiting to be popped. */
9505
9506 void
9507 init_pending_stack_adjust ()
9508 {
9509 pending_stack_adjust = 0;
9510 }
9511
9512 /* When exiting from function, if safe, clear out any pending stack adjust
9513 so the adjustment won't get done.
9514
9515 Note, if the current function calls alloca, then it must have a
9516 frame pointer regardless of the value of flag_omit_frame_pointer. */
9517
9518 void
9519 clear_pending_stack_adjust ()
9520 {
9521 #ifdef EXIT_IGNORE_STACK
9522 if (optimize > 0
9523 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9524 && EXIT_IGNORE_STACK
9525 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9526 && ! flag_inline_functions)
9527 {
9528 stack_pointer_delta -= pending_stack_adjust,
9529 pending_stack_adjust = 0;
9530 }
9531 #endif
9532 }
9533
9534 /* Pop any previously-pushed arguments that have not been popped yet. */
9535
9536 void
9537 do_pending_stack_adjust ()
9538 {
9539 if (inhibit_defer_pop == 0)
9540 {
9541 if (pending_stack_adjust != 0)
9542 adjust_stack (GEN_INT (pending_stack_adjust));
9543 pending_stack_adjust = 0;
9544 }
9545 }
9546 \f
9547 /* Expand conditional expressions. */
9548
9549 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9550 LABEL is an rtx of code CODE_LABEL, in this function and all the
9551 functions here. */
9552
9553 void
9554 jumpifnot (exp, label)
9555 tree exp;
9556 rtx label;
9557 {
9558 do_jump (exp, label, NULL_RTX);
9559 }
9560
9561 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9562
9563 void
9564 jumpif (exp, label)
9565 tree exp;
9566 rtx label;
9567 {
9568 do_jump (exp, NULL_RTX, label);
9569 }
9570
9571 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9572 the result is zero, or IF_TRUE_LABEL if the result is one.
9573 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9574 meaning fall through in that case.
9575
9576 do_jump always does any pending stack adjust except when it does not
9577 actually perform a jump. An example where there is no jump
9578 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9579
9580 This function is responsible for optimizing cases such as
9581 &&, || and comparison operators in EXP. */
9582
9583 void
9584 do_jump (exp, if_false_label, if_true_label)
9585 tree exp;
9586 rtx if_false_label, if_true_label;
9587 {
9588 enum tree_code code = TREE_CODE (exp);
9589 /* Some cases need to create a label to jump to
9590 in order to properly fall through.
9591 These cases set DROP_THROUGH_LABEL nonzero. */
9592 rtx drop_through_label = 0;
9593 rtx temp;
9594 int i;
9595 tree type;
9596 enum machine_mode mode;
9597
9598 #ifdef MAX_INTEGER_COMPUTATION_MODE
9599 check_max_integer_computation_mode (exp);
9600 #endif
9601
9602 emit_queue ();
9603
9604 switch (code)
9605 {
9606 case ERROR_MARK:
9607 break;
9608
9609 case INTEGER_CST:
9610 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9611 if (temp)
9612 emit_jump (temp);
9613 break;
9614
9615 #if 0
9616 /* This is not true with #pragma weak */
9617 case ADDR_EXPR:
9618 /* The address of something can never be zero. */
9619 if (if_true_label)
9620 emit_jump (if_true_label);
9621 break;
9622 #endif
9623
9624 case NOP_EXPR:
9625 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9626 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9627 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9628 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9629 goto normal;
9630 case CONVERT_EXPR:
9631 /* If we are narrowing the operand, we have to do the compare in the
9632 narrower mode. */
9633 if ((TYPE_PRECISION (TREE_TYPE (exp))
9634 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9635 goto normal;
9636 case NON_LVALUE_EXPR:
9637 case REFERENCE_EXPR:
9638 case ABS_EXPR:
9639 case NEGATE_EXPR:
9640 case LROTATE_EXPR:
9641 case RROTATE_EXPR:
9642 /* These cannot change zero->non-zero or vice versa. */
9643 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9644 break;
9645
9646 case WITH_RECORD_EXPR:
9647 /* Put the object on the placeholder list, recurse through our first
9648 operand, and pop the list. */
9649 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9650 placeholder_list);
9651 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9652 placeholder_list = TREE_CHAIN (placeholder_list);
9653 break;
9654
9655 #if 0
9656 /* This is never less insns than evaluating the PLUS_EXPR followed by
9657 a test and can be longer if the test is eliminated. */
9658 case PLUS_EXPR:
9659 /* Reduce to minus. */
9660 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9661 TREE_OPERAND (exp, 0),
9662 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9663 TREE_OPERAND (exp, 1))));
9664 /* Process as MINUS. */
9665 #endif
9666
9667 case MINUS_EXPR:
9668 /* Non-zero iff operands of minus differ. */
9669 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9670 TREE_OPERAND (exp, 0),
9671 TREE_OPERAND (exp, 1)),
9672 NE, NE, if_false_label, if_true_label);
9673 break;
9674
9675 case BIT_AND_EXPR:
9676 /* If we are AND'ing with a small constant, do this comparison in the
9677 smallest type that fits. If the machine doesn't have comparisons
9678 that small, it will be converted back to the wider comparison.
9679 This helps if we are testing the sign bit of a narrower object.
9680 combine can't do this for us because it can't know whether a
9681 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9682
9683 if (! SLOW_BYTE_ACCESS
9684 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9685 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9686 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9687 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9688 && (type = type_for_mode (mode, 1)) != 0
9689 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9690 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9691 != CODE_FOR_nothing))
9692 {
9693 do_jump (convert (type, exp), if_false_label, if_true_label);
9694 break;
9695 }
9696 goto normal;
9697
9698 case TRUTH_NOT_EXPR:
9699 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9700 break;
9701
9702 case TRUTH_ANDIF_EXPR:
9703 if (if_false_label == 0)
9704 if_false_label = drop_through_label = gen_label_rtx ();
9705 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9706 start_cleanup_deferral ();
9707 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9708 end_cleanup_deferral ();
9709 break;
9710
9711 case TRUTH_ORIF_EXPR:
9712 if (if_true_label == 0)
9713 if_true_label = drop_through_label = gen_label_rtx ();
9714 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9715 start_cleanup_deferral ();
9716 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9717 end_cleanup_deferral ();
9718 break;
9719
9720 case COMPOUND_EXPR:
9721 push_temp_slots ();
9722 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9723 preserve_temp_slots (NULL_RTX);
9724 free_temp_slots ();
9725 pop_temp_slots ();
9726 emit_queue ();
9727 do_pending_stack_adjust ();
9728 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9729 break;
9730
9731 case COMPONENT_REF:
9732 case BIT_FIELD_REF:
9733 case ARRAY_REF:
9734 case ARRAY_RANGE_REF:
9735 {
9736 HOST_WIDE_INT bitsize, bitpos;
9737 int unsignedp;
9738 enum machine_mode mode;
9739 tree type;
9740 tree offset;
9741 int volatilep = 0;
9742 unsigned int alignment;
9743
9744 /* Get description of this reference. We don't actually care
9745 about the underlying object here. */
9746 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9747 &unsignedp, &volatilep, &alignment);
9748
9749 type = type_for_size (bitsize, unsignedp);
9750 if (! SLOW_BYTE_ACCESS
9751 && type != 0 && bitsize >= 0
9752 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9753 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9754 != CODE_FOR_nothing))
9755 {
9756 do_jump (convert (type, exp), if_false_label, if_true_label);
9757 break;
9758 }
9759 goto normal;
9760 }
9761
9762 case COND_EXPR:
9763 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9764 if (integer_onep (TREE_OPERAND (exp, 1))
9765 && integer_zerop (TREE_OPERAND (exp, 2)))
9766 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9767
9768 else if (integer_zerop (TREE_OPERAND (exp, 1))
9769 && integer_onep (TREE_OPERAND (exp, 2)))
9770 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9771
9772 else
9773 {
9774 rtx label1 = gen_label_rtx ();
9775 drop_through_label = gen_label_rtx ();
9776
9777 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9778
9779 start_cleanup_deferral ();
9780 /* Now the THEN-expression. */
9781 do_jump (TREE_OPERAND (exp, 1),
9782 if_false_label ? if_false_label : drop_through_label,
9783 if_true_label ? if_true_label : drop_through_label);
9784 /* In case the do_jump just above never jumps. */
9785 do_pending_stack_adjust ();
9786 emit_label (label1);
9787
9788 /* Now the ELSE-expression. */
9789 do_jump (TREE_OPERAND (exp, 2),
9790 if_false_label ? if_false_label : drop_through_label,
9791 if_true_label ? if_true_label : drop_through_label);
9792 end_cleanup_deferral ();
9793 }
9794 break;
9795
9796 case EQ_EXPR:
9797 {
9798 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9799
9800 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9801 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9802 {
9803 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9804 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9805 do_jump
9806 (fold
9807 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9808 fold (build (EQ_EXPR, TREE_TYPE (exp),
9809 fold (build1 (REALPART_EXPR,
9810 TREE_TYPE (inner_type),
9811 exp0)),
9812 fold (build1 (REALPART_EXPR,
9813 TREE_TYPE (inner_type),
9814 exp1)))),
9815 fold (build (EQ_EXPR, TREE_TYPE (exp),
9816 fold (build1 (IMAGPART_EXPR,
9817 TREE_TYPE (inner_type),
9818 exp0)),
9819 fold (build1 (IMAGPART_EXPR,
9820 TREE_TYPE (inner_type),
9821 exp1)))))),
9822 if_false_label, if_true_label);
9823 }
9824
9825 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9826 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9827
9828 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9829 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9830 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9831 else
9832 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9833 break;
9834 }
9835
9836 case NE_EXPR:
9837 {
9838 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9839
9840 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9841 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9842 {
9843 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9844 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9845 do_jump
9846 (fold
9847 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9848 fold (build (NE_EXPR, TREE_TYPE (exp),
9849 fold (build1 (REALPART_EXPR,
9850 TREE_TYPE (inner_type),
9851 exp0)),
9852 fold (build1 (REALPART_EXPR,
9853 TREE_TYPE (inner_type),
9854 exp1)))),
9855 fold (build (NE_EXPR, TREE_TYPE (exp),
9856 fold (build1 (IMAGPART_EXPR,
9857 TREE_TYPE (inner_type),
9858 exp0)),
9859 fold (build1 (IMAGPART_EXPR,
9860 TREE_TYPE (inner_type),
9861 exp1)))))),
9862 if_false_label, if_true_label);
9863 }
9864
9865 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9866 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9867
9868 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9869 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9870 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9871 else
9872 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9873 break;
9874 }
9875
9876 case LT_EXPR:
9877 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9878 if (GET_MODE_CLASS (mode) == MODE_INT
9879 && ! can_compare_p (LT, mode, ccp_jump))
9880 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9881 else
9882 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9883 break;
9884
9885 case LE_EXPR:
9886 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9887 if (GET_MODE_CLASS (mode) == MODE_INT
9888 && ! can_compare_p (LE, mode, ccp_jump))
9889 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9890 else
9891 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9892 break;
9893
9894 case GT_EXPR:
9895 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9896 if (GET_MODE_CLASS (mode) == MODE_INT
9897 && ! can_compare_p (GT, mode, ccp_jump))
9898 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9899 else
9900 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9901 break;
9902
9903 case GE_EXPR:
9904 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9905 if (GET_MODE_CLASS (mode) == MODE_INT
9906 && ! can_compare_p (GE, mode, ccp_jump))
9907 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9908 else
9909 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9910 break;
9911
9912 case UNORDERED_EXPR:
9913 case ORDERED_EXPR:
9914 {
9915 enum rtx_code cmp, rcmp;
9916 int do_rev;
9917
9918 if (code == UNORDERED_EXPR)
9919 cmp = UNORDERED, rcmp = ORDERED;
9920 else
9921 cmp = ORDERED, rcmp = UNORDERED;
9922 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9923
9924 do_rev = 0;
9925 if (! can_compare_p (cmp, mode, ccp_jump)
9926 && (can_compare_p (rcmp, mode, ccp_jump)
9927 /* If the target doesn't provide either UNORDERED or ORDERED
9928 comparisons, canonicalize on UNORDERED for the library. */
9929 || rcmp == UNORDERED))
9930 do_rev = 1;
9931
9932 if (! do_rev)
9933 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9934 else
9935 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9936 }
9937 break;
9938
9939 {
9940 enum rtx_code rcode1;
9941 enum tree_code tcode2;
9942
9943 case UNLT_EXPR:
9944 rcode1 = UNLT;
9945 tcode2 = LT_EXPR;
9946 goto unordered_bcc;
9947 case UNLE_EXPR:
9948 rcode1 = UNLE;
9949 tcode2 = LE_EXPR;
9950 goto unordered_bcc;
9951 case UNGT_EXPR:
9952 rcode1 = UNGT;
9953 tcode2 = GT_EXPR;
9954 goto unordered_bcc;
9955 case UNGE_EXPR:
9956 rcode1 = UNGE;
9957 tcode2 = GE_EXPR;
9958 goto unordered_bcc;
9959 case UNEQ_EXPR:
9960 rcode1 = UNEQ;
9961 tcode2 = EQ_EXPR;
9962 goto unordered_bcc;
9963
9964 unordered_bcc:
9965 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9966 if (can_compare_p (rcode1, mode, ccp_jump))
9967 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9968 if_true_label);
9969 else
9970 {
9971 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9972 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9973 tree cmp0, cmp1;
9974
9975 /* If the target doesn't support combined unordered
9976 compares, decompose into UNORDERED + comparison. */
9977 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9978 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9979 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9980 do_jump (exp, if_false_label, if_true_label);
9981 }
9982 }
9983 break;
9984
9985 /* Special case:
9986 __builtin_expect (<test>, 0) and
9987 __builtin_expect (<test>, 1)
9988
9989 We need to do this here, so that <test> is not converted to a SCC
9990 operation on machines that use condition code registers and COMPARE
9991 like the PowerPC, and then the jump is done based on whether the SCC
9992 operation produced a 1 or 0. */
9993 case CALL_EXPR:
9994 /* Check for a built-in function. */
9995 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9996 {
9997 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9998 tree arglist = TREE_OPERAND (exp, 1);
9999
10000 if (TREE_CODE (fndecl) == FUNCTION_DECL
10001 && DECL_BUILT_IN (fndecl)
10002 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10003 && arglist != NULL_TREE
10004 && TREE_CHAIN (arglist) != NULL_TREE)
10005 {
10006 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10007 if_true_label);
10008
10009 if (seq != NULL_RTX)
10010 {
10011 emit_insn (seq);
10012 return;
10013 }
10014 }
10015 }
10016 /* fall through and generate the normal code. */
10017
10018 default:
10019 normal:
10020 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10021 #if 0
10022 /* This is not needed any more and causes poor code since it causes
10023 comparisons and tests from non-SI objects to have different code
10024 sequences. */
10025 /* Copy to register to avoid generating bad insns by cse
10026 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10027 if (!cse_not_expected && GET_CODE (temp) == MEM)
10028 temp = copy_to_reg (temp);
10029 #endif
10030 do_pending_stack_adjust ();
10031 /* Do any postincrements in the expression that was tested. */
10032 emit_queue ();
10033
10034 if (GET_CODE (temp) == CONST_INT
10035 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10036 || GET_CODE (temp) == LABEL_REF)
10037 {
10038 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10039 if (target)
10040 emit_jump (target);
10041 }
10042 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10043 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10044 /* Note swapping the labels gives us not-equal. */
10045 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10046 else if (GET_MODE (temp) != VOIDmode)
10047 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10048 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10049 GET_MODE (temp), NULL_RTX, 0,
10050 if_false_label, if_true_label);
10051 else
10052 abort ();
10053 }
10054
10055 if (drop_through_label)
10056 {
10057 /* If do_jump produces code that might be jumped around,
10058 do any stack adjusts from that code, before the place
10059 where control merges in. */
10060 do_pending_stack_adjust ();
10061 emit_label (drop_through_label);
10062 }
10063 }
10064 \f
10065 /* Given a comparison expression EXP for values too wide to be compared
10066 with one insn, test the comparison and jump to the appropriate label.
10067 The code of EXP is ignored; we always test GT if SWAP is 0,
10068 and LT if SWAP is 1. */
10069
10070 static void
10071 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10072 tree exp;
10073 int swap;
10074 rtx if_false_label, if_true_label;
10075 {
10076 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10077 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10078 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10079 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10080
10081 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10082 }
10083
10084 /* Compare OP0 with OP1, word at a time, in mode MODE.
10085 UNSIGNEDP says to do unsigned comparison.
10086 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10087
10088 void
10089 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10090 enum machine_mode mode;
10091 int unsignedp;
10092 rtx op0, op1;
10093 rtx if_false_label, if_true_label;
10094 {
10095 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10096 rtx drop_through_label = 0;
10097 int i;
10098
10099 if (! if_true_label || ! if_false_label)
10100 drop_through_label = gen_label_rtx ();
10101 if (! if_true_label)
10102 if_true_label = drop_through_label;
10103 if (! if_false_label)
10104 if_false_label = drop_through_label;
10105
10106 /* Compare a word at a time, high order first. */
10107 for (i = 0; i < nwords; i++)
10108 {
10109 rtx op0_word, op1_word;
10110
10111 if (WORDS_BIG_ENDIAN)
10112 {
10113 op0_word = operand_subword_force (op0, i, mode);
10114 op1_word = operand_subword_force (op1, i, mode);
10115 }
10116 else
10117 {
10118 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10119 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10120 }
10121
10122 /* All but high-order word must be compared as unsigned. */
10123 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10124 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10125 NULL_RTX, if_true_label);
10126
10127 /* Consider lower words only if these are equal. */
10128 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10129 NULL_RTX, 0, NULL_RTX, if_false_label);
10130 }
10131
10132 if (if_false_label)
10133 emit_jump (if_false_label);
10134 if (drop_through_label)
10135 emit_label (drop_through_label);
10136 }
10137
10138 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10139 with one insn, test the comparison and jump to the appropriate label. */
10140
10141 static void
10142 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10143 tree exp;
10144 rtx if_false_label, if_true_label;
10145 {
10146 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10147 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10148 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10149 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10150 int i;
10151 rtx drop_through_label = 0;
10152
10153 if (! if_false_label)
10154 drop_through_label = if_false_label = gen_label_rtx ();
10155
10156 for (i = 0; i < nwords; i++)
10157 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10158 operand_subword_force (op1, i, mode),
10159 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10160 word_mode, NULL_RTX, 0, if_false_label,
10161 NULL_RTX);
10162
10163 if (if_true_label)
10164 emit_jump (if_true_label);
10165 if (drop_through_label)
10166 emit_label (drop_through_label);
10167 }
10168 \f
10169 /* Jump according to whether OP0 is 0.
10170 We assume that OP0 has an integer mode that is too wide
10171 for the available compare insns. */
10172
10173 void
10174 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10175 rtx op0;
10176 rtx if_false_label, if_true_label;
10177 {
10178 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10179 rtx part;
10180 int i;
10181 rtx drop_through_label = 0;
10182
10183 /* The fastest way of doing this comparison on almost any machine is to
10184 "or" all the words and compare the result. If all have to be loaded
10185 from memory and this is a very wide item, it's possible this may
10186 be slower, but that's highly unlikely. */
10187
10188 part = gen_reg_rtx (word_mode);
10189 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10190 for (i = 1; i < nwords && part != 0; i++)
10191 part = expand_binop (word_mode, ior_optab, part,
10192 operand_subword_force (op0, i, GET_MODE (op0)),
10193 part, 1, OPTAB_WIDEN);
10194
10195 if (part != 0)
10196 {
10197 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10198 NULL_RTX, 0, if_false_label, if_true_label);
10199
10200 return;
10201 }
10202
10203 /* If we couldn't do the "or" simply, do this with a series of compares. */
10204 if (! if_false_label)
10205 drop_through_label = if_false_label = gen_label_rtx ();
10206
10207 for (i = 0; i < nwords; i++)
10208 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10209 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10210 if_false_label, NULL_RTX);
10211
10212 if (if_true_label)
10213 emit_jump (if_true_label);
10214
10215 if (drop_through_label)
10216 emit_label (drop_through_label);
10217 }
10218 \f
10219 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10220 (including code to compute the values to be compared)
10221 and set (CC0) according to the result.
10222 The decision as to signed or unsigned comparison must be made by the caller.
10223
10224 We force a stack adjustment unless there are currently
10225 things pushed on the stack that aren't yet used.
10226
10227 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10228 compared.
10229
10230 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10231 size of MODE should be used. */
10232
10233 rtx
10234 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10235 rtx op0, op1;
10236 enum rtx_code code;
10237 int unsignedp;
10238 enum machine_mode mode;
10239 rtx size;
10240 unsigned int align;
10241 {
10242 rtx tem;
10243
10244 /* If one operand is constant, make it the second one. Only do this
10245 if the other operand is not constant as well. */
10246
10247 if (swap_commutative_operands_p (op0, op1))
10248 {
10249 tem = op0;
10250 op0 = op1;
10251 op1 = tem;
10252 code = swap_condition (code);
10253 }
10254
10255 if (flag_force_mem)
10256 {
10257 op0 = force_not_mem (op0);
10258 op1 = force_not_mem (op1);
10259 }
10260
10261 do_pending_stack_adjust ();
10262
10263 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10264 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10265 return tem;
10266
10267 #if 0
10268 /* There's no need to do this now that combine.c can eliminate lots of
10269 sign extensions. This can be less efficient in certain cases on other
10270 machines. */
10271
10272 /* If this is a signed equality comparison, we can do it as an
10273 unsigned comparison since zero-extension is cheaper than sign
10274 extension and comparisons with zero are done as unsigned. This is
10275 the case even on machines that can do fast sign extension, since
10276 zero-extension is easier to combine with other operations than
10277 sign-extension is. If we are comparing against a constant, we must
10278 convert it to what it would look like unsigned. */
10279 if ((code == EQ || code == NE) && ! unsignedp
10280 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10281 {
10282 if (GET_CODE (op1) == CONST_INT
10283 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10284 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10285 unsignedp = 1;
10286 }
10287 #endif
10288
10289 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10290
10291 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10292 }
10293
10294 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10295 The decision as to signed or unsigned comparison must be made by the caller.
10296
10297 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10298 compared.
10299
10300 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10301 size of MODE should be used. */
10302
10303 void
10304 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10305 if_false_label, if_true_label)
10306 rtx op0, op1;
10307 enum rtx_code code;
10308 int unsignedp;
10309 enum machine_mode mode;
10310 rtx size;
10311 unsigned int align;
10312 rtx if_false_label, if_true_label;
10313 {
10314 rtx tem;
10315 int dummy_true_label = 0;
10316
10317 /* Reverse the comparison if that is safe and we want to jump if it is
10318 false. */
10319 if (! if_true_label && ! FLOAT_MODE_P (mode))
10320 {
10321 if_true_label = if_false_label;
10322 if_false_label = 0;
10323 code = reverse_condition (code);
10324 }
10325
10326 /* If one operand is constant, make it the second one. Only do this
10327 if the other operand is not constant as well. */
10328
10329 if (swap_commutative_operands_p (op0, op1))
10330 {
10331 tem = op0;
10332 op0 = op1;
10333 op1 = tem;
10334 code = swap_condition (code);
10335 }
10336
10337 if (flag_force_mem)
10338 {
10339 op0 = force_not_mem (op0);
10340 op1 = force_not_mem (op1);
10341 }
10342
10343 do_pending_stack_adjust ();
10344
10345 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10346 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10347 {
10348 if (tem == const_true_rtx)
10349 {
10350 if (if_true_label)
10351 emit_jump (if_true_label);
10352 }
10353 else
10354 {
10355 if (if_false_label)
10356 emit_jump (if_false_label);
10357 }
10358 return;
10359 }
10360
10361 #if 0
10362 /* There's no need to do this now that combine.c can eliminate lots of
10363 sign extensions. This can be less efficient in certain cases on other
10364 machines. */
10365
10366 /* If this is a signed equality comparison, we can do it as an
10367 unsigned comparison since zero-extension is cheaper than sign
10368 extension and comparisons with zero are done as unsigned. This is
10369 the case even on machines that can do fast sign extension, since
10370 zero-extension is easier to combine with other operations than
10371 sign-extension is. If we are comparing against a constant, we must
10372 convert it to what it would look like unsigned. */
10373 if ((code == EQ || code == NE) && ! unsignedp
10374 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10375 {
10376 if (GET_CODE (op1) == CONST_INT
10377 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10378 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10379 unsignedp = 1;
10380 }
10381 #endif
10382
10383 if (! if_true_label)
10384 {
10385 dummy_true_label = 1;
10386 if_true_label = gen_label_rtx ();
10387 }
10388
10389 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10390 if_true_label);
10391
10392 if (if_false_label)
10393 emit_jump (if_false_label);
10394 if (dummy_true_label)
10395 emit_label (if_true_label);
10396 }
10397
10398 /* Generate code for a comparison expression EXP (including code to compute
10399 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10400 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10401 generated code will drop through.
10402 SIGNED_CODE should be the rtx operation for this comparison for
10403 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10404
10405 We force a stack adjustment unless there are currently
10406 things pushed on the stack that aren't yet used. */
10407
10408 static void
10409 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10410 if_true_label)
10411 tree exp;
10412 enum rtx_code signed_code, unsigned_code;
10413 rtx if_false_label, if_true_label;
10414 {
10415 unsigned int align0, align1;
10416 rtx op0, op1;
10417 tree type;
10418 enum machine_mode mode;
10419 int unsignedp;
10420 enum rtx_code code;
10421
10422 /* Don't crash if the comparison was erroneous. */
10423 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10424 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10425 return;
10426
10427 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10428 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10429 return;
10430
10431 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10432 mode = TYPE_MODE (type);
10433 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10434 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10435 || (GET_MODE_BITSIZE (mode)
10436 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10437 1)))))))
10438 {
10439 /* op0 might have been replaced by promoted constant, in which
10440 case the type of second argument should be used. */
10441 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10442 mode = TYPE_MODE (type);
10443 }
10444 unsignedp = TREE_UNSIGNED (type);
10445 code = unsignedp ? unsigned_code : signed_code;
10446
10447 #ifdef HAVE_canonicalize_funcptr_for_compare
10448 /* If function pointers need to be "canonicalized" before they can
10449 be reliably compared, then canonicalize them. */
10450 if (HAVE_canonicalize_funcptr_for_compare
10451 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10452 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10453 == FUNCTION_TYPE))
10454 {
10455 rtx new_op0 = gen_reg_rtx (mode);
10456
10457 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10458 op0 = new_op0;
10459 }
10460
10461 if (HAVE_canonicalize_funcptr_for_compare
10462 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10463 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10464 == FUNCTION_TYPE))
10465 {
10466 rtx new_op1 = gen_reg_rtx (mode);
10467
10468 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10469 op1 = new_op1;
10470 }
10471 #endif
10472
10473 /* Do any postincrements in the expression that was tested. */
10474 emit_queue ();
10475
10476 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10477 ((mode == BLKmode)
10478 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10479 MIN (align0, align1),
10480 if_false_label, if_true_label);
10481 }
10482 \f
10483 /* Generate code to calculate EXP using a store-flag instruction
10484 and return an rtx for the result. EXP is either a comparison
10485 or a TRUTH_NOT_EXPR whose operand is a comparison.
10486
10487 If TARGET is nonzero, store the result there if convenient.
10488
10489 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10490 cheap.
10491
10492 Return zero if there is no suitable set-flag instruction
10493 available on this machine.
10494
10495 Once expand_expr has been called on the arguments of the comparison,
10496 we are committed to doing the store flag, since it is not safe to
10497 re-evaluate the expression. We emit the store-flag insn by calling
10498 emit_store_flag, but only expand the arguments if we have a reason
10499 to believe that emit_store_flag will be successful. If we think that
10500 it will, but it isn't, we have to simulate the store-flag with a
10501 set/jump/set sequence. */
10502
10503 static rtx
10504 do_store_flag (exp, target, mode, only_cheap)
10505 tree exp;
10506 rtx target;
10507 enum machine_mode mode;
10508 int only_cheap;
10509 {
10510 enum rtx_code code;
10511 tree arg0, arg1, type;
10512 tree tem;
10513 enum machine_mode operand_mode;
10514 int invert = 0;
10515 int unsignedp;
10516 rtx op0, op1;
10517 enum insn_code icode;
10518 rtx subtarget = target;
10519 rtx result, label;
10520
10521 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10522 result at the end. We can't simply invert the test since it would
10523 have already been inverted if it were valid. This case occurs for
10524 some floating-point comparisons. */
10525
10526 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10527 invert = 1, exp = TREE_OPERAND (exp, 0);
10528
10529 arg0 = TREE_OPERAND (exp, 0);
10530 arg1 = TREE_OPERAND (exp, 1);
10531
10532 /* Don't crash if the comparison was erroneous. */
10533 if (arg0 == error_mark_node || arg1 == error_mark_node)
10534 return const0_rtx;
10535
10536 type = TREE_TYPE (arg0);
10537 operand_mode = TYPE_MODE (type);
10538 unsignedp = TREE_UNSIGNED (type);
10539
10540 /* We won't bother with BLKmode store-flag operations because it would mean
10541 passing a lot of information to emit_store_flag. */
10542 if (operand_mode == BLKmode)
10543 return 0;
10544
10545 /* We won't bother with store-flag operations involving function pointers
10546 when function pointers must be canonicalized before comparisons. */
10547 #ifdef HAVE_canonicalize_funcptr_for_compare
10548 if (HAVE_canonicalize_funcptr_for_compare
10549 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10550 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10551 == FUNCTION_TYPE))
10552 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10553 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10554 == FUNCTION_TYPE))))
10555 return 0;
10556 #endif
10557
10558 STRIP_NOPS (arg0);
10559 STRIP_NOPS (arg1);
10560
10561 /* Get the rtx comparison code to use. We know that EXP is a comparison
10562 operation of some type. Some comparisons against 1 and -1 can be
10563 converted to comparisons with zero. Do so here so that the tests
10564 below will be aware that we have a comparison with zero. These
10565 tests will not catch constants in the first operand, but constants
10566 are rarely passed as the first operand. */
10567
10568 switch (TREE_CODE (exp))
10569 {
10570 case EQ_EXPR:
10571 code = EQ;
10572 break;
10573 case NE_EXPR:
10574 code = NE;
10575 break;
10576 case LT_EXPR:
10577 if (integer_onep (arg1))
10578 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10579 else
10580 code = unsignedp ? LTU : LT;
10581 break;
10582 case LE_EXPR:
10583 if (! unsignedp && integer_all_onesp (arg1))
10584 arg1 = integer_zero_node, code = LT;
10585 else
10586 code = unsignedp ? LEU : LE;
10587 break;
10588 case GT_EXPR:
10589 if (! unsignedp && integer_all_onesp (arg1))
10590 arg1 = integer_zero_node, code = GE;
10591 else
10592 code = unsignedp ? GTU : GT;
10593 break;
10594 case GE_EXPR:
10595 if (integer_onep (arg1))
10596 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10597 else
10598 code = unsignedp ? GEU : GE;
10599 break;
10600
10601 case UNORDERED_EXPR:
10602 code = UNORDERED;
10603 break;
10604 case ORDERED_EXPR:
10605 code = ORDERED;
10606 break;
10607 case UNLT_EXPR:
10608 code = UNLT;
10609 break;
10610 case UNLE_EXPR:
10611 code = UNLE;
10612 break;
10613 case UNGT_EXPR:
10614 code = UNGT;
10615 break;
10616 case UNGE_EXPR:
10617 code = UNGE;
10618 break;
10619 case UNEQ_EXPR:
10620 code = UNEQ;
10621 break;
10622
10623 default:
10624 abort ();
10625 }
10626
10627 /* Put a constant second. */
10628 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10629 {
10630 tem = arg0; arg0 = arg1; arg1 = tem;
10631 code = swap_condition (code);
10632 }
10633
10634 /* If this is an equality or inequality test of a single bit, we can
10635 do this by shifting the bit being tested to the low-order bit and
10636 masking the result with the constant 1. If the condition was EQ,
10637 we xor it with 1. This does not require an scc insn and is faster
10638 than an scc insn even if we have it. */
10639
10640 if ((code == NE || code == EQ)
10641 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10642 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10643 {
10644 tree inner = TREE_OPERAND (arg0, 0);
10645 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10646 int ops_unsignedp;
10647
10648 /* If INNER is a right shift of a constant and it plus BITNUM does
10649 not overflow, adjust BITNUM and INNER. */
10650
10651 if (TREE_CODE (inner) == RSHIFT_EXPR
10652 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10653 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10654 && bitnum < TYPE_PRECISION (type)
10655 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10656 bitnum - TYPE_PRECISION (type)))
10657 {
10658 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10659 inner = TREE_OPERAND (inner, 0);
10660 }
10661
10662 /* If we are going to be able to omit the AND below, we must do our
10663 operations as unsigned. If we must use the AND, we have a choice.
10664 Normally unsigned is faster, but for some machines signed is. */
10665 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10666 #ifdef LOAD_EXTEND_OP
10667 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10668 #else
10669 : 1
10670 #endif
10671 );
10672
10673 if (! get_subtarget (subtarget)
10674 || GET_MODE (subtarget) != operand_mode
10675 || ! safe_from_p (subtarget, inner, 1))
10676 subtarget = 0;
10677
10678 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10679
10680 if (bitnum != 0)
10681 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10682 size_int (bitnum), subtarget, ops_unsignedp);
10683
10684 if (GET_MODE (op0) != mode)
10685 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10686
10687 if ((code == EQ && ! invert) || (code == NE && invert))
10688 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10689 ops_unsignedp, OPTAB_LIB_WIDEN);
10690
10691 /* Put the AND last so it can combine with more things. */
10692 if (bitnum != TYPE_PRECISION (type) - 1)
10693 op0 = expand_and (op0, const1_rtx, subtarget);
10694
10695 return op0;
10696 }
10697
10698 /* Now see if we are likely to be able to do this. Return if not. */
10699 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10700 return 0;
10701
10702 icode = setcc_gen_code[(int) code];
10703 if (icode == CODE_FOR_nothing
10704 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10705 {
10706 /* We can only do this if it is one of the special cases that
10707 can be handled without an scc insn. */
10708 if ((code == LT && integer_zerop (arg1))
10709 || (! only_cheap && code == GE && integer_zerop (arg1)))
10710 ;
10711 else if (BRANCH_COST >= 0
10712 && ! only_cheap && (code == NE || code == EQ)
10713 && TREE_CODE (type) != REAL_TYPE
10714 && ((abs_optab->handlers[(int) operand_mode].insn_code
10715 != CODE_FOR_nothing)
10716 || (ffs_optab->handlers[(int) operand_mode].insn_code
10717 != CODE_FOR_nothing)))
10718 ;
10719 else
10720 return 0;
10721 }
10722
10723 if (! get_subtarget (target)
10724 || GET_MODE (subtarget) != operand_mode
10725 || ! safe_from_p (subtarget, arg1, 1))
10726 subtarget = 0;
10727
10728 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10729 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10730
10731 if (target == 0)
10732 target = gen_reg_rtx (mode);
10733
10734 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10735 because, if the emit_store_flag does anything it will succeed and
10736 OP0 and OP1 will not be used subsequently. */
10737
10738 result = emit_store_flag (target, code,
10739 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10740 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10741 operand_mode, unsignedp, 1);
10742
10743 if (result)
10744 {
10745 if (invert)
10746 result = expand_binop (mode, xor_optab, result, const1_rtx,
10747 result, 0, OPTAB_LIB_WIDEN);
10748 return result;
10749 }
10750
10751 /* If this failed, we have to do this with set/compare/jump/set code. */
10752 if (GET_CODE (target) != REG
10753 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10754 target = gen_reg_rtx (GET_MODE (target));
10755
10756 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10757 result = compare_from_rtx (op0, op1, code, unsignedp,
10758 operand_mode, NULL_RTX, 0);
10759 if (GET_CODE (result) == CONST_INT)
10760 return (((result == const0_rtx && ! invert)
10761 || (result != const0_rtx && invert))
10762 ? const0_rtx : const1_rtx);
10763
10764 label = gen_label_rtx ();
10765 if (bcc_gen_fctn[(int) code] == 0)
10766 abort ();
10767
10768 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10769 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10770 emit_label (label);
10771
10772 return target;
10773 }
10774 \f
10775
10776 /* Stubs in case we haven't got a casesi insn. */
10777 #ifndef HAVE_casesi
10778 # define HAVE_casesi 0
10779 # define gen_casesi(a, b, c, d, e) (0)
10780 # define CODE_FOR_casesi CODE_FOR_nothing
10781 #endif
10782
10783 /* If the machine does not have a case insn that compares the bounds,
10784 this means extra overhead for dispatch tables, which raises the
10785 threshold for using them. */
10786 #ifndef CASE_VALUES_THRESHOLD
10787 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10788 #endif /* CASE_VALUES_THRESHOLD */
10789
10790 unsigned int
10791 case_values_threshold ()
10792 {
10793 return CASE_VALUES_THRESHOLD;
10794 }
10795
10796 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10797 0 otherwise (i.e. if there is no casesi instruction). */
10798 int
10799 try_casesi (index_type, index_expr, minval, range,
10800 table_label, default_label)
10801 tree index_type, index_expr, minval, range;
10802 rtx table_label ATTRIBUTE_UNUSED;
10803 rtx default_label;
10804 {
10805 enum machine_mode index_mode = SImode;
10806 int index_bits = GET_MODE_BITSIZE (index_mode);
10807 rtx op1, op2, index;
10808 enum machine_mode op_mode;
10809
10810 if (! HAVE_casesi)
10811 return 0;
10812
10813 /* Convert the index to SImode. */
10814 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10815 {
10816 enum machine_mode omode = TYPE_MODE (index_type);
10817 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10818
10819 /* We must handle the endpoints in the original mode. */
10820 index_expr = build (MINUS_EXPR, index_type,
10821 index_expr, minval);
10822 minval = integer_zero_node;
10823 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10824 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10825 omode, 1, 0, default_label);
10826 /* Now we can safely truncate. */
10827 index = convert_to_mode (index_mode, index, 0);
10828 }
10829 else
10830 {
10831 if (TYPE_MODE (index_type) != index_mode)
10832 {
10833 index_expr = convert (type_for_size (index_bits, 0),
10834 index_expr);
10835 index_type = TREE_TYPE (index_expr);
10836 }
10837
10838 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10839 }
10840 emit_queue ();
10841 index = protect_from_queue (index, 0);
10842 do_pending_stack_adjust ();
10843
10844 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10845 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10846 (index, op_mode))
10847 index = copy_to_mode_reg (op_mode, index);
10848
10849 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10850
10851 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10852 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10853 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10854 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10855 (op1, op_mode))
10856 op1 = copy_to_mode_reg (op_mode, op1);
10857
10858 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10859
10860 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10861 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10862 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10863 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10864 (op2, op_mode))
10865 op2 = copy_to_mode_reg (op_mode, op2);
10866
10867 emit_jump_insn (gen_casesi (index, op1, op2,
10868 table_label, default_label));
10869 return 1;
10870 }
10871
10872 /* Attempt to generate a tablejump instruction; same concept. */
10873 #ifndef HAVE_tablejump
10874 #define HAVE_tablejump 0
10875 #define gen_tablejump(x, y) (0)
10876 #endif
10877
10878 /* Subroutine of the next function.
10879
10880 INDEX is the value being switched on, with the lowest value
10881 in the table already subtracted.
10882 MODE is its expected mode (needed if INDEX is constant).
10883 RANGE is the length of the jump table.
10884 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10885
10886 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10887 index value is out of range. */
10888
10889 static void
10890 do_tablejump (index, mode, range, table_label, default_label)
10891 rtx index, range, table_label, default_label;
10892 enum machine_mode mode;
10893 {
10894 rtx temp, vector;
10895
10896 /* Do an unsigned comparison (in the proper mode) between the index
10897 expression and the value which represents the length of the range.
10898 Since we just finished subtracting the lower bound of the range
10899 from the index expression, this comparison allows us to simultaneously
10900 check that the original index expression value is both greater than
10901 or equal to the minimum value of the range and less than or equal to
10902 the maximum value of the range. */
10903
10904 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10905 0, default_label);
10906
10907 /* If index is in range, it must fit in Pmode.
10908 Convert to Pmode so we can index with it. */
10909 if (mode != Pmode)
10910 index = convert_to_mode (Pmode, index, 1);
10911
10912 /* Don't let a MEM slip thru, because then INDEX that comes
10913 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10914 and break_out_memory_refs will go to work on it and mess it up. */
10915 #ifdef PIC_CASE_VECTOR_ADDRESS
10916 if (flag_pic && GET_CODE (index) != REG)
10917 index = copy_to_mode_reg (Pmode, index);
10918 #endif
10919
10920 /* If flag_force_addr were to affect this address
10921 it could interfere with the tricky assumptions made
10922 about addresses that contain label-refs,
10923 which may be valid only very near the tablejump itself. */
10924 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10925 GET_MODE_SIZE, because this indicates how large insns are. The other
10926 uses should all be Pmode, because they are addresses. This code
10927 could fail if addresses and insns are not the same size. */
10928 index = gen_rtx_PLUS (Pmode,
10929 gen_rtx_MULT (Pmode, index,
10930 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10931 gen_rtx_LABEL_REF (Pmode, table_label));
10932 #ifdef PIC_CASE_VECTOR_ADDRESS
10933 if (flag_pic)
10934 index = PIC_CASE_VECTOR_ADDRESS (index);
10935 else
10936 #endif
10937 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10938 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10939 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10940 RTX_UNCHANGING_P (vector) = 1;
10941 convert_move (temp, vector, 0);
10942
10943 emit_jump_insn (gen_tablejump (temp, table_label));
10944
10945 /* If we are generating PIC code or if the table is PC-relative, the
10946 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10947 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10948 emit_barrier ();
10949 }
10950
10951 int
10952 try_tablejump (index_type, index_expr, minval, range,
10953 table_label, default_label)
10954 tree index_type, index_expr, minval, range;
10955 rtx table_label, default_label;
10956 {
10957 rtx index;
10958
10959 if (! HAVE_tablejump)
10960 return 0;
10961
10962 index_expr = fold (build (MINUS_EXPR, index_type,
10963 convert (index_type, index_expr),
10964 convert (index_type, minval)));
10965 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10966 emit_queue ();
10967 index = protect_from_queue (index, 0);
10968 do_pending_stack_adjust ();
10969
10970 do_tablejump (index, TYPE_MODE (index_type),
10971 convert_modes (TYPE_MODE (index_type),
10972 TYPE_MODE (TREE_TYPE (range)),
10973 expand_expr (range, NULL_RTX,
10974 VOIDmode, 0),
10975 TREE_UNSIGNED (TREE_TYPE (range))),
10976 table_label, default_label);
10977 return 1;
10978 }