13995eef670291709e06581cb82a9ab9e8d87ab6
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
54
55 #ifdef PUSH_ROUNDING
56
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #endif
60
61 #endif
62
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
70
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
74 #endif
75
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
83
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
86
87 /* This structure is used by move_by_pieces to describe the move to
88 be performed. */
89 struct move_by_pieces
90 {
91 rtx to;
92 rtx to_addr;
93 int autinc_to;
94 int explicit_inc_to;
95 rtx from;
96 rtx from_addr;
97 int autinc_from;
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
101 int reverse;
102 };
103
104 /* This structure is used by store_by_pieces to describe the clear to
105 be performed. */
106
107 struct store_by_pieces
108 {
109 rtx to;
110 rtx to_addr;
111 int autinc_to;
112 int explicit_inc_to;
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
116 PTR constfundata;
117 int reverse;
118 };
119
120 extern struct obstack permanent_obstack;
121
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
125 unsigned int));
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
129 enum machine_mode));
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
131 unsigned int));
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
133 unsigned int));
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
135 enum machine_mode,
136 struct store_by_pieces *));
137 static rtx get_subtarget PARAMS ((rtx));
138 static int is_zeros_p PARAMS ((tree));
139 static int mostly_zeros_p PARAMS ((tree));
140 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int));
143 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree,
147 int));
148 static rtx var_rtx PARAMS ((tree));
149 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150 static rtx expand_increment PARAMS ((tree, int, int));
151 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
152 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
153 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
154 rtx, rtx));
155 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
158 #endif
159 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
160
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
164
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
167
168 /* If a memory-to-memory move would take MOVE_RATIO or more simple
169 move-instruction sequences, we will do a movstr or libcall instead. */
170
171 #ifndef MOVE_RATIO
172 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
173 #define MOVE_RATIO 2
174 #else
175 /* If we are optimizing for space (-Os), cut down the default move ratio. */
176 #define MOVE_RATIO (optimize_size ? 3 : 15)
177 #endif
178 #endif
179
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
185 #endif
186
187 /* This array records the insn_code of insns to perform block moves. */
188 enum insn_code movstr_optab[NUM_MACHINE_MODES];
189
190 /* This array records the insn_code of insns to perform block clears. */
191 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
192
193 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
194
195 #ifndef SLOW_UNALIGNED_ACCESS
196 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
197 #endif
198 \f
199 /* This is run once per compilation to set up which modes can be used
200 directly in memory and to initialize the block move optab. */
201
202 void
203 init_expr_once ()
204 {
205 rtx insn, pat;
206 enum machine_mode mode;
207 int num_clobbers;
208 rtx mem, mem1;
209
210 start_sequence ();
211
212 /* Try indexing by frame ptr and try by stack ptr.
213 It is known that on the Convex the stack ptr isn't a valid index.
214 With luck, one or the other is valid on any machine. */
215 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
217
218 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
219 pat = PATTERN (insn);
220
221 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
222 mode = (enum machine_mode) ((int) mode + 1))
223 {
224 int regno;
225 rtx reg;
226
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
230
231 /* See if there is some register that can be used in this mode and
232 directly loaded or stored from memory. */
233
234 if (mode != VOIDmode && mode != BLKmode)
235 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
236 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
237 regno++)
238 {
239 if (! HARD_REGNO_MODE_OK (regno, mode))
240 continue;
241
242 reg = gen_rtx_REG (mode, regno);
243
244 SET_SRC (pat) = mem;
245 SET_DEST (pat) = reg;
246 if (recog (pat, insn, &num_clobbers) >= 0)
247 direct_load[(int) mode] = 1;
248
249 SET_SRC (pat) = mem1;
250 SET_DEST (pat) = reg;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_load[(int) mode] = 1;
253
254 SET_SRC (pat) = reg;
255 SET_DEST (pat) = mem;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_store[(int) mode] = 1;
258
259 SET_SRC (pat) = reg;
260 SET_DEST (pat) = mem1;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_store[(int) mode] = 1;
263 }
264 }
265
266 end_sequence ();
267 }
268
269 /* This is run at the start of compiling a function. */
270
271 void
272 init_expr ()
273 {
274 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
275
276 pending_chain = 0;
277 pending_stack_adjust = 0;
278 stack_pointer_delta = 0;
279 inhibit_defer_pop = 0;
280 saveregs_value = 0;
281 apply_args_value = 0;
282 forced_labels = 0;
283 }
284
285 void
286 mark_expr_status (p)
287 struct expr_status *p;
288 {
289 if (p == NULL)
290 return;
291
292 ggc_mark_rtx (p->x_saveregs_value);
293 ggc_mark_rtx (p->x_apply_args_value);
294 ggc_mark_rtx (p->x_forced_labels);
295 }
296
297 void
298 free_expr_status (f)
299 struct function *f;
300 {
301 free (f->expr);
302 f->expr = NULL;
303 }
304
305 /* Small sanity check that the queue is empty at the end of a function. */
306
307 void
308 finish_expr_for_function ()
309 {
310 if (pending_chain)
311 abort ();
312 }
313 \f
314 /* Manage the queue of increment instructions to be output
315 for POSTINCREMENT_EXPR expressions, etc. */
316
317 /* Queue up to increment (or change) VAR later. BODY says how:
318 BODY should be the same thing you would pass to emit_insn
319 to increment right away. It will go to emit_insn later on.
320
321 The value is a QUEUED expression to be used in place of VAR
322 where you want to guarantee the pre-incrementation value of VAR. */
323
324 static rtx
325 enqueue_insn (var, body)
326 rtx var, body;
327 {
328 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
329 body, pending_chain);
330 return pending_chain;
331 }
332
333 /* Use protect_from_queue to convert a QUEUED expression
334 into something that you can put immediately into an instruction.
335 If the queued incrementation has not happened yet,
336 protect_from_queue returns the variable itself.
337 If the incrementation has happened, protect_from_queue returns a temp
338 that contains a copy of the old value of the variable.
339
340 Any time an rtx which might possibly be a QUEUED is to be put
341 into an instruction, it must be passed through protect_from_queue first.
342 QUEUED expressions are not meaningful in instructions.
343
344 Do not pass a value through protect_from_queue and then hold
345 on to it for a while before putting it in an instruction!
346 If the queue is flushed in between, incorrect code will result. */
347
348 rtx
349 protect_from_queue (x, modify)
350 rtx x;
351 int modify;
352 {
353 RTX_CODE code = GET_CODE (x);
354
355 #if 0 /* A QUEUED can hang around after the queue is forced out. */
356 /* Shortcut for most common case. */
357 if (pending_chain == 0)
358 return x;
359 #endif
360
361 if (code != QUEUED)
362 {
363 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
364 use of autoincrement. Make a copy of the contents of the memory
365 location rather than a copy of the address, but not if the value is
366 of mode BLKmode. Don't modify X in place since it might be
367 shared. */
368 if (code == MEM && GET_MODE (x) != BLKmode
369 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
370 {
371 rtx y = XEXP (x, 0);
372 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
373
374 if (QUEUED_INSN (y))
375 {
376 rtx temp = gen_reg_rtx (GET_MODE (x));
377
378 emit_insn_before (gen_move_insn (temp, new),
379 QUEUED_INSN (y));
380 return temp;
381 }
382
383 /* Copy the address into a pseudo, so that the returned value
384 remains correct across calls to emit_queue. */
385 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
386 }
387
388 /* Otherwise, recursively protect the subexpressions of all
389 the kinds of rtx's that can contain a QUEUED. */
390 if (code == MEM)
391 {
392 rtx tem = protect_from_queue (XEXP (x, 0), 0);
393 if (tem != XEXP (x, 0))
394 {
395 x = copy_rtx (x);
396 XEXP (x, 0) = tem;
397 }
398 }
399 else if (code == PLUS || code == MULT)
400 {
401 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
402 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
403 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
404 {
405 x = copy_rtx (x);
406 XEXP (x, 0) = new0;
407 XEXP (x, 1) = new1;
408 }
409 }
410 return x;
411 }
412 /* If the increment has not happened, use the variable itself. Copy it
413 into a new pseudo so that the value remains correct across calls to
414 emit_queue. */
415 if (QUEUED_INSN (x) == 0)
416 return copy_to_reg (QUEUED_VAR (x));
417 /* If the increment has happened and a pre-increment copy exists,
418 use that copy. */
419 if (QUEUED_COPY (x) != 0)
420 return QUEUED_COPY (x);
421 /* The increment has happened but we haven't set up a pre-increment copy.
422 Set one up now, and use it. */
423 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
424 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
425 QUEUED_INSN (x));
426 return QUEUED_COPY (x);
427 }
428
429 /* Return nonzero if X contains a QUEUED expression:
430 if it contains anything that will be altered by a queued increment.
431 We handle only combinations of MEM, PLUS, MINUS and MULT operators
432 since memory addresses generally contain only those. */
433
434 int
435 queued_subexp_p (x)
436 rtx x;
437 {
438 enum rtx_code code = GET_CODE (x);
439 switch (code)
440 {
441 case QUEUED:
442 return 1;
443 case MEM:
444 return queued_subexp_p (XEXP (x, 0));
445 case MULT:
446 case PLUS:
447 case MINUS:
448 return (queued_subexp_p (XEXP (x, 0))
449 || queued_subexp_p (XEXP (x, 1)));
450 default:
451 return 0;
452 }
453 }
454
455 /* Perform all the pending incrementations. */
456
457 void
458 emit_queue ()
459 {
460 rtx p;
461 while ((p = pending_chain))
462 {
463 rtx body = QUEUED_BODY (p);
464
465 if (GET_CODE (body) == SEQUENCE)
466 {
467 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
468 emit_insn (QUEUED_BODY (p));
469 }
470 else
471 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
472 pending_chain = QUEUED_NEXT (p);
473 }
474 }
475 \f
476 /* Copy data from FROM to TO, where the machine modes are not the same.
477 Both modes may be integer, or both may be floating.
478 UNSIGNEDP should be nonzero if FROM is an unsigned type.
479 This causes zero-extension instead of sign-extension. */
480
481 void
482 convert_move (to, from, unsignedp)
483 rtx to, from;
484 int unsignedp;
485 {
486 enum machine_mode to_mode = GET_MODE (to);
487 enum machine_mode from_mode = GET_MODE (from);
488 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
489 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
490 enum insn_code code;
491 rtx libcall;
492
493 /* rtx code for making an equivalent value. */
494 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
495
496 to = protect_from_queue (to, 1);
497 from = protect_from_queue (from, 0);
498
499 if (to_real != from_real)
500 abort ();
501
502 /* If FROM is a SUBREG that indicates that we have already done at least
503 the required extension, strip it. We don't handle such SUBREGs as
504 TO here. */
505
506 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
507 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
508 >= GET_MODE_SIZE (to_mode))
509 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
510 from = gen_lowpart (to_mode, from), from_mode = to_mode;
511
512 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
513 abort ();
514
515 if (to_mode == from_mode
516 || (from_mode == VOIDmode && CONSTANT_P (from)))
517 {
518 emit_move_insn (to, from);
519 return;
520 }
521
522 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
523 {
524 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
525 abort ();
526
527 if (VECTOR_MODE_P (to_mode))
528 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
529 else
530 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
531
532 emit_move_insn (to, from);
533 return;
534 }
535
536 if (to_real != from_real)
537 abort ();
538
539 if (to_real)
540 {
541 rtx value, insns;
542
543 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
544 {
545 /* Try converting directly if the insn is supported. */
546 if ((code = can_extend_p (to_mode, from_mode, 0))
547 != CODE_FOR_nothing)
548 {
549 emit_unop_insn (code, to, from, UNKNOWN);
550 return;
551 }
552 }
553
554 #ifdef HAVE_trunchfqf2
555 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
556 {
557 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
558 return;
559 }
560 #endif
561 #ifdef HAVE_trunctqfqf2
562 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
563 {
564 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
565 return;
566 }
567 #endif
568 #ifdef HAVE_truncsfqf2
569 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
570 {
571 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
572 return;
573 }
574 #endif
575 #ifdef HAVE_truncdfqf2
576 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
577 {
578 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
579 return;
580 }
581 #endif
582 #ifdef HAVE_truncxfqf2
583 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
584 {
585 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
586 return;
587 }
588 #endif
589 #ifdef HAVE_trunctfqf2
590 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
591 {
592 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
593 return;
594 }
595 #endif
596
597 #ifdef HAVE_trunctqfhf2
598 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
599 {
600 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
601 return;
602 }
603 #endif
604 #ifdef HAVE_truncsfhf2
605 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
606 {
607 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
608 return;
609 }
610 #endif
611 #ifdef HAVE_truncdfhf2
612 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
613 {
614 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
615 return;
616 }
617 #endif
618 #ifdef HAVE_truncxfhf2
619 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
620 {
621 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
622 return;
623 }
624 #endif
625 #ifdef HAVE_trunctfhf2
626 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
627 {
628 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
629 return;
630 }
631 #endif
632
633 #ifdef HAVE_truncsftqf2
634 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
637 return;
638 }
639 #endif
640 #ifdef HAVE_truncdftqf2
641 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
644 return;
645 }
646 #endif
647 #ifdef HAVE_truncxftqf2
648 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
651 return;
652 }
653 #endif
654 #ifdef HAVE_trunctftqf2
655 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
658 return;
659 }
660 #endif
661
662 #ifdef HAVE_truncdfsf2
663 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
664 {
665 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
666 return;
667 }
668 #endif
669 #ifdef HAVE_truncxfsf2
670 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
673 return;
674 }
675 #endif
676 #ifdef HAVE_trunctfsf2
677 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
678 {
679 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
680 return;
681 }
682 #endif
683 #ifdef HAVE_truncxfdf2
684 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
687 return;
688 }
689 #endif
690 #ifdef HAVE_trunctfdf2
691 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
692 {
693 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
694 return;
695 }
696 #endif
697
698 libcall = (rtx) 0;
699 switch (from_mode)
700 {
701 case SFmode:
702 switch (to_mode)
703 {
704 case DFmode:
705 libcall = extendsfdf2_libfunc;
706 break;
707
708 case XFmode:
709 libcall = extendsfxf2_libfunc;
710 break;
711
712 case TFmode:
713 libcall = extendsftf2_libfunc;
714 break;
715
716 default:
717 break;
718 }
719 break;
720
721 case DFmode:
722 switch (to_mode)
723 {
724 case SFmode:
725 libcall = truncdfsf2_libfunc;
726 break;
727
728 case XFmode:
729 libcall = extenddfxf2_libfunc;
730 break;
731
732 case TFmode:
733 libcall = extenddftf2_libfunc;
734 break;
735
736 default:
737 break;
738 }
739 break;
740
741 case XFmode:
742 switch (to_mode)
743 {
744 case SFmode:
745 libcall = truncxfsf2_libfunc;
746 break;
747
748 case DFmode:
749 libcall = truncxfdf2_libfunc;
750 break;
751
752 default:
753 break;
754 }
755 break;
756
757 case TFmode:
758 switch (to_mode)
759 {
760 case SFmode:
761 libcall = trunctfsf2_libfunc;
762 break;
763
764 case DFmode:
765 libcall = trunctfdf2_libfunc;
766 break;
767
768 default:
769 break;
770 }
771 break;
772
773 default:
774 break;
775 }
776
777 if (libcall == (rtx) 0)
778 /* This conversion is not implemented yet. */
779 abort ();
780
781 start_sequence ();
782 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
783 1, from, from_mode);
784 insns = get_insns ();
785 end_sequence ();
786 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
787 from));
788 return;
789 }
790
791 /* Now both modes are integers. */
792
793 /* Handle expanding beyond a word. */
794 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
795 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
796 {
797 rtx insns;
798 rtx lowpart;
799 rtx fill_value;
800 rtx lowfrom;
801 int i;
802 enum machine_mode lowpart_mode;
803 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
804
805 /* Try converting directly if the insn is supported. */
806 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
807 != CODE_FOR_nothing)
808 {
809 /* If FROM is a SUBREG, put it into a register. Do this
810 so that we always generate the same set of insns for
811 better cse'ing; if an intermediate assignment occurred,
812 we won't be doing the operation directly on the SUBREG. */
813 if (optimize > 0 && GET_CODE (from) == SUBREG)
814 from = force_reg (from_mode, from);
815 emit_unop_insn (code, to, from, equiv_code);
816 return;
817 }
818 /* Next, try converting via full word. */
819 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
820 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
821 != CODE_FOR_nothing))
822 {
823 if (GET_CODE (to) == REG)
824 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
825 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826 emit_unop_insn (code, to,
827 gen_lowpart (word_mode, to), equiv_code);
828 return;
829 }
830
831 /* No special multiword conversion insn; do it by hand. */
832 start_sequence ();
833
834 /* Since we will turn this into a no conflict block, we must ensure
835 that the source does not overlap the target. */
836
837 if (reg_overlap_mentioned_p (to, from))
838 from = force_reg (from_mode, from);
839
840 /* Get a copy of FROM widened to a word, if necessary. */
841 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
842 lowpart_mode = word_mode;
843 else
844 lowpart_mode = from_mode;
845
846 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
847
848 lowpart = gen_lowpart (lowpart_mode, to);
849 emit_move_insn (lowpart, lowfrom);
850
851 /* Compute the value to put in each remaining word. */
852 if (unsignedp)
853 fill_value = const0_rtx;
854 else
855 {
856 #ifdef HAVE_slt
857 if (HAVE_slt
858 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
859 && STORE_FLAG_VALUE == -1)
860 {
861 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
862 lowpart_mode, 0);
863 fill_value = gen_reg_rtx (word_mode);
864 emit_insn (gen_slt (fill_value));
865 }
866 else
867 #endif
868 {
869 fill_value
870 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
871 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
872 NULL_RTX, 0);
873 fill_value = convert_to_mode (word_mode, fill_value, 1);
874 }
875 }
876
877 /* Fill the remaining words. */
878 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
879 {
880 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
881 rtx subword = operand_subword (to, index, 1, to_mode);
882
883 if (subword == 0)
884 abort ();
885
886 if (fill_value != subword)
887 emit_move_insn (subword, fill_value);
888 }
889
890 insns = get_insns ();
891 end_sequence ();
892
893 emit_no_conflict_block (insns, to, from, NULL_RTX,
894 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
895 return;
896 }
897
898 /* Truncating multi-word to a word or less. */
899 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
900 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
901 {
902 if (!((GET_CODE (from) == MEM
903 && ! MEM_VOLATILE_P (from)
904 && direct_load[(int) to_mode]
905 && ! mode_dependent_address_p (XEXP (from, 0)))
906 || GET_CODE (from) == REG
907 || GET_CODE (from) == SUBREG))
908 from = force_reg (from_mode, from);
909 convert_move (to, gen_lowpart (word_mode, from), 0);
910 return;
911 }
912
913 /* Handle pointer conversion. */ /* SPEE 900220. */
914 if (to_mode == PQImode)
915 {
916 if (from_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
918
919 #ifdef HAVE_truncqipqi2
920 if (HAVE_truncqipqi2)
921 {
922 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
923 return;
924 }
925 #endif /* HAVE_truncqipqi2 */
926 abort ();
927 }
928
929 if (from_mode == PQImode)
930 {
931 if (to_mode != QImode)
932 {
933 from = convert_to_mode (QImode, from, unsignedp);
934 from_mode = QImode;
935 }
936 else
937 {
938 #ifdef HAVE_extendpqiqi2
939 if (HAVE_extendpqiqi2)
940 {
941 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
942 return;
943 }
944 #endif /* HAVE_extendpqiqi2 */
945 abort ();
946 }
947 }
948
949 if (to_mode == PSImode)
950 {
951 if (from_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
953
954 #ifdef HAVE_truncsipsi2
955 if (HAVE_truncsipsi2)
956 {
957 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
958 return;
959 }
960 #endif /* HAVE_truncsipsi2 */
961 abort ();
962 }
963
964 if (from_mode == PSImode)
965 {
966 if (to_mode != SImode)
967 {
968 from = convert_to_mode (SImode, from, unsignedp);
969 from_mode = SImode;
970 }
971 else
972 {
973 #ifdef HAVE_extendpsisi2
974 if (! unsignedp && HAVE_extendpsisi2)
975 {
976 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
977 return;
978 }
979 #endif /* HAVE_extendpsisi2 */
980 #ifdef HAVE_zero_extendpsisi2
981 if (unsignedp && HAVE_zero_extendpsisi2)
982 {
983 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
984 return;
985 }
986 #endif /* HAVE_zero_extendpsisi2 */
987 abort ();
988 }
989 }
990
991 if (to_mode == PDImode)
992 {
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
995
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
998 {
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1000 return;
1001 }
1002 #endif /* HAVE_truncdipdi2 */
1003 abort ();
1004 }
1005
1006 if (from_mode == PDImode)
1007 {
1008 if (to_mode != DImode)
1009 {
1010 from = convert_to_mode (DImode, from, unsignedp);
1011 from_mode = DImode;
1012 }
1013 else
1014 {
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1017 {
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1019 return;
1020 }
1021 #endif /* HAVE_extendpdidi2 */
1022 abort ();
1023 }
1024 }
1025
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1028
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1033 {
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1045 return;
1046 }
1047
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1050 {
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1054 {
1055 if (flag_force_mem)
1056 from = force_not_mem (from);
1057
1058 emit_unop_insn (code, to, from, equiv_code);
1059 return;
1060 }
1061 else
1062 {
1063 enum machine_mode intermediate;
1064 rtx tmp;
1065 tree shift_amount;
1066
1067 /* Search for a mode to convert via. */
1068 for (intermediate = from_mode; intermediate != VOIDmode;
1069 intermediate = GET_MODE_WIDER_MODE (intermediate))
1070 if (((can_extend_p (to_mode, intermediate, unsignedp)
1071 != CODE_FOR_nothing)
1072 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074 GET_MODE_BITSIZE (intermediate))))
1075 && (can_extend_p (intermediate, from_mode, unsignedp)
1076 != CODE_FOR_nothing))
1077 {
1078 convert_move (to, convert_to_mode (intermediate, from,
1079 unsignedp), unsignedp);
1080 return;
1081 }
1082
1083 /* No suitable intermediate mode.
1084 Generate what we need with shifts. */
1085 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1086 - GET_MODE_BITSIZE (from_mode), 0);
1087 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1088 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1089 to, unsignedp);
1090 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1091 to, unsignedp);
1092 if (tmp != to)
1093 emit_move_insn (to, tmp);
1094 return;
1095 }
1096 }
1097
1098 /* Support special truncate insns for certain modes. */
1099
1100 if (from_mode == DImode && to_mode == SImode)
1101 {
1102 #ifdef HAVE_truncdisi2
1103 if (HAVE_truncdisi2)
1104 {
1105 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1106 return;
1107 }
1108 #endif
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 return;
1111 }
1112
1113 if (from_mode == DImode && to_mode == HImode)
1114 {
1115 #ifdef HAVE_truncdihi2
1116 if (HAVE_truncdihi2)
1117 {
1118 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1119 return;
1120 }
1121 #endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1124 }
1125
1126 if (from_mode == DImode && to_mode == QImode)
1127 {
1128 #ifdef HAVE_truncdiqi2
1129 if (HAVE_truncdiqi2)
1130 {
1131 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1132 return;
1133 }
1134 #endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1137 }
1138
1139 if (from_mode == SImode && to_mode == HImode)
1140 {
1141 #ifdef HAVE_truncsihi2
1142 if (HAVE_truncsihi2)
1143 {
1144 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1145 return;
1146 }
1147 #endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1150 }
1151
1152 if (from_mode == SImode && to_mode == QImode)
1153 {
1154 #ifdef HAVE_truncsiqi2
1155 if (HAVE_truncsiqi2)
1156 {
1157 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1158 return;
1159 }
1160 #endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1163 }
1164
1165 if (from_mode == HImode && to_mode == QImode)
1166 {
1167 #ifdef HAVE_trunchiqi2
1168 if (HAVE_trunchiqi2)
1169 {
1170 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1171 return;
1172 }
1173 #endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1176 }
1177
1178 if (from_mode == TImode && to_mode == DImode)
1179 {
1180 #ifdef HAVE_trunctidi2
1181 if (HAVE_trunctidi2)
1182 {
1183 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1184 return;
1185 }
1186 #endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1189 }
1190
1191 if (from_mode == TImode && to_mode == SImode)
1192 {
1193 #ifdef HAVE_trunctisi2
1194 if (HAVE_trunctisi2)
1195 {
1196 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1197 return;
1198 }
1199 #endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1202 }
1203
1204 if (from_mode == TImode && to_mode == HImode)
1205 {
1206 #ifdef HAVE_trunctihi2
1207 if (HAVE_trunctihi2)
1208 {
1209 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1210 return;
1211 }
1212 #endif
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 return;
1215 }
1216
1217 if (from_mode == TImode && to_mode == QImode)
1218 {
1219 #ifdef HAVE_trunctiqi2
1220 if (HAVE_trunctiqi2)
1221 {
1222 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1223 return;
1224 }
1225 #endif
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 return;
1228 }
1229
1230 /* Handle truncation of volatile memrefs, and so on;
1231 the things that couldn't be truncated directly,
1232 and for which there was no special instruction. */
1233 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1234 {
1235 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1236 emit_move_insn (to, temp);
1237 return;
1238 }
1239
1240 /* Mode combination is not recognized. */
1241 abort ();
1242 }
1243
1244 /* Return an rtx for a value that would result
1245 from converting X to mode MODE.
1246 Both X and MODE may be floating, or both integer.
1247 UNSIGNEDP is nonzero if X is an unsigned value.
1248 This can be done by referring to a part of X in place
1249 or by copying to a new temporary with conversion.
1250
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
1253
1254 rtx
1255 convert_to_mode (mode, x, unsignedp)
1256 enum machine_mode mode;
1257 rtx x;
1258 int unsignedp;
1259 {
1260 return convert_modes (mode, VOIDmode, x, unsignedp);
1261 }
1262
1263 /* Return an rtx for a value that would result
1264 from converting X from mode OLDMODE to mode MODE.
1265 Both modes may be floating, or both integer.
1266 UNSIGNEDP is nonzero if X is an unsigned value.
1267
1268 This can be done by referring to a part of X in place
1269 or by copying to a new temporary with conversion.
1270
1271 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1272
1273 This function *must not* call protect_from_queue
1274 except when putting X into an insn (in which case convert_move does it). */
1275
1276 rtx
1277 convert_modes (mode, oldmode, x, unsignedp)
1278 enum machine_mode mode, oldmode;
1279 rtx x;
1280 int unsignedp;
1281 {
1282 rtx temp;
1283
1284 /* If FROM is a SUBREG that indicates that we have already done at least
1285 the required extension, strip it. */
1286
1287 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1288 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1289 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1290 x = gen_lowpart (mode, x);
1291
1292 if (GET_MODE (x) != VOIDmode)
1293 oldmode = GET_MODE (x);
1294
1295 if (mode == oldmode)
1296 return x;
1297
1298 /* There is one case that we must handle specially: If we are converting
1299 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1300 we are to interpret the constant as unsigned, gen_lowpart will do
1301 the wrong if the constant appears negative. What we want to do is
1302 make the high-order word of the constant zero, not all ones. */
1303
1304 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1305 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1306 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1307 {
1308 HOST_WIDE_INT val = INTVAL (x);
1309
1310 if (oldmode != VOIDmode
1311 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1312 {
1313 int width = GET_MODE_BITSIZE (oldmode);
1314
1315 /* We need to zero extend VAL. */
1316 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1317 }
1318
1319 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1320 }
1321
1322 /* We can do this with a gen_lowpart if both desired and current modes
1323 are integer, and this is either a constant integer, a register, or a
1324 non-volatile MEM. Except for the constant case where MODE is no
1325 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1326
1327 if ((GET_CODE (x) == CONST_INT
1328 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1329 || (GET_MODE_CLASS (mode) == MODE_INT
1330 && GET_MODE_CLASS (oldmode) == MODE_INT
1331 && (GET_CODE (x) == CONST_DOUBLE
1332 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1333 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1334 && direct_load[(int) mode])
1335 || (GET_CODE (x) == REG
1336 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1337 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1338 {
1339 /* ?? If we don't know OLDMODE, we have to assume here that
1340 X does not need sign- or zero-extension. This may not be
1341 the case, but it's the best we can do. */
1342 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1343 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1344 {
1345 HOST_WIDE_INT val = INTVAL (x);
1346 int width = GET_MODE_BITSIZE (oldmode);
1347
1348 /* We must sign or zero-extend in this case. Start by
1349 zero-extending, then sign extend if we need to. */
1350 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1351 if (! unsignedp
1352 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1353 val |= (HOST_WIDE_INT) (-1) << width;
1354
1355 return GEN_INT (trunc_int_for_mode (val, mode));
1356 }
1357
1358 return gen_lowpart (mode, x);
1359 }
1360
1361 temp = gen_reg_rtx (mode);
1362 convert_move (temp, x, unsignedp);
1363 return temp;
1364 }
1365 \f
1366 /* This macro is used to determine what the largest unit size that
1367 move_by_pieces can use is. */
1368
1369 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1370 move efficiently, as opposed to MOVE_MAX which is the maximum
1371 number of bytes we can move with a single instruction. */
1372
1373 #ifndef MOVE_MAX_PIECES
1374 #define MOVE_MAX_PIECES MOVE_MAX
1375 #endif
1376
1377 /* Generate several move instructions to copy LEN bytes from block FROM to
1378 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1379 and TO through protect_from_queue before calling.
1380
1381 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1382 used to push FROM to the stack.
1383
1384 ALIGN is maximum alignment we can assume. */
1385
1386 void
1387 move_by_pieces (to, from, len, align)
1388 rtx to, from;
1389 unsigned HOST_WIDE_INT len;
1390 unsigned int align;
1391 {
1392 struct move_by_pieces data;
1393 rtx to_addr, from_addr = XEXP (from, 0);
1394 unsigned int max_size = MOVE_MAX_PIECES + 1;
1395 enum machine_mode mode = VOIDmode, tmode;
1396 enum insn_code icode;
1397
1398 data.offset = 0;
1399 data.from_addr = from_addr;
1400 if (to)
1401 {
1402 to_addr = XEXP (to, 0);
1403 data.to = to;
1404 data.autinc_to
1405 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1406 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1407 data.reverse
1408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1409 }
1410 else
1411 {
1412 to_addr = NULL_RTX;
1413 data.to = NULL_RTX;
1414 data.autinc_to = 1;
1415 #ifdef STACK_GROWS_DOWNWARD
1416 data.reverse = 1;
1417 #else
1418 data.reverse = 0;
1419 #endif
1420 }
1421 data.to_addr = to_addr;
1422 data.from = from;
1423 data.autinc_from
1424 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1425 || GET_CODE (from_addr) == POST_INC
1426 || GET_CODE (from_addr) == POST_DEC);
1427
1428 data.explicit_inc_from = 0;
1429 data.explicit_inc_to = 0;
1430 if (data.reverse) data.offset = len;
1431 data.len = len;
1432
1433 /* If copying requires more than two move insns,
1434 copy addresses to registers (to make displacements shorter)
1435 and use post-increment if available. */
1436 if (!(data.autinc_from && data.autinc_to)
1437 && move_by_pieces_ninsns (len, align) > 2)
1438 {
1439 /* Find the mode of the largest move... */
1440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1442 if (GET_MODE_SIZE (tmode) < max_size)
1443 mode = tmode;
1444
1445 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1446 {
1447 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = -1;
1450 }
1451 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1452 {
1453 data.from_addr = copy_addr_to_reg (from_addr);
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = 1;
1456 }
1457 if (!data.autinc_from && CONSTANT_P (from_addr))
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1460 {
1461 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1462 data.autinc_to = 1;
1463 data.explicit_inc_to = -1;
1464 }
1465 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1466 {
1467 data.to_addr = copy_addr_to_reg (to_addr);
1468 data.autinc_to = 1;
1469 data.explicit_inc_to = 1;
1470 }
1471 if (!data.autinc_to && CONSTANT_P (to_addr))
1472 data.to_addr = copy_addr_to_reg (to_addr);
1473 }
1474
1475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1477 align = MOVE_MAX * BITS_PER_UNIT;
1478
1479 /* First move what we can in the largest integer mode, then go to
1480 successively smaller modes. */
1481
1482 while (max_size > 1)
1483 {
1484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486 if (GET_MODE_SIZE (tmode) < max_size)
1487 mode = tmode;
1488
1489 if (mode == VOIDmode)
1490 break;
1491
1492 icode = mov_optab->handlers[(int) mode].insn_code;
1493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1494 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1495
1496 max_size = GET_MODE_SIZE (mode);
1497 }
1498
1499 /* The code above should have handled everything. */
1500 if (data.len > 0)
1501 abort ();
1502 }
1503
1504 /* Return number of insns required to move L bytes by pieces.
1505 ALIGN (in bits) is maximum alignment we can assume. */
1506
1507 static unsigned HOST_WIDE_INT
1508 move_by_pieces_ninsns (l, align)
1509 unsigned HOST_WIDE_INT l;
1510 unsigned int align;
1511 {
1512 unsigned HOST_WIDE_INT n_insns = 0;
1513 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1514
1515 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1516 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1517 align = MOVE_MAX * BITS_PER_UNIT;
1518
1519 while (max_size > 1)
1520 {
1521 enum machine_mode mode = VOIDmode, tmode;
1522 enum insn_code icode;
1523
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
1527 mode = tmode;
1528
1529 if (mode == VOIDmode)
1530 break;
1531
1532 icode = mov_optab->handlers[(int) mode].insn_code;
1533 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1534 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1535
1536 max_size = GET_MODE_SIZE (mode);
1537 }
1538
1539 if (l)
1540 abort ();
1541 return n_insns;
1542 }
1543
1544 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1545 with move instructions for mode MODE. GENFUN is the gen_... function
1546 to make a move insn for that mode. DATA has all the other info. */
1547
1548 static void
1549 move_by_pieces_1 (genfun, mode, data)
1550 rtx (*genfun) PARAMS ((rtx, ...));
1551 enum machine_mode mode;
1552 struct move_by_pieces *data;
1553 {
1554 unsigned int size = GET_MODE_SIZE (mode);
1555 rtx to1 = NULL_RTX, from1;
1556
1557 while (data->len >= size)
1558 {
1559 if (data->reverse)
1560 data->offset -= size;
1561
1562 if (data->to)
1563 {
1564 if (data->autinc_to)
1565 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1566 data->offset);
1567 else
1568 to1 = adjust_address (data->to, mode, data->offset);
1569 }
1570
1571 if (data->autinc_from)
1572 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1573 data->offset);
1574 else
1575 from1 = adjust_address (data->from, mode, data->offset);
1576
1577 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1578 emit_insn (gen_add2_insn (data->to_addr,
1579 GEN_INT (-(HOST_WIDE_INT)size)));
1580 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1581 emit_insn (gen_add2_insn (data->from_addr,
1582 GEN_INT (-(HOST_WIDE_INT)size)));
1583
1584 if (data->to)
1585 emit_insn ((*genfun) (to1, from1));
1586 else
1587 {
1588 #ifdef PUSH_ROUNDING
1589 emit_single_push_insn (mode, from1, NULL);
1590 #else
1591 abort ();
1592 #endif
1593 }
1594
1595 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1597 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1599
1600 if (! data->reverse)
1601 data->offset += size;
1602
1603 data->len -= size;
1604 }
1605 }
1606 \f
1607 /* Emit code to move a block Y to a block X.
1608 This may be done with string-move instructions,
1609 with multiple scalar move instructions, or with a library call.
1610
1611 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1612 with mode BLKmode.
1613 SIZE is an rtx that says how long they are.
1614 ALIGN is the maximum alignment we can assume they have.
1615
1616 Return the address of the new block, if memcpy is called and returns it,
1617 0 otherwise. */
1618
1619 rtx
1620 emit_block_move (x, y, size)
1621 rtx x, y;
1622 rtx size;
1623 {
1624 rtx retval = 0;
1625 #ifdef TARGET_MEM_FUNCTIONS
1626 static tree fn;
1627 tree call_expr, arg_list;
1628 #endif
1629 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1630
1631 if (GET_MODE (x) != BLKmode)
1632 abort ();
1633
1634 if (GET_MODE (y) != BLKmode)
1635 abort ();
1636
1637 x = protect_from_queue (x, 1);
1638 y = protect_from_queue (y, 0);
1639 size = protect_from_queue (size, 0);
1640
1641 if (GET_CODE (x) != MEM)
1642 abort ();
1643 if (GET_CODE (y) != MEM)
1644 abort ();
1645 if (size == 0)
1646 abort ();
1647
1648 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1649 move_by_pieces (x, y, INTVAL (size), align);
1650 else
1651 {
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
1655
1656 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1657 enum machine_mode mode;
1658
1659 /* Since this is a move insn, we don't care about volatility. */
1660 volatile_ok = 1;
1661
1662 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1663 mode = GET_MODE_WIDER_MODE (mode))
1664 {
1665 enum insn_code code = movstr_optab[(int) mode];
1666 insn_operand_predicate_fn pred;
1667
1668 if (code != CODE_FOR_nothing
1669 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1670 here because if SIZE is less than the mode mask, as it is
1671 returned by the macro, it will definitely be less than the
1672 actual mode mask. */
1673 && ((GET_CODE (size) == CONST_INT
1674 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1675 <= (GET_MODE_MASK (mode) >> 1)))
1676 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1677 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1678 || (*pred) (x, BLKmode))
1679 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1680 || (*pred) (y, BLKmode))
1681 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1682 || (*pred) (opalign, VOIDmode)))
1683 {
1684 rtx op2;
1685 rtx last = get_last_insn ();
1686 rtx pat;
1687
1688 op2 = convert_to_mode (mode, size, 1);
1689 pred = insn_data[(int) code].operand[2].predicate;
1690 if (pred != 0 && ! (*pred) (op2, mode))
1691 op2 = copy_to_mode_reg (mode, op2);
1692
1693 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1694 if (pat)
1695 {
1696 emit_insn (pat);
1697 volatile_ok = 0;
1698 return 0;
1699 }
1700 else
1701 delete_insns_since (last);
1702 }
1703 }
1704
1705 volatile_ok = 0;
1706
1707 /* X, Y, or SIZE may have been passed through protect_from_queue.
1708
1709 It is unsafe to save the value generated by protect_from_queue
1710 and reuse it later. Consider what happens if emit_queue is
1711 called before the return value from protect_from_queue is used.
1712
1713 Expansion of the CALL_EXPR below will call emit_queue before
1714 we are finished emitting RTL for argument setup. So if we are
1715 not careful we could get the wrong value for an argument.
1716
1717 To avoid this problem we go ahead and emit code to copy X, Y &
1718 SIZE into new pseudos. We can then place those new pseudos
1719 into an RTL_EXPR and use them later, even after a call to
1720 emit_queue.
1721
1722 Note this is not strictly needed for library calls since they
1723 do not call emit_queue before loading their arguments. However,
1724 we may need to have library calls call emit_queue in the future
1725 since failing to do so could cause problems for targets which
1726 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1727 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1728 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1729
1730 #ifdef TARGET_MEM_FUNCTIONS
1731 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1732 #else
1733 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1734 TREE_UNSIGNED (integer_type_node));
1735 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1736 #endif
1737
1738 #ifdef TARGET_MEM_FUNCTIONS
1739 /* It is incorrect to use the libcall calling conventions to call
1740 memcpy in this context.
1741
1742 This could be a user call to memcpy and the user may wish to
1743 examine the return value from memcpy.
1744
1745 For targets where libcalls and normal calls have different conventions
1746 for returning pointers, we could end up generating incorrect code.
1747
1748 So instead of using a libcall sequence we build up a suitable
1749 CALL_EXPR and expand the call in the normal fashion. */
1750 if (fn == NULL_TREE)
1751 {
1752 tree fntype;
1753
1754 /* This was copied from except.c, I don't know if all this is
1755 necessary in this context or not. */
1756 fn = get_identifier ("memcpy");
1757 fntype = build_pointer_type (void_type_node);
1758 fntype = build_function_type (fntype, NULL_TREE);
1759 fn = build_decl (FUNCTION_DECL, fn, fntype);
1760 ggc_add_tree_root (&fn, 1);
1761 DECL_EXTERNAL (fn) = 1;
1762 TREE_PUBLIC (fn) = 1;
1763 DECL_ARTIFICIAL (fn) = 1;
1764 TREE_NOTHROW (fn) = 1;
1765 make_decl_rtl (fn, NULL);
1766 assemble_external (fn);
1767 }
1768
1769 /* We need to make an argument list for the function call.
1770
1771 memcpy has three arguments, the first two are void * addresses and
1772 the last is a size_t byte count for the copy. */
1773 arg_list
1774 = build_tree_list (NULL_TREE,
1775 make_tree (build_pointer_type (void_type_node), x));
1776 TREE_CHAIN (arg_list)
1777 = build_tree_list (NULL_TREE,
1778 make_tree (build_pointer_type (void_type_node), y));
1779 TREE_CHAIN (TREE_CHAIN (arg_list))
1780 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1781 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1782
1783 /* Now we have to build up the CALL_EXPR itself. */
1784 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1785 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1786 call_expr, arg_list, NULL_TREE);
1787 TREE_SIDE_EFFECTS (call_expr) = 1;
1788
1789 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1790 #else
1791 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1792 VOIDmode, 3, y, Pmode, x, Pmode,
1793 convert_to_mode (TYPE_MODE (integer_type_node), size,
1794 TREE_UNSIGNED (integer_type_node)),
1795 TYPE_MODE (integer_type_node));
1796 #endif
1797
1798 /* If we are initializing a readonly value, show the above call
1799 clobbered it. Otherwise, a load from it may erroneously be hoisted
1800 from a loop. */
1801 if (RTX_UNCHANGING_P (x))
1802 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1803 }
1804
1805 return retval;
1806 }
1807 \f
1808 /* Copy all or part of a value X into registers starting at REGNO.
1809 The number of registers to be filled is NREGS. */
1810
1811 void
1812 move_block_to_reg (regno, x, nregs, mode)
1813 int regno;
1814 rtx x;
1815 int nregs;
1816 enum machine_mode mode;
1817 {
1818 int i;
1819 #ifdef HAVE_load_multiple
1820 rtx pat;
1821 rtx last;
1822 #endif
1823
1824 if (nregs == 0)
1825 return;
1826
1827 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1828 x = validize_mem (force_const_mem (mode, x));
1829
1830 /* See if the machine can do this with a load multiple insn. */
1831 #ifdef HAVE_load_multiple
1832 if (HAVE_load_multiple)
1833 {
1834 last = get_last_insn ();
1835 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1836 GEN_INT (nregs));
1837 if (pat)
1838 {
1839 emit_insn (pat);
1840 return;
1841 }
1842 else
1843 delete_insns_since (last);
1844 }
1845 #endif
1846
1847 for (i = 0; i < nregs; i++)
1848 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1849 operand_subword_force (x, i, mode));
1850 }
1851
1852 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1853 The number of registers to be filled is NREGS. SIZE indicates the number
1854 of bytes in the object X. */
1855
1856 void
1857 move_block_from_reg (regno, x, nregs, size)
1858 int regno;
1859 rtx x;
1860 int nregs;
1861 int size;
1862 {
1863 int i;
1864 #ifdef HAVE_store_multiple
1865 rtx pat;
1866 rtx last;
1867 #endif
1868 enum machine_mode mode;
1869
1870 if (nregs == 0)
1871 return;
1872
1873 /* If SIZE is that of a mode no bigger than a word, just use that
1874 mode's store operation. */
1875 if (size <= UNITS_PER_WORD
1876 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1877 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1878 {
1879 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1880 return;
1881 }
1882
1883 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1884 to the left before storing to memory. Note that the previous test
1885 doesn't handle all cases (e.g. SIZE == 3). */
1886 if (size < UNITS_PER_WORD
1887 && BYTES_BIG_ENDIAN
1888 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1889 {
1890 rtx tem = operand_subword (x, 0, 1, BLKmode);
1891 rtx shift;
1892
1893 if (tem == 0)
1894 abort ();
1895
1896 shift = expand_shift (LSHIFT_EXPR, word_mode,
1897 gen_rtx_REG (word_mode, regno),
1898 build_int_2 ((UNITS_PER_WORD - size)
1899 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1900 emit_move_insn (tem, shift);
1901 return;
1902 }
1903
1904 /* See if the machine can do this with a store multiple insn. */
1905 #ifdef HAVE_store_multiple
1906 if (HAVE_store_multiple)
1907 {
1908 last = get_last_insn ();
1909 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1910 GEN_INT (nregs));
1911 if (pat)
1912 {
1913 emit_insn (pat);
1914 return;
1915 }
1916 else
1917 delete_insns_since (last);
1918 }
1919 #endif
1920
1921 for (i = 0; i < nregs; i++)
1922 {
1923 rtx tem = operand_subword (x, i, 1, BLKmode);
1924
1925 if (tem == 0)
1926 abort ();
1927
1928 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1929 }
1930 }
1931
1932 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1933 registers represented by a PARALLEL. SSIZE represents the total size of
1934 block SRC in bytes, or -1 if not known. */
1935 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1936 the balance will be in what would be the low-order memory addresses, i.e.
1937 left justified for big endian, right justified for little endian. This
1938 happens to be true for the targets currently using this support. If this
1939 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1940 would be needed. */
1941
1942 void
1943 emit_group_load (dst, orig_src, ssize)
1944 rtx dst, orig_src;
1945 int ssize;
1946 {
1947 rtx *tmps, src;
1948 int start, i;
1949
1950 if (GET_CODE (dst) != PARALLEL)
1951 abort ();
1952
1953 /* Check for a NULL entry, used to indicate that the parameter goes
1954 both on the stack and in registers. */
1955 if (XEXP (XVECEXP (dst, 0, 0), 0))
1956 start = 0;
1957 else
1958 start = 1;
1959
1960 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1961
1962 /* Process the pieces. */
1963 for (i = start; i < XVECLEN (dst, 0); i++)
1964 {
1965 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1966 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1967 unsigned int bytelen = GET_MODE_SIZE (mode);
1968 int shift = 0;
1969
1970 /* Handle trailing fragments that run over the size of the struct. */
1971 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1972 {
1973 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1974 bytelen = ssize - bytepos;
1975 if (bytelen <= 0)
1976 abort ();
1977 }
1978
1979 /* If we won't be loading directly from memory, protect the real source
1980 from strange tricks we might play; but make sure that the source can
1981 be loaded directly into the destination. */
1982 src = orig_src;
1983 if (GET_CODE (orig_src) != MEM
1984 && (!CONSTANT_P (orig_src)
1985 || (GET_MODE (orig_src) != mode
1986 && GET_MODE (orig_src) != VOIDmode)))
1987 {
1988 if (GET_MODE (orig_src) == VOIDmode)
1989 src = gen_reg_rtx (mode);
1990 else
1991 src = gen_reg_rtx (GET_MODE (orig_src));
1992
1993 emit_move_insn (src, orig_src);
1994 }
1995
1996 /* Optimize the access just a bit. */
1997 if (GET_CODE (src) == MEM
1998 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
1999 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2000 && bytelen == GET_MODE_SIZE (mode))
2001 {
2002 tmps[i] = gen_reg_rtx (mode);
2003 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2004 }
2005 else if (GET_CODE (src) == CONCAT)
2006 {
2007 if (bytepos == 0
2008 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2009 tmps[i] = XEXP (src, 0);
2010 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2011 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2012 tmps[i] = XEXP (src, 1);
2013 else if (bytepos == 0)
2014 {
2015 rtx mem = assign_stack_temp (GET_MODE (src),
2016 GET_MODE_SIZE (GET_MODE (src)), 0);
2017 emit_move_insn (mem, src);
2018 tmps[i] = adjust_address (mem, mode, 0);
2019 }
2020 else
2021 abort ();
2022 }
2023 else if (CONSTANT_P (src)
2024 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2025 tmps[i] = src;
2026 else
2027 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2028 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2029 mode, mode, ssize);
2030
2031 if (BYTES_BIG_ENDIAN && shift)
2032 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2033 tmps[i], 0, OPTAB_WIDEN);
2034 }
2035
2036 emit_queue ();
2037
2038 /* Copy the extracted pieces into the proper (probable) hard regs. */
2039 for (i = start; i < XVECLEN (dst, 0); i++)
2040 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2041 }
2042
2043 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2044 registers represented by a PARALLEL. SSIZE represents the total size of
2045 block DST, or -1 if not known. */
2046
2047 void
2048 emit_group_store (orig_dst, src, ssize)
2049 rtx orig_dst, src;
2050 int ssize;
2051 {
2052 rtx *tmps, dst;
2053 int start, i;
2054
2055 if (GET_CODE (src) != PARALLEL)
2056 abort ();
2057
2058 /* Check for a NULL entry, used to indicate that the parameter goes
2059 both on the stack and in registers. */
2060 if (XEXP (XVECEXP (src, 0, 0), 0))
2061 start = 0;
2062 else
2063 start = 1;
2064
2065 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2066
2067 /* Copy the (probable) hard regs into pseudos. */
2068 for (i = start; i < XVECLEN (src, 0); i++)
2069 {
2070 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2071 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2072 emit_move_insn (tmps[i], reg);
2073 }
2074 emit_queue ();
2075
2076 /* If we won't be storing directly into memory, protect the real destination
2077 from strange tricks we might play. */
2078 dst = orig_dst;
2079 if (GET_CODE (dst) == PARALLEL)
2080 {
2081 rtx temp;
2082
2083 /* We can get a PARALLEL dst if there is a conditional expression in
2084 a return statement. In that case, the dst and src are the same,
2085 so no action is necessary. */
2086 if (rtx_equal_p (dst, src))
2087 return;
2088
2089 /* It is unclear if we can ever reach here, but we may as well handle
2090 it. Allocate a temporary, and split this into a store/load to/from
2091 the temporary. */
2092
2093 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2094 emit_group_store (temp, src, ssize);
2095 emit_group_load (dst, temp, ssize);
2096 return;
2097 }
2098 else if (GET_CODE (dst) != MEM)
2099 {
2100 dst = gen_reg_rtx (GET_MODE (orig_dst));
2101 /* Make life a bit easier for combine. */
2102 emit_move_insn (dst, const0_rtx);
2103 }
2104
2105 /* Process the pieces. */
2106 for (i = start; i < XVECLEN (src, 0); i++)
2107 {
2108 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2109 enum machine_mode mode = GET_MODE (tmps[i]);
2110 unsigned int bytelen = GET_MODE_SIZE (mode);
2111
2112 /* Handle trailing fragments that run over the size of the struct. */
2113 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2114 {
2115 if (BYTES_BIG_ENDIAN)
2116 {
2117 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2118 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2119 tmps[i], 0, OPTAB_WIDEN);
2120 }
2121 bytelen = ssize - bytepos;
2122 }
2123
2124 /* Optimize the access just a bit. */
2125 if (GET_CODE (dst) == MEM
2126 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2127 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2128 && bytelen == GET_MODE_SIZE (mode))
2129 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2130 else
2131 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2132 mode, tmps[i], ssize);
2133 }
2134
2135 emit_queue ();
2136
2137 /* Copy from the pseudo into the (probable) hard reg. */
2138 if (GET_CODE (dst) == REG)
2139 emit_move_insn (orig_dst, dst);
2140 }
2141
2142 /* Generate code to copy a BLKmode object of TYPE out of a
2143 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2144 is null, a stack temporary is created. TGTBLK is returned.
2145
2146 The primary purpose of this routine is to handle functions
2147 that return BLKmode structures in registers. Some machines
2148 (the PA for example) want to return all small structures
2149 in registers regardless of the structure's alignment. */
2150
2151 rtx
2152 copy_blkmode_from_reg (tgtblk, srcreg, type)
2153 rtx tgtblk;
2154 rtx srcreg;
2155 tree type;
2156 {
2157 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2158 rtx src = NULL, dst = NULL;
2159 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2160 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2161
2162 if (tgtblk == 0)
2163 {
2164 tgtblk = assign_temp (build_qualified_type (type,
2165 (TYPE_QUALS (type)
2166 | TYPE_QUAL_CONST)),
2167 0, 1, 1);
2168 preserve_temp_slots (tgtblk);
2169 }
2170
2171 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2172 into a new pseudo which is a full word.
2173
2174 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2175 the wrong part of the register gets copied so we fake a type conversion
2176 in place. */
2177 if (GET_MODE (srcreg) != BLKmode
2178 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2179 {
2180 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2181 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2182 else
2183 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2184 }
2185
2186 /* Structures whose size is not a multiple of a word are aligned
2187 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2188 machine, this means we must skip the empty high order bytes when
2189 calculating the bit offset. */
2190 if (BYTES_BIG_ENDIAN
2191 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2192 && bytes % UNITS_PER_WORD)
2193 big_endian_correction
2194 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2195
2196 /* Copy the structure BITSIZE bites at a time.
2197
2198 We could probably emit more efficient code for machines which do not use
2199 strict alignment, but it doesn't seem worth the effort at the current
2200 time. */
2201 for (bitpos = 0, xbitpos = big_endian_correction;
2202 bitpos < bytes * BITS_PER_UNIT;
2203 bitpos += bitsize, xbitpos += bitsize)
2204 {
2205 /* We need a new source operand each time xbitpos is on a
2206 word boundary and when xbitpos == big_endian_correction
2207 (the first time through). */
2208 if (xbitpos % BITS_PER_WORD == 0
2209 || xbitpos == big_endian_correction)
2210 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2211 GET_MODE (srcreg));
2212
2213 /* We need a new destination operand each time bitpos is on
2214 a word boundary. */
2215 if (bitpos % BITS_PER_WORD == 0)
2216 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2217
2218 /* Use xbitpos for the source extraction (right justified) and
2219 xbitpos for the destination store (left justified). */
2220 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2221 extract_bit_field (src, bitsize,
2222 xbitpos % BITS_PER_WORD, 1,
2223 NULL_RTX, word_mode, word_mode,
2224 BITS_PER_WORD),
2225 BITS_PER_WORD);
2226 }
2227
2228 return tgtblk;
2229 }
2230
2231 /* Add a USE expression for REG to the (possibly empty) list pointed
2232 to by CALL_FUSAGE. REG must denote a hard register. */
2233
2234 void
2235 use_reg (call_fusage, reg)
2236 rtx *call_fusage, reg;
2237 {
2238 if (GET_CODE (reg) != REG
2239 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2240 abort ();
2241
2242 *call_fusage
2243 = gen_rtx_EXPR_LIST (VOIDmode,
2244 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2245 }
2246
2247 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2248 starting at REGNO. All of these registers must be hard registers. */
2249
2250 void
2251 use_regs (call_fusage, regno, nregs)
2252 rtx *call_fusage;
2253 int regno;
2254 int nregs;
2255 {
2256 int i;
2257
2258 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2259 abort ();
2260
2261 for (i = 0; i < nregs; i++)
2262 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2263 }
2264
2265 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2266 PARALLEL REGS. This is for calls that pass values in multiple
2267 non-contiguous locations. The Irix 6 ABI has examples of this. */
2268
2269 void
2270 use_group_regs (call_fusage, regs)
2271 rtx *call_fusage;
2272 rtx regs;
2273 {
2274 int i;
2275
2276 for (i = 0; i < XVECLEN (regs, 0); i++)
2277 {
2278 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2279
2280 /* A NULL entry means the parameter goes both on the stack and in
2281 registers. This can also be a MEM for targets that pass values
2282 partially on the stack and partially in registers. */
2283 if (reg != 0 && GET_CODE (reg) == REG)
2284 use_reg (call_fusage, reg);
2285 }
2286 }
2287 \f
2288
2289 int
2290 can_store_by_pieces (len, constfun, constfundata, align)
2291 unsigned HOST_WIDE_INT len;
2292 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2293 PTR constfundata;
2294 unsigned int align;
2295 {
2296 unsigned HOST_WIDE_INT max_size, l;
2297 HOST_WIDE_INT offset = 0;
2298 enum machine_mode mode, tmode;
2299 enum insn_code icode;
2300 int reverse;
2301 rtx cst;
2302
2303 if (! MOVE_BY_PIECES_P (len, align))
2304 return 0;
2305
2306 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2307 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2308 align = MOVE_MAX * BITS_PER_UNIT;
2309
2310 /* We would first store what we can in the largest integer mode, then go to
2311 successively smaller modes. */
2312
2313 for (reverse = 0;
2314 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2315 reverse++)
2316 {
2317 l = len;
2318 mode = VOIDmode;
2319 max_size = MOVE_MAX_PIECES + 1;
2320 while (max_size > 1)
2321 {
2322 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2323 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2324 if (GET_MODE_SIZE (tmode) < max_size)
2325 mode = tmode;
2326
2327 if (mode == VOIDmode)
2328 break;
2329
2330 icode = mov_optab->handlers[(int) mode].insn_code;
2331 if (icode != CODE_FOR_nothing
2332 && align >= GET_MODE_ALIGNMENT (mode))
2333 {
2334 unsigned int size = GET_MODE_SIZE (mode);
2335
2336 while (l >= size)
2337 {
2338 if (reverse)
2339 offset -= size;
2340
2341 cst = (*constfun) (constfundata, offset, mode);
2342 if (!LEGITIMATE_CONSTANT_P (cst))
2343 return 0;
2344
2345 if (!reverse)
2346 offset += size;
2347
2348 l -= size;
2349 }
2350 }
2351
2352 max_size = GET_MODE_SIZE (mode);
2353 }
2354
2355 /* The code above should have handled everything. */
2356 if (l != 0)
2357 abort ();
2358 }
2359
2360 return 1;
2361 }
2362
2363 /* Generate several move instructions to store LEN bytes generated by
2364 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2365 pointer which will be passed as argument in every CONSTFUN call.
2366 ALIGN is maximum alignment we can assume. */
2367
2368 void
2369 store_by_pieces (to, len, constfun, constfundata, align)
2370 rtx to;
2371 unsigned HOST_WIDE_INT len;
2372 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2373 PTR constfundata;
2374 unsigned int align;
2375 {
2376 struct store_by_pieces data;
2377
2378 if (! MOVE_BY_PIECES_P (len, align))
2379 abort ();
2380 to = protect_from_queue (to, 1);
2381 data.constfun = constfun;
2382 data.constfundata = constfundata;
2383 data.len = len;
2384 data.to = to;
2385 store_by_pieces_1 (&data, align);
2386 }
2387
2388 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2389 rtx with BLKmode). The caller must pass TO through protect_from_queue
2390 before calling. ALIGN is maximum alignment we can assume. */
2391
2392 static void
2393 clear_by_pieces (to, len, align)
2394 rtx to;
2395 unsigned HOST_WIDE_INT len;
2396 unsigned int align;
2397 {
2398 struct store_by_pieces data;
2399
2400 data.constfun = clear_by_pieces_1;
2401 data.constfundata = NULL;
2402 data.len = len;
2403 data.to = to;
2404 store_by_pieces_1 (&data, align);
2405 }
2406
2407 /* Callback routine for clear_by_pieces.
2408 Return const0_rtx unconditionally. */
2409
2410 static rtx
2411 clear_by_pieces_1 (data, offset, mode)
2412 PTR data ATTRIBUTE_UNUSED;
2413 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2414 enum machine_mode mode ATTRIBUTE_UNUSED;
2415 {
2416 return const0_rtx;
2417 }
2418
2419 /* Subroutine of clear_by_pieces and store_by_pieces.
2420 Generate several move instructions to store LEN bytes of block TO. (A MEM
2421 rtx with BLKmode). The caller must pass TO through protect_from_queue
2422 before calling. ALIGN is maximum alignment we can assume. */
2423
2424 static void
2425 store_by_pieces_1 (data, align)
2426 struct store_by_pieces *data;
2427 unsigned int align;
2428 {
2429 rtx to_addr = XEXP (data->to, 0);
2430 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2431 enum machine_mode mode = VOIDmode, tmode;
2432 enum insn_code icode;
2433
2434 data->offset = 0;
2435 data->to_addr = to_addr;
2436 data->autinc_to
2437 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2438 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2439
2440 data->explicit_inc_to = 0;
2441 data->reverse
2442 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2443 if (data->reverse)
2444 data->offset = data->len;
2445
2446 /* If storing requires more than two move insns,
2447 copy addresses to registers (to make displacements shorter)
2448 and use post-increment if available. */
2449 if (!data->autinc_to
2450 && move_by_pieces_ninsns (data->len, align) > 2)
2451 {
2452 /* Determine the main mode we'll be using. */
2453 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2454 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2455 if (GET_MODE_SIZE (tmode) < max_size)
2456 mode = tmode;
2457
2458 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2459 {
2460 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2461 data->autinc_to = 1;
2462 data->explicit_inc_to = -1;
2463 }
2464
2465 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2466 && ! data->autinc_to)
2467 {
2468 data->to_addr = copy_addr_to_reg (to_addr);
2469 data->autinc_to = 1;
2470 data->explicit_inc_to = 1;
2471 }
2472
2473 if ( !data->autinc_to && CONSTANT_P (to_addr))
2474 data->to_addr = copy_addr_to_reg (to_addr);
2475 }
2476
2477 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2478 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2479 align = MOVE_MAX * BITS_PER_UNIT;
2480
2481 /* First store what we can in the largest integer mode, then go to
2482 successively smaller modes. */
2483
2484 while (max_size > 1)
2485 {
2486 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2487 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2488 if (GET_MODE_SIZE (tmode) < max_size)
2489 mode = tmode;
2490
2491 if (mode == VOIDmode)
2492 break;
2493
2494 icode = mov_optab->handlers[(int) mode].insn_code;
2495 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2496 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2497
2498 max_size = GET_MODE_SIZE (mode);
2499 }
2500
2501 /* The code above should have handled everything. */
2502 if (data->len != 0)
2503 abort ();
2504 }
2505
2506 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2507 with move instructions for mode MODE. GENFUN is the gen_... function
2508 to make a move insn for that mode. DATA has all the other info. */
2509
2510 static void
2511 store_by_pieces_2 (genfun, mode, data)
2512 rtx (*genfun) PARAMS ((rtx, ...));
2513 enum machine_mode mode;
2514 struct store_by_pieces *data;
2515 {
2516 unsigned int size = GET_MODE_SIZE (mode);
2517 rtx to1, cst;
2518
2519 while (data->len >= size)
2520 {
2521 if (data->reverse)
2522 data->offset -= size;
2523
2524 if (data->autinc_to)
2525 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2526 data->offset);
2527 else
2528 to1 = adjust_address (data->to, mode, data->offset);
2529
2530 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2531 emit_insn (gen_add2_insn (data->to_addr,
2532 GEN_INT (-(HOST_WIDE_INT) size)));
2533
2534 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2535 emit_insn ((*genfun) (to1, cst));
2536
2537 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2538 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2539
2540 if (! data->reverse)
2541 data->offset += size;
2542
2543 data->len -= size;
2544 }
2545 }
2546 \f
2547 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2548 its length in bytes. */
2549
2550 rtx
2551 clear_storage (object, size)
2552 rtx object;
2553 rtx size;
2554 {
2555 #ifdef TARGET_MEM_FUNCTIONS
2556 static tree fn;
2557 tree call_expr, arg_list;
2558 #endif
2559 rtx retval = 0;
2560 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2561 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2562
2563 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2564 just move a zero. Otherwise, do this a piece at a time. */
2565 if (GET_MODE (object) != BLKmode
2566 && GET_CODE (size) == CONST_INT
2567 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2568 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2569 else
2570 {
2571 object = protect_from_queue (object, 1);
2572 size = protect_from_queue (size, 0);
2573
2574 if (GET_CODE (size) == CONST_INT
2575 && MOVE_BY_PIECES_P (INTVAL (size), align))
2576 clear_by_pieces (object, INTVAL (size), align);
2577 else
2578 {
2579 /* Try the most limited insn first, because there's no point
2580 including more than one in the machine description unless
2581 the more limited one has some advantage. */
2582
2583 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2584 enum machine_mode mode;
2585
2586 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2587 mode = GET_MODE_WIDER_MODE (mode))
2588 {
2589 enum insn_code code = clrstr_optab[(int) mode];
2590 insn_operand_predicate_fn pred;
2591
2592 if (code != CODE_FOR_nothing
2593 /* We don't need MODE to be narrower than
2594 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2595 the mode mask, as it is returned by the macro, it will
2596 definitely be less than the actual mode mask. */
2597 && ((GET_CODE (size) == CONST_INT
2598 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2599 <= (GET_MODE_MASK (mode) >> 1)))
2600 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2601 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2602 || (*pred) (object, BLKmode))
2603 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2604 || (*pred) (opalign, VOIDmode)))
2605 {
2606 rtx op1;
2607 rtx last = get_last_insn ();
2608 rtx pat;
2609
2610 op1 = convert_to_mode (mode, size, 1);
2611 pred = insn_data[(int) code].operand[1].predicate;
2612 if (pred != 0 && ! (*pred) (op1, mode))
2613 op1 = copy_to_mode_reg (mode, op1);
2614
2615 pat = GEN_FCN ((int) code) (object, op1, opalign);
2616 if (pat)
2617 {
2618 emit_insn (pat);
2619 return 0;
2620 }
2621 else
2622 delete_insns_since (last);
2623 }
2624 }
2625
2626 /* OBJECT or SIZE may have been passed through protect_from_queue.
2627
2628 It is unsafe to save the value generated by protect_from_queue
2629 and reuse it later. Consider what happens if emit_queue is
2630 called before the return value from protect_from_queue is used.
2631
2632 Expansion of the CALL_EXPR below will call emit_queue before
2633 we are finished emitting RTL for argument setup. So if we are
2634 not careful we could get the wrong value for an argument.
2635
2636 To avoid this problem we go ahead and emit code to copy OBJECT
2637 and SIZE into new pseudos. We can then place those new pseudos
2638 into an RTL_EXPR and use them later, even after a call to
2639 emit_queue.
2640
2641 Note this is not strictly needed for library calls since they
2642 do not call emit_queue before loading their arguments. However,
2643 we may need to have library calls call emit_queue in the future
2644 since failing to do so could cause problems for targets which
2645 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2646 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2647
2648 #ifdef TARGET_MEM_FUNCTIONS
2649 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2650 #else
2651 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2652 TREE_UNSIGNED (integer_type_node));
2653 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2654 #endif
2655
2656 #ifdef TARGET_MEM_FUNCTIONS
2657 /* It is incorrect to use the libcall calling conventions to call
2658 memset in this context.
2659
2660 This could be a user call to memset and the user may wish to
2661 examine the return value from memset.
2662
2663 For targets where libcalls and normal calls have different
2664 conventions for returning pointers, we could end up generating
2665 incorrect code.
2666
2667 So instead of using a libcall sequence we build up a suitable
2668 CALL_EXPR and expand the call in the normal fashion. */
2669 if (fn == NULL_TREE)
2670 {
2671 tree fntype;
2672
2673 /* This was copied from except.c, I don't know if all this is
2674 necessary in this context or not. */
2675 fn = get_identifier ("memset");
2676 fntype = build_pointer_type (void_type_node);
2677 fntype = build_function_type (fntype, NULL_TREE);
2678 fn = build_decl (FUNCTION_DECL, fn, fntype);
2679 ggc_add_tree_root (&fn, 1);
2680 DECL_EXTERNAL (fn) = 1;
2681 TREE_PUBLIC (fn) = 1;
2682 DECL_ARTIFICIAL (fn) = 1;
2683 TREE_NOTHROW (fn) = 1;
2684 make_decl_rtl (fn, NULL);
2685 assemble_external (fn);
2686 }
2687
2688 /* We need to make an argument list for the function call.
2689
2690 memset has three arguments, the first is a void * addresses, the
2691 second an integer with the initialization value, the last is a
2692 size_t byte count for the copy. */
2693 arg_list
2694 = build_tree_list (NULL_TREE,
2695 make_tree (build_pointer_type (void_type_node),
2696 object));
2697 TREE_CHAIN (arg_list)
2698 = build_tree_list (NULL_TREE,
2699 make_tree (integer_type_node, const0_rtx));
2700 TREE_CHAIN (TREE_CHAIN (arg_list))
2701 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2702 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2703
2704 /* Now we have to build up the CALL_EXPR itself. */
2705 call_expr = build1 (ADDR_EXPR,
2706 build_pointer_type (TREE_TYPE (fn)), fn);
2707 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2708 call_expr, arg_list, NULL_TREE);
2709 TREE_SIDE_EFFECTS (call_expr) = 1;
2710
2711 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2712 #else
2713 emit_library_call (bzero_libfunc, LCT_NORMAL,
2714 VOIDmode, 2, object, Pmode, size,
2715 TYPE_MODE (integer_type_node));
2716 #endif
2717
2718 /* If we are initializing a readonly value, show the above call
2719 clobbered it. Otherwise, a load from it may erroneously be
2720 hoisted from a loop. */
2721 if (RTX_UNCHANGING_P (object))
2722 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2723 }
2724 }
2725
2726 return retval;
2727 }
2728
2729 /* Generate code to copy Y into X.
2730 Both Y and X must have the same mode, except that
2731 Y can be a constant with VOIDmode.
2732 This mode cannot be BLKmode; use emit_block_move for that.
2733
2734 Return the last instruction emitted. */
2735
2736 rtx
2737 emit_move_insn (x, y)
2738 rtx x, y;
2739 {
2740 enum machine_mode mode = GET_MODE (x);
2741 rtx y_cst = NULL_RTX;
2742 rtx last_insn;
2743
2744 x = protect_from_queue (x, 1);
2745 y = protect_from_queue (y, 0);
2746
2747 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2748 abort ();
2749
2750 /* Never force constant_p_rtx to memory. */
2751 if (GET_CODE (y) == CONSTANT_P_RTX)
2752 ;
2753 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2754 {
2755 y_cst = y;
2756 y = force_const_mem (mode, y);
2757 }
2758
2759 /* If X or Y are memory references, verify that their addresses are valid
2760 for the machine. */
2761 if (GET_CODE (x) == MEM
2762 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2763 && ! push_operand (x, GET_MODE (x)))
2764 || (flag_force_addr
2765 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2766 x = validize_mem (x);
2767
2768 if (GET_CODE (y) == MEM
2769 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2770 || (flag_force_addr
2771 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2772 y = validize_mem (y);
2773
2774 if (mode == BLKmode)
2775 abort ();
2776
2777 last_insn = emit_move_insn_1 (x, y);
2778
2779 if (y_cst && GET_CODE (x) == REG)
2780 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2781
2782 return last_insn;
2783 }
2784
2785 /* Low level part of emit_move_insn.
2786 Called just like emit_move_insn, but assumes X and Y
2787 are basically valid. */
2788
2789 rtx
2790 emit_move_insn_1 (x, y)
2791 rtx x, y;
2792 {
2793 enum machine_mode mode = GET_MODE (x);
2794 enum machine_mode submode;
2795 enum mode_class class = GET_MODE_CLASS (mode);
2796
2797 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2798 abort ();
2799
2800 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2801 return
2802 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2803
2804 /* Expand complex moves by moving real part and imag part, if possible. */
2805 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2806 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2807 * BITS_PER_UNIT),
2808 (class == MODE_COMPLEX_INT
2809 ? MODE_INT : MODE_FLOAT),
2810 0))
2811 && (mov_optab->handlers[(int) submode].insn_code
2812 != CODE_FOR_nothing))
2813 {
2814 /* Don't split destination if it is a stack push. */
2815 int stack = push_operand (x, GET_MODE (x));
2816
2817 #ifdef PUSH_ROUNDING
2818 /* In case we output to the stack, but the size is smaller machine can
2819 push exactly, we need to use move instructions. */
2820 if (stack
2821 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2822 != GET_MODE_SIZE (submode)))
2823 {
2824 rtx temp;
2825 HOST_WIDE_INT offset1, offset2;
2826
2827 /* Do not use anti_adjust_stack, since we don't want to update
2828 stack_pointer_delta. */
2829 temp = expand_binop (Pmode,
2830 #ifdef STACK_GROWS_DOWNWARD
2831 sub_optab,
2832 #else
2833 add_optab,
2834 #endif
2835 stack_pointer_rtx,
2836 GEN_INT
2837 (PUSH_ROUNDING
2838 (GET_MODE_SIZE (GET_MODE (x)))),
2839 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2840
2841 if (temp != stack_pointer_rtx)
2842 emit_move_insn (stack_pointer_rtx, temp);
2843
2844 #ifdef STACK_GROWS_DOWNWARD
2845 offset1 = 0;
2846 offset2 = GET_MODE_SIZE (submode);
2847 #else
2848 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2849 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2850 + GET_MODE_SIZE (submode));
2851 #endif
2852
2853 emit_move_insn (change_address (x, submode,
2854 gen_rtx_PLUS (Pmode,
2855 stack_pointer_rtx,
2856 GEN_INT (offset1))),
2857 gen_realpart (submode, y));
2858 emit_move_insn (change_address (x, submode,
2859 gen_rtx_PLUS (Pmode,
2860 stack_pointer_rtx,
2861 GEN_INT (offset2))),
2862 gen_imagpart (submode, y));
2863 }
2864 else
2865 #endif
2866 /* If this is a stack, push the highpart first, so it
2867 will be in the argument order.
2868
2869 In that case, change_address is used only to convert
2870 the mode, not to change the address. */
2871 if (stack)
2872 {
2873 /* Note that the real part always precedes the imag part in memory
2874 regardless of machine's endianness. */
2875 #ifdef STACK_GROWS_DOWNWARD
2876 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2877 (gen_rtx_MEM (submode, XEXP (x, 0)),
2878 gen_imagpart (submode, y)));
2879 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2880 (gen_rtx_MEM (submode, XEXP (x, 0)),
2881 gen_realpart (submode, y)));
2882 #else
2883 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2884 (gen_rtx_MEM (submode, XEXP (x, 0)),
2885 gen_realpart (submode, y)));
2886 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2887 (gen_rtx_MEM (submode, XEXP (x, 0)),
2888 gen_imagpart (submode, y)));
2889 #endif
2890 }
2891 else
2892 {
2893 rtx realpart_x, realpart_y;
2894 rtx imagpart_x, imagpart_y;
2895
2896 /* If this is a complex value with each part being smaller than a
2897 word, the usual calling sequence will likely pack the pieces into
2898 a single register. Unfortunately, SUBREG of hard registers only
2899 deals in terms of words, so we have a problem converting input
2900 arguments to the CONCAT of two registers that is used elsewhere
2901 for complex values. If this is before reload, we can copy it into
2902 memory and reload. FIXME, we should see about using extract and
2903 insert on integer registers, but complex short and complex char
2904 variables should be rarely used. */
2905 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2906 && (reload_in_progress | reload_completed) == 0)
2907 {
2908 int packed_dest_p
2909 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2910 int packed_src_p
2911 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2912
2913 if (packed_dest_p || packed_src_p)
2914 {
2915 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2916 ? MODE_FLOAT : MODE_INT);
2917
2918 enum machine_mode reg_mode
2919 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2920
2921 if (reg_mode != BLKmode)
2922 {
2923 rtx mem = assign_stack_temp (reg_mode,
2924 GET_MODE_SIZE (mode), 0);
2925 rtx cmem = adjust_address (mem, mode, 0);
2926
2927 cfun->cannot_inline
2928 = N_("function using short complex types cannot be inline");
2929
2930 if (packed_dest_p)
2931 {
2932 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2933
2934 emit_move_insn_1 (cmem, y);
2935 return emit_move_insn_1 (sreg, mem);
2936 }
2937 else
2938 {
2939 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2940
2941 emit_move_insn_1 (mem, sreg);
2942 return emit_move_insn_1 (x, cmem);
2943 }
2944 }
2945 }
2946 }
2947
2948 realpart_x = gen_realpart (submode, x);
2949 realpart_y = gen_realpart (submode, y);
2950 imagpart_x = gen_imagpart (submode, x);
2951 imagpart_y = gen_imagpart (submode, y);
2952
2953 /* Show the output dies here. This is necessary for SUBREGs
2954 of pseudos since we cannot track their lifetimes correctly;
2955 hard regs shouldn't appear here except as return values.
2956 We never want to emit such a clobber after reload. */
2957 if (x != y
2958 && ! (reload_in_progress || reload_completed)
2959 && (GET_CODE (realpart_x) == SUBREG
2960 || GET_CODE (imagpart_x) == SUBREG))
2961 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2962
2963 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2964 (realpart_x, realpart_y));
2965 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2966 (imagpart_x, imagpart_y));
2967 }
2968
2969 return get_last_insn ();
2970 }
2971
2972 /* This will handle any multi-word mode that lacks a move_insn pattern.
2973 However, you will get better code if you define such patterns,
2974 even if they must turn into multiple assembler instructions. */
2975 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2976 {
2977 rtx last_insn = 0;
2978 rtx seq, inner;
2979 int need_clobber;
2980 int i;
2981
2982 #ifdef PUSH_ROUNDING
2983
2984 /* If X is a push on the stack, do the push now and replace
2985 X with a reference to the stack pointer. */
2986 if (push_operand (x, GET_MODE (x)))
2987 {
2988 rtx temp;
2989 enum rtx_code code;
2990
2991 /* Do not use anti_adjust_stack, since we don't want to update
2992 stack_pointer_delta. */
2993 temp = expand_binop (Pmode,
2994 #ifdef STACK_GROWS_DOWNWARD
2995 sub_optab,
2996 #else
2997 add_optab,
2998 #endif
2999 stack_pointer_rtx,
3000 GEN_INT
3001 (PUSH_ROUNDING
3002 (GET_MODE_SIZE (GET_MODE (x)))),
3003 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3004
3005 if (temp != stack_pointer_rtx)
3006 emit_move_insn (stack_pointer_rtx, temp);
3007
3008 code = GET_CODE (XEXP (x, 0));
3009
3010 /* Just hope that small offsets off SP are OK. */
3011 if (code == POST_INC)
3012 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3013 GEN_INT (-((HOST_WIDE_INT)
3014 GET_MODE_SIZE (GET_MODE (x)))));
3015 else if (code == POST_DEC)
3016 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3017 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3018 else
3019 temp = stack_pointer_rtx;
3020
3021 x = change_address (x, VOIDmode, temp);
3022 }
3023 #endif
3024
3025 /* If we are in reload, see if either operand is a MEM whose address
3026 is scheduled for replacement. */
3027 if (reload_in_progress && GET_CODE (x) == MEM
3028 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3029 x = replace_equiv_address_nv (x, inner);
3030 if (reload_in_progress && GET_CODE (y) == MEM
3031 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3032 y = replace_equiv_address_nv (y, inner);
3033
3034 start_sequence ();
3035
3036 need_clobber = 0;
3037 for (i = 0;
3038 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3039 i++)
3040 {
3041 rtx xpart = operand_subword (x, i, 1, mode);
3042 rtx ypart = operand_subword (y, i, 1, mode);
3043
3044 /* If we can't get a part of Y, put Y into memory if it is a
3045 constant. Otherwise, force it into a register. If we still
3046 can't get a part of Y, abort. */
3047 if (ypart == 0 && CONSTANT_P (y))
3048 {
3049 y = force_const_mem (mode, y);
3050 ypart = operand_subword (y, i, 1, mode);
3051 }
3052 else if (ypart == 0)
3053 ypart = operand_subword_force (y, i, mode);
3054
3055 if (xpart == 0 || ypart == 0)
3056 abort ();
3057
3058 need_clobber |= (GET_CODE (xpart) == SUBREG);
3059
3060 last_insn = emit_move_insn (xpart, ypart);
3061 }
3062
3063 seq = gen_sequence ();
3064 end_sequence ();
3065
3066 /* Show the output dies here. This is necessary for SUBREGs
3067 of pseudos since we cannot track their lifetimes correctly;
3068 hard regs shouldn't appear here except as return values.
3069 We never want to emit such a clobber after reload. */
3070 if (x != y
3071 && ! (reload_in_progress || reload_completed)
3072 && need_clobber != 0)
3073 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3074
3075 emit_insn (seq);
3076
3077 return last_insn;
3078 }
3079 else
3080 abort ();
3081 }
3082 \f
3083 /* Pushing data onto the stack. */
3084
3085 /* Push a block of length SIZE (perhaps variable)
3086 and return an rtx to address the beginning of the block.
3087 Note that it is not possible for the value returned to be a QUEUED.
3088 The value may be virtual_outgoing_args_rtx.
3089
3090 EXTRA is the number of bytes of padding to push in addition to SIZE.
3091 BELOW nonzero means this padding comes at low addresses;
3092 otherwise, the padding comes at high addresses. */
3093
3094 rtx
3095 push_block (size, extra, below)
3096 rtx size;
3097 int extra, below;
3098 {
3099 rtx temp;
3100
3101 size = convert_modes (Pmode, ptr_mode, size, 1);
3102 if (CONSTANT_P (size))
3103 anti_adjust_stack (plus_constant (size, extra));
3104 else if (GET_CODE (size) == REG && extra == 0)
3105 anti_adjust_stack (size);
3106 else
3107 {
3108 temp = copy_to_mode_reg (Pmode, size);
3109 if (extra != 0)
3110 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3111 temp, 0, OPTAB_LIB_WIDEN);
3112 anti_adjust_stack (temp);
3113 }
3114
3115 #ifndef STACK_GROWS_DOWNWARD
3116 if (0)
3117 #else
3118 if (1)
3119 #endif
3120 {
3121 temp = virtual_outgoing_args_rtx;
3122 if (extra != 0 && below)
3123 temp = plus_constant (temp, extra);
3124 }
3125 else
3126 {
3127 if (GET_CODE (size) == CONST_INT)
3128 temp = plus_constant (virtual_outgoing_args_rtx,
3129 -INTVAL (size) - (below ? 0 : extra));
3130 else if (extra != 0 && !below)
3131 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3132 negate_rtx (Pmode, plus_constant (size, extra)));
3133 else
3134 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3135 negate_rtx (Pmode, size));
3136 }
3137
3138 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3139 }
3140
3141 #ifdef PUSH_ROUNDING
3142
3143 /* Emit single push insn. */
3144
3145 static void
3146 emit_single_push_insn (mode, x, type)
3147 rtx x;
3148 enum machine_mode mode;
3149 tree type;
3150 {
3151 rtx dest_addr;
3152 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3153 rtx dest;
3154 enum insn_code icode;
3155 insn_operand_predicate_fn pred;
3156
3157 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3158 /* If there is push pattern, use it. Otherwise try old way of throwing
3159 MEM representing push operation to move expander. */
3160 icode = push_optab->handlers[(int) mode].insn_code;
3161 if (icode != CODE_FOR_nothing)
3162 {
3163 if (((pred = insn_data[(int) icode].operand[0].predicate)
3164 && !((*pred) (x, mode))))
3165 x = force_reg (mode, x);
3166 emit_insn (GEN_FCN (icode) (x));
3167 return;
3168 }
3169 if (GET_MODE_SIZE (mode) == rounded_size)
3170 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3171 else
3172 {
3173 #ifdef STACK_GROWS_DOWNWARD
3174 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3175 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3176 #else
3177 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3178 GEN_INT (rounded_size));
3179 #endif
3180 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3181 }
3182
3183 dest = gen_rtx_MEM (mode, dest_addr);
3184
3185 if (type != 0)
3186 {
3187 set_mem_attributes (dest, type, 1);
3188
3189 if (flag_optimize_sibling_calls)
3190 /* Function incoming arguments may overlap with sibling call
3191 outgoing arguments and we cannot allow reordering of reads
3192 from function arguments with stores to outgoing arguments
3193 of sibling calls. */
3194 set_mem_alias_set (dest, 0);
3195 }
3196 emit_move_insn (dest, x);
3197 }
3198 #endif
3199
3200 /* Generate code to push X onto the stack, assuming it has mode MODE and
3201 type TYPE.
3202 MODE is redundant except when X is a CONST_INT (since they don't
3203 carry mode info).
3204 SIZE is an rtx for the size of data to be copied (in bytes),
3205 needed only if X is BLKmode.
3206
3207 ALIGN (in bits) is maximum alignment we can assume.
3208
3209 If PARTIAL and REG are both nonzero, then copy that many of the first
3210 words of X into registers starting with REG, and push the rest of X.
3211 The amount of space pushed is decreased by PARTIAL words,
3212 rounded *down* to a multiple of PARM_BOUNDARY.
3213 REG must be a hard register in this case.
3214 If REG is zero but PARTIAL is not, take any all others actions for an
3215 argument partially in registers, but do not actually load any
3216 registers.
3217
3218 EXTRA is the amount in bytes of extra space to leave next to this arg.
3219 This is ignored if an argument block has already been allocated.
3220
3221 On a machine that lacks real push insns, ARGS_ADDR is the address of
3222 the bottom of the argument block for this call. We use indexing off there
3223 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3224 argument block has not been preallocated.
3225
3226 ARGS_SO_FAR is the size of args previously pushed for this call.
3227
3228 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3229 for arguments passed in registers. If nonzero, it will be the number
3230 of bytes required. */
3231
3232 void
3233 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3234 args_addr, args_so_far, reg_parm_stack_space,
3235 alignment_pad)
3236 rtx x;
3237 enum machine_mode mode;
3238 tree type;
3239 rtx size;
3240 unsigned int align;
3241 int partial;
3242 rtx reg;
3243 int extra;
3244 rtx args_addr;
3245 rtx args_so_far;
3246 int reg_parm_stack_space;
3247 rtx alignment_pad;
3248 {
3249 rtx xinner;
3250 enum direction stack_direction
3251 #ifdef STACK_GROWS_DOWNWARD
3252 = downward;
3253 #else
3254 = upward;
3255 #endif
3256
3257 /* Decide where to pad the argument: `downward' for below,
3258 `upward' for above, or `none' for don't pad it.
3259 Default is below for small data on big-endian machines; else above. */
3260 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3261
3262 /* Invert direction if stack is post-decrement.
3263 FIXME: why? */
3264 if (STACK_PUSH_CODE == POST_DEC)
3265 if (where_pad != none)
3266 where_pad = (where_pad == downward ? upward : downward);
3267
3268 xinner = x = protect_from_queue (x, 0);
3269
3270 if (mode == BLKmode)
3271 {
3272 /* Copy a block into the stack, entirely or partially. */
3273
3274 rtx temp;
3275 int used = partial * UNITS_PER_WORD;
3276 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3277 int skip;
3278
3279 if (size == 0)
3280 abort ();
3281
3282 used -= offset;
3283
3284 /* USED is now the # of bytes we need not copy to the stack
3285 because registers will take care of them. */
3286
3287 if (partial != 0)
3288 xinner = adjust_address (xinner, BLKmode, used);
3289
3290 /* If the partial register-part of the arg counts in its stack size,
3291 skip the part of stack space corresponding to the registers.
3292 Otherwise, start copying to the beginning of the stack space,
3293 by setting SKIP to 0. */
3294 skip = (reg_parm_stack_space == 0) ? 0 : used;
3295
3296 #ifdef PUSH_ROUNDING
3297 /* Do it with several push insns if that doesn't take lots of insns
3298 and if there is no difficulty with push insns that skip bytes
3299 on the stack for alignment purposes. */
3300 if (args_addr == 0
3301 && PUSH_ARGS
3302 && GET_CODE (size) == CONST_INT
3303 && skip == 0
3304 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3305 /* Here we avoid the case of a structure whose weak alignment
3306 forces many pushes of a small amount of data,
3307 and such small pushes do rounding that causes trouble. */
3308 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3309 || align >= BIGGEST_ALIGNMENT
3310 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3311 == (align / BITS_PER_UNIT)))
3312 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3313 {
3314 /* Push padding now if padding above and stack grows down,
3315 or if padding below and stack grows up.
3316 But if space already allocated, this has already been done. */
3317 if (extra && args_addr == 0
3318 && where_pad != none && where_pad != stack_direction)
3319 anti_adjust_stack (GEN_INT (extra));
3320
3321 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3322 }
3323 else
3324 #endif /* PUSH_ROUNDING */
3325 {
3326 rtx target;
3327
3328 /* Otherwise make space on the stack and copy the data
3329 to the address of that space. */
3330
3331 /* Deduct words put into registers from the size we must copy. */
3332 if (partial != 0)
3333 {
3334 if (GET_CODE (size) == CONST_INT)
3335 size = GEN_INT (INTVAL (size) - used);
3336 else
3337 size = expand_binop (GET_MODE (size), sub_optab, size,
3338 GEN_INT (used), NULL_RTX, 0,
3339 OPTAB_LIB_WIDEN);
3340 }
3341
3342 /* Get the address of the stack space.
3343 In this case, we do not deal with EXTRA separately.
3344 A single stack adjust will do. */
3345 if (! args_addr)
3346 {
3347 temp = push_block (size, extra, where_pad == downward);
3348 extra = 0;
3349 }
3350 else if (GET_CODE (args_so_far) == CONST_INT)
3351 temp = memory_address (BLKmode,
3352 plus_constant (args_addr,
3353 skip + INTVAL (args_so_far)));
3354 else
3355 temp = memory_address (BLKmode,
3356 plus_constant (gen_rtx_PLUS (Pmode,
3357 args_addr,
3358 args_so_far),
3359 skip));
3360 target = gen_rtx_MEM (BLKmode, temp);
3361
3362 if (type != 0)
3363 {
3364 set_mem_attributes (target, type, 1);
3365 /* Function incoming arguments may overlap with sibling call
3366 outgoing arguments and we cannot allow reordering of reads
3367 from function arguments with stores to outgoing arguments
3368 of sibling calls. */
3369 set_mem_alias_set (target, 0);
3370 }
3371 else
3372 set_mem_align (target, align);
3373
3374 /* TEMP is the address of the block. Copy the data there. */
3375 if (GET_CODE (size) == CONST_INT
3376 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3377 {
3378 move_by_pieces (target, xinner, INTVAL (size), align);
3379 goto ret;
3380 }
3381 else
3382 {
3383 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3384 enum machine_mode mode;
3385
3386 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3387 mode != VOIDmode;
3388 mode = GET_MODE_WIDER_MODE (mode))
3389 {
3390 enum insn_code code = movstr_optab[(int) mode];
3391 insn_operand_predicate_fn pred;
3392
3393 if (code != CODE_FOR_nothing
3394 && ((GET_CODE (size) == CONST_INT
3395 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3396 <= (GET_MODE_MASK (mode) >> 1)))
3397 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3398 && (!(pred = insn_data[(int) code].operand[0].predicate)
3399 || ((*pred) (target, BLKmode)))
3400 && (!(pred = insn_data[(int) code].operand[1].predicate)
3401 || ((*pred) (xinner, BLKmode)))
3402 && (!(pred = insn_data[(int) code].operand[3].predicate)
3403 || ((*pred) (opalign, VOIDmode))))
3404 {
3405 rtx op2 = convert_to_mode (mode, size, 1);
3406 rtx last = get_last_insn ();
3407 rtx pat;
3408
3409 pred = insn_data[(int) code].operand[2].predicate;
3410 if (pred != 0 && ! (*pred) (op2, mode))
3411 op2 = copy_to_mode_reg (mode, op2);
3412
3413 pat = GEN_FCN ((int) code) (target, xinner,
3414 op2, opalign);
3415 if (pat)
3416 {
3417 emit_insn (pat);
3418 goto ret;
3419 }
3420 else
3421 delete_insns_since (last);
3422 }
3423 }
3424 }
3425
3426 if (!ACCUMULATE_OUTGOING_ARGS)
3427 {
3428 /* If the source is referenced relative to the stack pointer,
3429 copy it to another register to stabilize it. We do not need
3430 to do this if we know that we won't be changing sp. */
3431
3432 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3433 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3434 temp = copy_to_reg (temp);
3435 }
3436
3437 /* Make inhibit_defer_pop nonzero around the library call
3438 to force it to pop the bcopy-arguments right away. */
3439 NO_DEFER_POP;
3440 #ifdef TARGET_MEM_FUNCTIONS
3441 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3442 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3443 convert_to_mode (TYPE_MODE (sizetype),
3444 size, TREE_UNSIGNED (sizetype)),
3445 TYPE_MODE (sizetype));
3446 #else
3447 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3448 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3449 convert_to_mode (TYPE_MODE (integer_type_node),
3450 size,
3451 TREE_UNSIGNED (integer_type_node)),
3452 TYPE_MODE (integer_type_node));
3453 #endif
3454 OK_DEFER_POP;
3455 }
3456 }
3457 else if (partial > 0)
3458 {
3459 /* Scalar partly in registers. */
3460
3461 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3462 int i;
3463 int not_stack;
3464 /* # words of start of argument
3465 that we must make space for but need not store. */
3466 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3467 int args_offset = INTVAL (args_so_far);
3468 int skip;
3469
3470 /* Push padding now if padding above and stack grows down,
3471 or if padding below and stack grows up.
3472 But if space already allocated, this has already been done. */
3473 if (extra && args_addr == 0
3474 && where_pad != none && where_pad != stack_direction)
3475 anti_adjust_stack (GEN_INT (extra));
3476
3477 /* If we make space by pushing it, we might as well push
3478 the real data. Otherwise, we can leave OFFSET nonzero
3479 and leave the space uninitialized. */
3480 if (args_addr == 0)
3481 offset = 0;
3482
3483 /* Now NOT_STACK gets the number of words that we don't need to
3484 allocate on the stack. */
3485 not_stack = partial - offset;
3486
3487 /* If the partial register-part of the arg counts in its stack size,
3488 skip the part of stack space corresponding to the registers.
3489 Otherwise, start copying to the beginning of the stack space,
3490 by setting SKIP to 0. */
3491 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3492
3493 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3494 x = validize_mem (force_const_mem (mode, x));
3495
3496 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3497 SUBREGs of such registers are not allowed. */
3498 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3499 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3500 x = copy_to_reg (x);
3501
3502 /* Loop over all the words allocated on the stack for this arg. */
3503 /* We can do it by words, because any scalar bigger than a word
3504 has a size a multiple of a word. */
3505 #ifndef PUSH_ARGS_REVERSED
3506 for (i = not_stack; i < size; i++)
3507 #else
3508 for (i = size - 1; i >= not_stack; i--)
3509 #endif
3510 if (i >= not_stack + offset)
3511 emit_push_insn (operand_subword_force (x, i, mode),
3512 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3513 0, args_addr,
3514 GEN_INT (args_offset + ((i - not_stack + skip)
3515 * UNITS_PER_WORD)),
3516 reg_parm_stack_space, alignment_pad);
3517 }
3518 else
3519 {
3520 rtx addr;
3521 rtx target = NULL_RTX;
3522 rtx dest;
3523
3524 /* Push padding now if padding above and stack grows down,
3525 or if padding below and stack grows up.
3526 But if space already allocated, this has already been done. */
3527 if (extra && args_addr == 0
3528 && where_pad != none && where_pad != stack_direction)
3529 anti_adjust_stack (GEN_INT (extra));
3530
3531 #ifdef PUSH_ROUNDING
3532 if (args_addr == 0 && PUSH_ARGS)
3533 emit_single_push_insn (mode, x, type);
3534 else
3535 #endif
3536 {
3537 if (GET_CODE (args_so_far) == CONST_INT)
3538 addr
3539 = memory_address (mode,
3540 plus_constant (args_addr,
3541 INTVAL (args_so_far)));
3542 else
3543 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3544 args_so_far));
3545 target = addr;
3546 dest = gen_rtx_MEM (mode, addr);
3547 if (type != 0)
3548 {
3549 set_mem_attributes (dest, type, 1);
3550 /* Function incoming arguments may overlap with sibling call
3551 outgoing arguments and we cannot allow reordering of reads
3552 from function arguments with stores to outgoing arguments
3553 of sibling calls. */
3554 set_mem_alias_set (dest, 0);
3555 }
3556
3557 emit_move_insn (dest, x);
3558 }
3559
3560 }
3561
3562 ret:
3563 /* If part should go in registers, copy that part
3564 into the appropriate registers. Do this now, at the end,
3565 since mem-to-mem copies above may do function calls. */
3566 if (partial > 0 && reg != 0)
3567 {
3568 /* Handle calls that pass values in multiple non-contiguous locations.
3569 The Irix 6 ABI has examples of this. */
3570 if (GET_CODE (reg) == PARALLEL)
3571 emit_group_load (reg, x, -1); /* ??? size? */
3572 else
3573 move_block_to_reg (REGNO (reg), x, partial, mode);
3574 }
3575
3576 if (extra && args_addr == 0 && where_pad == stack_direction)
3577 anti_adjust_stack (GEN_INT (extra));
3578
3579 if (alignment_pad && args_addr == 0)
3580 anti_adjust_stack (alignment_pad);
3581 }
3582 \f
3583 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3584 operations. */
3585
3586 static rtx
3587 get_subtarget (x)
3588 rtx x;
3589 {
3590 return ((x == 0
3591 /* Only registers can be subtargets. */
3592 || GET_CODE (x) != REG
3593 /* If the register is readonly, it can't be set more than once. */
3594 || RTX_UNCHANGING_P (x)
3595 /* Don't use hard regs to avoid extending their life. */
3596 || REGNO (x) < FIRST_PSEUDO_REGISTER
3597 /* Avoid subtargets inside loops,
3598 since they hide some invariant expressions. */
3599 || preserve_subexpressions_p ())
3600 ? 0 : x);
3601 }
3602
3603 /* Expand an assignment that stores the value of FROM into TO.
3604 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3605 (This may contain a QUEUED rtx;
3606 if the value is constant, this rtx is a constant.)
3607 Otherwise, the returned value is NULL_RTX.
3608
3609 SUGGEST_REG is no longer actually used.
3610 It used to mean, copy the value through a register
3611 and return that register, if that is possible.
3612 We now use WANT_VALUE to decide whether to do this. */
3613
3614 rtx
3615 expand_assignment (to, from, want_value, suggest_reg)
3616 tree to, from;
3617 int want_value;
3618 int suggest_reg ATTRIBUTE_UNUSED;
3619 {
3620 rtx to_rtx = 0;
3621 rtx result;
3622
3623 /* Don't crash if the lhs of the assignment was erroneous. */
3624
3625 if (TREE_CODE (to) == ERROR_MARK)
3626 {
3627 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3628 return want_value ? result : NULL_RTX;
3629 }
3630
3631 /* Assignment of a structure component needs special treatment
3632 if the structure component's rtx is not simply a MEM.
3633 Assignment of an array element at a constant index, and assignment of
3634 an array element in an unaligned packed structure field, has the same
3635 problem. */
3636
3637 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3638 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3639 {
3640 enum machine_mode mode1;
3641 HOST_WIDE_INT bitsize, bitpos;
3642 rtx orig_to_rtx;
3643 tree offset;
3644 int unsignedp;
3645 int volatilep = 0;
3646 tree tem;
3647
3648 push_temp_slots ();
3649 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3650 &unsignedp, &volatilep);
3651
3652 /* If we are going to use store_bit_field and extract_bit_field,
3653 make sure to_rtx will be safe for multiple use. */
3654
3655 if (mode1 == VOIDmode && want_value)
3656 tem = stabilize_reference (tem);
3657
3658 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3659
3660 if (offset != 0)
3661 {
3662 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3663
3664 if (GET_CODE (to_rtx) != MEM)
3665 abort ();
3666
3667 if (GET_MODE (offset_rtx) != ptr_mode)
3668 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3669
3670 #ifdef POINTERS_EXTEND_UNSIGNED
3671 if (GET_MODE (offset_rtx) != Pmode)
3672 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3673 #endif
3674
3675 /* A constant address in TO_RTX can have VOIDmode, we must not try
3676 to call force_reg for that case. Avoid that case. */
3677 if (GET_CODE (to_rtx) == MEM
3678 && GET_MODE (to_rtx) == BLKmode
3679 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3680 && bitsize > 0
3681 && (bitpos % bitsize) == 0
3682 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3683 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3684 {
3685 rtx temp
3686 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3687
3688 if (GET_CODE (XEXP (temp, 0)) == REG)
3689 to_rtx = temp;
3690 else
3691 to_rtx = (replace_equiv_address
3692 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3693 XEXP (temp, 0))));
3694 bitpos = 0;
3695 }
3696
3697 to_rtx = offset_address (to_rtx, offset_rtx,
3698 highest_pow2_factor (offset));
3699 }
3700
3701 if (GET_CODE (to_rtx) == MEM)
3702 {
3703 tree old_expr = MEM_EXPR (to_rtx);
3704
3705 /* If the field is at offset zero, we could have been given the
3706 DECL_RTX of the parent struct. Don't munge it. */
3707 to_rtx = shallow_copy_rtx (to_rtx);
3708
3709 set_mem_attributes (to_rtx, to, 0);
3710
3711 /* If we changed MEM_EXPR, that means we're now referencing
3712 the COMPONENT_REF, which means that MEM_OFFSET must be
3713 relative to that field. But we've not yet reflected BITPOS
3714 in TO_RTX. This will be done in store_field. Adjust for
3715 that by biasing MEM_OFFSET by -bitpos. */
3716 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3717 && (bitpos / BITS_PER_UNIT) != 0)
3718 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3719 - (bitpos / BITS_PER_UNIT)));
3720 }
3721
3722 /* Deal with volatile and readonly fields. The former is only done
3723 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3724 if (volatilep && GET_CODE (to_rtx) == MEM)
3725 {
3726 if (to_rtx == orig_to_rtx)
3727 to_rtx = copy_rtx (to_rtx);
3728 MEM_VOLATILE_P (to_rtx) = 1;
3729 }
3730
3731 if (TREE_CODE (to) == COMPONENT_REF
3732 && TREE_READONLY (TREE_OPERAND (to, 1)))
3733 {
3734 if (to_rtx == orig_to_rtx)
3735 to_rtx = copy_rtx (to_rtx);
3736 RTX_UNCHANGING_P (to_rtx) = 1;
3737 }
3738
3739 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3740 {
3741 if (to_rtx == orig_to_rtx)
3742 to_rtx = copy_rtx (to_rtx);
3743 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3744 }
3745
3746 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3747 (want_value
3748 /* Spurious cast for HPUX compiler. */
3749 ? ((enum machine_mode)
3750 TYPE_MODE (TREE_TYPE (to)))
3751 : VOIDmode),
3752 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3753
3754 preserve_temp_slots (result);
3755 free_temp_slots ();
3756 pop_temp_slots ();
3757
3758 /* If the value is meaningful, convert RESULT to the proper mode.
3759 Otherwise, return nothing. */
3760 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3761 TYPE_MODE (TREE_TYPE (from)),
3762 result,
3763 TREE_UNSIGNED (TREE_TYPE (to)))
3764 : NULL_RTX);
3765 }
3766
3767 /* If the rhs is a function call and its value is not an aggregate,
3768 call the function before we start to compute the lhs.
3769 This is needed for correct code for cases such as
3770 val = setjmp (buf) on machines where reference to val
3771 requires loading up part of an address in a separate insn.
3772
3773 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3774 since it might be a promoted variable where the zero- or sign- extension
3775 needs to be done. Handling this in the normal way is safe because no
3776 computation is done before the call. */
3777 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3778 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3779 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3780 && GET_CODE (DECL_RTL (to)) == REG))
3781 {
3782 rtx value;
3783
3784 push_temp_slots ();
3785 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3786 if (to_rtx == 0)
3787 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3788
3789 /* Handle calls that return values in multiple non-contiguous locations.
3790 The Irix 6 ABI has examples of this. */
3791 if (GET_CODE (to_rtx) == PARALLEL)
3792 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3793 else if (GET_MODE (to_rtx) == BLKmode)
3794 emit_block_move (to_rtx, value, expr_size (from));
3795 else
3796 {
3797 #ifdef POINTERS_EXTEND_UNSIGNED
3798 if (POINTER_TYPE_P (TREE_TYPE (to))
3799 && GET_MODE (to_rtx) != GET_MODE (value))
3800 value = convert_memory_address (GET_MODE (to_rtx), value);
3801 #endif
3802 emit_move_insn (to_rtx, value);
3803 }
3804 preserve_temp_slots (to_rtx);
3805 free_temp_slots ();
3806 pop_temp_slots ();
3807 return want_value ? to_rtx : NULL_RTX;
3808 }
3809
3810 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3811 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3812
3813 if (to_rtx == 0)
3814 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3815
3816 /* Don't move directly into a return register. */
3817 if (TREE_CODE (to) == RESULT_DECL
3818 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3819 {
3820 rtx temp;
3821
3822 push_temp_slots ();
3823 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3824
3825 if (GET_CODE (to_rtx) == PARALLEL)
3826 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3827 else
3828 emit_move_insn (to_rtx, temp);
3829
3830 preserve_temp_slots (to_rtx);
3831 free_temp_slots ();
3832 pop_temp_slots ();
3833 return want_value ? to_rtx : NULL_RTX;
3834 }
3835
3836 /* In case we are returning the contents of an object which overlaps
3837 the place the value is being stored, use a safe function when copying
3838 a value through a pointer into a structure value return block. */
3839 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3840 && current_function_returns_struct
3841 && !current_function_returns_pcc_struct)
3842 {
3843 rtx from_rtx, size;
3844
3845 push_temp_slots ();
3846 size = expr_size (from);
3847 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3848
3849 #ifdef TARGET_MEM_FUNCTIONS
3850 emit_library_call (memmove_libfunc, LCT_NORMAL,
3851 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3852 XEXP (from_rtx, 0), Pmode,
3853 convert_to_mode (TYPE_MODE (sizetype),
3854 size, TREE_UNSIGNED (sizetype)),
3855 TYPE_MODE (sizetype));
3856 #else
3857 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3858 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3859 XEXP (to_rtx, 0), Pmode,
3860 convert_to_mode (TYPE_MODE (integer_type_node),
3861 size, TREE_UNSIGNED (integer_type_node)),
3862 TYPE_MODE (integer_type_node));
3863 #endif
3864
3865 preserve_temp_slots (to_rtx);
3866 free_temp_slots ();
3867 pop_temp_slots ();
3868 return want_value ? to_rtx : NULL_RTX;
3869 }
3870
3871 /* Compute FROM and store the value in the rtx we got. */
3872
3873 push_temp_slots ();
3874 result = store_expr (from, to_rtx, want_value);
3875 preserve_temp_slots (result);
3876 free_temp_slots ();
3877 pop_temp_slots ();
3878 return want_value ? result : NULL_RTX;
3879 }
3880
3881 /* Generate code for computing expression EXP,
3882 and storing the value into TARGET.
3883 TARGET may contain a QUEUED rtx.
3884
3885 If WANT_VALUE is nonzero, return a copy of the value
3886 not in TARGET, so that we can be sure to use the proper
3887 value in a containing expression even if TARGET has something
3888 else stored in it. If possible, we copy the value through a pseudo
3889 and return that pseudo. Or, if the value is constant, we try to
3890 return the constant. In some cases, we return a pseudo
3891 copied *from* TARGET.
3892
3893 If the mode is BLKmode then we may return TARGET itself.
3894 It turns out that in BLKmode it doesn't cause a problem.
3895 because C has no operators that could combine two different
3896 assignments into the same BLKmode object with different values
3897 with no sequence point. Will other languages need this to
3898 be more thorough?
3899
3900 If WANT_VALUE is 0, we return NULL, to make sure
3901 to catch quickly any cases where the caller uses the value
3902 and fails to set WANT_VALUE. */
3903
3904 rtx
3905 store_expr (exp, target, want_value)
3906 tree exp;
3907 rtx target;
3908 int want_value;
3909 {
3910 rtx temp;
3911 int dont_return_target = 0;
3912 int dont_store_target = 0;
3913
3914 if (TREE_CODE (exp) == COMPOUND_EXPR)
3915 {
3916 /* Perform first part of compound expression, then assign from second
3917 part. */
3918 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3919 emit_queue ();
3920 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3921 }
3922 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3923 {
3924 /* For conditional expression, get safe form of the target. Then
3925 test the condition, doing the appropriate assignment on either
3926 side. This avoids the creation of unnecessary temporaries.
3927 For non-BLKmode, it is more efficient not to do this. */
3928
3929 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3930
3931 emit_queue ();
3932 target = protect_from_queue (target, 1);
3933
3934 do_pending_stack_adjust ();
3935 NO_DEFER_POP;
3936 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3937 start_cleanup_deferral ();
3938 store_expr (TREE_OPERAND (exp, 1), target, 0);
3939 end_cleanup_deferral ();
3940 emit_queue ();
3941 emit_jump_insn (gen_jump (lab2));
3942 emit_barrier ();
3943 emit_label (lab1);
3944 start_cleanup_deferral ();
3945 store_expr (TREE_OPERAND (exp, 2), target, 0);
3946 end_cleanup_deferral ();
3947 emit_queue ();
3948 emit_label (lab2);
3949 OK_DEFER_POP;
3950
3951 return want_value ? target : NULL_RTX;
3952 }
3953 else if (queued_subexp_p (target))
3954 /* If target contains a postincrement, let's not risk
3955 using it as the place to generate the rhs. */
3956 {
3957 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3958 {
3959 /* Expand EXP into a new pseudo. */
3960 temp = gen_reg_rtx (GET_MODE (target));
3961 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3962 }
3963 else
3964 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3965
3966 /* If target is volatile, ANSI requires accessing the value
3967 *from* the target, if it is accessed. So make that happen.
3968 In no case return the target itself. */
3969 if (! MEM_VOLATILE_P (target) && want_value)
3970 dont_return_target = 1;
3971 }
3972 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3973 && GET_MODE (target) != BLKmode)
3974 /* If target is in memory and caller wants value in a register instead,
3975 arrange that. Pass TARGET as target for expand_expr so that,
3976 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3977 We know expand_expr will not use the target in that case.
3978 Don't do this if TARGET is volatile because we are supposed
3979 to write it and then read it. */
3980 {
3981 temp = expand_expr (exp, target, GET_MODE (target), 0);
3982 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3983 {
3984 /* If TEMP is already in the desired TARGET, only copy it from
3985 memory and don't store it there again. */
3986 if (temp == target
3987 || (rtx_equal_p (temp, target)
3988 && ! side_effects_p (temp) && ! side_effects_p (target)))
3989 dont_store_target = 1;
3990 temp = copy_to_reg (temp);
3991 }
3992 dont_return_target = 1;
3993 }
3994 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3995 /* If this is an scalar in a register that is stored in a wider mode
3996 than the declared mode, compute the result into its declared mode
3997 and then convert to the wider mode. Our value is the computed
3998 expression. */
3999 {
4000 /* If we don't want a value, we can do the conversion inside EXP,
4001 which will often result in some optimizations. Do the conversion
4002 in two steps: first change the signedness, if needed, then
4003 the extend. But don't do this if the type of EXP is a subtype
4004 of something else since then the conversion might involve
4005 more than just converting modes. */
4006 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4007 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4008 {
4009 if (TREE_UNSIGNED (TREE_TYPE (exp))
4010 != SUBREG_PROMOTED_UNSIGNED_P (target))
4011 exp
4012 = convert
4013 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4014 TREE_TYPE (exp)),
4015 exp);
4016
4017 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4018 SUBREG_PROMOTED_UNSIGNED_P (target)),
4019 exp);
4020 }
4021
4022 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4023
4024 /* If TEMP is a volatile MEM and we want a result value, make
4025 the access now so it gets done only once. Likewise if
4026 it contains TARGET. */
4027 if (GET_CODE (temp) == MEM && want_value
4028 && (MEM_VOLATILE_P (temp)
4029 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4030 temp = copy_to_reg (temp);
4031
4032 /* If TEMP is a VOIDmode constant, use convert_modes to make
4033 sure that we properly convert it. */
4034 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4035 {
4036 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4037 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4038 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4039 GET_MODE (target), temp,
4040 SUBREG_PROMOTED_UNSIGNED_P (target));
4041 }
4042
4043 convert_move (SUBREG_REG (target), temp,
4044 SUBREG_PROMOTED_UNSIGNED_P (target));
4045
4046 /* If we promoted a constant, change the mode back down to match
4047 target. Otherwise, the caller might get confused by a result whose
4048 mode is larger than expected. */
4049
4050 if (want_value && GET_MODE (temp) != GET_MODE (target))
4051 {
4052 if (GET_MODE (temp) != VOIDmode)
4053 {
4054 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4055 SUBREG_PROMOTED_VAR_P (temp) = 1;
4056 SUBREG_PROMOTED_UNSIGNED_P (temp)
4057 = SUBREG_PROMOTED_UNSIGNED_P (target);
4058 }
4059 else
4060 temp = convert_modes (GET_MODE (target),
4061 GET_MODE (SUBREG_REG (target)),
4062 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4063 }
4064
4065 return want_value ? temp : NULL_RTX;
4066 }
4067 else
4068 {
4069 temp = expand_expr (exp, target, GET_MODE (target), 0);
4070 /* Return TARGET if it's a specified hardware register.
4071 If TARGET is a volatile mem ref, either return TARGET
4072 or return a reg copied *from* TARGET; ANSI requires this.
4073
4074 Otherwise, if TEMP is not TARGET, return TEMP
4075 if it is constant (for efficiency),
4076 or if we really want the correct value. */
4077 if (!(target && GET_CODE (target) == REG
4078 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4079 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4080 && ! rtx_equal_p (temp, target)
4081 && (CONSTANT_P (temp) || want_value))
4082 dont_return_target = 1;
4083 }
4084
4085 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4086 the same as that of TARGET, adjust the constant. This is needed, for
4087 example, in case it is a CONST_DOUBLE and we want only a word-sized
4088 value. */
4089 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4090 && TREE_CODE (exp) != ERROR_MARK
4091 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4092 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4093 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4094
4095 /* If value was not generated in the target, store it there.
4096 Convert the value to TARGET's type first if necessary.
4097 If TEMP and TARGET compare equal according to rtx_equal_p, but
4098 one or both of them are volatile memory refs, we have to distinguish
4099 two cases:
4100 - expand_expr has used TARGET. In this case, we must not generate
4101 another copy. This can be detected by TARGET being equal according
4102 to == .
4103 - expand_expr has not used TARGET - that means that the source just
4104 happens to have the same RTX form. Since temp will have been created
4105 by expand_expr, it will compare unequal according to == .
4106 We must generate a copy in this case, to reach the correct number
4107 of volatile memory references. */
4108
4109 if ((! rtx_equal_p (temp, target)
4110 || (temp != target && (side_effects_p (temp)
4111 || side_effects_p (target))))
4112 && TREE_CODE (exp) != ERROR_MARK
4113 && ! dont_store_target)
4114 {
4115 target = protect_from_queue (target, 1);
4116 if (GET_MODE (temp) != GET_MODE (target)
4117 && GET_MODE (temp) != VOIDmode)
4118 {
4119 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4120 if (dont_return_target)
4121 {
4122 /* In this case, we will return TEMP,
4123 so make sure it has the proper mode.
4124 But don't forget to store the value into TARGET. */
4125 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4126 emit_move_insn (target, temp);
4127 }
4128 else
4129 convert_move (target, temp, unsignedp);
4130 }
4131
4132 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4133 {
4134 /* Handle copying a string constant into an array. The string
4135 constant may be shorter than the array. So copy just the string's
4136 actual length, and clear the rest. First get the size of the data
4137 type of the string, which is actually the size of the target. */
4138 rtx size = expr_size (exp);
4139
4140 if (GET_CODE (size) == CONST_INT
4141 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4142 emit_block_move (target, temp, size);
4143 else
4144 {
4145 /* Compute the size of the data to copy from the string. */
4146 tree copy_size
4147 = size_binop (MIN_EXPR,
4148 make_tree (sizetype, size),
4149 size_int (TREE_STRING_LENGTH (exp)));
4150 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4151 VOIDmode, 0);
4152 rtx label = 0;
4153
4154 /* Copy that much. */
4155 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4156 emit_block_move (target, temp, copy_size_rtx);
4157
4158 /* Figure out how much is left in TARGET that we have to clear.
4159 Do all calculations in ptr_mode. */
4160 if (GET_CODE (copy_size_rtx) == CONST_INT)
4161 {
4162 size = plus_constant (size, -INTVAL (copy_size_rtx));
4163 target = adjust_address (target, BLKmode,
4164 INTVAL (copy_size_rtx));
4165 }
4166 else
4167 {
4168 size = expand_binop (ptr_mode, sub_optab, size,
4169 copy_size_rtx, NULL_RTX, 0,
4170 OPTAB_LIB_WIDEN);
4171
4172 #ifdef POINTERS_EXTEND_UNSIGNED
4173 if (GET_MODE (copy_size_rtx) != Pmode)
4174 copy_size_rtx = convert_memory_address (Pmode,
4175 copy_size_rtx);
4176 #endif
4177
4178 target = offset_address (target, copy_size_rtx,
4179 highest_pow2_factor (copy_size));
4180 label = gen_label_rtx ();
4181 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4182 GET_MODE (size), 0, label);
4183 }
4184
4185 if (size != const0_rtx)
4186 clear_storage (target, size);
4187
4188 if (label)
4189 emit_label (label);
4190 }
4191 }
4192 /* Handle calls that return values in multiple non-contiguous locations.
4193 The Irix 6 ABI has examples of this. */
4194 else if (GET_CODE (target) == PARALLEL)
4195 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4196 else if (GET_MODE (temp) == BLKmode)
4197 emit_block_move (target, temp, expr_size (exp));
4198 else
4199 emit_move_insn (target, temp);
4200 }
4201
4202 /* If we don't want a value, return NULL_RTX. */
4203 if (! want_value)
4204 return NULL_RTX;
4205
4206 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4207 ??? The latter test doesn't seem to make sense. */
4208 else if (dont_return_target && GET_CODE (temp) != MEM)
4209 return temp;
4210
4211 /* Return TARGET itself if it is a hard register. */
4212 else if (want_value && GET_MODE (target) != BLKmode
4213 && ! (GET_CODE (target) == REG
4214 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4215 return copy_to_reg (target);
4216
4217 else
4218 return target;
4219 }
4220 \f
4221 /* Return 1 if EXP just contains zeros. */
4222
4223 static int
4224 is_zeros_p (exp)
4225 tree exp;
4226 {
4227 tree elt;
4228
4229 switch (TREE_CODE (exp))
4230 {
4231 case CONVERT_EXPR:
4232 case NOP_EXPR:
4233 case NON_LVALUE_EXPR:
4234 case VIEW_CONVERT_EXPR:
4235 return is_zeros_p (TREE_OPERAND (exp, 0));
4236
4237 case INTEGER_CST:
4238 return integer_zerop (exp);
4239
4240 case COMPLEX_CST:
4241 return
4242 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4243
4244 case REAL_CST:
4245 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4246
4247 case CONSTRUCTOR:
4248 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4249 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4250 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4251 if (! is_zeros_p (TREE_VALUE (elt)))
4252 return 0;
4253
4254 return 1;
4255
4256 default:
4257 return 0;
4258 }
4259 }
4260
4261 /* Return 1 if EXP contains mostly (3/4) zeros. */
4262
4263 static int
4264 mostly_zeros_p (exp)
4265 tree exp;
4266 {
4267 if (TREE_CODE (exp) == CONSTRUCTOR)
4268 {
4269 int elts = 0, zeros = 0;
4270 tree elt = CONSTRUCTOR_ELTS (exp);
4271 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4272 {
4273 /* If there are no ranges of true bits, it is all zero. */
4274 return elt == NULL_TREE;
4275 }
4276 for (; elt; elt = TREE_CHAIN (elt))
4277 {
4278 /* We do not handle the case where the index is a RANGE_EXPR,
4279 so the statistic will be somewhat inaccurate.
4280 We do make a more accurate count in store_constructor itself,
4281 so since this function is only used for nested array elements,
4282 this should be close enough. */
4283 if (mostly_zeros_p (TREE_VALUE (elt)))
4284 zeros++;
4285 elts++;
4286 }
4287
4288 return 4 * zeros >= 3 * elts;
4289 }
4290
4291 return is_zeros_p (exp);
4292 }
4293 \f
4294 /* Helper function for store_constructor.
4295 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4296 TYPE is the type of the CONSTRUCTOR, not the element type.
4297 CLEARED is as for store_constructor.
4298 ALIAS_SET is the alias set to use for any stores.
4299
4300 This provides a recursive shortcut back to store_constructor when it isn't
4301 necessary to go through store_field. This is so that we can pass through
4302 the cleared field to let store_constructor know that we may not have to
4303 clear a substructure if the outer structure has already been cleared. */
4304
4305 static void
4306 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4307 alias_set)
4308 rtx target;
4309 unsigned HOST_WIDE_INT bitsize;
4310 HOST_WIDE_INT bitpos;
4311 enum machine_mode mode;
4312 tree exp, type;
4313 int cleared;
4314 int alias_set;
4315 {
4316 if (TREE_CODE (exp) == CONSTRUCTOR
4317 && bitpos % BITS_PER_UNIT == 0
4318 /* If we have a non-zero bitpos for a register target, then we just
4319 let store_field do the bitfield handling. This is unlikely to
4320 generate unnecessary clear instructions anyways. */
4321 && (bitpos == 0 || GET_CODE (target) == MEM))
4322 {
4323 if (GET_CODE (target) == MEM)
4324 target
4325 = adjust_address (target,
4326 GET_MODE (target) == BLKmode
4327 || 0 != (bitpos
4328 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4329 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4330
4331
4332 /* Update the alias set, if required. */
4333 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4334 && MEM_ALIAS_SET (target) != 0)
4335 {
4336 target = copy_rtx (target);
4337 set_mem_alias_set (target, alias_set);
4338 }
4339
4340 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4341 }
4342 else
4343 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4344 alias_set);
4345 }
4346
4347 /* Store the value of constructor EXP into the rtx TARGET.
4348 TARGET is either a REG or a MEM; we know it cannot conflict, since
4349 safe_from_p has been called.
4350 CLEARED is true if TARGET is known to have been zero'd.
4351 SIZE is the number of bytes of TARGET we are allowed to modify: this
4352 may not be the same as the size of EXP if we are assigning to a field
4353 which has been packed to exclude padding bits. */
4354
4355 static void
4356 store_constructor (exp, target, cleared, size)
4357 tree exp;
4358 rtx target;
4359 int cleared;
4360 HOST_WIDE_INT size;
4361 {
4362 tree type = TREE_TYPE (exp);
4363 #ifdef WORD_REGISTER_OPERATIONS
4364 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4365 #endif
4366
4367 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4368 || TREE_CODE (type) == QUAL_UNION_TYPE)
4369 {
4370 tree elt;
4371
4372 /* We either clear the aggregate or indicate the value is dead. */
4373 if ((TREE_CODE (type) == UNION_TYPE
4374 || TREE_CODE (type) == QUAL_UNION_TYPE)
4375 && ! cleared
4376 && ! CONSTRUCTOR_ELTS (exp))
4377 /* If the constructor is empty, clear the union. */
4378 {
4379 clear_storage (target, expr_size (exp));
4380 cleared = 1;
4381 }
4382
4383 /* If we are building a static constructor into a register,
4384 set the initial value as zero so we can fold the value into
4385 a constant. But if more than one register is involved,
4386 this probably loses. */
4387 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4388 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4389 {
4390 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4391 cleared = 1;
4392 }
4393
4394 /* If the constructor has fewer fields than the structure
4395 or if we are initializing the structure to mostly zeros,
4396 clear the whole structure first. Don't do this if TARGET is a
4397 register whose mode size isn't equal to SIZE since clear_storage
4398 can't handle this case. */
4399 else if (! cleared && size > 0
4400 && ((list_length (CONSTRUCTOR_ELTS (exp))
4401 != fields_length (type))
4402 || mostly_zeros_p (exp))
4403 && (GET_CODE (target) != REG
4404 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4405 == size)))
4406 {
4407 clear_storage (target, GEN_INT (size));
4408 cleared = 1;
4409 }
4410
4411 if (! cleared)
4412 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4413
4414 /* Store each element of the constructor into
4415 the corresponding field of TARGET. */
4416
4417 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4418 {
4419 tree field = TREE_PURPOSE (elt);
4420 tree value = TREE_VALUE (elt);
4421 enum machine_mode mode;
4422 HOST_WIDE_INT bitsize;
4423 HOST_WIDE_INT bitpos = 0;
4424 int unsignedp;
4425 tree offset;
4426 rtx to_rtx = target;
4427
4428 /* Just ignore missing fields.
4429 We cleared the whole structure, above,
4430 if any fields are missing. */
4431 if (field == 0)
4432 continue;
4433
4434 if (cleared && is_zeros_p (value))
4435 continue;
4436
4437 if (host_integerp (DECL_SIZE (field), 1))
4438 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4439 else
4440 bitsize = -1;
4441
4442 unsignedp = TREE_UNSIGNED (field);
4443 mode = DECL_MODE (field);
4444 if (DECL_BIT_FIELD (field))
4445 mode = VOIDmode;
4446
4447 offset = DECL_FIELD_OFFSET (field);
4448 if (host_integerp (offset, 0)
4449 && host_integerp (bit_position (field), 0))
4450 {
4451 bitpos = int_bit_position (field);
4452 offset = 0;
4453 }
4454 else
4455 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4456
4457 if (offset)
4458 {
4459 rtx offset_rtx;
4460
4461 if (contains_placeholder_p (offset))
4462 offset = build (WITH_RECORD_EXPR, sizetype,
4463 offset, make_tree (TREE_TYPE (exp), target));
4464
4465 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4466 if (GET_CODE (to_rtx) != MEM)
4467 abort ();
4468
4469 if (GET_MODE (offset_rtx) != ptr_mode)
4470 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4471
4472 #ifdef POINTERS_EXTEND_UNSIGNED
4473 if (GET_MODE (offset_rtx) != Pmode)
4474 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4475 #endif
4476
4477 to_rtx = offset_address (to_rtx, offset_rtx,
4478 highest_pow2_factor (offset));
4479 }
4480
4481 if (TREE_READONLY (field))
4482 {
4483 if (GET_CODE (to_rtx) == MEM)
4484 to_rtx = copy_rtx (to_rtx);
4485
4486 RTX_UNCHANGING_P (to_rtx) = 1;
4487 }
4488
4489 #ifdef WORD_REGISTER_OPERATIONS
4490 /* If this initializes a field that is smaller than a word, at the
4491 start of a word, try to widen it to a full word.
4492 This special case allows us to output C++ member function
4493 initializations in a form that the optimizers can understand. */
4494 if (GET_CODE (target) == REG
4495 && bitsize < BITS_PER_WORD
4496 && bitpos % BITS_PER_WORD == 0
4497 && GET_MODE_CLASS (mode) == MODE_INT
4498 && TREE_CODE (value) == INTEGER_CST
4499 && exp_size >= 0
4500 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4501 {
4502 tree type = TREE_TYPE (value);
4503
4504 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4505 {
4506 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4507 value = convert (type, value);
4508 }
4509
4510 if (BYTES_BIG_ENDIAN)
4511 value
4512 = fold (build (LSHIFT_EXPR, type, value,
4513 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4514 bitsize = BITS_PER_WORD;
4515 mode = word_mode;
4516 }
4517 #endif
4518
4519 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4520 && DECL_NONADDRESSABLE_P (field))
4521 {
4522 to_rtx = copy_rtx (to_rtx);
4523 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4524 }
4525
4526 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4527 value, type, cleared,
4528 get_alias_set (TREE_TYPE (field)));
4529 }
4530 }
4531 else if (TREE_CODE (type) == ARRAY_TYPE)
4532 {
4533 tree elt;
4534 int i;
4535 int need_to_clear;
4536 tree domain = TYPE_DOMAIN (type);
4537 tree elttype = TREE_TYPE (type);
4538 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4539 && TYPE_MAX_VALUE (domain)
4540 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4541 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4542 HOST_WIDE_INT minelt = 0;
4543 HOST_WIDE_INT maxelt = 0;
4544
4545 /* If we have constant bounds for the range of the type, get them. */
4546 if (const_bounds_p)
4547 {
4548 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4549 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4550 }
4551
4552 /* If the constructor has fewer elements than the array,
4553 clear the whole array first. Similarly if this is
4554 static constructor of a non-BLKmode object. */
4555 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4556 need_to_clear = 1;
4557 else
4558 {
4559 HOST_WIDE_INT count = 0, zero_count = 0;
4560 need_to_clear = ! const_bounds_p;
4561
4562 /* This loop is a more accurate version of the loop in
4563 mostly_zeros_p (it handles RANGE_EXPR in an index).
4564 It is also needed to check for missing elements. */
4565 for (elt = CONSTRUCTOR_ELTS (exp);
4566 elt != NULL_TREE && ! need_to_clear;
4567 elt = TREE_CHAIN (elt))
4568 {
4569 tree index = TREE_PURPOSE (elt);
4570 HOST_WIDE_INT this_node_count;
4571
4572 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4573 {
4574 tree lo_index = TREE_OPERAND (index, 0);
4575 tree hi_index = TREE_OPERAND (index, 1);
4576
4577 if (! host_integerp (lo_index, 1)
4578 || ! host_integerp (hi_index, 1))
4579 {
4580 need_to_clear = 1;
4581 break;
4582 }
4583
4584 this_node_count = (tree_low_cst (hi_index, 1)
4585 - tree_low_cst (lo_index, 1) + 1);
4586 }
4587 else
4588 this_node_count = 1;
4589
4590 count += this_node_count;
4591 if (mostly_zeros_p (TREE_VALUE (elt)))
4592 zero_count += this_node_count;
4593 }
4594
4595 /* Clear the entire array first if there are any missing elements,
4596 or if the incidence of zero elements is >= 75%. */
4597 if (! need_to_clear
4598 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4599 need_to_clear = 1;
4600 }
4601
4602 if (need_to_clear && size > 0)
4603 {
4604 if (! cleared)
4605 clear_storage (target, GEN_INT (size));
4606 cleared = 1;
4607 }
4608 else if (REG_P (target))
4609 /* Inform later passes that the old value is dead. */
4610 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4611
4612 /* Store each element of the constructor into
4613 the corresponding element of TARGET, determined
4614 by counting the elements. */
4615 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4616 elt;
4617 elt = TREE_CHAIN (elt), i++)
4618 {
4619 enum machine_mode mode;
4620 HOST_WIDE_INT bitsize;
4621 HOST_WIDE_INT bitpos;
4622 int unsignedp;
4623 tree value = TREE_VALUE (elt);
4624 tree index = TREE_PURPOSE (elt);
4625 rtx xtarget = target;
4626
4627 if (cleared && is_zeros_p (value))
4628 continue;
4629
4630 unsignedp = TREE_UNSIGNED (elttype);
4631 mode = TYPE_MODE (elttype);
4632 if (mode == BLKmode)
4633 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4634 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4635 : -1);
4636 else
4637 bitsize = GET_MODE_BITSIZE (mode);
4638
4639 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4640 {
4641 tree lo_index = TREE_OPERAND (index, 0);
4642 tree hi_index = TREE_OPERAND (index, 1);
4643 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4644 struct nesting *loop;
4645 HOST_WIDE_INT lo, hi, count;
4646 tree position;
4647
4648 /* If the range is constant and "small", unroll the loop. */
4649 if (const_bounds_p
4650 && host_integerp (lo_index, 0)
4651 && host_integerp (hi_index, 0)
4652 && (lo = tree_low_cst (lo_index, 0),
4653 hi = tree_low_cst (hi_index, 0),
4654 count = hi - lo + 1,
4655 (GET_CODE (target) != MEM
4656 || count <= 2
4657 || (host_integerp (TYPE_SIZE (elttype), 1)
4658 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4659 <= 40 * 8)))))
4660 {
4661 lo -= minelt; hi -= minelt;
4662 for (; lo <= hi; lo++)
4663 {
4664 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4665
4666 if (GET_CODE (target) == MEM
4667 && !MEM_KEEP_ALIAS_SET_P (target)
4668 && TYPE_NONALIASED_COMPONENT (type))
4669 {
4670 target = copy_rtx (target);
4671 MEM_KEEP_ALIAS_SET_P (target) = 1;
4672 }
4673
4674 store_constructor_field
4675 (target, bitsize, bitpos, mode, value, type, cleared,
4676 get_alias_set (elttype));
4677 }
4678 }
4679 else
4680 {
4681 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4682 loop_top = gen_label_rtx ();
4683 loop_end = gen_label_rtx ();
4684
4685 unsignedp = TREE_UNSIGNED (domain);
4686
4687 index = build_decl (VAR_DECL, NULL_TREE, domain);
4688
4689 index_r
4690 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4691 &unsignedp, 0));
4692 SET_DECL_RTL (index, index_r);
4693 if (TREE_CODE (value) == SAVE_EXPR
4694 && SAVE_EXPR_RTL (value) == 0)
4695 {
4696 /* Make sure value gets expanded once before the
4697 loop. */
4698 expand_expr (value, const0_rtx, VOIDmode, 0);
4699 emit_queue ();
4700 }
4701 store_expr (lo_index, index_r, 0);
4702 loop = expand_start_loop (0);
4703
4704 /* Assign value to element index. */
4705 position
4706 = convert (ssizetype,
4707 fold (build (MINUS_EXPR, TREE_TYPE (index),
4708 index, TYPE_MIN_VALUE (domain))));
4709 position = size_binop (MULT_EXPR, position,
4710 convert (ssizetype,
4711 TYPE_SIZE_UNIT (elttype)));
4712
4713 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4714 xtarget = offset_address (target, pos_rtx,
4715 highest_pow2_factor (position));
4716 xtarget = adjust_address (xtarget, mode, 0);
4717 if (TREE_CODE (value) == CONSTRUCTOR)
4718 store_constructor (value, xtarget, cleared,
4719 bitsize / BITS_PER_UNIT);
4720 else
4721 store_expr (value, xtarget, 0);
4722
4723 expand_exit_loop_if_false (loop,
4724 build (LT_EXPR, integer_type_node,
4725 index, hi_index));
4726
4727 expand_increment (build (PREINCREMENT_EXPR,
4728 TREE_TYPE (index),
4729 index, integer_one_node), 0, 0);
4730 expand_end_loop ();
4731 emit_label (loop_end);
4732 }
4733 }
4734 else if ((index != 0 && ! host_integerp (index, 0))
4735 || ! host_integerp (TYPE_SIZE (elttype), 1))
4736 {
4737 tree position;
4738
4739 if (index == 0)
4740 index = ssize_int (1);
4741
4742 if (minelt)
4743 index = convert (ssizetype,
4744 fold (build (MINUS_EXPR, index,
4745 TYPE_MIN_VALUE (domain))));
4746
4747 position = size_binop (MULT_EXPR, index,
4748 convert (ssizetype,
4749 TYPE_SIZE_UNIT (elttype)));
4750 xtarget = offset_address (target,
4751 expand_expr (position, 0, VOIDmode, 0),
4752 highest_pow2_factor (position));
4753 xtarget = adjust_address (xtarget, mode, 0);
4754 store_expr (value, xtarget, 0);
4755 }
4756 else
4757 {
4758 if (index != 0)
4759 bitpos = ((tree_low_cst (index, 0) - minelt)
4760 * tree_low_cst (TYPE_SIZE (elttype), 1));
4761 else
4762 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4763
4764 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4765 && TYPE_NONALIASED_COMPONENT (type))
4766 {
4767 target = copy_rtx (target);
4768 MEM_KEEP_ALIAS_SET_P (target) = 1;
4769 }
4770
4771 store_constructor_field (target, bitsize, bitpos, mode, value,
4772 type, cleared, get_alias_set (elttype));
4773
4774 }
4775 }
4776 }
4777
4778 /* Set constructor assignments. */
4779 else if (TREE_CODE (type) == SET_TYPE)
4780 {
4781 tree elt = CONSTRUCTOR_ELTS (exp);
4782 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4783 tree domain = TYPE_DOMAIN (type);
4784 tree domain_min, domain_max, bitlength;
4785
4786 /* The default implementation strategy is to extract the constant
4787 parts of the constructor, use that to initialize the target,
4788 and then "or" in whatever non-constant ranges we need in addition.
4789
4790 If a large set is all zero or all ones, it is
4791 probably better to set it using memset (if available) or bzero.
4792 Also, if a large set has just a single range, it may also be
4793 better to first clear all the first clear the set (using
4794 bzero/memset), and set the bits we want. */
4795
4796 /* Check for all zeros. */
4797 if (elt == NULL_TREE && size > 0)
4798 {
4799 if (!cleared)
4800 clear_storage (target, GEN_INT (size));
4801 return;
4802 }
4803
4804 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4805 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4806 bitlength = size_binop (PLUS_EXPR,
4807 size_diffop (domain_max, domain_min),
4808 ssize_int (1));
4809
4810 nbits = tree_low_cst (bitlength, 1);
4811
4812 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4813 are "complicated" (more than one range), initialize (the
4814 constant parts) by copying from a constant. */
4815 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4816 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4817 {
4818 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4819 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4820 char *bit_buffer = (char *) alloca (nbits);
4821 HOST_WIDE_INT word = 0;
4822 unsigned int bit_pos = 0;
4823 unsigned int ibit = 0;
4824 unsigned int offset = 0; /* In bytes from beginning of set. */
4825
4826 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4827 for (;;)
4828 {
4829 if (bit_buffer[ibit])
4830 {
4831 if (BYTES_BIG_ENDIAN)
4832 word |= (1 << (set_word_size - 1 - bit_pos));
4833 else
4834 word |= 1 << bit_pos;
4835 }
4836
4837 bit_pos++; ibit++;
4838 if (bit_pos >= set_word_size || ibit == nbits)
4839 {
4840 if (word != 0 || ! cleared)
4841 {
4842 rtx datum = GEN_INT (word);
4843 rtx to_rtx;
4844
4845 /* The assumption here is that it is safe to use
4846 XEXP if the set is multi-word, but not if
4847 it's single-word. */
4848 if (GET_CODE (target) == MEM)
4849 to_rtx = adjust_address (target, mode, offset);
4850 else if (offset == 0)
4851 to_rtx = target;
4852 else
4853 abort ();
4854 emit_move_insn (to_rtx, datum);
4855 }
4856
4857 if (ibit == nbits)
4858 break;
4859 word = 0;
4860 bit_pos = 0;
4861 offset += set_word_size / BITS_PER_UNIT;
4862 }
4863 }
4864 }
4865 else if (!cleared)
4866 /* Don't bother clearing storage if the set is all ones. */
4867 if (TREE_CHAIN (elt) != NULL_TREE
4868 || (TREE_PURPOSE (elt) == NULL_TREE
4869 ? nbits != 1
4870 : ( ! host_integerp (TREE_VALUE (elt), 0)
4871 || ! host_integerp (TREE_PURPOSE (elt), 0)
4872 || (tree_low_cst (TREE_VALUE (elt), 0)
4873 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4874 != (HOST_WIDE_INT) nbits))))
4875 clear_storage (target, expr_size (exp));
4876
4877 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4878 {
4879 /* Start of range of element or NULL. */
4880 tree startbit = TREE_PURPOSE (elt);
4881 /* End of range of element, or element value. */
4882 tree endbit = TREE_VALUE (elt);
4883 #ifdef TARGET_MEM_FUNCTIONS
4884 HOST_WIDE_INT startb, endb;
4885 #endif
4886 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4887
4888 bitlength_rtx = expand_expr (bitlength,
4889 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4890
4891 /* Handle non-range tuple element like [ expr ]. */
4892 if (startbit == NULL_TREE)
4893 {
4894 startbit = save_expr (endbit);
4895 endbit = startbit;
4896 }
4897
4898 startbit = convert (sizetype, startbit);
4899 endbit = convert (sizetype, endbit);
4900 if (! integer_zerop (domain_min))
4901 {
4902 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4903 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4904 }
4905 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4906 EXPAND_CONST_ADDRESS);
4907 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4908 EXPAND_CONST_ADDRESS);
4909
4910 if (REG_P (target))
4911 {
4912 targetx
4913 = assign_temp
4914 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4915 TYPE_QUAL_CONST)),
4916 0, 1, 1);
4917 emit_move_insn (targetx, target);
4918 }
4919
4920 else if (GET_CODE (target) == MEM)
4921 targetx = target;
4922 else
4923 abort ();
4924
4925 #ifdef TARGET_MEM_FUNCTIONS
4926 /* Optimization: If startbit and endbit are
4927 constants divisible by BITS_PER_UNIT,
4928 call memset instead. */
4929 if (TREE_CODE (startbit) == INTEGER_CST
4930 && TREE_CODE (endbit) == INTEGER_CST
4931 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4932 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4933 {
4934 emit_library_call (memset_libfunc, LCT_NORMAL,
4935 VOIDmode, 3,
4936 plus_constant (XEXP (targetx, 0),
4937 startb / BITS_PER_UNIT),
4938 Pmode,
4939 constm1_rtx, TYPE_MODE (integer_type_node),
4940 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4941 TYPE_MODE (sizetype));
4942 }
4943 else
4944 #endif
4945 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4946 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4947 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4948 startbit_rtx, TYPE_MODE (sizetype),
4949 endbit_rtx, TYPE_MODE (sizetype));
4950
4951 if (REG_P (target))
4952 emit_move_insn (target, targetx);
4953 }
4954 }
4955
4956 else
4957 abort ();
4958 }
4959
4960 /* Store the value of EXP (an expression tree)
4961 into a subfield of TARGET which has mode MODE and occupies
4962 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4963 If MODE is VOIDmode, it means that we are storing into a bit-field.
4964
4965 If VALUE_MODE is VOIDmode, return nothing in particular.
4966 UNSIGNEDP is not used in this case.
4967
4968 Otherwise, return an rtx for the value stored. This rtx
4969 has mode VALUE_MODE if that is convenient to do.
4970 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4971
4972 TYPE is the type of the underlying object,
4973
4974 ALIAS_SET is the alias set for the destination. This value will
4975 (in general) be different from that for TARGET, since TARGET is a
4976 reference to the containing structure. */
4977
4978 static rtx
4979 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
4980 alias_set)
4981 rtx target;
4982 HOST_WIDE_INT bitsize;
4983 HOST_WIDE_INT bitpos;
4984 enum machine_mode mode;
4985 tree exp;
4986 enum machine_mode value_mode;
4987 int unsignedp;
4988 tree type;
4989 int alias_set;
4990 {
4991 HOST_WIDE_INT width_mask = 0;
4992
4993 if (TREE_CODE (exp) == ERROR_MARK)
4994 return const0_rtx;
4995
4996 /* If we have nothing to store, do nothing unless the expression has
4997 side-effects. */
4998 if (bitsize == 0)
4999 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5000 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5001 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5002
5003 /* If we are storing into an unaligned field of an aligned union that is
5004 in a register, we may have the mode of TARGET being an integer mode but
5005 MODE == BLKmode. In that case, get an aligned object whose size and
5006 alignment are the same as TARGET and store TARGET into it (we can avoid
5007 the store if the field being stored is the entire width of TARGET). Then
5008 call ourselves recursively to store the field into a BLKmode version of
5009 that object. Finally, load from the object into TARGET. This is not
5010 very efficient in general, but should only be slightly more expensive
5011 than the otherwise-required unaligned accesses. Perhaps this can be
5012 cleaned up later. */
5013
5014 if (mode == BLKmode
5015 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5016 {
5017 rtx object
5018 = assign_temp
5019 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5020 0, 1, 1);
5021 rtx blk_object = adjust_address (object, BLKmode, 0);
5022
5023 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5024 emit_move_insn (object, target);
5025
5026 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5027 alias_set);
5028
5029 emit_move_insn (target, object);
5030
5031 /* We want to return the BLKmode version of the data. */
5032 return blk_object;
5033 }
5034
5035 if (GET_CODE (target) == CONCAT)
5036 {
5037 /* We're storing into a struct containing a single __complex. */
5038
5039 if (bitpos != 0)
5040 abort ();
5041 return store_expr (exp, target, 0);
5042 }
5043
5044 /* If the structure is in a register or if the component
5045 is a bit field, we cannot use addressing to access it.
5046 Use bit-field techniques or SUBREG to store in it. */
5047
5048 if (mode == VOIDmode
5049 || (mode != BLKmode && ! direct_store[(int) mode]
5050 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5051 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5052 || GET_CODE (target) == REG
5053 || GET_CODE (target) == SUBREG
5054 /* If the field isn't aligned enough to store as an ordinary memref,
5055 store it as a bit field. */
5056 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5057 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5058 || bitpos % GET_MODE_ALIGNMENT (mode)))
5059 /* If the RHS and field are a constant size and the size of the
5060 RHS isn't the same size as the bitfield, we must use bitfield
5061 operations. */
5062 || (bitsize >= 0
5063 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5064 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5065 {
5066 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5067
5068 /* If BITSIZE is narrower than the size of the type of EXP
5069 we will be narrowing TEMP. Normally, what's wanted are the
5070 low-order bits. However, if EXP's type is a record and this is
5071 big-endian machine, we want the upper BITSIZE bits. */
5072 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5073 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5074 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5075 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5076 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5077 - bitsize),
5078 temp, 1);
5079
5080 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5081 MODE. */
5082 if (mode != VOIDmode && mode != BLKmode
5083 && mode != TYPE_MODE (TREE_TYPE (exp)))
5084 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5085
5086 /* If the modes of TARGET and TEMP are both BLKmode, both
5087 must be in memory and BITPOS must be aligned on a byte
5088 boundary. If so, we simply do a block copy. */
5089 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5090 {
5091 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5092 || bitpos % BITS_PER_UNIT != 0)
5093 abort ();
5094
5095 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5096 emit_block_move (target, temp,
5097 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5098 / BITS_PER_UNIT));
5099
5100 return value_mode == VOIDmode ? const0_rtx : target;
5101 }
5102
5103 /* Store the value in the bitfield. */
5104 store_bit_field (target, bitsize, bitpos, mode, temp,
5105 int_size_in_bytes (type));
5106
5107 if (value_mode != VOIDmode)
5108 {
5109 /* The caller wants an rtx for the value.
5110 If possible, avoid refetching from the bitfield itself. */
5111 if (width_mask != 0
5112 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5113 {
5114 tree count;
5115 enum machine_mode tmode;
5116
5117 if (unsignedp)
5118 return expand_and (temp,
5119 GEN_INT
5120 (trunc_int_for_mode
5121 (width_mask,
5122 GET_MODE (temp) == VOIDmode
5123 ? value_mode
5124 : GET_MODE (temp))), NULL_RTX);
5125
5126 tmode = GET_MODE (temp);
5127 if (tmode == VOIDmode)
5128 tmode = value_mode;
5129 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5130 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5131 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5132 }
5133
5134 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5135 NULL_RTX, value_mode, VOIDmode,
5136 int_size_in_bytes (type));
5137 }
5138 return const0_rtx;
5139 }
5140 else
5141 {
5142 rtx addr = XEXP (target, 0);
5143 rtx to_rtx = target;
5144
5145 /* If a value is wanted, it must be the lhs;
5146 so make the address stable for multiple use. */
5147
5148 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5149 && ! CONSTANT_ADDRESS_P (addr)
5150 /* A frame-pointer reference is already stable. */
5151 && ! (GET_CODE (addr) == PLUS
5152 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5153 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5154 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5155 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5156
5157 /* Now build a reference to just the desired component. */
5158
5159 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5160
5161 if (to_rtx == target)
5162 to_rtx = copy_rtx (to_rtx);
5163
5164 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5165 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5166 set_mem_alias_set (to_rtx, alias_set);
5167
5168 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5169 }
5170 }
5171 \f
5172 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5173 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5174 codes and find the ultimate containing object, which we return.
5175
5176 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5177 bit position, and *PUNSIGNEDP to the signedness of the field.
5178 If the position of the field is variable, we store a tree
5179 giving the variable offset (in units) in *POFFSET.
5180 This offset is in addition to the bit position.
5181 If the position is not variable, we store 0 in *POFFSET.
5182
5183 If any of the extraction expressions is volatile,
5184 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5185
5186 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5187 is a mode that can be used to access the field. In that case, *PBITSIZE
5188 is redundant.
5189
5190 If the field describes a variable-sized object, *PMODE is set to
5191 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5192 this case, but the address of the object can be found. */
5193
5194 tree
5195 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5196 punsignedp, pvolatilep)
5197 tree exp;
5198 HOST_WIDE_INT *pbitsize;
5199 HOST_WIDE_INT *pbitpos;
5200 tree *poffset;
5201 enum machine_mode *pmode;
5202 int *punsignedp;
5203 int *pvolatilep;
5204 {
5205 tree size_tree = 0;
5206 enum machine_mode mode = VOIDmode;
5207 tree offset = size_zero_node;
5208 tree bit_offset = bitsize_zero_node;
5209 tree placeholder_ptr = 0;
5210 tree tem;
5211
5212 /* First get the mode, signedness, and size. We do this from just the
5213 outermost expression. */
5214 if (TREE_CODE (exp) == COMPONENT_REF)
5215 {
5216 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5217 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5218 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5219
5220 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5221 }
5222 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5223 {
5224 size_tree = TREE_OPERAND (exp, 1);
5225 *punsignedp = TREE_UNSIGNED (exp);
5226 }
5227 else
5228 {
5229 mode = TYPE_MODE (TREE_TYPE (exp));
5230 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5231
5232 if (mode == BLKmode)
5233 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5234 else
5235 *pbitsize = GET_MODE_BITSIZE (mode);
5236 }
5237
5238 if (size_tree != 0)
5239 {
5240 if (! host_integerp (size_tree, 1))
5241 mode = BLKmode, *pbitsize = -1;
5242 else
5243 *pbitsize = tree_low_cst (size_tree, 1);
5244 }
5245
5246 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5247 and find the ultimate containing object. */
5248 while (1)
5249 {
5250 if (TREE_CODE (exp) == BIT_FIELD_REF)
5251 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5252 else if (TREE_CODE (exp) == COMPONENT_REF)
5253 {
5254 tree field = TREE_OPERAND (exp, 1);
5255 tree this_offset = DECL_FIELD_OFFSET (field);
5256
5257 /* If this field hasn't been filled in yet, don't go
5258 past it. This should only happen when folding expressions
5259 made during type construction. */
5260 if (this_offset == 0)
5261 break;
5262 else if (! TREE_CONSTANT (this_offset)
5263 && contains_placeholder_p (this_offset))
5264 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5265
5266 offset = size_binop (PLUS_EXPR, offset, this_offset);
5267 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5268 DECL_FIELD_BIT_OFFSET (field));
5269
5270 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5271 }
5272
5273 else if (TREE_CODE (exp) == ARRAY_REF
5274 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5275 {
5276 tree index = TREE_OPERAND (exp, 1);
5277 tree array = TREE_OPERAND (exp, 0);
5278 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5279 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5280 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5281
5282 /* We assume all arrays have sizes that are a multiple of a byte.
5283 First subtract the lower bound, if any, in the type of the
5284 index, then convert to sizetype and multiply by the size of the
5285 array element. */
5286 if (low_bound != 0 && ! integer_zerop (low_bound))
5287 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5288 index, low_bound));
5289
5290 /* If the index has a self-referential type, pass it to a
5291 WITH_RECORD_EXPR; if the component size is, pass our
5292 component to one. */
5293 if (! TREE_CONSTANT (index)
5294 && contains_placeholder_p (index))
5295 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5296 if (! TREE_CONSTANT (unit_size)
5297 && contains_placeholder_p (unit_size))
5298 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5299
5300 offset = size_binop (PLUS_EXPR, offset,
5301 size_binop (MULT_EXPR,
5302 convert (sizetype, index),
5303 unit_size));
5304 }
5305
5306 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5307 {
5308 tree new = find_placeholder (exp, &placeholder_ptr);
5309
5310 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5311 We might have been called from tree optimization where we
5312 haven't set up an object yet. */
5313 if (new == 0)
5314 break;
5315 else
5316 exp = new;
5317
5318 continue;
5319 }
5320 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5321 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5322 && ! ((TREE_CODE (exp) == NOP_EXPR
5323 || TREE_CODE (exp) == CONVERT_EXPR)
5324 && (TYPE_MODE (TREE_TYPE (exp))
5325 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5326 break;
5327
5328 /* If any reference in the chain is volatile, the effect is volatile. */
5329 if (TREE_THIS_VOLATILE (exp))
5330 *pvolatilep = 1;
5331
5332 exp = TREE_OPERAND (exp, 0);
5333 }
5334
5335 /* If OFFSET is constant, see if we can return the whole thing as a
5336 constant bit position. Otherwise, split it up. */
5337 if (host_integerp (offset, 0)
5338 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5339 bitsize_unit_node))
5340 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5341 && host_integerp (tem, 0))
5342 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5343 else
5344 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5345
5346 *pmode = mode;
5347 return exp;
5348 }
5349
5350 /* Return 1 if T is an expression that get_inner_reference handles. */
5351
5352 int
5353 handled_component_p (t)
5354 tree t;
5355 {
5356 switch (TREE_CODE (t))
5357 {
5358 case BIT_FIELD_REF:
5359 case COMPONENT_REF:
5360 case ARRAY_REF:
5361 case ARRAY_RANGE_REF:
5362 case NON_LVALUE_EXPR:
5363 case VIEW_CONVERT_EXPR:
5364 return 1;
5365
5366 case NOP_EXPR:
5367 case CONVERT_EXPR:
5368 return (TYPE_MODE (TREE_TYPE (t))
5369 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5370
5371 default:
5372 return 0;
5373 }
5374 }
5375 \f
5376 /* Given an rtx VALUE that may contain additions and multiplications, return
5377 an equivalent value that just refers to a register, memory, or constant.
5378 This is done by generating instructions to perform the arithmetic and
5379 returning a pseudo-register containing the value.
5380
5381 The returned value may be a REG, SUBREG, MEM or constant. */
5382
5383 rtx
5384 force_operand (value, target)
5385 rtx value, target;
5386 {
5387 optab binoptab = 0;
5388 /* Use a temporary to force order of execution of calls to
5389 `force_operand'. */
5390 rtx tmp;
5391 rtx op2;
5392 /* Use subtarget as the target for operand 0 of a binary operation. */
5393 rtx subtarget = get_subtarget (target);
5394
5395 /* Check for a PIC address load. */
5396 if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5397 && XEXP (value, 0) == pic_offset_table_rtx
5398 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5399 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5400 || GET_CODE (XEXP (value, 1)) == CONST))
5401 {
5402 if (!subtarget)
5403 subtarget = gen_reg_rtx (GET_MODE (value));
5404 emit_move_insn (subtarget, value);
5405 return subtarget;
5406 }
5407
5408 if (GET_CODE (value) == PLUS)
5409 binoptab = add_optab;
5410 else if (GET_CODE (value) == MINUS)
5411 binoptab = sub_optab;
5412 else if (GET_CODE (value) == MULT)
5413 {
5414 op2 = XEXP (value, 1);
5415 if (!CONSTANT_P (op2)
5416 && !(GET_CODE (op2) == REG && op2 != subtarget))
5417 subtarget = 0;
5418 tmp = force_operand (XEXP (value, 0), subtarget);
5419 return expand_mult (GET_MODE (value), tmp,
5420 force_operand (op2, NULL_RTX),
5421 target, 1);
5422 }
5423
5424 if (binoptab)
5425 {
5426 op2 = XEXP (value, 1);
5427 if (!CONSTANT_P (op2)
5428 && !(GET_CODE (op2) == REG && op2 != subtarget))
5429 subtarget = 0;
5430 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5431 {
5432 binoptab = add_optab;
5433 op2 = negate_rtx (GET_MODE (value), op2);
5434 }
5435
5436 /* Check for an addition with OP2 a constant integer and our first
5437 operand a PLUS of a virtual register and something else. In that
5438 case, we want to emit the sum of the virtual register and the
5439 constant first and then add the other value. This allows virtual
5440 register instantiation to simply modify the constant rather than
5441 creating another one around this addition. */
5442 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5443 && GET_CODE (XEXP (value, 0)) == PLUS
5444 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5445 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5446 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5447 {
5448 rtx temp = expand_binop (GET_MODE (value), binoptab,
5449 XEXP (XEXP (value, 0), 0), op2,
5450 subtarget, 0, OPTAB_LIB_WIDEN);
5451 return expand_binop (GET_MODE (value), binoptab, temp,
5452 force_operand (XEXP (XEXP (value, 0), 1), 0),
5453 target, 0, OPTAB_LIB_WIDEN);
5454 }
5455
5456 tmp = force_operand (XEXP (value, 0), subtarget);
5457 return expand_binop (GET_MODE (value), binoptab, tmp,
5458 force_operand (op2, NULL_RTX),
5459 target, 0, OPTAB_LIB_WIDEN);
5460 /* We give UNSIGNEDP = 0 to expand_binop
5461 because the only operations we are expanding here are signed ones. */
5462 }
5463
5464 #ifdef INSN_SCHEDULING
5465 /* On machines that have insn scheduling, we want all memory reference to be
5466 explicit, so we need to deal with such paradoxical SUBREGs. */
5467 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5468 && (GET_MODE_SIZE (GET_MODE (value))
5469 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5470 value
5471 = simplify_gen_subreg (GET_MODE (value),
5472 force_reg (GET_MODE (SUBREG_REG (value)),
5473 force_operand (SUBREG_REG (value),
5474 NULL_RTX)),
5475 GET_MODE (SUBREG_REG (value)),
5476 SUBREG_BYTE (value));
5477 #endif
5478
5479 return value;
5480 }
5481 \f
5482 /* Subroutine of expand_expr: return nonzero iff there is no way that
5483 EXP can reference X, which is being modified. TOP_P is nonzero if this
5484 call is going to be used to determine whether we need a temporary
5485 for EXP, as opposed to a recursive call to this function.
5486
5487 It is always safe for this routine to return zero since it merely
5488 searches for optimization opportunities. */
5489
5490 int
5491 safe_from_p (x, exp, top_p)
5492 rtx x;
5493 tree exp;
5494 int top_p;
5495 {
5496 rtx exp_rtl = 0;
5497 int i, nops;
5498 static tree save_expr_list;
5499
5500 if (x == 0
5501 /* If EXP has varying size, we MUST use a target since we currently
5502 have no way of allocating temporaries of variable size
5503 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5504 So we assume here that something at a higher level has prevented a
5505 clash. This is somewhat bogus, but the best we can do. Only
5506 do this when X is BLKmode and when we are at the top level. */
5507 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5508 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5509 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5510 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5511 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5512 != INTEGER_CST)
5513 && GET_MODE (x) == BLKmode)
5514 /* If X is in the outgoing argument area, it is always safe. */
5515 || (GET_CODE (x) == MEM
5516 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5517 || (GET_CODE (XEXP (x, 0)) == PLUS
5518 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5519 return 1;
5520
5521 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5522 find the underlying pseudo. */
5523 if (GET_CODE (x) == SUBREG)
5524 {
5525 x = SUBREG_REG (x);
5526 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5527 return 0;
5528 }
5529
5530 /* A SAVE_EXPR might appear many times in the expression passed to the
5531 top-level safe_from_p call, and if it has a complex subexpression,
5532 examining it multiple times could result in a combinatorial explosion.
5533 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5534 with optimization took about 28 minutes to compile -- even though it was
5535 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5536 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5537 we have processed. Note that the only test of top_p was above. */
5538
5539 if (top_p)
5540 {
5541 int rtn;
5542 tree t;
5543
5544 save_expr_list = 0;
5545
5546 rtn = safe_from_p (x, exp, 0);
5547
5548 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5549 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5550
5551 return rtn;
5552 }
5553
5554 /* Now look at our tree code and possibly recurse. */
5555 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5556 {
5557 case 'd':
5558 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5559 break;
5560
5561 case 'c':
5562 return 1;
5563
5564 case 'x':
5565 if (TREE_CODE (exp) == TREE_LIST)
5566 return ((TREE_VALUE (exp) == 0
5567 || safe_from_p (x, TREE_VALUE (exp), 0))
5568 && (TREE_CHAIN (exp) == 0
5569 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5570 else if (TREE_CODE (exp) == ERROR_MARK)
5571 return 1; /* An already-visited SAVE_EXPR? */
5572 else
5573 return 0;
5574
5575 case '1':
5576 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5577
5578 case '2':
5579 case '<':
5580 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5581 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5582
5583 case 'e':
5584 case 'r':
5585 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5586 the expression. If it is set, we conflict iff we are that rtx or
5587 both are in memory. Otherwise, we check all operands of the
5588 expression recursively. */
5589
5590 switch (TREE_CODE (exp))
5591 {
5592 case ADDR_EXPR:
5593 /* If the operand is static or we are static, we can't conflict.
5594 Likewise if we don't conflict with the operand at all. */
5595 if (staticp (TREE_OPERAND (exp, 0))
5596 || TREE_STATIC (exp)
5597 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5598 return 1;
5599
5600 /* Otherwise, the only way this can conflict is if we are taking
5601 the address of a DECL a that address if part of X, which is
5602 very rare. */
5603 exp = TREE_OPERAND (exp, 0);
5604 if (DECL_P (exp))
5605 {
5606 if (!DECL_RTL_SET_P (exp)
5607 || GET_CODE (DECL_RTL (exp)) != MEM)
5608 return 0;
5609 else
5610 exp_rtl = XEXP (DECL_RTL (exp), 0);
5611 }
5612 break;
5613
5614 case INDIRECT_REF:
5615 if (GET_CODE (x) == MEM
5616 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5617 get_alias_set (exp)))
5618 return 0;
5619 break;
5620
5621 case CALL_EXPR:
5622 /* Assume that the call will clobber all hard registers and
5623 all of memory. */
5624 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5625 || GET_CODE (x) == MEM)
5626 return 0;
5627 break;
5628
5629 case RTL_EXPR:
5630 /* If a sequence exists, we would have to scan every instruction
5631 in the sequence to see if it was safe. This is probably not
5632 worthwhile. */
5633 if (RTL_EXPR_SEQUENCE (exp))
5634 return 0;
5635
5636 exp_rtl = RTL_EXPR_RTL (exp);
5637 break;
5638
5639 case WITH_CLEANUP_EXPR:
5640 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5641 break;
5642
5643 case CLEANUP_POINT_EXPR:
5644 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5645
5646 case SAVE_EXPR:
5647 exp_rtl = SAVE_EXPR_RTL (exp);
5648 if (exp_rtl)
5649 break;
5650
5651 /* If we've already scanned this, don't do it again. Otherwise,
5652 show we've scanned it and record for clearing the flag if we're
5653 going on. */
5654 if (TREE_PRIVATE (exp))
5655 return 1;
5656
5657 TREE_PRIVATE (exp) = 1;
5658 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5659 {
5660 TREE_PRIVATE (exp) = 0;
5661 return 0;
5662 }
5663
5664 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5665 return 1;
5666
5667 case BIND_EXPR:
5668 /* The only operand we look at is operand 1. The rest aren't
5669 part of the expression. */
5670 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5671
5672 case METHOD_CALL_EXPR:
5673 /* This takes an rtx argument, but shouldn't appear here. */
5674 abort ();
5675
5676 default:
5677 break;
5678 }
5679
5680 /* If we have an rtx, we do not need to scan our operands. */
5681 if (exp_rtl)
5682 break;
5683
5684 nops = first_rtl_op (TREE_CODE (exp));
5685 for (i = 0; i < nops; i++)
5686 if (TREE_OPERAND (exp, i) != 0
5687 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5688 return 0;
5689
5690 /* If this is a language-specific tree code, it may require
5691 special handling. */
5692 if ((unsigned int) TREE_CODE (exp)
5693 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5694 && !(*lang_hooks.safe_from_p) (x, exp))
5695 return 0;
5696 }
5697
5698 /* If we have an rtl, find any enclosed object. Then see if we conflict
5699 with it. */
5700 if (exp_rtl)
5701 {
5702 if (GET_CODE (exp_rtl) == SUBREG)
5703 {
5704 exp_rtl = SUBREG_REG (exp_rtl);
5705 if (GET_CODE (exp_rtl) == REG
5706 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5707 return 0;
5708 }
5709
5710 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5711 are memory and they conflict. */
5712 return ! (rtx_equal_p (x, exp_rtl)
5713 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5714 && true_dependence (exp_rtl, GET_MODE (x), x,
5715 rtx_addr_varies_p)));
5716 }
5717
5718 /* If we reach here, it is safe. */
5719 return 1;
5720 }
5721
5722 /* Subroutine of expand_expr: return rtx if EXP is a
5723 variable or parameter; else return 0. */
5724
5725 static rtx
5726 var_rtx (exp)
5727 tree exp;
5728 {
5729 STRIP_NOPS (exp);
5730 switch (TREE_CODE (exp))
5731 {
5732 case PARM_DECL:
5733 case VAR_DECL:
5734 return DECL_RTL (exp);
5735 default:
5736 return 0;
5737 }
5738 }
5739
5740 #ifdef MAX_INTEGER_COMPUTATION_MODE
5741
5742 void
5743 check_max_integer_computation_mode (exp)
5744 tree exp;
5745 {
5746 enum tree_code code;
5747 enum machine_mode mode;
5748
5749 /* Strip any NOPs that don't change the mode. */
5750 STRIP_NOPS (exp);
5751 code = TREE_CODE (exp);
5752
5753 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5754 if (code == NOP_EXPR
5755 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5756 return;
5757
5758 /* First check the type of the overall operation. We need only look at
5759 unary, binary and relational operations. */
5760 if (TREE_CODE_CLASS (code) == '1'
5761 || TREE_CODE_CLASS (code) == '2'
5762 || TREE_CODE_CLASS (code) == '<')
5763 {
5764 mode = TYPE_MODE (TREE_TYPE (exp));
5765 if (GET_MODE_CLASS (mode) == MODE_INT
5766 && mode > MAX_INTEGER_COMPUTATION_MODE)
5767 internal_error ("unsupported wide integer operation");
5768 }
5769
5770 /* Check operand of a unary op. */
5771 if (TREE_CODE_CLASS (code) == '1')
5772 {
5773 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5774 if (GET_MODE_CLASS (mode) == MODE_INT
5775 && mode > MAX_INTEGER_COMPUTATION_MODE)
5776 internal_error ("unsupported wide integer operation");
5777 }
5778
5779 /* Check operands of a binary/comparison op. */
5780 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5781 {
5782 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5783 if (GET_MODE_CLASS (mode) == MODE_INT
5784 && mode > MAX_INTEGER_COMPUTATION_MODE)
5785 internal_error ("unsupported wide integer operation");
5786
5787 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5788 if (GET_MODE_CLASS (mode) == MODE_INT
5789 && mode > MAX_INTEGER_COMPUTATION_MODE)
5790 internal_error ("unsupported wide integer operation");
5791 }
5792 }
5793 #endif
5794 \f
5795 /* Return the highest power of two that EXP is known to be a multiple of.
5796 This is used in updating alignment of MEMs in array references. */
5797
5798 static HOST_WIDE_INT
5799 highest_pow2_factor (exp)
5800 tree exp;
5801 {
5802 HOST_WIDE_INT c0, c1;
5803
5804 switch (TREE_CODE (exp))
5805 {
5806 case INTEGER_CST:
5807 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5808 lowest bit that's a one. If the result is zero, return
5809 BIGGEST_ALIGNMENT. We need to handle this case since we can find it
5810 in a COND_EXPR, a MIN_EXPR, or a MAX_EXPR. If the constant overlows,
5811 we have an erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5812 later ICE. */
5813 if (TREE_CONSTANT_OVERFLOW (exp)
5814 || integer_zerop (exp))
5815 return BIGGEST_ALIGNMENT;
5816 else if (host_integerp (exp, 0))
5817 {
5818 c0 = tree_low_cst (exp, 0);
5819 c0 = c0 < 0 ? - c0 : c0;
5820 return c0 & -c0;
5821 }
5822 break;
5823
5824 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5825 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5826 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5827 return MIN (c0, c1);
5828
5829 case MULT_EXPR:
5830 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5831 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5832 return c0 * c1;
5833
5834 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5835 case CEIL_DIV_EXPR:
5836 if (integer_pow2p (TREE_OPERAND (exp, 1))
5837 && host_integerp (TREE_OPERAND (exp, 1), 1))
5838 {
5839 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5840 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5841 return MAX (1, c0 / c1);
5842 }
5843 break;
5844
5845 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5846 case SAVE_EXPR: case WITH_RECORD_EXPR:
5847 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5848
5849 case COMPOUND_EXPR:
5850 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5851
5852 case COND_EXPR:
5853 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5854 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5855 return MIN (c0, c1);
5856
5857 default:
5858 break;
5859 }
5860
5861 return 1;
5862 }
5863 \f
5864 /* Return an object on the placeholder list that matches EXP, a
5865 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5866 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5867 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5868 is a location which initially points to a starting location in the
5869 placeholder list (zero means start of the list) and where a pointer into
5870 the placeholder list at which the object is found is placed. */
5871
5872 tree
5873 find_placeholder (exp, plist)
5874 tree exp;
5875 tree *plist;
5876 {
5877 tree type = TREE_TYPE (exp);
5878 tree placeholder_expr;
5879
5880 for (placeholder_expr
5881 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5882 placeholder_expr != 0;
5883 placeholder_expr = TREE_CHAIN (placeholder_expr))
5884 {
5885 tree need_type = TYPE_MAIN_VARIANT (type);
5886 tree elt;
5887
5888 /* Find the outermost reference that is of the type we want. If none,
5889 see if any object has a type that is a pointer to the type we
5890 want. */
5891 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5892 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5893 || TREE_CODE (elt) == COND_EXPR)
5894 ? TREE_OPERAND (elt, 1)
5895 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5896 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5897 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5898 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5899 ? TREE_OPERAND (elt, 0) : 0))
5900 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5901 {
5902 if (plist)
5903 *plist = placeholder_expr;
5904 return elt;
5905 }
5906
5907 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5908 elt
5909 = ((TREE_CODE (elt) == COMPOUND_EXPR
5910 || TREE_CODE (elt) == COND_EXPR)
5911 ? TREE_OPERAND (elt, 1)
5912 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5913 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5914 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5915 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5916 ? TREE_OPERAND (elt, 0) : 0))
5917 if (POINTER_TYPE_P (TREE_TYPE (elt))
5918 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5919 == need_type))
5920 {
5921 if (plist)
5922 *plist = placeholder_expr;
5923 return build1 (INDIRECT_REF, need_type, elt);
5924 }
5925 }
5926
5927 return 0;
5928 }
5929 \f
5930 /* expand_expr: generate code for computing expression EXP.
5931 An rtx for the computed value is returned. The value is never null.
5932 In the case of a void EXP, const0_rtx is returned.
5933
5934 The value may be stored in TARGET if TARGET is nonzero.
5935 TARGET is just a suggestion; callers must assume that
5936 the rtx returned may not be the same as TARGET.
5937
5938 If TARGET is CONST0_RTX, it means that the value will be ignored.
5939
5940 If TMODE is not VOIDmode, it suggests generating the
5941 result in mode TMODE. But this is done only when convenient.
5942 Otherwise, TMODE is ignored and the value generated in its natural mode.
5943 TMODE is just a suggestion; callers must assume that
5944 the rtx returned may not have mode TMODE.
5945
5946 Note that TARGET may have neither TMODE nor MODE. In that case, it
5947 probably will not be used.
5948
5949 If MODIFIER is EXPAND_SUM then when EXP is an addition
5950 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5951 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5952 products as above, or REG or MEM, or constant.
5953 Ordinarily in such cases we would output mul or add instructions
5954 and then return a pseudo reg containing the sum.
5955
5956 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5957 it also marks a label as absolutely required (it can't be dead).
5958 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5959 This is used for outputting expressions used in initializers.
5960
5961 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5962 with a constant address even if that address is not normally legitimate.
5963 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5964
5965 rtx
5966 expand_expr (exp, target, tmode, modifier)
5967 tree exp;
5968 rtx target;
5969 enum machine_mode tmode;
5970 enum expand_modifier modifier;
5971 {
5972 rtx op0, op1, temp;
5973 tree type = TREE_TYPE (exp);
5974 int unsignedp = TREE_UNSIGNED (type);
5975 enum machine_mode mode;
5976 enum tree_code code = TREE_CODE (exp);
5977 optab this_optab;
5978 rtx subtarget, original_target;
5979 int ignore;
5980 tree context;
5981
5982 /* Handle ERROR_MARK before anybody tries to access its type. */
5983 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5984 {
5985 op0 = CONST0_RTX (tmode);
5986 if (op0 != 0)
5987 return op0;
5988 return const0_rtx;
5989 }
5990
5991 mode = TYPE_MODE (type);
5992 /* Use subtarget as the target for operand 0 of a binary operation. */
5993 subtarget = get_subtarget (target);
5994 original_target = target;
5995 ignore = (target == const0_rtx
5996 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5997 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5998 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
5999 && TREE_CODE (type) == VOID_TYPE));
6000
6001 /* If we are going to ignore this result, we need only do something
6002 if there is a side-effect somewhere in the expression. If there
6003 is, short-circuit the most common cases here. Note that we must
6004 not call expand_expr with anything but const0_rtx in case this
6005 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6006
6007 if (ignore)
6008 {
6009 if (! TREE_SIDE_EFFECTS (exp))
6010 return const0_rtx;
6011
6012 /* Ensure we reference a volatile object even if value is ignored, but
6013 don't do this if all we are doing is taking its address. */
6014 if (TREE_THIS_VOLATILE (exp)
6015 && TREE_CODE (exp) != FUNCTION_DECL
6016 && mode != VOIDmode && mode != BLKmode
6017 && modifier != EXPAND_CONST_ADDRESS)
6018 {
6019 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6020 if (GET_CODE (temp) == MEM)
6021 temp = copy_to_reg (temp);
6022 return const0_rtx;
6023 }
6024
6025 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6026 || code == INDIRECT_REF || code == BUFFER_REF)
6027 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6028 modifier);
6029
6030 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6031 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6032 {
6033 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6034 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6035 return const0_rtx;
6036 }
6037 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6038 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6039 /* If the second operand has no side effects, just evaluate
6040 the first. */
6041 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6042 modifier);
6043 else if (code == BIT_FIELD_REF)
6044 {
6045 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6046 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6047 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6048 return const0_rtx;
6049 }
6050
6051 target = 0;
6052 }
6053
6054 #ifdef MAX_INTEGER_COMPUTATION_MODE
6055 /* Only check stuff here if the mode we want is different from the mode
6056 of the expression; if it's the same, check_max_integer_computiation_mode
6057 will handle it. Do we really need to check this stuff at all? */
6058
6059 if (target
6060 && GET_MODE (target) != mode
6061 && TREE_CODE (exp) != INTEGER_CST
6062 && TREE_CODE (exp) != PARM_DECL
6063 && TREE_CODE (exp) != ARRAY_REF
6064 && TREE_CODE (exp) != ARRAY_RANGE_REF
6065 && TREE_CODE (exp) != COMPONENT_REF
6066 && TREE_CODE (exp) != BIT_FIELD_REF
6067 && TREE_CODE (exp) != INDIRECT_REF
6068 && TREE_CODE (exp) != CALL_EXPR
6069 && TREE_CODE (exp) != VAR_DECL
6070 && TREE_CODE (exp) != RTL_EXPR)
6071 {
6072 enum machine_mode mode = GET_MODE (target);
6073
6074 if (GET_MODE_CLASS (mode) == MODE_INT
6075 && mode > MAX_INTEGER_COMPUTATION_MODE)
6076 internal_error ("unsupported wide integer operation");
6077 }
6078
6079 if (tmode != mode
6080 && TREE_CODE (exp) != INTEGER_CST
6081 && TREE_CODE (exp) != PARM_DECL
6082 && TREE_CODE (exp) != ARRAY_REF
6083 && TREE_CODE (exp) != ARRAY_RANGE_REF
6084 && TREE_CODE (exp) != COMPONENT_REF
6085 && TREE_CODE (exp) != BIT_FIELD_REF
6086 && TREE_CODE (exp) != INDIRECT_REF
6087 && TREE_CODE (exp) != VAR_DECL
6088 && TREE_CODE (exp) != CALL_EXPR
6089 && TREE_CODE (exp) != RTL_EXPR
6090 && GET_MODE_CLASS (tmode) == MODE_INT
6091 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6092 internal_error ("unsupported wide integer operation");
6093
6094 check_max_integer_computation_mode (exp);
6095 #endif
6096
6097 /* If will do cse, generate all results into pseudo registers
6098 since 1) that allows cse to find more things
6099 and 2) otherwise cse could produce an insn the machine
6100 cannot support. And exception is a CONSTRUCTOR into a multi-word
6101 MEM: that's much more likely to be most efficient into the MEM. */
6102
6103 if (! cse_not_expected && mode != BLKmode && target
6104 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6105 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6106 target = subtarget;
6107
6108 switch (code)
6109 {
6110 case LABEL_DECL:
6111 {
6112 tree function = decl_function_context (exp);
6113 /* Handle using a label in a containing function. */
6114 if (function != current_function_decl
6115 && function != inline_function_decl && function != 0)
6116 {
6117 struct function *p = find_function_data (function);
6118 p->expr->x_forced_labels
6119 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6120 p->expr->x_forced_labels);
6121 }
6122 else
6123 {
6124 if (modifier == EXPAND_INITIALIZER)
6125 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6126 label_rtx (exp),
6127 forced_labels);
6128 }
6129
6130 temp = gen_rtx_MEM (FUNCTION_MODE,
6131 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6132 if (function != current_function_decl
6133 && function != inline_function_decl && function != 0)
6134 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6135 return temp;
6136 }
6137
6138 case PARM_DECL:
6139 if (DECL_RTL (exp) == 0)
6140 {
6141 error_with_decl (exp, "prior parameter's size depends on `%s'");
6142 return CONST0_RTX (mode);
6143 }
6144
6145 /* ... fall through ... */
6146
6147 case VAR_DECL:
6148 /* If a static var's type was incomplete when the decl was written,
6149 but the type is complete now, lay out the decl now. */
6150 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6151 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6152 {
6153 rtx value = DECL_RTL_IF_SET (exp);
6154
6155 layout_decl (exp, 0);
6156
6157 /* If the RTL was already set, update its mode and memory
6158 attributes. */
6159 if (value != 0)
6160 {
6161 PUT_MODE (value, DECL_MODE (exp));
6162 SET_DECL_RTL (exp, 0);
6163 set_mem_attributes (value, exp, 1);
6164 SET_DECL_RTL (exp, value);
6165 }
6166 }
6167
6168 /* ... fall through ... */
6169
6170 case FUNCTION_DECL:
6171 case RESULT_DECL:
6172 if (DECL_RTL (exp) == 0)
6173 abort ();
6174
6175 /* Ensure variable marked as used even if it doesn't go through
6176 a parser. If it hasn't be used yet, write out an external
6177 definition. */
6178 if (! TREE_USED (exp))
6179 {
6180 assemble_external (exp);
6181 TREE_USED (exp) = 1;
6182 }
6183
6184 /* Show we haven't gotten RTL for this yet. */
6185 temp = 0;
6186
6187 /* Handle variables inherited from containing functions. */
6188 context = decl_function_context (exp);
6189
6190 /* We treat inline_function_decl as an alias for the current function
6191 because that is the inline function whose vars, types, etc.
6192 are being merged into the current function.
6193 See expand_inline_function. */
6194
6195 if (context != 0 && context != current_function_decl
6196 && context != inline_function_decl
6197 /* If var is static, we don't need a static chain to access it. */
6198 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6199 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6200 {
6201 rtx addr;
6202
6203 /* Mark as non-local and addressable. */
6204 DECL_NONLOCAL (exp) = 1;
6205 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6206 abort ();
6207 mark_addressable (exp);
6208 if (GET_CODE (DECL_RTL (exp)) != MEM)
6209 abort ();
6210 addr = XEXP (DECL_RTL (exp), 0);
6211 if (GET_CODE (addr) == MEM)
6212 addr
6213 = replace_equiv_address (addr,
6214 fix_lexical_addr (XEXP (addr, 0), exp));
6215 else
6216 addr = fix_lexical_addr (addr, exp);
6217
6218 temp = replace_equiv_address (DECL_RTL (exp), addr);
6219 }
6220
6221 /* This is the case of an array whose size is to be determined
6222 from its initializer, while the initializer is still being parsed.
6223 See expand_decl. */
6224
6225 else if (GET_CODE (DECL_RTL (exp)) == MEM
6226 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6227 temp = validize_mem (DECL_RTL (exp));
6228
6229 /* If DECL_RTL is memory, we are in the normal case and either
6230 the address is not valid or it is not a register and -fforce-addr
6231 is specified, get the address into a register. */
6232
6233 else if (GET_CODE (DECL_RTL (exp)) == MEM
6234 && modifier != EXPAND_CONST_ADDRESS
6235 && modifier != EXPAND_SUM
6236 && modifier != EXPAND_INITIALIZER
6237 && (! memory_address_p (DECL_MODE (exp),
6238 XEXP (DECL_RTL (exp), 0))
6239 || (flag_force_addr
6240 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6241 temp = replace_equiv_address (DECL_RTL (exp),
6242 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6243
6244 /* If we got something, return it. But first, set the alignment
6245 if the address is a register. */
6246 if (temp != 0)
6247 {
6248 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6249 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6250
6251 return temp;
6252 }
6253
6254 /* If the mode of DECL_RTL does not match that of the decl, it
6255 must be a promoted value. We return a SUBREG of the wanted mode,
6256 but mark it so that we know that it was already extended. */
6257
6258 if (GET_CODE (DECL_RTL (exp)) == REG
6259 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6260 {
6261 /* Get the signedness used for this variable. Ensure we get the
6262 same mode we got when the variable was declared. */
6263 if (GET_MODE (DECL_RTL (exp))
6264 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6265 abort ();
6266
6267 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6268 SUBREG_PROMOTED_VAR_P (temp) = 1;
6269 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6270 return temp;
6271 }
6272
6273 return DECL_RTL (exp);
6274
6275 case INTEGER_CST:
6276 return immed_double_const (TREE_INT_CST_LOW (exp),
6277 TREE_INT_CST_HIGH (exp), mode);
6278
6279 case CONST_DECL:
6280 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6281
6282 case REAL_CST:
6283 /* If optimized, generate immediate CONST_DOUBLE
6284 which will be turned into memory by reload if necessary.
6285
6286 We used to force a register so that loop.c could see it. But
6287 this does not allow gen_* patterns to perform optimizations with
6288 the constants. It also produces two insns in cases like "x = 1.0;".
6289 On most machines, floating-point constants are not permitted in
6290 many insns, so we'd end up copying it to a register in any case.
6291
6292 Now, we do the copying in expand_binop, if appropriate. */
6293 return immed_real_const (exp);
6294
6295 case COMPLEX_CST:
6296 case STRING_CST:
6297 if (! TREE_CST_RTL (exp))
6298 output_constant_def (exp, 1);
6299
6300 /* TREE_CST_RTL probably contains a constant address.
6301 On RISC machines where a constant address isn't valid,
6302 make some insns to get that address into a register. */
6303 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6304 && modifier != EXPAND_CONST_ADDRESS
6305 && modifier != EXPAND_INITIALIZER
6306 && modifier != EXPAND_SUM
6307 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6308 || (flag_force_addr
6309 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6310 return replace_equiv_address (TREE_CST_RTL (exp),
6311 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6312 return TREE_CST_RTL (exp);
6313
6314 case EXPR_WITH_FILE_LOCATION:
6315 {
6316 rtx to_return;
6317 const char *saved_input_filename = input_filename;
6318 int saved_lineno = lineno;
6319 input_filename = EXPR_WFL_FILENAME (exp);
6320 lineno = EXPR_WFL_LINENO (exp);
6321 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6322 emit_line_note (input_filename, lineno);
6323 /* Possibly avoid switching back and forth here. */
6324 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6325 input_filename = saved_input_filename;
6326 lineno = saved_lineno;
6327 return to_return;
6328 }
6329
6330 case SAVE_EXPR:
6331 context = decl_function_context (exp);
6332
6333 /* If this SAVE_EXPR was at global context, assume we are an
6334 initialization function and move it into our context. */
6335 if (context == 0)
6336 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6337
6338 /* We treat inline_function_decl as an alias for the current function
6339 because that is the inline function whose vars, types, etc.
6340 are being merged into the current function.
6341 See expand_inline_function. */
6342 if (context == current_function_decl || context == inline_function_decl)
6343 context = 0;
6344
6345 /* If this is non-local, handle it. */
6346 if (context)
6347 {
6348 /* The following call just exists to abort if the context is
6349 not of a containing function. */
6350 find_function_data (context);
6351
6352 temp = SAVE_EXPR_RTL (exp);
6353 if (temp && GET_CODE (temp) == REG)
6354 {
6355 put_var_into_stack (exp);
6356 temp = SAVE_EXPR_RTL (exp);
6357 }
6358 if (temp == 0 || GET_CODE (temp) != MEM)
6359 abort ();
6360 return
6361 replace_equiv_address (temp,
6362 fix_lexical_addr (XEXP (temp, 0), exp));
6363 }
6364 if (SAVE_EXPR_RTL (exp) == 0)
6365 {
6366 if (mode == VOIDmode)
6367 temp = const0_rtx;
6368 else
6369 temp = assign_temp (build_qualified_type (type,
6370 (TYPE_QUALS (type)
6371 | TYPE_QUAL_CONST)),
6372 3, 0, 0);
6373
6374 SAVE_EXPR_RTL (exp) = temp;
6375 if (!optimize && GET_CODE (temp) == REG)
6376 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6377 save_expr_regs);
6378
6379 /* If the mode of TEMP does not match that of the expression, it
6380 must be a promoted value. We pass store_expr a SUBREG of the
6381 wanted mode but mark it so that we know that it was already
6382 extended. Note that `unsignedp' was modified above in
6383 this case. */
6384
6385 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6386 {
6387 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6388 SUBREG_PROMOTED_VAR_P (temp) = 1;
6389 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6390 }
6391
6392 if (temp == const0_rtx)
6393 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6394 else
6395 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6396
6397 TREE_USED (exp) = 1;
6398 }
6399
6400 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6401 must be a promoted value. We return a SUBREG of the wanted mode,
6402 but mark it so that we know that it was already extended. */
6403
6404 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6405 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6406 {
6407 /* Compute the signedness and make the proper SUBREG. */
6408 promote_mode (type, mode, &unsignedp, 0);
6409 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6410 SUBREG_PROMOTED_VAR_P (temp) = 1;
6411 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6412 return temp;
6413 }
6414
6415 return SAVE_EXPR_RTL (exp);
6416
6417 case UNSAVE_EXPR:
6418 {
6419 rtx temp;
6420 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6421 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6422 return temp;
6423 }
6424
6425 case PLACEHOLDER_EXPR:
6426 {
6427 tree old_list = placeholder_list;
6428 tree placeholder_expr = 0;
6429
6430 exp = find_placeholder (exp, &placeholder_expr);
6431 if (exp == 0)
6432 abort ();
6433
6434 placeholder_list = TREE_CHAIN (placeholder_expr);
6435 temp = expand_expr (exp, original_target, tmode, modifier);
6436 placeholder_list = old_list;
6437 return temp;
6438 }
6439
6440 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6441 abort ();
6442
6443 case WITH_RECORD_EXPR:
6444 /* Put the object on the placeholder list, expand our first operand,
6445 and pop the list. */
6446 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6447 placeholder_list);
6448 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6449 modifier);
6450 placeholder_list = TREE_CHAIN (placeholder_list);
6451 return target;
6452
6453 case GOTO_EXPR:
6454 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6455 expand_goto (TREE_OPERAND (exp, 0));
6456 else
6457 expand_computed_goto (TREE_OPERAND (exp, 0));
6458 return const0_rtx;
6459
6460 case EXIT_EXPR:
6461 expand_exit_loop_if_false (NULL,
6462 invert_truthvalue (TREE_OPERAND (exp, 0)));
6463 return const0_rtx;
6464
6465 case LABELED_BLOCK_EXPR:
6466 if (LABELED_BLOCK_BODY (exp))
6467 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6468 /* Should perhaps use expand_label, but this is simpler and safer. */
6469 do_pending_stack_adjust ();
6470 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6471 return const0_rtx;
6472
6473 case EXIT_BLOCK_EXPR:
6474 if (EXIT_BLOCK_RETURN (exp))
6475 sorry ("returned value in block_exit_expr");
6476 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6477 return const0_rtx;
6478
6479 case LOOP_EXPR:
6480 push_temp_slots ();
6481 expand_start_loop (1);
6482 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6483 expand_end_loop ();
6484 pop_temp_slots ();
6485
6486 return const0_rtx;
6487
6488 case BIND_EXPR:
6489 {
6490 tree vars = TREE_OPERAND (exp, 0);
6491 int vars_need_expansion = 0;
6492
6493 /* Need to open a binding contour here because
6494 if there are any cleanups they must be contained here. */
6495 expand_start_bindings (2);
6496
6497 /* Mark the corresponding BLOCK for output in its proper place. */
6498 if (TREE_OPERAND (exp, 2) != 0
6499 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6500 insert_block (TREE_OPERAND (exp, 2));
6501
6502 /* If VARS have not yet been expanded, expand them now. */
6503 while (vars)
6504 {
6505 if (!DECL_RTL_SET_P (vars))
6506 {
6507 vars_need_expansion = 1;
6508 expand_decl (vars);
6509 }
6510 expand_decl_init (vars);
6511 vars = TREE_CHAIN (vars);
6512 }
6513
6514 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6515
6516 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6517
6518 return temp;
6519 }
6520
6521 case RTL_EXPR:
6522 if (RTL_EXPR_SEQUENCE (exp))
6523 {
6524 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6525 abort ();
6526 emit_insns (RTL_EXPR_SEQUENCE (exp));
6527 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6528 }
6529 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6530 free_temps_for_rtl_expr (exp);
6531 return RTL_EXPR_RTL (exp);
6532
6533 case CONSTRUCTOR:
6534 /* If we don't need the result, just ensure we evaluate any
6535 subexpressions. */
6536 if (ignore)
6537 {
6538 tree elt;
6539
6540 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6541 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6542
6543 return const0_rtx;
6544 }
6545
6546 /* All elts simple constants => refer to a constant in memory. But
6547 if this is a non-BLKmode mode, let it store a field at a time
6548 since that should make a CONST_INT or CONST_DOUBLE when we
6549 fold. Likewise, if we have a target we can use, it is best to
6550 store directly into the target unless the type is large enough
6551 that memcpy will be used. If we are making an initializer and
6552 all operands are constant, put it in memory as well. */
6553 else if ((TREE_STATIC (exp)
6554 && ((mode == BLKmode
6555 && ! (target != 0 && safe_from_p (target, exp, 1)))
6556 || TREE_ADDRESSABLE (exp)
6557 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6558 && (! MOVE_BY_PIECES_P
6559 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6560 TYPE_ALIGN (type)))
6561 && ! mostly_zeros_p (exp))))
6562 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6563 {
6564 rtx constructor = output_constant_def (exp, 1);
6565
6566 if (modifier != EXPAND_CONST_ADDRESS
6567 && modifier != EXPAND_INITIALIZER
6568 && modifier != EXPAND_SUM)
6569 constructor = validize_mem (constructor);
6570
6571 return constructor;
6572 }
6573 else
6574 {
6575 /* Handle calls that pass values in multiple non-contiguous
6576 locations. The Irix 6 ABI has examples of this. */
6577 if (target == 0 || ! safe_from_p (target, exp, 1)
6578 || GET_CODE (target) == PARALLEL)
6579 target
6580 = assign_temp (build_qualified_type (type,
6581 (TYPE_QUALS (type)
6582 | (TREE_READONLY (exp)
6583 * TYPE_QUAL_CONST))),
6584 0, TREE_ADDRESSABLE (exp), 1);
6585
6586 store_constructor (exp, target, 0,
6587 int_size_in_bytes (TREE_TYPE (exp)));
6588 return target;
6589 }
6590
6591 case INDIRECT_REF:
6592 {
6593 tree exp1 = TREE_OPERAND (exp, 0);
6594 tree index;
6595 tree string = string_constant (exp1, &index);
6596
6597 /* Try to optimize reads from const strings. */
6598 if (string
6599 && TREE_CODE (string) == STRING_CST
6600 && TREE_CODE (index) == INTEGER_CST
6601 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6602 && GET_MODE_CLASS (mode) == MODE_INT
6603 && GET_MODE_SIZE (mode) == 1
6604 && modifier != EXPAND_WRITE)
6605 return
6606 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6607
6608 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6609 op0 = memory_address (mode, op0);
6610 temp = gen_rtx_MEM (mode, op0);
6611 set_mem_attributes (temp, exp, 0);
6612
6613 /* If we are writing to this object and its type is a record with
6614 readonly fields, we must mark it as readonly so it will
6615 conflict with readonly references to those fields. */
6616 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6617 RTX_UNCHANGING_P (temp) = 1;
6618
6619 return temp;
6620 }
6621
6622 case ARRAY_REF:
6623 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6624 abort ();
6625
6626 {
6627 tree array = TREE_OPERAND (exp, 0);
6628 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6629 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6630 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6631 HOST_WIDE_INT i;
6632
6633 /* Optimize the special-case of a zero lower bound.
6634
6635 We convert the low_bound to sizetype to avoid some problems
6636 with constant folding. (E.g. suppose the lower bound is 1,
6637 and its mode is QI. Without the conversion, (ARRAY
6638 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6639 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6640
6641 if (! integer_zerop (low_bound))
6642 index = size_diffop (index, convert (sizetype, low_bound));
6643
6644 /* Fold an expression like: "foo"[2].
6645 This is not done in fold so it won't happen inside &.
6646 Don't fold if this is for wide characters since it's too
6647 difficult to do correctly and this is a very rare case. */
6648
6649 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6650 && TREE_CODE (array) == STRING_CST
6651 && TREE_CODE (index) == INTEGER_CST
6652 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6653 && GET_MODE_CLASS (mode) == MODE_INT
6654 && GET_MODE_SIZE (mode) == 1)
6655 return
6656 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6657
6658 /* If this is a constant index into a constant array,
6659 just get the value from the array. Handle both the cases when
6660 we have an explicit constructor and when our operand is a variable
6661 that was declared const. */
6662
6663 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6664 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6665 && TREE_CODE (index) == INTEGER_CST
6666 && 0 > compare_tree_int (index,
6667 list_length (CONSTRUCTOR_ELTS
6668 (TREE_OPERAND (exp, 0)))))
6669 {
6670 tree elem;
6671
6672 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6673 i = TREE_INT_CST_LOW (index);
6674 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6675 ;
6676
6677 if (elem)
6678 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6679 modifier);
6680 }
6681
6682 else if (optimize >= 1
6683 && modifier != EXPAND_CONST_ADDRESS
6684 && modifier != EXPAND_INITIALIZER
6685 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6686 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6687 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6688 {
6689 if (TREE_CODE (index) == INTEGER_CST)
6690 {
6691 tree init = DECL_INITIAL (array);
6692
6693 if (TREE_CODE (init) == CONSTRUCTOR)
6694 {
6695 tree elem;
6696
6697 for (elem = CONSTRUCTOR_ELTS (init);
6698 (elem
6699 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6700 elem = TREE_CHAIN (elem))
6701 ;
6702
6703 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6704 return expand_expr (fold (TREE_VALUE (elem)), target,
6705 tmode, modifier);
6706 }
6707 else if (TREE_CODE (init) == STRING_CST
6708 && 0 > compare_tree_int (index,
6709 TREE_STRING_LENGTH (init)))
6710 {
6711 tree type = TREE_TYPE (TREE_TYPE (init));
6712 enum machine_mode mode = TYPE_MODE (type);
6713
6714 if (GET_MODE_CLASS (mode) == MODE_INT
6715 && GET_MODE_SIZE (mode) == 1)
6716 return (GEN_INT
6717 (TREE_STRING_POINTER
6718 (init)[TREE_INT_CST_LOW (index)]));
6719 }
6720 }
6721 }
6722 }
6723 /* Fall through. */
6724
6725 case COMPONENT_REF:
6726 case BIT_FIELD_REF:
6727 case ARRAY_RANGE_REF:
6728 /* If the operand is a CONSTRUCTOR, we can just extract the
6729 appropriate field if it is present. Don't do this if we have
6730 already written the data since we want to refer to that copy
6731 and varasm.c assumes that's what we'll do. */
6732 if (code == COMPONENT_REF
6733 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6734 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6735 {
6736 tree elt;
6737
6738 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6739 elt = TREE_CHAIN (elt))
6740 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6741 /* We can normally use the value of the field in the
6742 CONSTRUCTOR. However, if this is a bitfield in
6743 an integral mode that we can fit in a HOST_WIDE_INT,
6744 we must mask only the number of bits in the bitfield,
6745 since this is done implicitly by the constructor. If
6746 the bitfield does not meet either of those conditions,
6747 we can't do this optimization. */
6748 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6749 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6750 == MODE_INT)
6751 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6752 <= HOST_BITS_PER_WIDE_INT))))
6753 {
6754 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6755 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6756 {
6757 HOST_WIDE_INT bitsize
6758 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6759
6760 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6761 {
6762 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6763 op0 = expand_and (op0, op1, target);
6764 }
6765 else
6766 {
6767 enum machine_mode imode
6768 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6769 tree count
6770 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6771 0);
6772
6773 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6774 target, 0);
6775 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6776 target, 0);
6777 }
6778 }
6779
6780 return op0;
6781 }
6782 }
6783
6784 {
6785 enum machine_mode mode1;
6786 HOST_WIDE_INT bitsize, bitpos;
6787 tree offset;
6788 int volatilep = 0;
6789 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6790 &mode1, &unsignedp, &volatilep);
6791 rtx orig_op0;
6792
6793 /* If we got back the original object, something is wrong. Perhaps
6794 we are evaluating an expression too early. In any event, don't
6795 infinitely recurse. */
6796 if (tem == exp)
6797 abort ();
6798
6799 /* If TEM's type is a union of variable size, pass TARGET to the inner
6800 computation, since it will need a temporary and TARGET is known
6801 to have to do. This occurs in unchecked conversion in Ada. */
6802
6803 orig_op0 = op0
6804 = expand_expr (tem,
6805 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6806 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6807 != INTEGER_CST)
6808 ? target : NULL_RTX),
6809 VOIDmode,
6810 (modifier == EXPAND_INITIALIZER
6811 || modifier == EXPAND_CONST_ADDRESS)
6812 ? modifier : EXPAND_NORMAL);
6813
6814 /* If this is a constant, put it into a register if it is a
6815 legitimate constant and OFFSET is 0 and memory if it isn't. */
6816 if (CONSTANT_P (op0))
6817 {
6818 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6819 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6820 && offset == 0)
6821 op0 = force_reg (mode, op0);
6822 else
6823 op0 = validize_mem (force_const_mem (mode, op0));
6824 }
6825
6826 if (offset != 0)
6827 {
6828 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6829
6830 /* If this object is in a register, put it into memory.
6831 This case can't occur in C, but can in Ada if we have
6832 unchecked conversion of an expression from a scalar type to
6833 an array or record type. */
6834 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6835 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6836 {
6837 /* If the operand is a SAVE_EXPR, we can deal with this by
6838 forcing the SAVE_EXPR into memory. */
6839 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6840 {
6841 put_var_into_stack (TREE_OPERAND (exp, 0));
6842 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6843 }
6844 else
6845 {
6846 tree nt
6847 = build_qualified_type (TREE_TYPE (tem),
6848 (TYPE_QUALS (TREE_TYPE (tem))
6849 | TYPE_QUAL_CONST));
6850 rtx memloc = assign_temp (nt, 1, 1, 1);
6851
6852 emit_move_insn (memloc, op0);
6853 op0 = memloc;
6854 }
6855 }
6856
6857 if (GET_CODE (op0) != MEM)
6858 abort ();
6859
6860 if (GET_MODE (offset_rtx) != ptr_mode)
6861 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6862
6863 #ifdef POINTERS_EXTEND_UNSIGNED
6864 if (GET_MODE (offset_rtx) != Pmode)
6865 offset_rtx = convert_memory_address (Pmode, offset_rtx);
6866 #endif
6867
6868 /* A constant address in OP0 can have VOIDmode, we must not try
6869 to call force_reg for that case. Avoid that case. */
6870 if (GET_CODE (op0) == MEM
6871 && GET_MODE (op0) == BLKmode
6872 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6873 && bitsize != 0
6874 && (bitpos % bitsize) == 0
6875 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6876 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6877 {
6878 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6879
6880 if (GET_CODE (XEXP (temp, 0)) == REG)
6881 op0 = temp;
6882 else
6883 op0 = (replace_equiv_address
6884 (op0,
6885 force_reg (GET_MODE (XEXP (temp, 0)),
6886 XEXP (temp, 0))));
6887 bitpos = 0;
6888 }
6889
6890 op0 = offset_address (op0, offset_rtx,
6891 highest_pow2_factor (offset));
6892 }
6893
6894 /* Don't forget about volatility even if this is a bitfield. */
6895 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6896 {
6897 if (op0 == orig_op0)
6898 op0 = copy_rtx (op0);
6899
6900 MEM_VOLATILE_P (op0) = 1;
6901 }
6902
6903 /* In cases where an aligned union has an unaligned object
6904 as a field, we might be extracting a BLKmode value from
6905 an integer-mode (e.g., SImode) object. Handle this case
6906 by doing the extract into an object as wide as the field
6907 (which we know to be the width of a basic mode), then
6908 storing into memory, and changing the mode to BLKmode. */
6909 if (mode1 == VOIDmode
6910 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6911 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6912 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6913 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6914 && modifier != EXPAND_CONST_ADDRESS
6915 && modifier != EXPAND_INITIALIZER)
6916 /* If the field isn't aligned enough to fetch as a memref,
6917 fetch it as a bit field. */
6918 || (mode1 != BLKmode
6919 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
6920 && ((TYPE_ALIGN (TREE_TYPE (tem))
6921 < GET_MODE_ALIGNMENT (mode))
6922 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6923 /* If the type and the field are a constant size and the
6924 size of the type isn't the same size as the bitfield,
6925 we must use bitfield operations. */
6926 || (bitsize >= 0
6927 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6928 == INTEGER_CST)
6929 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6930 bitsize)))
6931 {
6932 enum machine_mode ext_mode = mode;
6933
6934 if (ext_mode == BLKmode
6935 && ! (target != 0 && GET_CODE (op0) == MEM
6936 && GET_CODE (target) == MEM
6937 && bitpos % BITS_PER_UNIT == 0))
6938 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6939
6940 if (ext_mode == BLKmode)
6941 {
6942 /* In this case, BITPOS must start at a byte boundary and
6943 TARGET, if specified, must be a MEM. */
6944 if (GET_CODE (op0) != MEM
6945 || (target != 0 && GET_CODE (target) != MEM)
6946 || bitpos % BITS_PER_UNIT != 0)
6947 abort ();
6948
6949 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
6950 if (target == 0)
6951 target = assign_temp (type, 0, 1, 1);
6952
6953 emit_block_move (target, op0,
6954 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6955 / BITS_PER_UNIT));
6956
6957 return target;
6958 }
6959
6960 op0 = validize_mem (op0);
6961
6962 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6963 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6964
6965 op0 = extract_bit_field (op0, bitsize, bitpos,
6966 unsignedp, target, ext_mode, ext_mode,
6967 int_size_in_bytes (TREE_TYPE (tem)));
6968
6969 /* If the result is a record type and BITSIZE is narrower than
6970 the mode of OP0, an integral mode, and this is a big endian
6971 machine, we must put the field into the high-order bits. */
6972 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6973 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6974 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
6975 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6976 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6977 - bitsize),
6978 op0, 1);
6979
6980 if (mode == BLKmode)
6981 {
6982 rtx new = assign_temp (build_qualified_type
6983 (type_for_mode (ext_mode, 0),
6984 TYPE_QUAL_CONST), 0, 1, 1);
6985
6986 emit_move_insn (new, op0);
6987 op0 = copy_rtx (new);
6988 PUT_MODE (op0, BLKmode);
6989 set_mem_attributes (op0, exp, 1);
6990 }
6991
6992 return op0;
6993 }
6994
6995 /* If the result is BLKmode, use that to access the object
6996 now as well. */
6997 if (mode == BLKmode)
6998 mode1 = BLKmode;
6999
7000 /* Get a reference to just this component. */
7001 if (modifier == EXPAND_CONST_ADDRESS
7002 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7003 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7004 else
7005 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7006
7007 if (op0 == orig_op0)
7008 op0 = copy_rtx (op0);
7009
7010 set_mem_attributes (op0, exp, 0);
7011 if (GET_CODE (XEXP (op0, 0)) == REG)
7012 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7013
7014 MEM_VOLATILE_P (op0) |= volatilep;
7015 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7016 || modifier == EXPAND_CONST_ADDRESS
7017 || modifier == EXPAND_INITIALIZER)
7018 return op0;
7019 else if (target == 0)
7020 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7021
7022 convert_move (target, op0, unsignedp);
7023 return target;
7024 }
7025
7026 case VTABLE_REF:
7027 {
7028 rtx insn, before = get_last_insn (), vtbl_ref;
7029
7030 /* Evaluate the interior expression. */
7031 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7032 tmode, modifier);
7033
7034 /* Get or create an instruction off which to hang a note. */
7035 if (REG_P (subtarget))
7036 {
7037 target = subtarget;
7038 insn = get_last_insn ();
7039 if (insn == before)
7040 abort ();
7041 if (! INSN_P (insn))
7042 insn = prev_nonnote_insn (insn);
7043 }
7044 else
7045 {
7046 target = gen_reg_rtx (GET_MODE (subtarget));
7047 insn = emit_move_insn (target, subtarget);
7048 }
7049
7050 /* Collect the data for the note. */
7051 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7052 vtbl_ref = plus_constant (vtbl_ref,
7053 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7054 /* Discard the initial CONST that was added. */
7055 vtbl_ref = XEXP (vtbl_ref, 0);
7056
7057 REG_NOTES (insn)
7058 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7059
7060 return target;
7061 }
7062
7063 /* Intended for a reference to a buffer of a file-object in Pascal.
7064 But it's not certain that a special tree code will really be
7065 necessary for these. INDIRECT_REF might work for them. */
7066 case BUFFER_REF:
7067 abort ();
7068
7069 case IN_EXPR:
7070 {
7071 /* Pascal set IN expression.
7072
7073 Algorithm:
7074 rlo = set_low - (set_low%bits_per_word);
7075 the_word = set [ (index - rlo)/bits_per_word ];
7076 bit_index = index % bits_per_word;
7077 bitmask = 1 << bit_index;
7078 return !!(the_word & bitmask); */
7079
7080 tree set = TREE_OPERAND (exp, 0);
7081 tree index = TREE_OPERAND (exp, 1);
7082 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7083 tree set_type = TREE_TYPE (set);
7084 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7085 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7086 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7087 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7088 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7089 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7090 rtx setaddr = XEXP (setval, 0);
7091 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7092 rtx rlow;
7093 rtx diff, quo, rem, addr, bit, result;
7094
7095 /* If domain is empty, answer is no. Likewise if index is constant
7096 and out of bounds. */
7097 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7098 && TREE_CODE (set_low_bound) == INTEGER_CST
7099 && tree_int_cst_lt (set_high_bound, set_low_bound))
7100 || (TREE_CODE (index) == INTEGER_CST
7101 && TREE_CODE (set_low_bound) == INTEGER_CST
7102 && tree_int_cst_lt (index, set_low_bound))
7103 || (TREE_CODE (set_high_bound) == INTEGER_CST
7104 && TREE_CODE (index) == INTEGER_CST
7105 && tree_int_cst_lt (set_high_bound, index))))
7106 return const0_rtx;
7107
7108 if (target == 0)
7109 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7110
7111 /* If we get here, we have to generate the code for both cases
7112 (in range and out of range). */
7113
7114 op0 = gen_label_rtx ();
7115 op1 = gen_label_rtx ();
7116
7117 if (! (GET_CODE (index_val) == CONST_INT
7118 && GET_CODE (lo_r) == CONST_INT))
7119 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7120 GET_MODE (index_val), iunsignedp, op1);
7121
7122 if (! (GET_CODE (index_val) == CONST_INT
7123 && GET_CODE (hi_r) == CONST_INT))
7124 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7125 GET_MODE (index_val), iunsignedp, op1);
7126
7127 /* Calculate the element number of bit zero in the first word
7128 of the set. */
7129 if (GET_CODE (lo_r) == CONST_INT)
7130 rlow = GEN_INT (INTVAL (lo_r)
7131 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7132 else
7133 rlow = expand_binop (index_mode, and_optab, lo_r,
7134 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7135 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7136
7137 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7138 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7139
7140 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7141 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7142 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7143 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7144
7145 addr = memory_address (byte_mode,
7146 expand_binop (index_mode, add_optab, diff,
7147 setaddr, NULL_RTX, iunsignedp,
7148 OPTAB_LIB_WIDEN));
7149
7150 /* Extract the bit we want to examine. */
7151 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7152 gen_rtx_MEM (byte_mode, addr),
7153 make_tree (TREE_TYPE (index), rem),
7154 NULL_RTX, 1);
7155 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7156 GET_MODE (target) == byte_mode ? target : 0,
7157 1, OPTAB_LIB_WIDEN);
7158
7159 if (result != target)
7160 convert_move (target, result, 1);
7161
7162 /* Output the code to handle the out-of-range case. */
7163 emit_jump (op0);
7164 emit_label (op1);
7165 emit_move_insn (target, const0_rtx);
7166 emit_label (op0);
7167 return target;
7168 }
7169
7170 case WITH_CLEANUP_EXPR:
7171 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7172 {
7173 WITH_CLEANUP_EXPR_RTL (exp)
7174 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7175 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7176
7177 /* That's it for this cleanup. */
7178 TREE_OPERAND (exp, 1) = 0;
7179 }
7180 return WITH_CLEANUP_EXPR_RTL (exp);
7181
7182 case CLEANUP_POINT_EXPR:
7183 {
7184 /* Start a new binding layer that will keep track of all cleanup
7185 actions to be performed. */
7186 expand_start_bindings (2);
7187
7188 target_temp_slot_level = temp_slot_level;
7189
7190 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7191 /* If we're going to use this value, load it up now. */
7192 if (! ignore)
7193 op0 = force_not_mem (op0);
7194 preserve_temp_slots (op0);
7195 expand_end_bindings (NULL_TREE, 0, 0);
7196 }
7197 return op0;
7198
7199 case CALL_EXPR:
7200 /* Check for a built-in function. */
7201 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7202 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7203 == FUNCTION_DECL)
7204 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7205 {
7206 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7207 == BUILT_IN_FRONTEND)
7208 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7209 else
7210 return expand_builtin (exp, target, subtarget, tmode, ignore);
7211 }
7212
7213 return expand_call (exp, target, ignore);
7214
7215 case NON_LVALUE_EXPR:
7216 case NOP_EXPR:
7217 case CONVERT_EXPR:
7218 case REFERENCE_EXPR:
7219 if (TREE_OPERAND (exp, 0) == error_mark_node)
7220 return const0_rtx;
7221
7222 if (TREE_CODE (type) == UNION_TYPE)
7223 {
7224 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7225
7226 /* If both input and output are BLKmode, this conversion isn't doing
7227 anything except possibly changing memory attribute. */
7228 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7229 {
7230 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7231 modifier);
7232
7233 result = copy_rtx (result);
7234 set_mem_attributes (result, exp, 0);
7235 return result;
7236 }
7237
7238 if (target == 0)
7239 target = assign_temp (type, 0, 1, 1);
7240
7241 if (GET_CODE (target) == MEM)
7242 /* Store data into beginning of memory target. */
7243 store_expr (TREE_OPERAND (exp, 0),
7244 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7245
7246 else if (GET_CODE (target) == REG)
7247 /* Store this field into a union of the proper type. */
7248 store_field (target,
7249 MIN ((int_size_in_bytes (TREE_TYPE
7250 (TREE_OPERAND (exp, 0)))
7251 * BITS_PER_UNIT),
7252 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7253 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7254 VOIDmode, 0, type, 0);
7255 else
7256 abort ();
7257
7258 /* Return the entire union. */
7259 return target;
7260 }
7261
7262 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7263 {
7264 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7265 modifier);
7266
7267 /* If the signedness of the conversion differs and OP0 is
7268 a promoted SUBREG, clear that indication since we now
7269 have to do the proper extension. */
7270 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7271 && GET_CODE (op0) == SUBREG)
7272 SUBREG_PROMOTED_VAR_P (op0) = 0;
7273
7274 return op0;
7275 }
7276
7277 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7278 if (GET_MODE (op0) == mode)
7279 return op0;
7280
7281 /* If OP0 is a constant, just convert it into the proper mode. */
7282 if (CONSTANT_P (op0))
7283 {
7284 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7285 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7286
7287 if (modifier == EXPAND_INITIALIZER)
7288 return simplify_gen_subreg (mode, op0, inner_mode,
7289 subreg_lowpart_offset (mode,
7290 inner_mode));
7291 else
7292 return convert_modes (mode, inner_mode, op0,
7293 TREE_UNSIGNED (inner_type));
7294 }
7295
7296 if (modifier == EXPAND_INITIALIZER)
7297 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7298
7299 if (target == 0)
7300 return
7301 convert_to_mode (mode, op0,
7302 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7303 else
7304 convert_move (target, op0,
7305 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7306 return target;
7307
7308 case VIEW_CONVERT_EXPR:
7309 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7310
7311 /* If the input and output modes are both the same, we are done.
7312 Otherwise, if neither mode is BLKmode and both are within a word, we
7313 can use gen_lowpart. If neither is true, make sure the operand is
7314 in memory and convert the MEM to the new mode. */
7315 if (TYPE_MODE (type) == GET_MODE (op0))
7316 ;
7317 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7318 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7319 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7320 op0 = gen_lowpart (TYPE_MODE (type), op0);
7321 else if (GET_CODE (op0) != MEM)
7322 {
7323 /* If the operand is not a MEM, force it into memory. Since we
7324 are going to be be changing the mode of the MEM, don't call
7325 force_const_mem for constants because we don't allow pool
7326 constants to change mode. */
7327 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7328
7329 if (TREE_ADDRESSABLE (exp))
7330 abort ();
7331
7332 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7333 target
7334 = assign_stack_temp_for_type
7335 (TYPE_MODE (inner_type),
7336 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7337
7338 emit_move_insn (target, op0);
7339 op0 = target;
7340 }
7341
7342 /* At this point, OP0 is in the correct mode. If the output type is such
7343 that the operand is known to be aligned, indicate that it is.
7344 Otherwise, we need only be concerned about alignment for non-BLKmode
7345 results. */
7346 if (GET_CODE (op0) == MEM)
7347 {
7348 op0 = copy_rtx (op0);
7349
7350 if (TYPE_ALIGN_OK (type))
7351 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7352 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7353 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7354 {
7355 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7356 HOST_WIDE_INT temp_size
7357 = MAX (int_size_in_bytes (inner_type),
7358 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7359 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7360 temp_size, 0, type);
7361 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7362
7363 if (TREE_ADDRESSABLE (exp))
7364 abort ();
7365
7366 if (GET_MODE (op0) == BLKmode)
7367 emit_block_move (new_with_op0_mode, op0,
7368 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7369 else
7370 emit_move_insn (new_with_op0_mode, op0);
7371
7372 op0 = new;
7373 }
7374
7375 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7376 }
7377
7378 return op0;
7379
7380 case PLUS_EXPR:
7381 /* We come here from MINUS_EXPR when the second operand is a
7382 constant. */
7383 plus_expr:
7384 this_optab = ! unsignedp && flag_trapv
7385 && (GET_MODE_CLASS (mode) == MODE_INT)
7386 ? addv_optab : add_optab;
7387
7388 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7389 something else, make sure we add the register to the constant and
7390 then to the other thing. This case can occur during strength
7391 reduction and doing it this way will produce better code if the
7392 frame pointer or argument pointer is eliminated.
7393
7394 fold-const.c will ensure that the constant is always in the inner
7395 PLUS_EXPR, so the only case we need to do anything about is if
7396 sp, ap, or fp is our second argument, in which case we must swap
7397 the innermost first argument and our second argument. */
7398
7399 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7400 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7401 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7402 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7403 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7404 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7405 {
7406 tree t = TREE_OPERAND (exp, 1);
7407
7408 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7409 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7410 }
7411
7412 /* If the result is to be ptr_mode and we are adding an integer to
7413 something, we might be forming a constant. So try to use
7414 plus_constant. If it produces a sum and we can't accept it,
7415 use force_operand. This allows P = &ARR[const] to generate
7416 efficient code on machines where a SYMBOL_REF is not a valid
7417 address.
7418
7419 If this is an EXPAND_SUM call, always return the sum. */
7420 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7421 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7422 {
7423 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7424 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7425 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7426 {
7427 rtx constant_part;
7428
7429 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7430 EXPAND_SUM);
7431 /* Use immed_double_const to ensure that the constant is
7432 truncated according to the mode of OP1, then sign extended
7433 to a HOST_WIDE_INT. Using the constant directly can result
7434 in non-canonical RTL in a 64x32 cross compile. */
7435 constant_part
7436 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7437 (HOST_WIDE_INT) 0,
7438 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7439 op1 = plus_constant (op1, INTVAL (constant_part));
7440 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7441 op1 = force_operand (op1, target);
7442 return op1;
7443 }
7444
7445 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7446 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7447 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7448 {
7449 rtx constant_part;
7450
7451 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7452 EXPAND_SUM);
7453 if (! CONSTANT_P (op0))
7454 {
7455 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7456 VOIDmode, modifier);
7457 /* Don't go to both_summands if modifier
7458 says it's not right to return a PLUS. */
7459 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7460 goto binop2;
7461 goto both_summands;
7462 }
7463 /* Use immed_double_const to ensure that the constant is
7464 truncated according to the mode of OP1, then sign extended
7465 to a HOST_WIDE_INT. Using the constant directly can result
7466 in non-canonical RTL in a 64x32 cross compile. */
7467 constant_part
7468 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7469 (HOST_WIDE_INT) 0,
7470 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7471 op0 = plus_constant (op0, INTVAL (constant_part));
7472 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7473 op0 = force_operand (op0, target);
7474 return op0;
7475 }
7476 }
7477
7478 /* No sense saving up arithmetic to be done
7479 if it's all in the wrong mode to form part of an address.
7480 And force_operand won't know whether to sign-extend or
7481 zero-extend. */
7482 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7483 || mode != ptr_mode)
7484 goto binop;
7485
7486 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7487 subtarget = 0;
7488
7489 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7490 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7491
7492 both_summands:
7493 /* Make sure any term that's a sum with a constant comes last. */
7494 if (GET_CODE (op0) == PLUS
7495 && CONSTANT_P (XEXP (op0, 1)))
7496 {
7497 temp = op0;
7498 op0 = op1;
7499 op1 = temp;
7500 }
7501 /* If adding to a sum including a constant,
7502 associate it to put the constant outside. */
7503 if (GET_CODE (op1) == PLUS
7504 && CONSTANT_P (XEXP (op1, 1)))
7505 {
7506 rtx constant_term = const0_rtx;
7507
7508 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7509 if (temp != 0)
7510 op0 = temp;
7511 /* Ensure that MULT comes first if there is one. */
7512 else if (GET_CODE (op0) == MULT)
7513 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7514 else
7515 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7516
7517 /* Let's also eliminate constants from op0 if possible. */
7518 op0 = eliminate_constant_term (op0, &constant_term);
7519
7520 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7521 their sum should be a constant. Form it into OP1, since the
7522 result we want will then be OP0 + OP1. */
7523
7524 temp = simplify_binary_operation (PLUS, mode, constant_term,
7525 XEXP (op1, 1));
7526 if (temp != 0)
7527 op1 = temp;
7528 else
7529 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7530 }
7531
7532 /* Put a constant term last and put a multiplication first. */
7533 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7534 temp = op1, op1 = op0, op0 = temp;
7535
7536 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7537 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7538
7539 case MINUS_EXPR:
7540 /* For initializers, we are allowed to return a MINUS of two
7541 symbolic constants. Here we handle all cases when both operands
7542 are constant. */
7543 /* Handle difference of two symbolic constants,
7544 for the sake of an initializer. */
7545 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7546 && really_constant_p (TREE_OPERAND (exp, 0))
7547 && really_constant_p (TREE_OPERAND (exp, 1)))
7548 {
7549 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7550 modifier);
7551 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7552 modifier);
7553
7554 /* If the last operand is a CONST_INT, use plus_constant of
7555 the negated constant. Else make the MINUS. */
7556 if (GET_CODE (op1) == CONST_INT)
7557 return plus_constant (op0, - INTVAL (op1));
7558 else
7559 return gen_rtx_MINUS (mode, op0, op1);
7560 }
7561 /* Convert A - const to A + (-const). */
7562 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7563 {
7564 tree negated = fold (build1 (NEGATE_EXPR, type,
7565 TREE_OPERAND (exp, 1)));
7566
7567 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7568 /* If we can't negate the constant in TYPE, leave it alone and
7569 expand_binop will negate it for us. We used to try to do it
7570 here in the signed version of TYPE, but that doesn't work
7571 on POINTER_TYPEs. */;
7572 else
7573 {
7574 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7575 goto plus_expr;
7576 }
7577 }
7578 this_optab = ! unsignedp && flag_trapv
7579 && (GET_MODE_CLASS(mode) == MODE_INT)
7580 ? subv_optab : sub_optab;
7581 goto binop;
7582
7583 case MULT_EXPR:
7584 /* If first operand is constant, swap them.
7585 Thus the following special case checks need only
7586 check the second operand. */
7587 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7588 {
7589 tree t1 = TREE_OPERAND (exp, 0);
7590 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7591 TREE_OPERAND (exp, 1) = t1;
7592 }
7593
7594 /* Attempt to return something suitable for generating an
7595 indexed address, for machines that support that. */
7596
7597 if (modifier == EXPAND_SUM && mode == ptr_mode
7598 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7599 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7600 {
7601 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7602 EXPAND_SUM);
7603
7604 /* Apply distributive law if OP0 is x+c. */
7605 if (GET_CODE (op0) == PLUS
7606 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7607 return
7608 gen_rtx_PLUS
7609 (mode,
7610 gen_rtx_MULT
7611 (mode, XEXP (op0, 0),
7612 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7613 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7614 * INTVAL (XEXP (op0, 1))));
7615
7616 if (GET_CODE (op0) != REG)
7617 op0 = force_operand (op0, NULL_RTX);
7618 if (GET_CODE (op0) != REG)
7619 op0 = copy_to_mode_reg (mode, op0);
7620
7621 return
7622 gen_rtx_MULT (mode, op0,
7623 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7624 }
7625
7626 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7627 subtarget = 0;
7628
7629 /* Check for multiplying things that have been extended
7630 from a narrower type. If this machine supports multiplying
7631 in that narrower type with a result in the desired type,
7632 do it that way, and avoid the explicit type-conversion. */
7633 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7634 && TREE_CODE (type) == INTEGER_TYPE
7635 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7636 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7637 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7638 && int_fits_type_p (TREE_OPERAND (exp, 1),
7639 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7640 /* Don't use a widening multiply if a shift will do. */
7641 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7642 > HOST_BITS_PER_WIDE_INT)
7643 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7644 ||
7645 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7646 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7647 ==
7648 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7649 /* If both operands are extended, they must either both
7650 be zero-extended or both be sign-extended. */
7651 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7652 ==
7653 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7654 {
7655 enum machine_mode innermode
7656 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7657 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7658 ? smul_widen_optab : umul_widen_optab);
7659 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7660 ? umul_widen_optab : smul_widen_optab);
7661 if (mode == GET_MODE_WIDER_MODE (innermode))
7662 {
7663 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7664 {
7665 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7666 NULL_RTX, VOIDmode, 0);
7667 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7668 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7669 VOIDmode, 0);
7670 else
7671 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7672 NULL_RTX, VOIDmode, 0);
7673 goto binop2;
7674 }
7675 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7676 && innermode == word_mode)
7677 {
7678 rtx htem;
7679 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7680 NULL_RTX, VOIDmode, 0);
7681 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7682 op1 = convert_modes (innermode, mode,
7683 expand_expr (TREE_OPERAND (exp, 1),
7684 NULL_RTX, VOIDmode, 0),
7685 unsignedp);
7686 else
7687 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7688 NULL_RTX, VOIDmode, 0);
7689 temp = expand_binop (mode, other_optab, op0, op1, target,
7690 unsignedp, OPTAB_LIB_WIDEN);
7691 htem = expand_mult_highpart_adjust (innermode,
7692 gen_highpart (innermode, temp),
7693 op0, op1,
7694 gen_highpart (innermode, temp),
7695 unsignedp);
7696 emit_move_insn (gen_highpart (innermode, temp), htem);
7697 return temp;
7698 }
7699 }
7700 }
7701 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7702 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7703 return expand_mult (mode, op0, op1, target, unsignedp);
7704
7705 case TRUNC_DIV_EXPR:
7706 case FLOOR_DIV_EXPR:
7707 case CEIL_DIV_EXPR:
7708 case ROUND_DIV_EXPR:
7709 case EXACT_DIV_EXPR:
7710 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7711 subtarget = 0;
7712 /* Possible optimization: compute the dividend with EXPAND_SUM
7713 then if the divisor is constant can optimize the case
7714 where some terms of the dividend have coeffs divisible by it. */
7715 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7716 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7717 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7718
7719 case RDIV_EXPR:
7720 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7721 expensive divide. If not, combine will rebuild the original
7722 computation. */
7723 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7724 && !real_onep (TREE_OPERAND (exp, 0)))
7725 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7726 build (RDIV_EXPR, type,
7727 build_real (type, dconst1),
7728 TREE_OPERAND (exp, 1))),
7729 target, tmode, unsignedp);
7730 this_optab = sdiv_optab;
7731 goto binop;
7732
7733 case TRUNC_MOD_EXPR:
7734 case FLOOR_MOD_EXPR:
7735 case CEIL_MOD_EXPR:
7736 case ROUND_MOD_EXPR:
7737 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7738 subtarget = 0;
7739 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7740 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7741 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7742
7743 case FIX_ROUND_EXPR:
7744 case FIX_FLOOR_EXPR:
7745 case FIX_CEIL_EXPR:
7746 abort (); /* Not used for C. */
7747
7748 case FIX_TRUNC_EXPR:
7749 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7750 if (target == 0)
7751 target = gen_reg_rtx (mode);
7752 expand_fix (target, op0, unsignedp);
7753 return target;
7754
7755 case FLOAT_EXPR:
7756 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7757 if (target == 0)
7758 target = gen_reg_rtx (mode);
7759 /* expand_float can't figure out what to do if FROM has VOIDmode.
7760 So give it the correct mode. With -O, cse will optimize this. */
7761 if (GET_MODE (op0) == VOIDmode)
7762 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7763 op0);
7764 expand_float (target, op0,
7765 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7766 return target;
7767
7768 case NEGATE_EXPR:
7769 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7770 temp = expand_unop (mode,
7771 ! unsignedp && flag_trapv
7772 && (GET_MODE_CLASS(mode) == MODE_INT)
7773 ? negv_optab : neg_optab, op0, target, 0);
7774 if (temp == 0)
7775 abort ();
7776 return temp;
7777
7778 case ABS_EXPR:
7779 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7780
7781 /* Handle complex values specially. */
7782 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7783 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7784 return expand_complex_abs (mode, op0, target, unsignedp);
7785
7786 /* Unsigned abs is simply the operand. Testing here means we don't
7787 risk generating incorrect code below. */
7788 if (TREE_UNSIGNED (type))
7789 return op0;
7790
7791 return expand_abs (mode, op0, target, unsignedp,
7792 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7793
7794 case MAX_EXPR:
7795 case MIN_EXPR:
7796 target = original_target;
7797 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7798 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7799 || GET_MODE (target) != mode
7800 || (GET_CODE (target) == REG
7801 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7802 target = gen_reg_rtx (mode);
7803 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7804 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7805
7806 /* First try to do it with a special MIN or MAX instruction.
7807 If that does not win, use a conditional jump to select the proper
7808 value. */
7809 this_optab = (TREE_UNSIGNED (type)
7810 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7811 : (code == MIN_EXPR ? smin_optab : smax_optab));
7812
7813 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7814 OPTAB_WIDEN);
7815 if (temp != 0)
7816 return temp;
7817
7818 /* At this point, a MEM target is no longer useful; we will get better
7819 code without it. */
7820
7821 if (GET_CODE (target) == MEM)
7822 target = gen_reg_rtx (mode);
7823
7824 if (target != op0)
7825 emit_move_insn (target, op0);
7826
7827 op0 = gen_label_rtx ();
7828
7829 /* If this mode is an integer too wide to compare properly,
7830 compare word by word. Rely on cse to optimize constant cases. */
7831 if (GET_MODE_CLASS (mode) == MODE_INT
7832 && ! can_compare_p (GE, mode, ccp_jump))
7833 {
7834 if (code == MAX_EXPR)
7835 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7836 target, op1, NULL_RTX, op0);
7837 else
7838 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7839 op1, target, NULL_RTX, op0);
7840 }
7841 else
7842 {
7843 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7844 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7845 unsignedp, mode, NULL_RTX, NULL_RTX,
7846 op0);
7847 }
7848 emit_move_insn (target, op1);
7849 emit_label (op0);
7850 return target;
7851
7852 case BIT_NOT_EXPR:
7853 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7854 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7855 if (temp == 0)
7856 abort ();
7857 return temp;
7858
7859 case FFS_EXPR:
7860 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7861 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7862 if (temp == 0)
7863 abort ();
7864 return temp;
7865
7866 /* ??? Can optimize bitwise operations with one arg constant.
7867 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7868 and (a bitwise1 b) bitwise2 b (etc)
7869 but that is probably not worth while. */
7870
7871 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7872 boolean values when we want in all cases to compute both of them. In
7873 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7874 as actual zero-or-1 values and then bitwise anding. In cases where
7875 there cannot be any side effects, better code would be made by
7876 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7877 how to recognize those cases. */
7878
7879 case TRUTH_AND_EXPR:
7880 case BIT_AND_EXPR:
7881 this_optab = and_optab;
7882 goto binop;
7883
7884 case TRUTH_OR_EXPR:
7885 case BIT_IOR_EXPR:
7886 this_optab = ior_optab;
7887 goto binop;
7888
7889 case TRUTH_XOR_EXPR:
7890 case BIT_XOR_EXPR:
7891 this_optab = xor_optab;
7892 goto binop;
7893
7894 case LSHIFT_EXPR:
7895 case RSHIFT_EXPR:
7896 case LROTATE_EXPR:
7897 case RROTATE_EXPR:
7898 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7899 subtarget = 0;
7900 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7901 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7902 unsignedp);
7903
7904 /* Could determine the answer when only additive constants differ. Also,
7905 the addition of one can be handled by changing the condition. */
7906 case LT_EXPR:
7907 case LE_EXPR:
7908 case GT_EXPR:
7909 case GE_EXPR:
7910 case EQ_EXPR:
7911 case NE_EXPR:
7912 case UNORDERED_EXPR:
7913 case ORDERED_EXPR:
7914 case UNLT_EXPR:
7915 case UNLE_EXPR:
7916 case UNGT_EXPR:
7917 case UNGE_EXPR:
7918 case UNEQ_EXPR:
7919 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7920 if (temp != 0)
7921 return temp;
7922
7923 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7924 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7925 && original_target
7926 && GET_CODE (original_target) == REG
7927 && (GET_MODE (original_target)
7928 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7929 {
7930 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7931 VOIDmode, 0);
7932
7933 if (temp != original_target)
7934 temp = copy_to_reg (temp);
7935
7936 op1 = gen_label_rtx ();
7937 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7938 GET_MODE (temp), unsignedp, op1);
7939 emit_move_insn (temp, const1_rtx);
7940 emit_label (op1);
7941 return temp;
7942 }
7943
7944 /* If no set-flag instruction, must generate a conditional
7945 store into a temporary variable. Drop through
7946 and handle this like && and ||. */
7947
7948 case TRUTH_ANDIF_EXPR:
7949 case TRUTH_ORIF_EXPR:
7950 if (! ignore
7951 && (target == 0 || ! safe_from_p (target, exp, 1)
7952 /* Make sure we don't have a hard reg (such as function's return
7953 value) live across basic blocks, if not optimizing. */
7954 || (!optimize && GET_CODE (target) == REG
7955 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7956 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7957
7958 if (target)
7959 emit_clr_insn (target);
7960
7961 op1 = gen_label_rtx ();
7962 jumpifnot (exp, op1);
7963
7964 if (target)
7965 emit_0_to_1_insn (target);
7966
7967 emit_label (op1);
7968 return ignore ? const0_rtx : target;
7969
7970 case TRUTH_NOT_EXPR:
7971 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7972 /* The parser is careful to generate TRUTH_NOT_EXPR
7973 only with operands that are always zero or one. */
7974 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7975 target, 1, OPTAB_LIB_WIDEN);
7976 if (temp == 0)
7977 abort ();
7978 return temp;
7979
7980 case COMPOUND_EXPR:
7981 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7982 emit_queue ();
7983 return expand_expr (TREE_OPERAND (exp, 1),
7984 (ignore ? const0_rtx : target),
7985 VOIDmode, 0);
7986
7987 case COND_EXPR:
7988 /* If we would have a "singleton" (see below) were it not for a
7989 conversion in each arm, bring that conversion back out. */
7990 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7991 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7992 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7993 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7994 {
7995 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7996 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7997
7998 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7999 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8000 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8001 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8002 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8003 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8004 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8005 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8006 return expand_expr (build1 (NOP_EXPR, type,
8007 build (COND_EXPR, TREE_TYPE (iftrue),
8008 TREE_OPERAND (exp, 0),
8009 iftrue, iffalse)),
8010 target, tmode, modifier);
8011 }
8012
8013 {
8014 /* Note that COND_EXPRs whose type is a structure or union
8015 are required to be constructed to contain assignments of
8016 a temporary variable, so that we can evaluate them here
8017 for side effect only. If type is void, we must do likewise. */
8018
8019 /* If an arm of the branch requires a cleanup,
8020 only that cleanup is performed. */
8021
8022 tree singleton = 0;
8023 tree binary_op = 0, unary_op = 0;
8024
8025 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8026 convert it to our mode, if necessary. */
8027 if (integer_onep (TREE_OPERAND (exp, 1))
8028 && integer_zerop (TREE_OPERAND (exp, 2))
8029 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8030 {
8031 if (ignore)
8032 {
8033 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8034 modifier);
8035 return const0_rtx;
8036 }
8037
8038 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8039 if (GET_MODE (op0) == mode)
8040 return op0;
8041
8042 if (target == 0)
8043 target = gen_reg_rtx (mode);
8044 convert_move (target, op0, unsignedp);
8045 return target;
8046 }
8047
8048 /* Check for X ? A + B : A. If we have this, we can copy A to the
8049 output and conditionally add B. Similarly for unary operations.
8050 Don't do this if X has side-effects because those side effects
8051 might affect A or B and the "?" operation is a sequence point in
8052 ANSI. (operand_equal_p tests for side effects.) */
8053
8054 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8055 && operand_equal_p (TREE_OPERAND (exp, 2),
8056 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8057 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8058 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8059 && operand_equal_p (TREE_OPERAND (exp, 1),
8060 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8061 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8062 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8063 && operand_equal_p (TREE_OPERAND (exp, 2),
8064 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8065 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8066 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8067 && operand_equal_p (TREE_OPERAND (exp, 1),
8068 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8069 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8070
8071 /* If we are not to produce a result, we have no target. Otherwise,
8072 if a target was specified use it; it will not be used as an
8073 intermediate target unless it is safe. If no target, use a
8074 temporary. */
8075
8076 if (ignore)
8077 temp = 0;
8078 else if (original_target
8079 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8080 || (singleton && GET_CODE (original_target) == REG
8081 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8082 && original_target == var_rtx (singleton)))
8083 && GET_MODE (original_target) == mode
8084 #ifdef HAVE_conditional_move
8085 && (! can_conditionally_move_p (mode)
8086 || GET_CODE (original_target) == REG
8087 || TREE_ADDRESSABLE (type))
8088 #endif
8089 && (GET_CODE (original_target) != MEM
8090 || TREE_ADDRESSABLE (type)))
8091 temp = original_target;
8092 else if (TREE_ADDRESSABLE (type))
8093 abort ();
8094 else
8095 temp = assign_temp (type, 0, 0, 1);
8096
8097 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8098 do the test of X as a store-flag operation, do this as
8099 A + ((X != 0) << log C). Similarly for other simple binary
8100 operators. Only do for C == 1 if BRANCH_COST is low. */
8101 if (temp && singleton && binary_op
8102 && (TREE_CODE (binary_op) == PLUS_EXPR
8103 || TREE_CODE (binary_op) == MINUS_EXPR
8104 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8105 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8106 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8107 : integer_onep (TREE_OPERAND (binary_op, 1)))
8108 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8109 {
8110 rtx result;
8111 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8112 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8113 ? addv_optab : add_optab)
8114 : TREE_CODE (binary_op) == MINUS_EXPR
8115 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8116 ? subv_optab : sub_optab)
8117 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8118 : xor_optab);
8119
8120 /* If we had X ? A : A + 1, do this as A + (X == 0).
8121
8122 We have to invert the truth value here and then put it
8123 back later if do_store_flag fails. We cannot simply copy
8124 TREE_OPERAND (exp, 0) to another variable and modify that
8125 because invert_truthvalue can modify the tree pointed to
8126 by its argument. */
8127 if (singleton == TREE_OPERAND (exp, 1))
8128 TREE_OPERAND (exp, 0)
8129 = invert_truthvalue (TREE_OPERAND (exp, 0));
8130
8131 result = do_store_flag (TREE_OPERAND (exp, 0),
8132 (safe_from_p (temp, singleton, 1)
8133 ? temp : NULL_RTX),
8134 mode, BRANCH_COST <= 1);
8135
8136 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8137 result = expand_shift (LSHIFT_EXPR, mode, result,
8138 build_int_2 (tree_log2
8139 (TREE_OPERAND
8140 (binary_op, 1)),
8141 0),
8142 (safe_from_p (temp, singleton, 1)
8143 ? temp : NULL_RTX), 0);
8144
8145 if (result)
8146 {
8147 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8148 return expand_binop (mode, boptab, op1, result, temp,
8149 unsignedp, OPTAB_LIB_WIDEN);
8150 }
8151 else if (singleton == TREE_OPERAND (exp, 1))
8152 TREE_OPERAND (exp, 0)
8153 = invert_truthvalue (TREE_OPERAND (exp, 0));
8154 }
8155
8156 do_pending_stack_adjust ();
8157 NO_DEFER_POP;
8158 op0 = gen_label_rtx ();
8159
8160 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8161 {
8162 if (temp != 0)
8163 {
8164 /* If the target conflicts with the other operand of the
8165 binary op, we can't use it. Also, we can't use the target
8166 if it is a hard register, because evaluating the condition
8167 might clobber it. */
8168 if ((binary_op
8169 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8170 || (GET_CODE (temp) == REG
8171 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8172 temp = gen_reg_rtx (mode);
8173 store_expr (singleton, temp, 0);
8174 }
8175 else
8176 expand_expr (singleton,
8177 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8178 if (singleton == TREE_OPERAND (exp, 1))
8179 jumpif (TREE_OPERAND (exp, 0), op0);
8180 else
8181 jumpifnot (TREE_OPERAND (exp, 0), op0);
8182
8183 start_cleanup_deferral ();
8184 if (binary_op && temp == 0)
8185 /* Just touch the other operand. */
8186 expand_expr (TREE_OPERAND (binary_op, 1),
8187 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8188 else if (binary_op)
8189 store_expr (build (TREE_CODE (binary_op), type,
8190 make_tree (type, temp),
8191 TREE_OPERAND (binary_op, 1)),
8192 temp, 0);
8193 else
8194 store_expr (build1 (TREE_CODE (unary_op), type,
8195 make_tree (type, temp)),
8196 temp, 0);
8197 op1 = op0;
8198 }
8199 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8200 comparison operator. If we have one of these cases, set the
8201 output to A, branch on A (cse will merge these two references),
8202 then set the output to FOO. */
8203 else if (temp
8204 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8205 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8206 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8207 TREE_OPERAND (exp, 1), 0)
8208 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8209 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8210 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8211 {
8212 if (GET_CODE (temp) == REG
8213 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8214 temp = gen_reg_rtx (mode);
8215 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8216 jumpif (TREE_OPERAND (exp, 0), op0);
8217
8218 start_cleanup_deferral ();
8219 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8220 op1 = op0;
8221 }
8222 else if (temp
8223 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8224 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8225 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8226 TREE_OPERAND (exp, 2), 0)
8227 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8228 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8229 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8230 {
8231 if (GET_CODE (temp) == REG
8232 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8233 temp = gen_reg_rtx (mode);
8234 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8235 jumpifnot (TREE_OPERAND (exp, 0), op0);
8236
8237 start_cleanup_deferral ();
8238 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8239 op1 = op0;
8240 }
8241 else
8242 {
8243 op1 = gen_label_rtx ();
8244 jumpifnot (TREE_OPERAND (exp, 0), op0);
8245
8246 start_cleanup_deferral ();
8247
8248 /* One branch of the cond can be void, if it never returns. For
8249 example A ? throw : E */
8250 if (temp != 0
8251 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8252 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8253 else
8254 expand_expr (TREE_OPERAND (exp, 1),
8255 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8256 end_cleanup_deferral ();
8257 emit_queue ();
8258 emit_jump_insn (gen_jump (op1));
8259 emit_barrier ();
8260 emit_label (op0);
8261 start_cleanup_deferral ();
8262 if (temp != 0
8263 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8264 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8265 else
8266 expand_expr (TREE_OPERAND (exp, 2),
8267 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8268 }
8269
8270 end_cleanup_deferral ();
8271
8272 emit_queue ();
8273 emit_label (op1);
8274 OK_DEFER_POP;
8275
8276 return temp;
8277 }
8278
8279 case TARGET_EXPR:
8280 {
8281 /* Something needs to be initialized, but we didn't know
8282 where that thing was when building the tree. For example,
8283 it could be the return value of a function, or a parameter
8284 to a function which lays down in the stack, or a temporary
8285 variable which must be passed by reference.
8286
8287 We guarantee that the expression will either be constructed
8288 or copied into our original target. */
8289
8290 tree slot = TREE_OPERAND (exp, 0);
8291 tree cleanups = NULL_TREE;
8292 tree exp1;
8293
8294 if (TREE_CODE (slot) != VAR_DECL)
8295 abort ();
8296
8297 if (! ignore)
8298 target = original_target;
8299
8300 /* Set this here so that if we get a target that refers to a
8301 register variable that's already been used, put_reg_into_stack
8302 knows that it should fix up those uses. */
8303 TREE_USED (slot) = 1;
8304
8305 if (target == 0)
8306 {
8307 if (DECL_RTL_SET_P (slot))
8308 {
8309 target = DECL_RTL (slot);
8310 /* If we have already expanded the slot, so don't do
8311 it again. (mrs) */
8312 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8313 return target;
8314 }
8315 else
8316 {
8317 target = assign_temp (type, 2, 0, 1);
8318 /* All temp slots at this level must not conflict. */
8319 preserve_temp_slots (target);
8320 SET_DECL_RTL (slot, target);
8321 if (TREE_ADDRESSABLE (slot))
8322 put_var_into_stack (slot);
8323
8324 /* Since SLOT is not known to the called function
8325 to belong to its stack frame, we must build an explicit
8326 cleanup. This case occurs when we must build up a reference
8327 to pass the reference as an argument. In this case,
8328 it is very likely that such a reference need not be
8329 built here. */
8330
8331 if (TREE_OPERAND (exp, 2) == 0)
8332 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8333 cleanups = TREE_OPERAND (exp, 2);
8334 }
8335 }
8336 else
8337 {
8338 /* This case does occur, when expanding a parameter which
8339 needs to be constructed on the stack. The target
8340 is the actual stack address that we want to initialize.
8341 The function we call will perform the cleanup in this case. */
8342
8343 /* If we have already assigned it space, use that space,
8344 not target that we were passed in, as our target
8345 parameter is only a hint. */
8346 if (DECL_RTL_SET_P (slot))
8347 {
8348 target = DECL_RTL (slot);
8349 /* If we have already expanded the slot, so don't do
8350 it again. (mrs) */
8351 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8352 return target;
8353 }
8354 else
8355 {
8356 SET_DECL_RTL (slot, target);
8357 /* If we must have an addressable slot, then make sure that
8358 the RTL that we just stored in slot is OK. */
8359 if (TREE_ADDRESSABLE (slot))
8360 put_var_into_stack (slot);
8361 }
8362 }
8363
8364 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8365 /* Mark it as expanded. */
8366 TREE_OPERAND (exp, 1) = NULL_TREE;
8367
8368 store_expr (exp1, target, 0);
8369
8370 expand_decl_cleanup (NULL_TREE, cleanups);
8371
8372 return target;
8373 }
8374
8375 case INIT_EXPR:
8376 {
8377 tree lhs = TREE_OPERAND (exp, 0);
8378 tree rhs = TREE_OPERAND (exp, 1);
8379
8380 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8381 return temp;
8382 }
8383
8384 case MODIFY_EXPR:
8385 {
8386 /* If lhs is complex, expand calls in rhs before computing it.
8387 That's so we don't compute a pointer and save it over a
8388 call. If lhs is simple, compute it first so we can give it
8389 as a target if the rhs is just a call. This avoids an
8390 extra temp and copy and that prevents a partial-subsumption
8391 which makes bad code. Actually we could treat
8392 component_ref's of vars like vars. */
8393
8394 tree lhs = TREE_OPERAND (exp, 0);
8395 tree rhs = TREE_OPERAND (exp, 1);
8396
8397 temp = 0;
8398
8399 /* Check for |= or &= of a bitfield of size one into another bitfield
8400 of size 1. In this case, (unless we need the result of the
8401 assignment) we can do this more efficiently with a
8402 test followed by an assignment, if necessary.
8403
8404 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8405 things change so we do, this code should be enhanced to
8406 support it. */
8407 if (ignore
8408 && TREE_CODE (lhs) == COMPONENT_REF
8409 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8410 || TREE_CODE (rhs) == BIT_AND_EXPR)
8411 && TREE_OPERAND (rhs, 0) == lhs
8412 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8413 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8414 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8415 {
8416 rtx label = gen_label_rtx ();
8417
8418 do_jump (TREE_OPERAND (rhs, 1),
8419 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8420 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8421 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8422 (TREE_CODE (rhs) == BIT_IOR_EXPR
8423 ? integer_one_node
8424 : integer_zero_node)),
8425 0, 0);
8426 do_pending_stack_adjust ();
8427 emit_label (label);
8428 return const0_rtx;
8429 }
8430
8431 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8432
8433 return temp;
8434 }
8435
8436 case RETURN_EXPR:
8437 if (!TREE_OPERAND (exp, 0))
8438 expand_null_return ();
8439 else
8440 expand_return (TREE_OPERAND (exp, 0));
8441 return const0_rtx;
8442
8443 case PREINCREMENT_EXPR:
8444 case PREDECREMENT_EXPR:
8445 return expand_increment (exp, 0, ignore);
8446
8447 case POSTINCREMENT_EXPR:
8448 case POSTDECREMENT_EXPR:
8449 /* Faster to treat as pre-increment if result is not used. */
8450 return expand_increment (exp, ! ignore, ignore);
8451
8452 case ADDR_EXPR:
8453 /* Are we taking the address of a nested function? */
8454 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8455 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8456 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8457 && ! TREE_STATIC (exp))
8458 {
8459 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8460 op0 = force_operand (op0, target);
8461 }
8462 /* If we are taking the address of something erroneous, just
8463 return a zero. */
8464 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8465 return const0_rtx;
8466 /* If we are taking the address of a constant and are at the
8467 top level, we have to use output_constant_def since we can't
8468 call force_const_mem at top level. */
8469 else if (cfun == 0
8470 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8471 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8472 == 'c')))
8473 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8474 else
8475 {
8476 /* We make sure to pass const0_rtx down if we came in with
8477 ignore set, to avoid doing the cleanups twice for something. */
8478 op0 = expand_expr (TREE_OPERAND (exp, 0),
8479 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8480 (modifier == EXPAND_INITIALIZER
8481 ? modifier : EXPAND_CONST_ADDRESS));
8482
8483 /* If we are going to ignore the result, OP0 will have been set
8484 to const0_rtx, so just return it. Don't get confused and
8485 think we are taking the address of the constant. */
8486 if (ignore)
8487 return op0;
8488
8489 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8490 clever and returns a REG when given a MEM. */
8491 op0 = protect_from_queue (op0, 1);
8492
8493 /* We would like the object in memory. If it is a constant, we can
8494 have it be statically allocated into memory. For a non-constant,
8495 we need to allocate some memory and store the value into it. */
8496
8497 if (CONSTANT_P (op0))
8498 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8499 op0);
8500 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8501 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8502 || GET_CODE (op0) == PARALLEL)
8503 {
8504 /* If the operand is a SAVE_EXPR, we can deal with this by
8505 forcing the SAVE_EXPR into memory. */
8506 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8507 {
8508 put_var_into_stack (TREE_OPERAND (exp, 0));
8509 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8510 }
8511 else
8512 {
8513 /* If this object is in a register, it can't be BLKmode. */
8514 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8515 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8516
8517 if (GET_CODE (op0) == PARALLEL)
8518 /* Handle calls that pass values in multiple
8519 non-contiguous locations. The Irix 6 ABI has examples
8520 of this. */
8521 emit_group_store (memloc, op0,
8522 int_size_in_bytes (inner_type));
8523 else
8524 emit_move_insn (memloc, op0);
8525
8526 op0 = memloc;
8527 }
8528 }
8529
8530 if (GET_CODE (op0) != MEM)
8531 abort ();
8532
8533 mark_temp_addr_taken (op0);
8534 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8535 {
8536 op0 = XEXP (op0, 0);
8537 #ifdef POINTERS_EXTEND_UNSIGNED
8538 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8539 && mode == ptr_mode)
8540 op0 = convert_memory_address (ptr_mode, op0);
8541 #endif
8542 return op0;
8543 }
8544
8545 /* If OP0 is not aligned as least as much as the type requires, we
8546 need to make a temporary, copy OP0 to it, and take the address of
8547 the temporary. We want to use the alignment of the type, not of
8548 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8549 the test for BLKmode means that can't happen. The test for
8550 BLKmode is because we never make mis-aligned MEMs with
8551 non-BLKmode.
8552
8553 We don't need to do this at all if the machine doesn't have
8554 strict alignment. */
8555 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8556 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8557 > MEM_ALIGN (op0))
8558 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8559 {
8560 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8561 rtx new
8562 = assign_stack_temp_for_type
8563 (TYPE_MODE (inner_type),
8564 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8565 : int_size_in_bytes (inner_type),
8566 1, build_qualified_type (inner_type,
8567 (TYPE_QUALS (inner_type)
8568 | TYPE_QUAL_CONST)));
8569
8570 if (TYPE_ALIGN_OK (inner_type))
8571 abort ();
8572
8573 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8574 op0 = new;
8575 }
8576
8577 op0 = force_operand (XEXP (op0, 0), target);
8578 }
8579
8580 if (flag_force_addr
8581 && GET_CODE (op0) != REG
8582 && modifier != EXPAND_CONST_ADDRESS
8583 && modifier != EXPAND_INITIALIZER
8584 && modifier != EXPAND_SUM)
8585 op0 = force_reg (Pmode, op0);
8586
8587 if (GET_CODE (op0) == REG
8588 && ! REG_USERVAR_P (op0))
8589 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8590
8591 #ifdef POINTERS_EXTEND_UNSIGNED
8592 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8593 && mode == ptr_mode)
8594 op0 = convert_memory_address (ptr_mode, op0);
8595 #endif
8596
8597 return op0;
8598
8599 case ENTRY_VALUE_EXPR:
8600 abort ();
8601
8602 /* COMPLEX type for Extended Pascal & Fortran */
8603 case COMPLEX_EXPR:
8604 {
8605 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8606 rtx insns;
8607
8608 /* Get the rtx code of the operands. */
8609 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8610 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8611
8612 if (! target)
8613 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8614
8615 start_sequence ();
8616
8617 /* Move the real (op0) and imaginary (op1) parts to their location. */
8618 emit_move_insn (gen_realpart (mode, target), op0);
8619 emit_move_insn (gen_imagpart (mode, target), op1);
8620
8621 insns = get_insns ();
8622 end_sequence ();
8623
8624 /* Complex construction should appear as a single unit. */
8625 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8626 each with a separate pseudo as destination.
8627 It's not correct for flow to treat them as a unit. */
8628 if (GET_CODE (target) != CONCAT)
8629 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8630 else
8631 emit_insns (insns);
8632
8633 return target;
8634 }
8635
8636 case REALPART_EXPR:
8637 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8638 return gen_realpart (mode, op0);
8639
8640 case IMAGPART_EXPR:
8641 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8642 return gen_imagpart (mode, op0);
8643
8644 case CONJ_EXPR:
8645 {
8646 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8647 rtx imag_t;
8648 rtx insns;
8649
8650 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8651
8652 if (! target)
8653 target = gen_reg_rtx (mode);
8654
8655 start_sequence ();
8656
8657 /* Store the realpart and the negated imagpart to target. */
8658 emit_move_insn (gen_realpart (partmode, target),
8659 gen_realpart (partmode, op0));
8660
8661 imag_t = gen_imagpart (partmode, target);
8662 temp = expand_unop (partmode,
8663 ! unsignedp && flag_trapv
8664 && (GET_MODE_CLASS(partmode) == MODE_INT)
8665 ? negv_optab : neg_optab,
8666 gen_imagpart (partmode, op0), imag_t, 0);
8667 if (temp != imag_t)
8668 emit_move_insn (imag_t, temp);
8669
8670 insns = get_insns ();
8671 end_sequence ();
8672
8673 /* Conjugate should appear as a single unit
8674 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8675 each with a separate pseudo as destination.
8676 It's not correct for flow to treat them as a unit. */
8677 if (GET_CODE (target) != CONCAT)
8678 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8679 else
8680 emit_insns (insns);
8681
8682 return target;
8683 }
8684
8685 case TRY_CATCH_EXPR:
8686 {
8687 tree handler = TREE_OPERAND (exp, 1);
8688
8689 expand_eh_region_start ();
8690
8691 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8692
8693 expand_eh_region_end_cleanup (handler);
8694
8695 return op0;
8696 }
8697
8698 case TRY_FINALLY_EXPR:
8699 {
8700 tree try_block = TREE_OPERAND (exp, 0);
8701 tree finally_block = TREE_OPERAND (exp, 1);
8702 rtx finally_label = gen_label_rtx ();
8703 rtx done_label = gen_label_rtx ();
8704 rtx return_link = gen_reg_rtx (Pmode);
8705 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8706 (tree) finally_label, (tree) return_link);
8707 TREE_SIDE_EFFECTS (cleanup) = 1;
8708
8709 /* Start a new binding layer that will keep track of all cleanup
8710 actions to be performed. */
8711 expand_start_bindings (2);
8712
8713 target_temp_slot_level = temp_slot_level;
8714
8715 expand_decl_cleanup (NULL_TREE, cleanup);
8716 op0 = expand_expr (try_block, target, tmode, modifier);
8717
8718 preserve_temp_slots (op0);
8719 expand_end_bindings (NULL_TREE, 0, 0);
8720 emit_jump (done_label);
8721 emit_label (finally_label);
8722 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8723 emit_indirect_jump (return_link);
8724 emit_label (done_label);
8725 return op0;
8726 }
8727
8728 case GOTO_SUBROUTINE_EXPR:
8729 {
8730 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8731 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8732 rtx return_address = gen_label_rtx ();
8733 emit_move_insn (return_link,
8734 gen_rtx_LABEL_REF (Pmode, return_address));
8735 emit_jump (subr);
8736 emit_label (return_address);
8737 return const0_rtx;
8738 }
8739
8740 case VA_ARG_EXPR:
8741 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8742
8743 case EXC_PTR_EXPR:
8744 return get_exception_pointer (cfun);
8745
8746 case FDESC_EXPR:
8747 /* Function descriptors are not valid except for as
8748 initialization constants, and should not be expanded. */
8749 abort ();
8750
8751 default:
8752 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8753 }
8754
8755 /* Here to do an ordinary binary operator, generating an instruction
8756 from the optab already placed in `this_optab'. */
8757 binop:
8758 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8759 subtarget = 0;
8760 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8761 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8762 binop2:
8763 temp = expand_binop (mode, this_optab, op0, op1, target,
8764 unsignedp, OPTAB_LIB_WIDEN);
8765 if (temp == 0)
8766 abort ();
8767 return temp;
8768 }
8769 \f
8770 /* Return the tree node if a ARG corresponds to a string constant or zero
8771 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8772 in bytes within the string that ARG is accessing. The type of the
8773 offset will be `sizetype'. */
8774
8775 tree
8776 string_constant (arg, ptr_offset)
8777 tree arg;
8778 tree *ptr_offset;
8779 {
8780 STRIP_NOPS (arg);
8781
8782 if (TREE_CODE (arg) == ADDR_EXPR
8783 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8784 {
8785 *ptr_offset = size_zero_node;
8786 return TREE_OPERAND (arg, 0);
8787 }
8788 else if (TREE_CODE (arg) == PLUS_EXPR)
8789 {
8790 tree arg0 = TREE_OPERAND (arg, 0);
8791 tree arg1 = TREE_OPERAND (arg, 1);
8792
8793 STRIP_NOPS (arg0);
8794 STRIP_NOPS (arg1);
8795
8796 if (TREE_CODE (arg0) == ADDR_EXPR
8797 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8798 {
8799 *ptr_offset = convert (sizetype, arg1);
8800 return TREE_OPERAND (arg0, 0);
8801 }
8802 else if (TREE_CODE (arg1) == ADDR_EXPR
8803 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8804 {
8805 *ptr_offset = convert (sizetype, arg0);
8806 return TREE_OPERAND (arg1, 0);
8807 }
8808 }
8809
8810 return 0;
8811 }
8812 \f
8813 /* Expand code for a post- or pre- increment or decrement
8814 and return the RTX for the result.
8815 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8816
8817 static rtx
8818 expand_increment (exp, post, ignore)
8819 tree exp;
8820 int post, ignore;
8821 {
8822 rtx op0, op1;
8823 rtx temp, value;
8824 tree incremented = TREE_OPERAND (exp, 0);
8825 optab this_optab = add_optab;
8826 int icode;
8827 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8828 int op0_is_copy = 0;
8829 int single_insn = 0;
8830 /* 1 means we can't store into OP0 directly,
8831 because it is a subreg narrower than a word,
8832 and we don't dare clobber the rest of the word. */
8833 int bad_subreg = 0;
8834
8835 /* Stabilize any component ref that might need to be
8836 evaluated more than once below. */
8837 if (!post
8838 || TREE_CODE (incremented) == BIT_FIELD_REF
8839 || (TREE_CODE (incremented) == COMPONENT_REF
8840 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8841 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8842 incremented = stabilize_reference (incremented);
8843 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8844 ones into save exprs so that they don't accidentally get evaluated
8845 more than once by the code below. */
8846 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8847 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8848 incremented = save_expr (incremented);
8849
8850 /* Compute the operands as RTX.
8851 Note whether OP0 is the actual lvalue or a copy of it:
8852 I believe it is a copy iff it is a register or subreg
8853 and insns were generated in computing it. */
8854
8855 temp = get_last_insn ();
8856 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8857
8858 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8859 in place but instead must do sign- or zero-extension during assignment,
8860 so we copy it into a new register and let the code below use it as
8861 a copy.
8862
8863 Note that we can safely modify this SUBREG since it is know not to be
8864 shared (it was made by the expand_expr call above). */
8865
8866 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8867 {
8868 if (post)
8869 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8870 else
8871 bad_subreg = 1;
8872 }
8873 else if (GET_CODE (op0) == SUBREG
8874 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8875 {
8876 /* We cannot increment this SUBREG in place. If we are
8877 post-incrementing, get a copy of the old value. Otherwise,
8878 just mark that we cannot increment in place. */
8879 if (post)
8880 op0 = copy_to_reg (op0);
8881 else
8882 bad_subreg = 1;
8883 }
8884
8885 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8886 && temp != get_last_insn ());
8887 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8888
8889 /* Decide whether incrementing or decrementing. */
8890 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8891 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8892 this_optab = sub_optab;
8893
8894 /* Convert decrement by a constant into a negative increment. */
8895 if (this_optab == sub_optab
8896 && GET_CODE (op1) == CONST_INT)
8897 {
8898 op1 = GEN_INT (-INTVAL (op1));
8899 this_optab = add_optab;
8900 }
8901
8902 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
8903 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
8904
8905 /* For a preincrement, see if we can do this with a single instruction. */
8906 if (!post)
8907 {
8908 icode = (int) this_optab->handlers[(int) mode].insn_code;
8909 if (icode != (int) CODE_FOR_nothing
8910 /* Make sure that OP0 is valid for operands 0 and 1
8911 of the insn we want to queue. */
8912 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8913 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8914 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8915 single_insn = 1;
8916 }
8917
8918 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8919 then we cannot just increment OP0. We must therefore contrive to
8920 increment the original value. Then, for postincrement, we can return
8921 OP0 since it is a copy of the old value. For preincrement, expand here
8922 unless we can do it with a single insn.
8923
8924 Likewise if storing directly into OP0 would clobber high bits
8925 we need to preserve (bad_subreg). */
8926 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8927 {
8928 /* This is the easiest way to increment the value wherever it is.
8929 Problems with multiple evaluation of INCREMENTED are prevented
8930 because either (1) it is a component_ref or preincrement,
8931 in which case it was stabilized above, or (2) it is an array_ref
8932 with constant index in an array in a register, which is
8933 safe to reevaluate. */
8934 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8935 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8936 ? MINUS_EXPR : PLUS_EXPR),
8937 TREE_TYPE (exp),
8938 incremented,
8939 TREE_OPERAND (exp, 1));
8940
8941 while (TREE_CODE (incremented) == NOP_EXPR
8942 || TREE_CODE (incremented) == CONVERT_EXPR)
8943 {
8944 newexp = convert (TREE_TYPE (incremented), newexp);
8945 incremented = TREE_OPERAND (incremented, 0);
8946 }
8947
8948 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8949 return post ? op0 : temp;
8950 }
8951
8952 if (post)
8953 {
8954 /* We have a true reference to the value in OP0.
8955 If there is an insn to add or subtract in this mode, queue it.
8956 Queueing the increment insn avoids the register shuffling
8957 that often results if we must increment now and first save
8958 the old value for subsequent use. */
8959
8960 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8961 op0 = stabilize (op0);
8962 #endif
8963
8964 icode = (int) this_optab->handlers[(int) mode].insn_code;
8965 if (icode != (int) CODE_FOR_nothing
8966 /* Make sure that OP0 is valid for operands 0 and 1
8967 of the insn we want to queue. */
8968 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8969 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8970 {
8971 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8972 op1 = force_reg (mode, op1);
8973
8974 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8975 }
8976 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8977 {
8978 rtx addr = (general_operand (XEXP (op0, 0), mode)
8979 ? force_reg (Pmode, XEXP (op0, 0))
8980 : copy_to_reg (XEXP (op0, 0)));
8981 rtx temp, result;
8982
8983 op0 = replace_equiv_address (op0, addr);
8984 temp = force_reg (GET_MODE (op0), op0);
8985 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8986 op1 = force_reg (mode, op1);
8987
8988 /* The increment queue is LIFO, thus we have to `queue'
8989 the instructions in reverse order. */
8990 enqueue_insn (op0, gen_move_insn (op0, temp));
8991 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8992 return result;
8993 }
8994 }
8995
8996 /* Preincrement, or we can't increment with one simple insn. */
8997 if (post)
8998 /* Save a copy of the value before inc or dec, to return it later. */
8999 temp = value = copy_to_reg (op0);
9000 else
9001 /* Arrange to return the incremented value. */
9002 /* Copy the rtx because expand_binop will protect from the queue,
9003 and the results of that would be invalid for us to return
9004 if our caller does emit_queue before using our result. */
9005 temp = copy_rtx (value = op0);
9006
9007 /* Increment however we can. */
9008 op1 = expand_binop (mode, this_optab, value, op1, op0,
9009 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9010
9011 /* Make sure the value is stored into OP0. */
9012 if (op1 != op0)
9013 emit_move_insn (op0, op1);
9014
9015 return temp;
9016 }
9017 \f
9018 /* At the start of a function, record that we have no previously-pushed
9019 arguments waiting to be popped. */
9020
9021 void
9022 init_pending_stack_adjust ()
9023 {
9024 pending_stack_adjust = 0;
9025 }
9026
9027 /* When exiting from function, if safe, clear out any pending stack adjust
9028 so the adjustment won't get done.
9029
9030 Note, if the current function calls alloca, then it must have a
9031 frame pointer regardless of the value of flag_omit_frame_pointer. */
9032
9033 void
9034 clear_pending_stack_adjust ()
9035 {
9036 #ifdef EXIT_IGNORE_STACK
9037 if (optimize > 0
9038 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9039 && EXIT_IGNORE_STACK
9040 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9041 && ! flag_inline_functions)
9042 {
9043 stack_pointer_delta -= pending_stack_adjust,
9044 pending_stack_adjust = 0;
9045 }
9046 #endif
9047 }
9048
9049 /* Pop any previously-pushed arguments that have not been popped yet. */
9050
9051 void
9052 do_pending_stack_adjust ()
9053 {
9054 if (inhibit_defer_pop == 0)
9055 {
9056 if (pending_stack_adjust != 0)
9057 adjust_stack (GEN_INT (pending_stack_adjust));
9058 pending_stack_adjust = 0;
9059 }
9060 }
9061 \f
9062 /* Expand conditional expressions. */
9063
9064 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9065 LABEL is an rtx of code CODE_LABEL, in this function and all the
9066 functions here. */
9067
9068 void
9069 jumpifnot (exp, label)
9070 tree exp;
9071 rtx label;
9072 {
9073 do_jump (exp, label, NULL_RTX);
9074 }
9075
9076 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9077
9078 void
9079 jumpif (exp, label)
9080 tree exp;
9081 rtx label;
9082 {
9083 do_jump (exp, NULL_RTX, label);
9084 }
9085
9086 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9087 the result is zero, or IF_TRUE_LABEL if the result is one.
9088 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9089 meaning fall through in that case.
9090
9091 do_jump always does any pending stack adjust except when it does not
9092 actually perform a jump. An example where there is no jump
9093 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9094
9095 This function is responsible for optimizing cases such as
9096 &&, || and comparison operators in EXP. */
9097
9098 void
9099 do_jump (exp, if_false_label, if_true_label)
9100 tree exp;
9101 rtx if_false_label, if_true_label;
9102 {
9103 enum tree_code code = TREE_CODE (exp);
9104 /* Some cases need to create a label to jump to
9105 in order to properly fall through.
9106 These cases set DROP_THROUGH_LABEL nonzero. */
9107 rtx drop_through_label = 0;
9108 rtx temp;
9109 int i;
9110 tree type;
9111 enum machine_mode mode;
9112
9113 #ifdef MAX_INTEGER_COMPUTATION_MODE
9114 check_max_integer_computation_mode (exp);
9115 #endif
9116
9117 emit_queue ();
9118
9119 switch (code)
9120 {
9121 case ERROR_MARK:
9122 break;
9123
9124 case INTEGER_CST:
9125 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9126 if (temp)
9127 emit_jump (temp);
9128 break;
9129
9130 #if 0
9131 /* This is not true with #pragma weak */
9132 case ADDR_EXPR:
9133 /* The address of something can never be zero. */
9134 if (if_true_label)
9135 emit_jump (if_true_label);
9136 break;
9137 #endif
9138
9139 case NOP_EXPR:
9140 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9141 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9142 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9143 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9144 goto normal;
9145 case CONVERT_EXPR:
9146 /* If we are narrowing the operand, we have to do the compare in the
9147 narrower mode. */
9148 if ((TYPE_PRECISION (TREE_TYPE (exp))
9149 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9150 goto normal;
9151 case NON_LVALUE_EXPR:
9152 case REFERENCE_EXPR:
9153 case ABS_EXPR:
9154 case NEGATE_EXPR:
9155 case LROTATE_EXPR:
9156 case RROTATE_EXPR:
9157 /* These cannot change zero->non-zero or vice versa. */
9158 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9159 break;
9160
9161 case WITH_RECORD_EXPR:
9162 /* Put the object on the placeholder list, recurse through our first
9163 operand, and pop the list. */
9164 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9165 placeholder_list);
9166 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9167 placeholder_list = TREE_CHAIN (placeholder_list);
9168 break;
9169
9170 #if 0
9171 /* This is never less insns than evaluating the PLUS_EXPR followed by
9172 a test and can be longer if the test is eliminated. */
9173 case PLUS_EXPR:
9174 /* Reduce to minus. */
9175 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9176 TREE_OPERAND (exp, 0),
9177 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9178 TREE_OPERAND (exp, 1))));
9179 /* Process as MINUS. */
9180 #endif
9181
9182 case MINUS_EXPR:
9183 /* Non-zero iff operands of minus differ. */
9184 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9185 TREE_OPERAND (exp, 0),
9186 TREE_OPERAND (exp, 1)),
9187 NE, NE, if_false_label, if_true_label);
9188 break;
9189
9190 case BIT_AND_EXPR:
9191 /* If we are AND'ing with a small constant, do this comparison in the
9192 smallest type that fits. If the machine doesn't have comparisons
9193 that small, it will be converted back to the wider comparison.
9194 This helps if we are testing the sign bit of a narrower object.
9195 combine can't do this for us because it can't know whether a
9196 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9197
9198 if (! SLOW_BYTE_ACCESS
9199 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9200 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9201 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9202 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9203 && (type = type_for_mode (mode, 1)) != 0
9204 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9205 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9206 != CODE_FOR_nothing))
9207 {
9208 do_jump (convert (type, exp), if_false_label, if_true_label);
9209 break;
9210 }
9211 goto normal;
9212
9213 case TRUTH_NOT_EXPR:
9214 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9215 break;
9216
9217 case TRUTH_ANDIF_EXPR:
9218 if (if_false_label == 0)
9219 if_false_label = drop_through_label = gen_label_rtx ();
9220 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9221 start_cleanup_deferral ();
9222 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9223 end_cleanup_deferral ();
9224 break;
9225
9226 case TRUTH_ORIF_EXPR:
9227 if (if_true_label == 0)
9228 if_true_label = drop_through_label = gen_label_rtx ();
9229 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9230 start_cleanup_deferral ();
9231 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9232 end_cleanup_deferral ();
9233 break;
9234
9235 case COMPOUND_EXPR:
9236 push_temp_slots ();
9237 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9238 preserve_temp_slots (NULL_RTX);
9239 free_temp_slots ();
9240 pop_temp_slots ();
9241 emit_queue ();
9242 do_pending_stack_adjust ();
9243 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9244 break;
9245
9246 case COMPONENT_REF:
9247 case BIT_FIELD_REF:
9248 case ARRAY_REF:
9249 case ARRAY_RANGE_REF:
9250 {
9251 HOST_WIDE_INT bitsize, bitpos;
9252 int unsignedp;
9253 enum machine_mode mode;
9254 tree type;
9255 tree offset;
9256 int volatilep = 0;
9257
9258 /* Get description of this reference. We don't actually care
9259 about the underlying object here. */
9260 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9261 &unsignedp, &volatilep);
9262
9263 type = type_for_size (bitsize, unsignedp);
9264 if (! SLOW_BYTE_ACCESS
9265 && type != 0 && bitsize >= 0
9266 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9267 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9268 != CODE_FOR_nothing))
9269 {
9270 do_jump (convert (type, exp), if_false_label, if_true_label);
9271 break;
9272 }
9273 goto normal;
9274 }
9275
9276 case COND_EXPR:
9277 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9278 if (integer_onep (TREE_OPERAND (exp, 1))
9279 && integer_zerop (TREE_OPERAND (exp, 2)))
9280 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9281
9282 else if (integer_zerop (TREE_OPERAND (exp, 1))
9283 && integer_onep (TREE_OPERAND (exp, 2)))
9284 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9285
9286 else
9287 {
9288 rtx label1 = gen_label_rtx ();
9289 drop_through_label = gen_label_rtx ();
9290
9291 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9292
9293 start_cleanup_deferral ();
9294 /* Now the THEN-expression. */
9295 do_jump (TREE_OPERAND (exp, 1),
9296 if_false_label ? if_false_label : drop_through_label,
9297 if_true_label ? if_true_label : drop_through_label);
9298 /* In case the do_jump just above never jumps. */
9299 do_pending_stack_adjust ();
9300 emit_label (label1);
9301
9302 /* Now the ELSE-expression. */
9303 do_jump (TREE_OPERAND (exp, 2),
9304 if_false_label ? if_false_label : drop_through_label,
9305 if_true_label ? if_true_label : drop_through_label);
9306 end_cleanup_deferral ();
9307 }
9308 break;
9309
9310 case EQ_EXPR:
9311 {
9312 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9313
9314 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9315 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9316 {
9317 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9318 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9319 do_jump
9320 (fold
9321 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9322 fold (build (EQ_EXPR, TREE_TYPE (exp),
9323 fold (build1 (REALPART_EXPR,
9324 TREE_TYPE (inner_type),
9325 exp0)),
9326 fold (build1 (REALPART_EXPR,
9327 TREE_TYPE (inner_type),
9328 exp1)))),
9329 fold (build (EQ_EXPR, TREE_TYPE (exp),
9330 fold (build1 (IMAGPART_EXPR,
9331 TREE_TYPE (inner_type),
9332 exp0)),
9333 fold (build1 (IMAGPART_EXPR,
9334 TREE_TYPE (inner_type),
9335 exp1)))))),
9336 if_false_label, if_true_label);
9337 }
9338
9339 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9340 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9341
9342 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9343 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9344 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9345 else
9346 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9347 break;
9348 }
9349
9350 case NE_EXPR:
9351 {
9352 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9353
9354 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9355 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9356 {
9357 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9358 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9359 do_jump
9360 (fold
9361 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9362 fold (build (NE_EXPR, TREE_TYPE (exp),
9363 fold (build1 (REALPART_EXPR,
9364 TREE_TYPE (inner_type),
9365 exp0)),
9366 fold (build1 (REALPART_EXPR,
9367 TREE_TYPE (inner_type),
9368 exp1)))),
9369 fold (build (NE_EXPR, TREE_TYPE (exp),
9370 fold (build1 (IMAGPART_EXPR,
9371 TREE_TYPE (inner_type),
9372 exp0)),
9373 fold (build1 (IMAGPART_EXPR,
9374 TREE_TYPE (inner_type),
9375 exp1)))))),
9376 if_false_label, if_true_label);
9377 }
9378
9379 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9380 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9381
9382 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9383 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9384 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9385 else
9386 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9387 break;
9388 }
9389
9390 case LT_EXPR:
9391 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9392 if (GET_MODE_CLASS (mode) == MODE_INT
9393 && ! can_compare_p (LT, mode, ccp_jump))
9394 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9395 else
9396 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9397 break;
9398
9399 case LE_EXPR:
9400 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9401 if (GET_MODE_CLASS (mode) == MODE_INT
9402 && ! can_compare_p (LE, mode, ccp_jump))
9403 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9404 else
9405 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9406 break;
9407
9408 case GT_EXPR:
9409 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9410 if (GET_MODE_CLASS (mode) == MODE_INT
9411 && ! can_compare_p (GT, mode, ccp_jump))
9412 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9413 else
9414 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9415 break;
9416
9417 case GE_EXPR:
9418 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9419 if (GET_MODE_CLASS (mode) == MODE_INT
9420 && ! can_compare_p (GE, mode, ccp_jump))
9421 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9422 else
9423 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9424 break;
9425
9426 case UNORDERED_EXPR:
9427 case ORDERED_EXPR:
9428 {
9429 enum rtx_code cmp, rcmp;
9430 int do_rev;
9431
9432 if (code == UNORDERED_EXPR)
9433 cmp = UNORDERED, rcmp = ORDERED;
9434 else
9435 cmp = ORDERED, rcmp = UNORDERED;
9436 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9437
9438 do_rev = 0;
9439 if (! can_compare_p (cmp, mode, ccp_jump)
9440 && (can_compare_p (rcmp, mode, ccp_jump)
9441 /* If the target doesn't provide either UNORDERED or ORDERED
9442 comparisons, canonicalize on UNORDERED for the library. */
9443 || rcmp == UNORDERED))
9444 do_rev = 1;
9445
9446 if (! do_rev)
9447 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9448 else
9449 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9450 }
9451 break;
9452
9453 {
9454 enum rtx_code rcode1;
9455 enum tree_code tcode2;
9456
9457 case UNLT_EXPR:
9458 rcode1 = UNLT;
9459 tcode2 = LT_EXPR;
9460 goto unordered_bcc;
9461 case UNLE_EXPR:
9462 rcode1 = UNLE;
9463 tcode2 = LE_EXPR;
9464 goto unordered_bcc;
9465 case UNGT_EXPR:
9466 rcode1 = UNGT;
9467 tcode2 = GT_EXPR;
9468 goto unordered_bcc;
9469 case UNGE_EXPR:
9470 rcode1 = UNGE;
9471 tcode2 = GE_EXPR;
9472 goto unordered_bcc;
9473 case UNEQ_EXPR:
9474 rcode1 = UNEQ;
9475 tcode2 = EQ_EXPR;
9476 goto unordered_bcc;
9477
9478 unordered_bcc:
9479 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9480 if (can_compare_p (rcode1, mode, ccp_jump))
9481 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9482 if_true_label);
9483 else
9484 {
9485 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9486 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9487 tree cmp0, cmp1;
9488
9489 /* If the target doesn't support combined unordered
9490 compares, decompose into UNORDERED + comparison. */
9491 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9492 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9493 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9494 do_jump (exp, if_false_label, if_true_label);
9495 }
9496 }
9497 break;
9498
9499 /* Special case:
9500 __builtin_expect (<test>, 0) and
9501 __builtin_expect (<test>, 1)
9502
9503 We need to do this here, so that <test> is not converted to a SCC
9504 operation on machines that use condition code registers and COMPARE
9505 like the PowerPC, and then the jump is done based on whether the SCC
9506 operation produced a 1 or 0. */
9507 case CALL_EXPR:
9508 /* Check for a built-in function. */
9509 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9510 {
9511 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9512 tree arglist = TREE_OPERAND (exp, 1);
9513
9514 if (TREE_CODE (fndecl) == FUNCTION_DECL
9515 && DECL_BUILT_IN (fndecl)
9516 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9517 && arglist != NULL_TREE
9518 && TREE_CHAIN (arglist) != NULL_TREE)
9519 {
9520 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9521 if_true_label);
9522
9523 if (seq != NULL_RTX)
9524 {
9525 emit_insn (seq);
9526 return;
9527 }
9528 }
9529 }
9530 /* fall through and generate the normal code. */
9531
9532 default:
9533 normal:
9534 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9535 #if 0
9536 /* This is not needed any more and causes poor code since it causes
9537 comparisons and tests from non-SI objects to have different code
9538 sequences. */
9539 /* Copy to register to avoid generating bad insns by cse
9540 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9541 if (!cse_not_expected && GET_CODE (temp) == MEM)
9542 temp = copy_to_reg (temp);
9543 #endif
9544 do_pending_stack_adjust ();
9545 /* Do any postincrements in the expression that was tested. */
9546 emit_queue ();
9547
9548 if (GET_CODE (temp) == CONST_INT
9549 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9550 || GET_CODE (temp) == LABEL_REF)
9551 {
9552 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9553 if (target)
9554 emit_jump (target);
9555 }
9556 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9557 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9558 /* Note swapping the labels gives us not-equal. */
9559 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9560 else if (GET_MODE (temp) != VOIDmode)
9561 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9562 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9563 GET_MODE (temp), NULL_RTX,
9564 if_false_label, if_true_label);
9565 else
9566 abort ();
9567 }
9568
9569 if (drop_through_label)
9570 {
9571 /* If do_jump produces code that might be jumped around,
9572 do any stack adjusts from that code, before the place
9573 where control merges in. */
9574 do_pending_stack_adjust ();
9575 emit_label (drop_through_label);
9576 }
9577 }
9578 \f
9579 /* Given a comparison expression EXP for values too wide to be compared
9580 with one insn, test the comparison and jump to the appropriate label.
9581 The code of EXP is ignored; we always test GT if SWAP is 0,
9582 and LT if SWAP is 1. */
9583
9584 static void
9585 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9586 tree exp;
9587 int swap;
9588 rtx if_false_label, if_true_label;
9589 {
9590 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9591 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9592 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9593 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9594
9595 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9596 }
9597
9598 /* Compare OP0 with OP1, word at a time, in mode MODE.
9599 UNSIGNEDP says to do unsigned comparison.
9600 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9601
9602 void
9603 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9604 enum machine_mode mode;
9605 int unsignedp;
9606 rtx op0, op1;
9607 rtx if_false_label, if_true_label;
9608 {
9609 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9610 rtx drop_through_label = 0;
9611 int i;
9612
9613 if (! if_true_label || ! if_false_label)
9614 drop_through_label = gen_label_rtx ();
9615 if (! if_true_label)
9616 if_true_label = drop_through_label;
9617 if (! if_false_label)
9618 if_false_label = drop_through_label;
9619
9620 /* Compare a word at a time, high order first. */
9621 for (i = 0; i < nwords; i++)
9622 {
9623 rtx op0_word, op1_word;
9624
9625 if (WORDS_BIG_ENDIAN)
9626 {
9627 op0_word = operand_subword_force (op0, i, mode);
9628 op1_word = operand_subword_force (op1, i, mode);
9629 }
9630 else
9631 {
9632 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9633 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9634 }
9635
9636 /* All but high-order word must be compared as unsigned. */
9637 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9638 (unsignedp || i > 0), word_mode, NULL_RTX,
9639 NULL_RTX, if_true_label);
9640
9641 /* Consider lower words only if these are equal. */
9642 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9643 NULL_RTX, NULL_RTX, if_false_label);
9644 }
9645
9646 if (if_false_label)
9647 emit_jump (if_false_label);
9648 if (drop_through_label)
9649 emit_label (drop_through_label);
9650 }
9651
9652 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9653 with one insn, test the comparison and jump to the appropriate label. */
9654
9655 static void
9656 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9657 tree exp;
9658 rtx if_false_label, if_true_label;
9659 {
9660 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9661 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9662 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9663 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9664 int i;
9665 rtx drop_through_label = 0;
9666
9667 if (! if_false_label)
9668 drop_through_label = if_false_label = gen_label_rtx ();
9669
9670 for (i = 0; i < nwords; i++)
9671 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9672 operand_subword_force (op1, i, mode),
9673 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9674 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9675
9676 if (if_true_label)
9677 emit_jump (if_true_label);
9678 if (drop_through_label)
9679 emit_label (drop_through_label);
9680 }
9681 \f
9682 /* Jump according to whether OP0 is 0.
9683 We assume that OP0 has an integer mode that is too wide
9684 for the available compare insns. */
9685
9686 void
9687 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9688 rtx op0;
9689 rtx if_false_label, if_true_label;
9690 {
9691 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9692 rtx part;
9693 int i;
9694 rtx drop_through_label = 0;
9695
9696 /* The fastest way of doing this comparison on almost any machine is to
9697 "or" all the words and compare the result. If all have to be loaded
9698 from memory and this is a very wide item, it's possible this may
9699 be slower, but that's highly unlikely. */
9700
9701 part = gen_reg_rtx (word_mode);
9702 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9703 for (i = 1; i < nwords && part != 0; i++)
9704 part = expand_binop (word_mode, ior_optab, part,
9705 operand_subword_force (op0, i, GET_MODE (op0)),
9706 part, 1, OPTAB_WIDEN);
9707
9708 if (part != 0)
9709 {
9710 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9711 NULL_RTX, if_false_label, if_true_label);
9712
9713 return;
9714 }
9715
9716 /* If we couldn't do the "or" simply, do this with a series of compares. */
9717 if (! if_false_label)
9718 drop_through_label = if_false_label = gen_label_rtx ();
9719
9720 for (i = 0; i < nwords; i++)
9721 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9722 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9723 if_false_label, NULL_RTX);
9724
9725 if (if_true_label)
9726 emit_jump (if_true_label);
9727
9728 if (drop_through_label)
9729 emit_label (drop_through_label);
9730 }
9731 \f
9732 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9733 (including code to compute the values to be compared)
9734 and set (CC0) according to the result.
9735 The decision as to signed or unsigned comparison must be made by the caller.
9736
9737 We force a stack adjustment unless there are currently
9738 things pushed on the stack that aren't yet used.
9739
9740 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9741 compared. */
9742
9743 rtx
9744 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9745 rtx op0, op1;
9746 enum rtx_code code;
9747 int unsignedp;
9748 enum machine_mode mode;
9749 rtx size;
9750 {
9751 rtx tem;
9752
9753 /* If one operand is constant, make it the second one. Only do this
9754 if the other operand is not constant as well. */
9755
9756 if (swap_commutative_operands_p (op0, op1))
9757 {
9758 tem = op0;
9759 op0 = op1;
9760 op1 = tem;
9761 code = swap_condition (code);
9762 }
9763
9764 if (flag_force_mem)
9765 {
9766 op0 = force_not_mem (op0);
9767 op1 = force_not_mem (op1);
9768 }
9769
9770 do_pending_stack_adjust ();
9771
9772 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9773 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9774 return tem;
9775
9776 #if 0
9777 /* There's no need to do this now that combine.c can eliminate lots of
9778 sign extensions. This can be less efficient in certain cases on other
9779 machines. */
9780
9781 /* If this is a signed equality comparison, we can do it as an
9782 unsigned comparison since zero-extension is cheaper than sign
9783 extension and comparisons with zero are done as unsigned. This is
9784 the case even on machines that can do fast sign extension, since
9785 zero-extension is easier to combine with other operations than
9786 sign-extension is. If we are comparing against a constant, we must
9787 convert it to what it would look like unsigned. */
9788 if ((code == EQ || code == NE) && ! unsignedp
9789 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9790 {
9791 if (GET_CODE (op1) == CONST_INT
9792 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9793 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9794 unsignedp = 1;
9795 }
9796 #endif
9797
9798 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9799
9800 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9801 }
9802
9803 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9804 The decision as to signed or unsigned comparison must be made by the caller.
9805
9806 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9807 compared. */
9808
9809 void
9810 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9811 if_false_label, if_true_label)
9812 rtx op0, op1;
9813 enum rtx_code code;
9814 int unsignedp;
9815 enum machine_mode mode;
9816 rtx size;
9817 rtx if_false_label, if_true_label;
9818 {
9819 rtx tem;
9820 int dummy_true_label = 0;
9821
9822 /* Reverse the comparison if that is safe and we want to jump if it is
9823 false. */
9824 if (! if_true_label && ! FLOAT_MODE_P (mode))
9825 {
9826 if_true_label = if_false_label;
9827 if_false_label = 0;
9828 code = reverse_condition (code);
9829 }
9830
9831 /* If one operand is constant, make it the second one. Only do this
9832 if the other operand is not constant as well. */
9833
9834 if (swap_commutative_operands_p (op0, op1))
9835 {
9836 tem = op0;
9837 op0 = op1;
9838 op1 = tem;
9839 code = swap_condition (code);
9840 }
9841
9842 if (flag_force_mem)
9843 {
9844 op0 = force_not_mem (op0);
9845 op1 = force_not_mem (op1);
9846 }
9847
9848 do_pending_stack_adjust ();
9849
9850 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9851 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9852 {
9853 if (tem == const_true_rtx)
9854 {
9855 if (if_true_label)
9856 emit_jump (if_true_label);
9857 }
9858 else
9859 {
9860 if (if_false_label)
9861 emit_jump (if_false_label);
9862 }
9863 return;
9864 }
9865
9866 #if 0
9867 /* There's no need to do this now that combine.c can eliminate lots of
9868 sign extensions. This can be less efficient in certain cases on other
9869 machines. */
9870
9871 /* If this is a signed equality comparison, we can do it as an
9872 unsigned comparison since zero-extension is cheaper than sign
9873 extension and comparisons with zero are done as unsigned. This is
9874 the case even on machines that can do fast sign extension, since
9875 zero-extension is easier to combine with other operations than
9876 sign-extension is. If we are comparing against a constant, we must
9877 convert it to what it would look like unsigned. */
9878 if ((code == EQ || code == NE) && ! unsignedp
9879 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9880 {
9881 if (GET_CODE (op1) == CONST_INT
9882 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9883 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9884 unsignedp = 1;
9885 }
9886 #endif
9887
9888 if (! if_true_label)
9889 {
9890 dummy_true_label = 1;
9891 if_true_label = gen_label_rtx ();
9892 }
9893
9894 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
9895 if_true_label);
9896
9897 if (if_false_label)
9898 emit_jump (if_false_label);
9899 if (dummy_true_label)
9900 emit_label (if_true_label);
9901 }
9902
9903 /* Generate code for a comparison expression EXP (including code to compute
9904 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9905 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9906 generated code will drop through.
9907 SIGNED_CODE should be the rtx operation for this comparison for
9908 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9909
9910 We force a stack adjustment unless there are currently
9911 things pushed on the stack that aren't yet used. */
9912
9913 static void
9914 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9915 if_true_label)
9916 tree exp;
9917 enum rtx_code signed_code, unsigned_code;
9918 rtx if_false_label, if_true_label;
9919 {
9920 rtx op0, op1;
9921 tree type;
9922 enum machine_mode mode;
9923 int unsignedp;
9924 enum rtx_code code;
9925
9926 /* Don't crash if the comparison was erroneous. */
9927 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9928 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9929 return;
9930
9931 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9932 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
9933 return;
9934
9935 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9936 mode = TYPE_MODE (type);
9937 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
9938 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
9939 || (GET_MODE_BITSIZE (mode)
9940 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
9941 1)))))))
9942 {
9943 /* op0 might have been replaced by promoted constant, in which
9944 case the type of second argument should be used. */
9945 type = TREE_TYPE (TREE_OPERAND (exp, 1));
9946 mode = TYPE_MODE (type);
9947 }
9948 unsignedp = TREE_UNSIGNED (type);
9949 code = unsignedp ? unsigned_code : signed_code;
9950
9951 #ifdef HAVE_canonicalize_funcptr_for_compare
9952 /* If function pointers need to be "canonicalized" before they can
9953 be reliably compared, then canonicalize them. */
9954 if (HAVE_canonicalize_funcptr_for_compare
9955 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9956 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9957 == FUNCTION_TYPE))
9958 {
9959 rtx new_op0 = gen_reg_rtx (mode);
9960
9961 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9962 op0 = new_op0;
9963 }
9964
9965 if (HAVE_canonicalize_funcptr_for_compare
9966 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9967 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9968 == FUNCTION_TYPE))
9969 {
9970 rtx new_op1 = gen_reg_rtx (mode);
9971
9972 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9973 op1 = new_op1;
9974 }
9975 #endif
9976
9977 /* Do any postincrements in the expression that was tested. */
9978 emit_queue ();
9979
9980 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9981 ((mode == BLKmode)
9982 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9983 if_false_label, if_true_label);
9984 }
9985 \f
9986 /* Generate code to calculate EXP using a store-flag instruction
9987 and return an rtx for the result. EXP is either a comparison
9988 or a TRUTH_NOT_EXPR whose operand is a comparison.
9989
9990 If TARGET is nonzero, store the result there if convenient.
9991
9992 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9993 cheap.
9994
9995 Return zero if there is no suitable set-flag instruction
9996 available on this machine.
9997
9998 Once expand_expr has been called on the arguments of the comparison,
9999 we are committed to doing the store flag, since it is not safe to
10000 re-evaluate the expression. We emit the store-flag insn by calling
10001 emit_store_flag, but only expand the arguments if we have a reason
10002 to believe that emit_store_flag will be successful. If we think that
10003 it will, but it isn't, we have to simulate the store-flag with a
10004 set/jump/set sequence. */
10005
10006 static rtx
10007 do_store_flag (exp, target, mode, only_cheap)
10008 tree exp;
10009 rtx target;
10010 enum machine_mode mode;
10011 int only_cheap;
10012 {
10013 enum rtx_code code;
10014 tree arg0, arg1, type;
10015 tree tem;
10016 enum machine_mode operand_mode;
10017 int invert = 0;
10018 int unsignedp;
10019 rtx op0, op1;
10020 enum insn_code icode;
10021 rtx subtarget = target;
10022 rtx result, label;
10023
10024 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10025 result at the end. We can't simply invert the test since it would
10026 have already been inverted if it were valid. This case occurs for
10027 some floating-point comparisons. */
10028
10029 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10030 invert = 1, exp = TREE_OPERAND (exp, 0);
10031
10032 arg0 = TREE_OPERAND (exp, 0);
10033 arg1 = TREE_OPERAND (exp, 1);
10034
10035 /* Don't crash if the comparison was erroneous. */
10036 if (arg0 == error_mark_node || arg1 == error_mark_node)
10037 return const0_rtx;
10038
10039 type = TREE_TYPE (arg0);
10040 operand_mode = TYPE_MODE (type);
10041 unsignedp = TREE_UNSIGNED (type);
10042
10043 /* We won't bother with BLKmode store-flag operations because it would mean
10044 passing a lot of information to emit_store_flag. */
10045 if (operand_mode == BLKmode)
10046 return 0;
10047
10048 /* We won't bother with store-flag operations involving function pointers
10049 when function pointers must be canonicalized before comparisons. */
10050 #ifdef HAVE_canonicalize_funcptr_for_compare
10051 if (HAVE_canonicalize_funcptr_for_compare
10052 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10053 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10054 == FUNCTION_TYPE))
10055 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10056 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10057 == FUNCTION_TYPE))))
10058 return 0;
10059 #endif
10060
10061 STRIP_NOPS (arg0);
10062 STRIP_NOPS (arg1);
10063
10064 /* Get the rtx comparison code to use. We know that EXP is a comparison
10065 operation of some type. Some comparisons against 1 and -1 can be
10066 converted to comparisons with zero. Do so here so that the tests
10067 below will be aware that we have a comparison with zero. These
10068 tests will not catch constants in the first operand, but constants
10069 are rarely passed as the first operand. */
10070
10071 switch (TREE_CODE (exp))
10072 {
10073 case EQ_EXPR:
10074 code = EQ;
10075 break;
10076 case NE_EXPR:
10077 code = NE;
10078 break;
10079 case LT_EXPR:
10080 if (integer_onep (arg1))
10081 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10082 else
10083 code = unsignedp ? LTU : LT;
10084 break;
10085 case LE_EXPR:
10086 if (! unsignedp && integer_all_onesp (arg1))
10087 arg1 = integer_zero_node, code = LT;
10088 else
10089 code = unsignedp ? LEU : LE;
10090 break;
10091 case GT_EXPR:
10092 if (! unsignedp && integer_all_onesp (arg1))
10093 arg1 = integer_zero_node, code = GE;
10094 else
10095 code = unsignedp ? GTU : GT;
10096 break;
10097 case GE_EXPR:
10098 if (integer_onep (arg1))
10099 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10100 else
10101 code = unsignedp ? GEU : GE;
10102 break;
10103
10104 case UNORDERED_EXPR:
10105 code = UNORDERED;
10106 break;
10107 case ORDERED_EXPR:
10108 code = ORDERED;
10109 break;
10110 case UNLT_EXPR:
10111 code = UNLT;
10112 break;
10113 case UNLE_EXPR:
10114 code = UNLE;
10115 break;
10116 case UNGT_EXPR:
10117 code = UNGT;
10118 break;
10119 case UNGE_EXPR:
10120 code = UNGE;
10121 break;
10122 case UNEQ_EXPR:
10123 code = UNEQ;
10124 break;
10125
10126 default:
10127 abort ();
10128 }
10129
10130 /* Put a constant second. */
10131 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10132 {
10133 tem = arg0; arg0 = arg1; arg1 = tem;
10134 code = swap_condition (code);
10135 }
10136
10137 /* If this is an equality or inequality test of a single bit, we can
10138 do this by shifting the bit being tested to the low-order bit and
10139 masking the result with the constant 1. If the condition was EQ,
10140 we xor it with 1. This does not require an scc insn and is faster
10141 than an scc insn even if we have it. */
10142
10143 if ((code == NE || code == EQ)
10144 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10145 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10146 {
10147 tree inner = TREE_OPERAND (arg0, 0);
10148 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10149 int ops_unsignedp;
10150
10151 /* If INNER is a right shift of a constant and it plus BITNUM does
10152 not overflow, adjust BITNUM and INNER. */
10153
10154 if (TREE_CODE (inner) == RSHIFT_EXPR
10155 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10156 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10157 && bitnum < TYPE_PRECISION (type)
10158 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10159 bitnum - TYPE_PRECISION (type)))
10160 {
10161 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10162 inner = TREE_OPERAND (inner, 0);
10163 }
10164
10165 /* If we are going to be able to omit the AND below, we must do our
10166 operations as unsigned. If we must use the AND, we have a choice.
10167 Normally unsigned is faster, but for some machines signed is. */
10168 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10169 #ifdef LOAD_EXTEND_OP
10170 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10171 #else
10172 : 1
10173 #endif
10174 );
10175
10176 if (! get_subtarget (subtarget)
10177 || GET_MODE (subtarget) != operand_mode
10178 || ! safe_from_p (subtarget, inner, 1))
10179 subtarget = 0;
10180
10181 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10182
10183 if (bitnum != 0)
10184 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10185 size_int (bitnum), subtarget, ops_unsignedp);
10186
10187 if (GET_MODE (op0) != mode)
10188 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10189
10190 if ((code == EQ && ! invert) || (code == NE && invert))
10191 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10192 ops_unsignedp, OPTAB_LIB_WIDEN);
10193
10194 /* Put the AND last so it can combine with more things. */
10195 if (bitnum != TYPE_PRECISION (type) - 1)
10196 op0 = expand_and (op0, const1_rtx, subtarget);
10197
10198 return op0;
10199 }
10200
10201 /* Now see if we are likely to be able to do this. Return if not. */
10202 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10203 return 0;
10204
10205 icode = setcc_gen_code[(int) code];
10206 if (icode == CODE_FOR_nothing
10207 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10208 {
10209 /* We can only do this if it is one of the special cases that
10210 can be handled without an scc insn. */
10211 if ((code == LT && integer_zerop (arg1))
10212 || (! only_cheap && code == GE && integer_zerop (arg1)))
10213 ;
10214 else if (BRANCH_COST >= 0
10215 && ! only_cheap && (code == NE || code == EQ)
10216 && TREE_CODE (type) != REAL_TYPE
10217 && ((abs_optab->handlers[(int) operand_mode].insn_code
10218 != CODE_FOR_nothing)
10219 || (ffs_optab->handlers[(int) operand_mode].insn_code
10220 != CODE_FOR_nothing)))
10221 ;
10222 else
10223 return 0;
10224 }
10225
10226 if (! get_subtarget (target)
10227 || GET_MODE (subtarget) != operand_mode
10228 || ! safe_from_p (subtarget, arg1, 1))
10229 subtarget = 0;
10230
10231 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10232 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10233
10234 if (target == 0)
10235 target = gen_reg_rtx (mode);
10236
10237 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10238 because, if the emit_store_flag does anything it will succeed and
10239 OP0 and OP1 will not be used subsequently. */
10240
10241 result = emit_store_flag (target, code,
10242 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10243 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10244 operand_mode, unsignedp, 1);
10245
10246 if (result)
10247 {
10248 if (invert)
10249 result = expand_binop (mode, xor_optab, result, const1_rtx,
10250 result, 0, OPTAB_LIB_WIDEN);
10251 return result;
10252 }
10253
10254 /* If this failed, we have to do this with set/compare/jump/set code. */
10255 if (GET_CODE (target) != REG
10256 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10257 target = gen_reg_rtx (GET_MODE (target));
10258
10259 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10260 result = compare_from_rtx (op0, op1, code, unsignedp,
10261 operand_mode, NULL_RTX);
10262 if (GET_CODE (result) == CONST_INT)
10263 return (((result == const0_rtx && ! invert)
10264 || (result != const0_rtx && invert))
10265 ? const0_rtx : const1_rtx);
10266
10267 /* The code of RESULT may not match CODE if compare_from_rtx
10268 decided to swap its operands and reverse the original code.
10269
10270 We know that compare_from_rtx returns either a CONST_INT or
10271 a new comparison code, so it is safe to just extract the
10272 code from RESULT. */
10273 code = GET_CODE (result);
10274
10275 label = gen_label_rtx ();
10276 if (bcc_gen_fctn[(int) code] == 0)
10277 abort ();
10278
10279 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10280 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10281 emit_label (label);
10282
10283 return target;
10284 }
10285 \f
10286
10287 /* Stubs in case we haven't got a casesi insn. */
10288 #ifndef HAVE_casesi
10289 # define HAVE_casesi 0
10290 # define gen_casesi(a, b, c, d, e) (0)
10291 # define CODE_FOR_casesi CODE_FOR_nothing
10292 #endif
10293
10294 /* If the machine does not have a case insn that compares the bounds,
10295 this means extra overhead for dispatch tables, which raises the
10296 threshold for using them. */
10297 #ifndef CASE_VALUES_THRESHOLD
10298 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10299 #endif /* CASE_VALUES_THRESHOLD */
10300
10301 unsigned int
10302 case_values_threshold ()
10303 {
10304 return CASE_VALUES_THRESHOLD;
10305 }
10306
10307 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10308 0 otherwise (i.e. if there is no casesi instruction). */
10309 int
10310 try_casesi (index_type, index_expr, minval, range,
10311 table_label, default_label)
10312 tree index_type, index_expr, minval, range;
10313 rtx table_label ATTRIBUTE_UNUSED;
10314 rtx default_label;
10315 {
10316 enum machine_mode index_mode = SImode;
10317 int index_bits = GET_MODE_BITSIZE (index_mode);
10318 rtx op1, op2, index;
10319 enum machine_mode op_mode;
10320
10321 if (! HAVE_casesi)
10322 return 0;
10323
10324 /* Convert the index to SImode. */
10325 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10326 {
10327 enum machine_mode omode = TYPE_MODE (index_type);
10328 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10329
10330 /* We must handle the endpoints in the original mode. */
10331 index_expr = build (MINUS_EXPR, index_type,
10332 index_expr, minval);
10333 minval = integer_zero_node;
10334 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10335 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10336 omode, 1, default_label);
10337 /* Now we can safely truncate. */
10338 index = convert_to_mode (index_mode, index, 0);
10339 }
10340 else
10341 {
10342 if (TYPE_MODE (index_type) != index_mode)
10343 {
10344 index_expr = convert (type_for_size (index_bits, 0),
10345 index_expr);
10346 index_type = TREE_TYPE (index_expr);
10347 }
10348
10349 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10350 }
10351 emit_queue ();
10352 index = protect_from_queue (index, 0);
10353 do_pending_stack_adjust ();
10354
10355 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10356 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10357 (index, op_mode))
10358 index = copy_to_mode_reg (op_mode, index);
10359
10360 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10361
10362 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10363 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10364 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10365 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10366 (op1, op_mode))
10367 op1 = copy_to_mode_reg (op_mode, op1);
10368
10369 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10370
10371 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10372 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10373 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10374 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10375 (op2, op_mode))
10376 op2 = copy_to_mode_reg (op_mode, op2);
10377
10378 emit_jump_insn (gen_casesi (index, op1, op2,
10379 table_label, default_label));
10380 return 1;
10381 }
10382
10383 /* Attempt to generate a tablejump instruction; same concept. */
10384 #ifndef HAVE_tablejump
10385 #define HAVE_tablejump 0
10386 #define gen_tablejump(x, y) (0)
10387 #endif
10388
10389 /* Subroutine of the next function.
10390
10391 INDEX is the value being switched on, with the lowest value
10392 in the table already subtracted.
10393 MODE is its expected mode (needed if INDEX is constant).
10394 RANGE is the length of the jump table.
10395 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10396
10397 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10398 index value is out of range. */
10399
10400 static void
10401 do_tablejump (index, mode, range, table_label, default_label)
10402 rtx index, range, table_label, default_label;
10403 enum machine_mode mode;
10404 {
10405 rtx temp, vector;
10406
10407 /* Do an unsigned comparison (in the proper mode) between the index
10408 expression and the value which represents the length of the range.
10409 Since we just finished subtracting the lower bound of the range
10410 from the index expression, this comparison allows us to simultaneously
10411 check that the original index expression value is both greater than
10412 or equal to the minimum value of the range and less than or equal to
10413 the maximum value of the range. */
10414
10415 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10416 default_label);
10417
10418 /* If index is in range, it must fit in Pmode.
10419 Convert to Pmode so we can index with it. */
10420 if (mode != Pmode)
10421 index = convert_to_mode (Pmode, index, 1);
10422
10423 /* Don't let a MEM slip thru, because then INDEX that comes
10424 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10425 and break_out_memory_refs will go to work on it and mess it up. */
10426 #ifdef PIC_CASE_VECTOR_ADDRESS
10427 if (flag_pic && GET_CODE (index) != REG)
10428 index = copy_to_mode_reg (Pmode, index);
10429 #endif
10430
10431 /* If flag_force_addr were to affect this address
10432 it could interfere with the tricky assumptions made
10433 about addresses that contain label-refs,
10434 which may be valid only very near the tablejump itself. */
10435 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10436 GET_MODE_SIZE, because this indicates how large insns are. The other
10437 uses should all be Pmode, because they are addresses. This code
10438 could fail if addresses and insns are not the same size. */
10439 index = gen_rtx_PLUS (Pmode,
10440 gen_rtx_MULT (Pmode, index,
10441 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10442 gen_rtx_LABEL_REF (Pmode, table_label));
10443 #ifdef PIC_CASE_VECTOR_ADDRESS
10444 if (flag_pic)
10445 index = PIC_CASE_VECTOR_ADDRESS (index);
10446 else
10447 #endif
10448 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10449 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10450 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10451 RTX_UNCHANGING_P (vector) = 1;
10452 convert_move (temp, vector, 0);
10453
10454 emit_jump_insn (gen_tablejump (temp, table_label));
10455
10456 /* If we are generating PIC code or if the table is PC-relative, the
10457 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10458 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10459 emit_barrier ();
10460 }
10461
10462 int
10463 try_tablejump (index_type, index_expr, minval, range,
10464 table_label, default_label)
10465 tree index_type, index_expr, minval, range;
10466 rtx table_label, default_label;
10467 {
10468 rtx index;
10469
10470 if (! HAVE_tablejump)
10471 return 0;
10472
10473 index_expr = fold (build (MINUS_EXPR, index_type,
10474 convert (index_type, index_expr),
10475 convert (index_type, minval)));
10476 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10477 emit_queue ();
10478 index = protect_from_queue (index, 0);
10479 do_pending_stack_adjust ();
10480
10481 do_tablejump (index, TYPE_MODE (index_type),
10482 convert_modes (TYPE_MODE (index_type),
10483 TYPE_MODE (TREE_TYPE (range)),
10484 expand_expr (range, NULL_RTX,
10485 VOIDmode, 0),
10486 TREE_UNSIGNED (TREE_TYPE (range))),
10487 table_label, default_label);
10488 return 1;
10489 }