stmt.c (expand_expr_stmt): Keep last_expr_value non-NULL iff we're interested in...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
54
55 #ifdef PUSH_ROUNDING
56
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #endif
60
61 #endif
62
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
70
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
74 #endif
75
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
83
84 /* Don't check memory usage, since code is being emitted to check a memory
85 usage. Used when current_function_check_memory_usage is true, to avoid
86 infinite recursion. */
87 static int in_check_memory_usage;
88
89 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
90 static tree placeholder_list = 0;
91
92 /* This structure is used by move_by_pieces to describe the move to
93 be performed. */
94 struct move_by_pieces
95 {
96 rtx to;
97 rtx to_addr;
98 int autinc_to;
99 int explicit_inc_to;
100 rtx from;
101 rtx from_addr;
102 int autinc_from;
103 int explicit_inc_from;
104 unsigned HOST_WIDE_INT len;
105 HOST_WIDE_INT offset;
106 int reverse;
107 };
108
109 /* This structure is used by store_by_pieces to describe the clear to
110 be performed. */
111
112 struct store_by_pieces
113 {
114 rtx to;
115 rtx to_addr;
116 int autinc_to;
117 int explicit_inc_to;
118 unsigned HOST_WIDE_INT len;
119 HOST_WIDE_INT offset;
120 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
121 PTR constfundata;
122 int reverse;
123 };
124
125 extern struct obstack permanent_obstack;
126
127 static rtx get_push_address PARAMS ((int));
128
129 static rtx enqueue_insn PARAMS ((rtx, rtx));
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
131 PARAMS ((unsigned HOST_WIDE_INT,
132 unsigned int));
133 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
134 struct move_by_pieces *));
135 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
136 enum machine_mode));
137 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
138 unsigned int));
139 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
140 unsigned int));
141 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
142 enum machine_mode,
143 struct store_by_pieces *));
144 static rtx get_subtarget PARAMS ((rtx));
145 static int is_zeros_p PARAMS ((tree));
146 static int mostly_zeros_p PARAMS ((tree));
147 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
148 HOST_WIDE_INT, enum machine_mode,
149 tree, tree, int, int));
150 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
151 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
152 HOST_WIDE_INT, enum machine_mode,
153 tree, enum machine_mode, int, tree,
154 int));
155 static enum memory_use_mode
156 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
157 static rtx var_rtx PARAMS ((tree));
158 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
159 static rtx expand_increment PARAMS ((tree, int, int));
160 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
161 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
162 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
163 rtx, rtx));
164 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
165 #ifdef PUSH_ROUNDING
166 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
167 #endif
168 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
169
170 /* Record for each mode whether we can move a register directly to or
171 from an object of that mode in memory. If we can't, we won't try
172 to use that mode directly when accessing a field of that mode. */
173
174 static char direct_load[NUM_MACHINE_MODES];
175 static char direct_store[NUM_MACHINE_MODES];
176
177 /* If a memory-to-memory move would take MOVE_RATIO or more simple
178 move-instruction sequences, we will do a movstr or libcall instead. */
179
180 #ifndef MOVE_RATIO
181 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
182 #define MOVE_RATIO 2
183 #else
184 /* If we are optimizing for space (-Os), cut down the default move ratio. */
185 #define MOVE_RATIO (optimize_size ? 3 : 15)
186 #endif
187 #endif
188
189 /* This macro is used to determine whether move_by_pieces should be called
190 to perform a structure copy. */
191 #ifndef MOVE_BY_PIECES_P
192 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
194 #endif
195
196 /* This array records the insn_code of insns to perform block moves. */
197 enum insn_code movstr_optab[NUM_MACHINE_MODES];
198
199 /* This array records the insn_code of insns to perform block clears. */
200 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
201
202 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
203
204 #ifndef SLOW_UNALIGNED_ACCESS
205 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
206 #endif
207 \f
208 /* This is run once per compilation to set up which modes can be used
209 directly in memory and to initialize the block move optab. */
210
211 void
212 init_expr_once ()
213 {
214 rtx insn, pat;
215 enum machine_mode mode;
216 int num_clobbers;
217 rtx mem, mem1;
218
219 start_sequence ();
220
221 /* Try indexing by frame ptr and try by stack ptr.
222 It is known that on the Convex the stack ptr isn't a valid index.
223 With luck, one or the other is valid on any machine. */
224 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
225 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
226
227 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
228 pat = PATTERN (insn);
229
230 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
231 mode = (enum machine_mode) ((int) mode + 1))
232 {
233 int regno;
234 rtx reg;
235
236 direct_load[(int) mode] = direct_store[(int) mode] = 0;
237 PUT_MODE (mem, mode);
238 PUT_MODE (mem1, mode);
239
240 /* See if there is some register that can be used in this mode and
241 directly loaded or stored from memory. */
242
243 if (mode != VOIDmode && mode != BLKmode)
244 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
245 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
246 regno++)
247 {
248 if (! HARD_REGNO_MODE_OK (regno, mode))
249 continue;
250
251 reg = gen_rtx_REG (mode, regno);
252
253 SET_SRC (pat) = mem;
254 SET_DEST (pat) = reg;
255 if (recog (pat, insn, &num_clobbers) >= 0)
256 direct_load[(int) mode] = 1;
257
258 SET_SRC (pat) = mem1;
259 SET_DEST (pat) = reg;
260 if (recog (pat, insn, &num_clobbers) >= 0)
261 direct_load[(int) mode] = 1;
262
263 SET_SRC (pat) = reg;
264 SET_DEST (pat) = mem;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_store[(int) mode] = 1;
267
268 SET_SRC (pat) = reg;
269 SET_DEST (pat) = mem1;
270 if (recog (pat, insn, &num_clobbers) >= 0)
271 direct_store[(int) mode] = 1;
272 }
273 }
274
275 end_sequence ();
276 }
277
278 /* This is run at the start of compiling a function. */
279
280 void
281 init_expr ()
282 {
283 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
284
285 pending_chain = 0;
286 pending_stack_adjust = 0;
287 stack_pointer_delta = 0;
288 inhibit_defer_pop = 0;
289 saveregs_value = 0;
290 apply_args_value = 0;
291 forced_labels = 0;
292 }
293
294 void
295 mark_expr_status (p)
296 struct expr_status *p;
297 {
298 if (p == NULL)
299 return;
300
301 ggc_mark_rtx (p->x_saveregs_value);
302 ggc_mark_rtx (p->x_apply_args_value);
303 ggc_mark_rtx (p->x_forced_labels);
304 }
305
306 void
307 free_expr_status (f)
308 struct function *f;
309 {
310 free (f->expr);
311 f->expr = NULL;
312 }
313
314 /* Small sanity check that the queue is empty at the end of a function. */
315
316 void
317 finish_expr_for_function ()
318 {
319 if (pending_chain)
320 abort ();
321 }
322 \f
323 /* Manage the queue of increment instructions to be output
324 for POSTINCREMENT_EXPR expressions, etc. */
325
326 /* Queue up to increment (or change) VAR later. BODY says how:
327 BODY should be the same thing you would pass to emit_insn
328 to increment right away. It will go to emit_insn later on.
329
330 The value is a QUEUED expression to be used in place of VAR
331 where you want to guarantee the pre-incrementation value of VAR. */
332
333 static rtx
334 enqueue_insn (var, body)
335 rtx var, body;
336 {
337 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
338 body, pending_chain);
339 return pending_chain;
340 }
341
342 /* Use protect_from_queue to convert a QUEUED expression
343 into something that you can put immediately into an instruction.
344 If the queued incrementation has not happened yet,
345 protect_from_queue returns the variable itself.
346 If the incrementation has happened, protect_from_queue returns a temp
347 that contains a copy of the old value of the variable.
348
349 Any time an rtx which might possibly be a QUEUED is to be put
350 into an instruction, it must be passed through protect_from_queue first.
351 QUEUED expressions are not meaningful in instructions.
352
353 Do not pass a value through protect_from_queue and then hold
354 on to it for a while before putting it in an instruction!
355 If the queue is flushed in between, incorrect code will result. */
356
357 rtx
358 protect_from_queue (x, modify)
359 rtx x;
360 int modify;
361 {
362 RTX_CODE code = GET_CODE (x);
363
364 #if 0 /* A QUEUED can hang around after the queue is forced out. */
365 /* Shortcut for most common case. */
366 if (pending_chain == 0)
367 return x;
368 #endif
369
370 if (code != QUEUED)
371 {
372 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
373 use of autoincrement. Make a copy of the contents of the memory
374 location rather than a copy of the address, but not if the value is
375 of mode BLKmode. Don't modify X in place since it might be
376 shared. */
377 if (code == MEM && GET_MODE (x) != BLKmode
378 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
379 {
380 rtx y = XEXP (x, 0);
381 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
382
383 if (QUEUED_INSN (y))
384 {
385 rtx temp = gen_reg_rtx (GET_MODE (x));
386
387 emit_insn_before (gen_move_insn (temp, new),
388 QUEUED_INSN (y));
389 return temp;
390 }
391
392 /* Copy the address into a pseudo, so that the returned value
393 remains correct across calls to emit_queue. */
394 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
395 }
396
397 /* Otherwise, recursively protect the subexpressions of all
398 the kinds of rtx's that can contain a QUEUED. */
399 if (code == MEM)
400 {
401 rtx tem = protect_from_queue (XEXP (x, 0), 0);
402 if (tem != XEXP (x, 0))
403 {
404 x = copy_rtx (x);
405 XEXP (x, 0) = tem;
406 }
407 }
408 else if (code == PLUS || code == MULT)
409 {
410 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
411 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
412 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
413 {
414 x = copy_rtx (x);
415 XEXP (x, 0) = new0;
416 XEXP (x, 1) = new1;
417 }
418 }
419 return x;
420 }
421 /* If the increment has not happened, use the variable itself. Copy it
422 into a new pseudo so that the value remains correct across calls to
423 emit_queue. */
424 if (QUEUED_INSN (x) == 0)
425 return copy_to_reg (QUEUED_VAR (x));
426 /* If the increment has happened and a pre-increment copy exists,
427 use that copy. */
428 if (QUEUED_COPY (x) != 0)
429 return QUEUED_COPY (x);
430 /* The increment has happened but we haven't set up a pre-increment copy.
431 Set one up now, and use it. */
432 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
433 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
434 QUEUED_INSN (x));
435 return QUEUED_COPY (x);
436 }
437
438 /* Return nonzero if X contains a QUEUED expression:
439 if it contains anything that will be altered by a queued increment.
440 We handle only combinations of MEM, PLUS, MINUS and MULT operators
441 since memory addresses generally contain only those. */
442
443 int
444 queued_subexp_p (x)
445 rtx x;
446 {
447 enum rtx_code code = GET_CODE (x);
448 switch (code)
449 {
450 case QUEUED:
451 return 1;
452 case MEM:
453 return queued_subexp_p (XEXP (x, 0));
454 case MULT:
455 case PLUS:
456 case MINUS:
457 return (queued_subexp_p (XEXP (x, 0))
458 || queued_subexp_p (XEXP (x, 1)));
459 default:
460 return 0;
461 }
462 }
463
464 /* Perform all the pending incrementations. */
465
466 void
467 emit_queue ()
468 {
469 rtx p;
470 while ((p = pending_chain))
471 {
472 rtx body = QUEUED_BODY (p);
473
474 if (GET_CODE (body) == SEQUENCE)
475 {
476 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
477 emit_insn (QUEUED_BODY (p));
478 }
479 else
480 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
481 pending_chain = QUEUED_NEXT (p);
482 }
483 }
484 \f
485 /* Copy data from FROM to TO, where the machine modes are not the same.
486 Both modes may be integer, or both may be floating.
487 UNSIGNEDP should be nonzero if FROM is an unsigned type.
488 This causes zero-extension instead of sign-extension. */
489
490 void
491 convert_move (to, from, unsignedp)
492 rtx to, from;
493 int unsignedp;
494 {
495 enum machine_mode to_mode = GET_MODE (to);
496 enum machine_mode from_mode = GET_MODE (from);
497 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
498 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
499 enum insn_code code;
500 rtx libcall;
501
502 /* rtx code for making an equivalent value. */
503 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
504
505 to = protect_from_queue (to, 1);
506 from = protect_from_queue (from, 0);
507
508 if (to_real != from_real)
509 abort ();
510
511 /* If FROM is a SUBREG that indicates that we have already done at least
512 the required extension, strip it. We don't handle such SUBREGs as
513 TO here. */
514
515 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
516 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
517 >= GET_MODE_SIZE (to_mode))
518 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
519 from = gen_lowpart (to_mode, from), from_mode = to_mode;
520
521 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
522 abort ();
523
524 if (to_mode == from_mode
525 || (from_mode == VOIDmode && CONSTANT_P (from)))
526 {
527 emit_move_insn (to, from);
528 return;
529 }
530
531 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
532 {
533 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
534 abort ();
535
536 if (VECTOR_MODE_P (to_mode))
537 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
538 else
539 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
540
541 emit_move_insn (to, from);
542 return;
543 }
544
545 if (to_real != from_real)
546 abort ();
547
548 if (to_real)
549 {
550 rtx value, insns;
551
552 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
553 {
554 /* Try converting directly if the insn is supported. */
555 if ((code = can_extend_p (to_mode, from_mode, 0))
556 != CODE_FOR_nothing)
557 {
558 emit_unop_insn (code, to, from, UNKNOWN);
559 return;
560 }
561 }
562
563 #ifdef HAVE_trunchfqf2
564 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
565 {
566 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
567 return;
568 }
569 #endif
570 #ifdef HAVE_trunctqfqf2
571 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
572 {
573 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
574 return;
575 }
576 #endif
577 #ifdef HAVE_truncsfqf2
578 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
579 {
580 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
581 return;
582 }
583 #endif
584 #ifdef HAVE_truncdfqf2
585 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
586 {
587 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
588 return;
589 }
590 #endif
591 #ifdef HAVE_truncxfqf2
592 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
593 {
594 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
595 return;
596 }
597 #endif
598 #ifdef HAVE_trunctfqf2
599 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
602 return;
603 }
604 #endif
605
606 #ifdef HAVE_trunctqfhf2
607 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
608 {
609 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
610 return;
611 }
612 #endif
613 #ifdef HAVE_truncsfhf2
614 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
615 {
616 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
617 return;
618 }
619 #endif
620 #ifdef HAVE_truncdfhf2
621 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
622 {
623 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
624 return;
625 }
626 #endif
627 #ifdef HAVE_truncxfhf2
628 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
629 {
630 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
631 return;
632 }
633 #endif
634 #ifdef HAVE_trunctfhf2
635 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
636 {
637 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
638 return;
639 }
640 #endif
641
642 #ifdef HAVE_truncsftqf2
643 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
644 {
645 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
646 return;
647 }
648 #endif
649 #ifdef HAVE_truncdftqf2
650 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
651 {
652 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
653 return;
654 }
655 #endif
656 #ifdef HAVE_truncxftqf2
657 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
658 {
659 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
660 return;
661 }
662 #endif
663 #ifdef HAVE_trunctftqf2
664 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
665 {
666 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
667 return;
668 }
669 #endif
670
671 #ifdef HAVE_truncdfsf2
672 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
673 {
674 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
675 return;
676 }
677 #endif
678 #ifdef HAVE_truncxfsf2
679 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
680 {
681 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
682 return;
683 }
684 #endif
685 #ifdef HAVE_trunctfsf2
686 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
687 {
688 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
689 return;
690 }
691 #endif
692 #ifdef HAVE_truncxfdf2
693 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
694 {
695 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
696 return;
697 }
698 #endif
699 #ifdef HAVE_trunctfdf2
700 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
701 {
702 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
703 return;
704 }
705 #endif
706
707 libcall = (rtx) 0;
708 switch (from_mode)
709 {
710 case SFmode:
711 switch (to_mode)
712 {
713 case DFmode:
714 libcall = extendsfdf2_libfunc;
715 break;
716
717 case XFmode:
718 libcall = extendsfxf2_libfunc;
719 break;
720
721 case TFmode:
722 libcall = extendsftf2_libfunc;
723 break;
724
725 default:
726 break;
727 }
728 break;
729
730 case DFmode:
731 switch (to_mode)
732 {
733 case SFmode:
734 libcall = truncdfsf2_libfunc;
735 break;
736
737 case XFmode:
738 libcall = extenddfxf2_libfunc;
739 break;
740
741 case TFmode:
742 libcall = extenddftf2_libfunc;
743 break;
744
745 default:
746 break;
747 }
748 break;
749
750 case XFmode:
751 switch (to_mode)
752 {
753 case SFmode:
754 libcall = truncxfsf2_libfunc;
755 break;
756
757 case DFmode:
758 libcall = truncxfdf2_libfunc;
759 break;
760
761 default:
762 break;
763 }
764 break;
765
766 case TFmode:
767 switch (to_mode)
768 {
769 case SFmode:
770 libcall = trunctfsf2_libfunc;
771 break;
772
773 case DFmode:
774 libcall = trunctfdf2_libfunc;
775 break;
776
777 default:
778 break;
779 }
780 break;
781
782 default:
783 break;
784 }
785
786 if (libcall == (rtx) 0)
787 /* This conversion is not implemented yet. */
788 abort ();
789
790 start_sequence ();
791 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
792 1, from, from_mode);
793 insns = get_insns ();
794 end_sequence ();
795 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
796 from));
797 return;
798 }
799
800 /* Now both modes are integers. */
801
802 /* Handle expanding beyond a word. */
803 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
804 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
805 {
806 rtx insns;
807 rtx lowpart;
808 rtx fill_value;
809 rtx lowfrom;
810 int i;
811 enum machine_mode lowpart_mode;
812 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
813
814 /* Try converting directly if the insn is supported. */
815 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
816 != CODE_FOR_nothing)
817 {
818 /* If FROM is a SUBREG, put it into a register. Do this
819 so that we always generate the same set of insns for
820 better cse'ing; if an intermediate assignment occurred,
821 we won't be doing the operation directly on the SUBREG. */
822 if (optimize > 0 && GET_CODE (from) == SUBREG)
823 from = force_reg (from_mode, from);
824 emit_unop_insn (code, to, from, equiv_code);
825 return;
826 }
827 /* Next, try converting via full word. */
828 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
829 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
830 != CODE_FOR_nothing))
831 {
832 if (GET_CODE (to) == REG)
833 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
834 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
835 emit_unop_insn (code, to,
836 gen_lowpart (word_mode, to), equiv_code);
837 return;
838 }
839
840 /* No special multiword conversion insn; do it by hand. */
841 start_sequence ();
842
843 /* Since we will turn this into a no conflict block, we must ensure
844 that the source does not overlap the target. */
845
846 if (reg_overlap_mentioned_p (to, from))
847 from = force_reg (from_mode, from);
848
849 /* Get a copy of FROM widened to a word, if necessary. */
850 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
851 lowpart_mode = word_mode;
852 else
853 lowpart_mode = from_mode;
854
855 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
856
857 lowpart = gen_lowpart (lowpart_mode, to);
858 emit_move_insn (lowpart, lowfrom);
859
860 /* Compute the value to put in each remaining word. */
861 if (unsignedp)
862 fill_value = const0_rtx;
863 else
864 {
865 #ifdef HAVE_slt
866 if (HAVE_slt
867 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
868 && STORE_FLAG_VALUE == -1)
869 {
870 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
871 lowpart_mode, 0);
872 fill_value = gen_reg_rtx (word_mode);
873 emit_insn (gen_slt (fill_value));
874 }
875 else
876 #endif
877 {
878 fill_value
879 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
880 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
881 NULL_RTX, 0);
882 fill_value = convert_to_mode (word_mode, fill_value, 1);
883 }
884 }
885
886 /* Fill the remaining words. */
887 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
888 {
889 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
890 rtx subword = operand_subword (to, index, 1, to_mode);
891
892 if (subword == 0)
893 abort ();
894
895 if (fill_value != subword)
896 emit_move_insn (subword, fill_value);
897 }
898
899 insns = get_insns ();
900 end_sequence ();
901
902 emit_no_conflict_block (insns, to, from, NULL_RTX,
903 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
904 return;
905 }
906
907 /* Truncating multi-word to a word or less. */
908 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
909 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
910 {
911 if (!((GET_CODE (from) == MEM
912 && ! MEM_VOLATILE_P (from)
913 && direct_load[(int) to_mode]
914 && ! mode_dependent_address_p (XEXP (from, 0)))
915 || GET_CODE (from) == REG
916 || GET_CODE (from) == SUBREG))
917 from = force_reg (from_mode, from);
918 convert_move (to, gen_lowpart (word_mode, from), 0);
919 return;
920 }
921
922 /* Handle pointer conversion. */ /* SPEE 900220. */
923 if (to_mode == PQImode)
924 {
925 if (from_mode != QImode)
926 from = convert_to_mode (QImode, from, unsignedp);
927
928 #ifdef HAVE_truncqipqi2
929 if (HAVE_truncqipqi2)
930 {
931 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
932 return;
933 }
934 #endif /* HAVE_truncqipqi2 */
935 abort ();
936 }
937
938 if (from_mode == PQImode)
939 {
940 if (to_mode != QImode)
941 {
942 from = convert_to_mode (QImode, from, unsignedp);
943 from_mode = QImode;
944 }
945 else
946 {
947 #ifdef HAVE_extendpqiqi2
948 if (HAVE_extendpqiqi2)
949 {
950 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
951 return;
952 }
953 #endif /* HAVE_extendpqiqi2 */
954 abort ();
955 }
956 }
957
958 if (to_mode == PSImode)
959 {
960 if (from_mode != SImode)
961 from = convert_to_mode (SImode, from, unsignedp);
962
963 #ifdef HAVE_truncsipsi2
964 if (HAVE_truncsipsi2)
965 {
966 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
967 return;
968 }
969 #endif /* HAVE_truncsipsi2 */
970 abort ();
971 }
972
973 if (from_mode == PSImode)
974 {
975 if (to_mode != SImode)
976 {
977 from = convert_to_mode (SImode, from, unsignedp);
978 from_mode = SImode;
979 }
980 else
981 {
982 #ifdef HAVE_extendpsisi2
983 if (! unsignedp && HAVE_extendpsisi2)
984 {
985 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
986 return;
987 }
988 #endif /* HAVE_extendpsisi2 */
989 #ifdef HAVE_zero_extendpsisi2
990 if (unsignedp && HAVE_zero_extendpsisi2)
991 {
992 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
993 return;
994 }
995 #endif /* HAVE_zero_extendpsisi2 */
996 abort ();
997 }
998 }
999
1000 if (to_mode == PDImode)
1001 {
1002 if (from_mode != DImode)
1003 from = convert_to_mode (DImode, from, unsignedp);
1004
1005 #ifdef HAVE_truncdipdi2
1006 if (HAVE_truncdipdi2)
1007 {
1008 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1009 return;
1010 }
1011 #endif /* HAVE_truncdipdi2 */
1012 abort ();
1013 }
1014
1015 if (from_mode == PDImode)
1016 {
1017 if (to_mode != DImode)
1018 {
1019 from = convert_to_mode (DImode, from, unsignedp);
1020 from_mode = DImode;
1021 }
1022 else
1023 {
1024 #ifdef HAVE_extendpdidi2
1025 if (HAVE_extendpdidi2)
1026 {
1027 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1028 return;
1029 }
1030 #endif /* HAVE_extendpdidi2 */
1031 abort ();
1032 }
1033 }
1034
1035 /* Now follow all the conversions between integers
1036 no more than a word long. */
1037
1038 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1039 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1040 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1041 GET_MODE_BITSIZE (from_mode)))
1042 {
1043 if (!((GET_CODE (from) == MEM
1044 && ! MEM_VOLATILE_P (from)
1045 && direct_load[(int) to_mode]
1046 && ! mode_dependent_address_p (XEXP (from, 0)))
1047 || GET_CODE (from) == REG
1048 || GET_CODE (from) == SUBREG))
1049 from = force_reg (from_mode, from);
1050 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1051 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1052 from = copy_to_reg (from);
1053 emit_move_insn (to, gen_lowpart (to_mode, from));
1054 return;
1055 }
1056
1057 /* Handle extension. */
1058 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1059 {
1060 /* Convert directly if that works. */
1061 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1062 != CODE_FOR_nothing)
1063 {
1064 emit_unop_insn (code, to, from, equiv_code);
1065 return;
1066 }
1067 else
1068 {
1069 enum machine_mode intermediate;
1070 rtx tmp;
1071 tree shift_amount;
1072
1073 /* Search for a mode to convert via. */
1074 for (intermediate = from_mode; intermediate != VOIDmode;
1075 intermediate = GET_MODE_WIDER_MODE (intermediate))
1076 if (((can_extend_p (to_mode, intermediate, unsignedp)
1077 != CODE_FOR_nothing)
1078 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1079 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1080 GET_MODE_BITSIZE (intermediate))))
1081 && (can_extend_p (intermediate, from_mode, unsignedp)
1082 != CODE_FOR_nothing))
1083 {
1084 convert_move (to, convert_to_mode (intermediate, from,
1085 unsignedp), unsignedp);
1086 return;
1087 }
1088
1089 /* No suitable intermediate mode.
1090 Generate what we need with shifts. */
1091 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1092 - GET_MODE_BITSIZE (from_mode), 0);
1093 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1094 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1095 to, unsignedp);
1096 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1097 to, unsignedp);
1098 if (tmp != to)
1099 emit_move_insn (to, tmp);
1100 return;
1101 }
1102 }
1103
1104 /* Support special truncate insns for certain modes. */
1105
1106 if (from_mode == DImode && to_mode == SImode)
1107 {
1108 #ifdef HAVE_truncdisi2
1109 if (HAVE_truncdisi2)
1110 {
1111 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1112 return;
1113 }
1114 #endif
1115 convert_move (to, force_reg (from_mode, from), unsignedp);
1116 return;
1117 }
1118
1119 if (from_mode == DImode && to_mode == HImode)
1120 {
1121 #ifdef HAVE_truncdihi2
1122 if (HAVE_truncdihi2)
1123 {
1124 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1125 return;
1126 }
1127 #endif
1128 convert_move (to, force_reg (from_mode, from), unsignedp);
1129 return;
1130 }
1131
1132 if (from_mode == DImode && to_mode == QImode)
1133 {
1134 #ifdef HAVE_truncdiqi2
1135 if (HAVE_truncdiqi2)
1136 {
1137 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1138 return;
1139 }
1140 #endif
1141 convert_move (to, force_reg (from_mode, from), unsignedp);
1142 return;
1143 }
1144
1145 if (from_mode == SImode && to_mode == HImode)
1146 {
1147 #ifdef HAVE_truncsihi2
1148 if (HAVE_truncsihi2)
1149 {
1150 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1151 return;
1152 }
1153 #endif
1154 convert_move (to, force_reg (from_mode, from), unsignedp);
1155 return;
1156 }
1157
1158 if (from_mode == SImode && to_mode == QImode)
1159 {
1160 #ifdef HAVE_truncsiqi2
1161 if (HAVE_truncsiqi2)
1162 {
1163 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1164 return;
1165 }
1166 #endif
1167 convert_move (to, force_reg (from_mode, from), unsignedp);
1168 return;
1169 }
1170
1171 if (from_mode == HImode && to_mode == QImode)
1172 {
1173 #ifdef HAVE_trunchiqi2
1174 if (HAVE_trunchiqi2)
1175 {
1176 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1177 return;
1178 }
1179 #endif
1180 convert_move (to, force_reg (from_mode, from), unsignedp);
1181 return;
1182 }
1183
1184 if (from_mode == TImode && to_mode == DImode)
1185 {
1186 #ifdef HAVE_trunctidi2
1187 if (HAVE_trunctidi2)
1188 {
1189 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1190 return;
1191 }
1192 #endif
1193 convert_move (to, force_reg (from_mode, from), unsignedp);
1194 return;
1195 }
1196
1197 if (from_mode == TImode && to_mode == SImode)
1198 {
1199 #ifdef HAVE_trunctisi2
1200 if (HAVE_trunctisi2)
1201 {
1202 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1203 return;
1204 }
1205 #endif
1206 convert_move (to, force_reg (from_mode, from), unsignedp);
1207 return;
1208 }
1209
1210 if (from_mode == TImode && to_mode == HImode)
1211 {
1212 #ifdef HAVE_trunctihi2
1213 if (HAVE_trunctihi2)
1214 {
1215 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1216 return;
1217 }
1218 #endif
1219 convert_move (to, force_reg (from_mode, from), unsignedp);
1220 return;
1221 }
1222
1223 if (from_mode == TImode && to_mode == QImode)
1224 {
1225 #ifdef HAVE_trunctiqi2
1226 if (HAVE_trunctiqi2)
1227 {
1228 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1229 return;
1230 }
1231 #endif
1232 convert_move (to, force_reg (from_mode, from), unsignedp);
1233 return;
1234 }
1235
1236 /* Handle truncation of volatile memrefs, and so on;
1237 the things that couldn't be truncated directly,
1238 and for which there was no special instruction. */
1239 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1240 {
1241 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1242 emit_move_insn (to, temp);
1243 return;
1244 }
1245
1246 /* Mode combination is not recognized. */
1247 abort ();
1248 }
1249
1250 /* Return an rtx for a value that would result
1251 from converting X to mode MODE.
1252 Both X and MODE may be floating, or both integer.
1253 UNSIGNEDP is nonzero if X is an unsigned value.
1254 This can be done by referring to a part of X in place
1255 or by copying to a new temporary with conversion.
1256
1257 This function *must not* call protect_from_queue
1258 except when putting X into an insn (in which case convert_move does it). */
1259
1260 rtx
1261 convert_to_mode (mode, x, unsignedp)
1262 enum machine_mode mode;
1263 rtx x;
1264 int unsignedp;
1265 {
1266 return convert_modes (mode, VOIDmode, x, unsignedp);
1267 }
1268
1269 /* Return an rtx for a value that would result
1270 from converting X from mode OLDMODE to mode MODE.
1271 Both modes may be floating, or both integer.
1272 UNSIGNEDP is nonzero if X is an unsigned value.
1273
1274 This can be done by referring to a part of X in place
1275 or by copying to a new temporary with conversion.
1276
1277 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1278
1279 This function *must not* call protect_from_queue
1280 except when putting X into an insn (in which case convert_move does it). */
1281
1282 rtx
1283 convert_modes (mode, oldmode, x, unsignedp)
1284 enum machine_mode mode, oldmode;
1285 rtx x;
1286 int unsignedp;
1287 {
1288 rtx temp;
1289
1290 /* If FROM is a SUBREG that indicates that we have already done at least
1291 the required extension, strip it. */
1292
1293 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1294 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1295 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1296 x = gen_lowpart (mode, x);
1297
1298 if (GET_MODE (x) != VOIDmode)
1299 oldmode = GET_MODE (x);
1300
1301 if (mode == oldmode)
1302 return x;
1303
1304 /* There is one case that we must handle specially: If we are converting
1305 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1306 we are to interpret the constant as unsigned, gen_lowpart will do
1307 the wrong if the constant appears negative. What we want to do is
1308 make the high-order word of the constant zero, not all ones. */
1309
1310 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1311 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1312 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1313 {
1314 HOST_WIDE_INT val = INTVAL (x);
1315
1316 if (oldmode != VOIDmode
1317 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1318 {
1319 int width = GET_MODE_BITSIZE (oldmode);
1320
1321 /* We need to zero extend VAL. */
1322 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1323 }
1324
1325 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1326 }
1327
1328 /* We can do this with a gen_lowpart if both desired and current modes
1329 are integer, and this is either a constant integer, a register, or a
1330 non-volatile MEM. Except for the constant case where MODE is no
1331 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1332
1333 if ((GET_CODE (x) == CONST_INT
1334 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1335 || (GET_MODE_CLASS (mode) == MODE_INT
1336 && GET_MODE_CLASS (oldmode) == MODE_INT
1337 && (GET_CODE (x) == CONST_DOUBLE
1338 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1339 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1340 && direct_load[(int) mode])
1341 || (GET_CODE (x) == REG
1342 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1343 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1344 {
1345 /* ?? If we don't know OLDMODE, we have to assume here that
1346 X does not need sign- or zero-extension. This may not be
1347 the case, but it's the best we can do. */
1348 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1349 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1350 {
1351 HOST_WIDE_INT val = INTVAL (x);
1352 int width = GET_MODE_BITSIZE (oldmode);
1353
1354 /* We must sign or zero-extend in this case. Start by
1355 zero-extending, then sign extend if we need to. */
1356 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1357 if (! unsignedp
1358 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1359 val |= (HOST_WIDE_INT) (-1) << width;
1360
1361 return GEN_INT (trunc_int_for_mode (val, mode));
1362 }
1363
1364 return gen_lowpart (mode, x);
1365 }
1366
1367 temp = gen_reg_rtx (mode);
1368 convert_move (temp, x, unsignedp);
1369 return temp;
1370 }
1371 \f
1372 /* This macro is used to determine what the largest unit size that
1373 move_by_pieces can use is. */
1374
1375 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1376 move efficiently, as opposed to MOVE_MAX which is the maximum
1377 number of bytes we can move with a single instruction. */
1378
1379 #ifndef MOVE_MAX_PIECES
1380 #define MOVE_MAX_PIECES MOVE_MAX
1381 #endif
1382
1383 /* Generate several move instructions to copy LEN bytes from block FROM to
1384 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1385 and TO through protect_from_queue before calling.
1386
1387 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1388 used to push FROM to the stack.
1389
1390 ALIGN is maximum alignment we can assume. */
1391
1392 void
1393 move_by_pieces (to, from, len, align)
1394 rtx to, from;
1395 unsigned HOST_WIDE_INT len;
1396 unsigned int align;
1397 {
1398 struct move_by_pieces data;
1399 rtx to_addr, from_addr = XEXP (from, 0);
1400 unsigned int max_size = MOVE_MAX_PIECES + 1;
1401 enum machine_mode mode = VOIDmode, tmode;
1402 enum insn_code icode;
1403
1404 data.offset = 0;
1405 data.from_addr = from_addr;
1406 if (to)
1407 {
1408 to_addr = XEXP (to, 0);
1409 data.to = to;
1410 data.autinc_to
1411 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1412 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1413 data.reverse
1414 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1415 }
1416 else
1417 {
1418 to_addr = NULL_RTX;
1419 data.to = NULL_RTX;
1420 data.autinc_to = 1;
1421 #ifdef STACK_GROWS_DOWNWARD
1422 data.reverse = 1;
1423 #else
1424 data.reverse = 0;
1425 #endif
1426 }
1427 data.to_addr = to_addr;
1428 data.from = from;
1429 data.autinc_from
1430 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1431 || GET_CODE (from_addr) == POST_INC
1432 || GET_CODE (from_addr) == POST_DEC);
1433
1434 data.explicit_inc_from = 0;
1435 data.explicit_inc_to = 0;
1436 if (data.reverse) data.offset = len;
1437 data.len = len;
1438
1439 /* If copying requires more than two move insns,
1440 copy addresses to registers (to make displacements shorter)
1441 and use post-increment if available. */
1442 if (!(data.autinc_from && data.autinc_to)
1443 && move_by_pieces_ninsns (len, align) > 2)
1444 {
1445 /* Find the mode of the largest move... */
1446 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1447 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1448 if (GET_MODE_SIZE (tmode) < max_size)
1449 mode = tmode;
1450
1451 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1452 {
1453 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = -1;
1456 }
1457 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1458 {
1459 data.from_addr = copy_addr_to_reg (from_addr);
1460 data.autinc_from = 1;
1461 data.explicit_inc_from = 1;
1462 }
1463 if (!data.autinc_from && CONSTANT_P (from_addr))
1464 data.from_addr = copy_addr_to_reg (from_addr);
1465 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1466 {
1467 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1468 data.autinc_to = 1;
1469 data.explicit_inc_to = -1;
1470 }
1471 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1472 {
1473 data.to_addr = copy_addr_to_reg (to_addr);
1474 data.autinc_to = 1;
1475 data.explicit_inc_to = 1;
1476 }
1477 if (!data.autinc_to && CONSTANT_P (to_addr))
1478 data.to_addr = copy_addr_to_reg (to_addr);
1479 }
1480
1481 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1482 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1483 align = MOVE_MAX * BITS_PER_UNIT;
1484
1485 /* First move what we can in the largest integer mode, then go to
1486 successively smaller modes. */
1487
1488 while (max_size > 1)
1489 {
1490 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1491 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1492 if (GET_MODE_SIZE (tmode) < max_size)
1493 mode = tmode;
1494
1495 if (mode == VOIDmode)
1496 break;
1497
1498 icode = mov_optab->handlers[(int) mode].insn_code;
1499 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1500 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1501
1502 max_size = GET_MODE_SIZE (mode);
1503 }
1504
1505 /* The code above should have handled everything. */
1506 if (data.len > 0)
1507 abort ();
1508 }
1509
1510 /* Return number of insns required to move L bytes by pieces.
1511 ALIGN (in bits) is maximum alignment we can assume. */
1512
1513 static unsigned HOST_WIDE_INT
1514 move_by_pieces_ninsns (l, align)
1515 unsigned HOST_WIDE_INT l;
1516 unsigned int align;
1517 {
1518 unsigned HOST_WIDE_INT n_insns = 0;
1519 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1520
1521 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1522 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1523 align = MOVE_MAX * BITS_PER_UNIT;
1524
1525 while (max_size > 1)
1526 {
1527 enum machine_mode mode = VOIDmode, tmode;
1528 enum insn_code icode;
1529
1530 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1531 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1532 if (GET_MODE_SIZE (tmode) < max_size)
1533 mode = tmode;
1534
1535 if (mode == VOIDmode)
1536 break;
1537
1538 icode = mov_optab->handlers[(int) mode].insn_code;
1539 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1540 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1541
1542 max_size = GET_MODE_SIZE (mode);
1543 }
1544
1545 if (l)
1546 abort ();
1547 return n_insns;
1548 }
1549
1550 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1551 with move instructions for mode MODE. GENFUN is the gen_... function
1552 to make a move insn for that mode. DATA has all the other info. */
1553
1554 static void
1555 move_by_pieces_1 (genfun, mode, data)
1556 rtx (*genfun) PARAMS ((rtx, ...));
1557 enum machine_mode mode;
1558 struct move_by_pieces *data;
1559 {
1560 unsigned int size = GET_MODE_SIZE (mode);
1561 rtx to1 = NULL_RTX, from1;
1562
1563 while (data->len >= size)
1564 {
1565 if (data->reverse)
1566 data->offset -= size;
1567
1568 if (data->to)
1569 {
1570 if (data->autinc_to)
1571 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1572 data->offset);
1573 else
1574 to1 = adjust_address (data->to, mode, data->offset);
1575 }
1576
1577 if (data->autinc_from)
1578 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1579 data->offset);
1580 else
1581 from1 = adjust_address (data->from, mode, data->offset);
1582
1583 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1584 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1585 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1586 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1587
1588 if (data->to)
1589 emit_insn ((*genfun) (to1, from1));
1590 else
1591 {
1592 #ifdef PUSH_ROUNDING
1593 emit_single_push_insn (mode, from1, NULL);
1594 #else
1595 abort ();
1596 #endif
1597 }
1598
1599 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1600 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1601 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1602 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1603
1604 if (! data->reverse)
1605 data->offset += size;
1606
1607 data->len -= size;
1608 }
1609 }
1610 \f
1611 /* Emit code to move a block Y to a block X.
1612 This may be done with string-move instructions,
1613 with multiple scalar move instructions, or with a library call.
1614
1615 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1616 with mode BLKmode.
1617 SIZE is an rtx that says how long they are.
1618 ALIGN is the maximum alignment we can assume they have.
1619
1620 Return the address of the new block, if memcpy is called and returns it,
1621 0 otherwise. */
1622
1623 rtx
1624 emit_block_move (x, y, size)
1625 rtx x, y;
1626 rtx size;
1627 {
1628 rtx retval = 0;
1629 #ifdef TARGET_MEM_FUNCTIONS
1630 static tree fn;
1631 tree call_expr, arg_list;
1632 #endif
1633 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1634
1635 if (GET_MODE (x) != BLKmode)
1636 abort ();
1637
1638 if (GET_MODE (y) != BLKmode)
1639 abort ();
1640
1641 x = protect_from_queue (x, 1);
1642 y = protect_from_queue (y, 0);
1643 size = protect_from_queue (size, 0);
1644
1645 if (GET_CODE (x) != MEM)
1646 abort ();
1647 if (GET_CODE (y) != MEM)
1648 abort ();
1649 if (size == 0)
1650 abort ();
1651
1652 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1653 move_by_pieces (x, y, INTVAL (size), align);
1654 else
1655 {
1656 /* Try the most limited insn first, because there's no point
1657 including more than one in the machine description unless
1658 the more limited one has some advantage. */
1659
1660 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1661 enum machine_mode mode;
1662
1663 /* Since this is a move insn, we don't care about volatility. */
1664 volatile_ok = 1;
1665
1666 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1667 mode = GET_MODE_WIDER_MODE (mode))
1668 {
1669 enum insn_code code = movstr_optab[(int) mode];
1670 insn_operand_predicate_fn pred;
1671
1672 if (code != CODE_FOR_nothing
1673 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1674 here because if SIZE is less than the mode mask, as it is
1675 returned by the macro, it will definitely be less than the
1676 actual mode mask. */
1677 && ((GET_CODE (size) == CONST_INT
1678 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1679 <= (GET_MODE_MASK (mode) >> 1)))
1680 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1681 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1682 || (*pred) (x, BLKmode))
1683 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1684 || (*pred) (y, BLKmode))
1685 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1686 || (*pred) (opalign, VOIDmode)))
1687 {
1688 rtx op2;
1689 rtx last = get_last_insn ();
1690 rtx pat;
1691
1692 op2 = convert_to_mode (mode, size, 1);
1693 pred = insn_data[(int) code].operand[2].predicate;
1694 if (pred != 0 && ! (*pred) (op2, mode))
1695 op2 = copy_to_mode_reg (mode, op2);
1696
1697 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1698 if (pat)
1699 {
1700 emit_insn (pat);
1701 volatile_ok = 0;
1702 return 0;
1703 }
1704 else
1705 delete_insns_since (last);
1706 }
1707 }
1708
1709 volatile_ok = 0;
1710
1711 /* X, Y, or SIZE may have been passed through protect_from_queue.
1712
1713 It is unsafe to save the value generated by protect_from_queue
1714 and reuse it later. Consider what happens if emit_queue is
1715 called before the return value from protect_from_queue is used.
1716
1717 Expansion of the CALL_EXPR below will call emit_queue before
1718 we are finished emitting RTL for argument setup. So if we are
1719 not careful we could get the wrong value for an argument.
1720
1721 To avoid this problem we go ahead and emit code to copy X, Y &
1722 SIZE into new pseudos. We can then place those new pseudos
1723 into an RTL_EXPR and use them later, even after a call to
1724 emit_queue.
1725
1726 Note this is not strictly needed for library calls since they
1727 do not call emit_queue before loading their arguments. However,
1728 we may need to have library calls call emit_queue in the future
1729 since failing to do so could cause problems for targets which
1730 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1731 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1732 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1733
1734 #ifdef TARGET_MEM_FUNCTIONS
1735 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1736 #else
1737 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1738 TREE_UNSIGNED (integer_type_node));
1739 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1740 #endif
1741
1742 #ifdef TARGET_MEM_FUNCTIONS
1743 /* It is incorrect to use the libcall calling conventions to call
1744 memcpy in this context.
1745
1746 This could be a user call to memcpy and the user may wish to
1747 examine the return value from memcpy.
1748
1749 For targets where libcalls and normal calls have different conventions
1750 for returning pointers, we could end up generating incorrect code.
1751
1752 So instead of using a libcall sequence we build up a suitable
1753 CALL_EXPR and expand the call in the normal fashion. */
1754 if (fn == NULL_TREE)
1755 {
1756 tree fntype;
1757
1758 /* This was copied from except.c, I don't know if all this is
1759 necessary in this context or not. */
1760 fn = get_identifier ("memcpy");
1761 fntype = build_pointer_type (void_type_node);
1762 fntype = build_function_type (fntype, NULL_TREE);
1763 fn = build_decl (FUNCTION_DECL, fn, fntype);
1764 ggc_add_tree_root (&fn, 1);
1765 DECL_EXTERNAL (fn) = 1;
1766 TREE_PUBLIC (fn) = 1;
1767 DECL_ARTIFICIAL (fn) = 1;
1768 TREE_NOTHROW (fn) = 1;
1769 make_decl_rtl (fn, NULL);
1770 assemble_external (fn);
1771 }
1772
1773 /* We need to make an argument list for the function call.
1774
1775 memcpy has three arguments, the first two are void * addresses and
1776 the last is a size_t byte count for the copy. */
1777 arg_list
1778 = build_tree_list (NULL_TREE,
1779 make_tree (build_pointer_type (void_type_node), x));
1780 TREE_CHAIN (arg_list)
1781 = build_tree_list (NULL_TREE,
1782 make_tree (build_pointer_type (void_type_node), y));
1783 TREE_CHAIN (TREE_CHAIN (arg_list))
1784 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1785 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1786
1787 /* Now we have to build up the CALL_EXPR itself. */
1788 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1789 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1790 call_expr, arg_list, NULL_TREE);
1791 TREE_SIDE_EFFECTS (call_expr) = 1;
1792
1793 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1794 #else
1795 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1796 VOIDmode, 3, y, Pmode, x, Pmode,
1797 convert_to_mode (TYPE_MODE (integer_type_node), size,
1798 TREE_UNSIGNED (integer_type_node)),
1799 TYPE_MODE (integer_type_node));
1800 #endif
1801
1802 /* If we are initializing a readonly value, show the above call
1803 clobbered it. Otherwise, a load from it may erroneously be hoisted
1804 from a loop. */
1805 if (RTX_UNCHANGING_P (x))
1806 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1807 }
1808
1809 return retval;
1810 }
1811 \f
1812 /* Copy all or part of a value X into registers starting at REGNO.
1813 The number of registers to be filled is NREGS. */
1814
1815 void
1816 move_block_to_reg (regno, x, nregs, mode)
1817 int regno;
1818 rtx x;
1819 int nregs;
1820 enum machine_mode mode;
1821 {
1822 int i;
1823 #ifdef HAVE_load_multiple
1824 rtx pat;
1825 rtx last;
1826 #endif
1827
1828 if (nregs == 0)
1829 return;
1830
1831 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1832 x = validize_mem (force_const_mem (mode, x));
1833
1834 /* See if the machine can do this with a load multiple insn. */
1835 #ifdef HAVE_load_multiple
1836 if (HAVE_load_multiple)
1837 {
1838 last = get_last_insn ();
1839 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1840 GEN_INT (nregs));
1841 if (pat)
1842 {
1843 emit_insn (pat);
1844 return;
1845 }
1846 else
1847 delete_insns_since (last);
1848 }
1849 #endif
1850
1851 for (i = 0; i < nregs; i++)
1852 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1853 operand_subword_force (x, i, mode));
1854 }
1855
1856 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1857 The number of registers to be filled is NREGS. SIZE indicates the number
1858 of bytes in the object X. */
1859
1860 void
1861 move_block_from_reg (regno, x, nregs, size)
1862 int regno;
1863 rtx x;
1864 int nregs;
1865 int size;
1866 {
1867 int i;
1868 #ifdef HAVE_store_multiple
1869 rtx pat;
1870 rtx last;
1871 #endif
1872 enum machine_mode mode;
1873
1874 if (nregs == 0)
1875 return;
1876
1877 /* If SIZE is that of a mode no bigger than a word, just use that
1878 mode's store operation. */
1879 if (size <= UNITS_PER_WORD
1880 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1881 {
1882 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1883 return;
1884 }
1885
1886 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1887 to the left before storing to memory. Note that the previous test
1888 doesn't handle all cases (e.g. SIZE == 3). */
1889 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1890 {
1891 rtx tem = operand_subword (x, 0, 1, BLKmode);
1892 rtx shift;
1893
1894 if (tem == 0)
1895 abort ();
1896
1897 shift = expand_shift (LSHIFT_EXPR, word_mode,
1898 gen_rtx_REG (word_mode, regno),
1899 build_int_2 ((UNITS_PER_WORD - size)
1900 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1901 emit_move_insn (tem, shift);
1902 return;
1903 }
1904
1905 /* See if the machine can do this with a store multiple insn. */
1906 #ifdef HAVE_store_multiple
1907 if (HAVE_store_multiple)
1908 {
1909 last = get_last_insn ();
1910 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1911 GEN_INT (nregs));
1912 if (pat)
1913 {
1914 emit_insn (pat);
1915 return;
1916 }
1917 else
1918 delete_insns_since (last);
1919 }
1920 #endif
1921
1922 for (i = 0; i < nregs; i++)
1923 {
1924 rtx tem = operand_subword (x, i, 1, BLKmode);
1925
1926 if (tem == 0)
1927 abort ();
1928
1929 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1930 }
1931 }
1932
1933 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1934 registers represented by a PARALLEL. SSIZE represents the total size of
1935 block SRC in bytes, or -1 if not known. */
1936 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1937 the balance will be in what would be the low-order memory addresses, i.e.
1938 left justified for big endian, right justified for little endian. This
1939 happens to be true for the targets currently using this support. If this
1940 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1941 would be needed. */
1942
1943 void
1944 emit_group_load (dst, orig_src, ssize)
1945 rtx dst, orig_src;
1946 int ssize;
1947 {
1948 rtx *tmps, src;
1949 int start, i;
1950
1951 if (GET_CODE (dst) != PARALLEL)
1952 abort ();
1953
1954 /* Check for a NULL entry, used to indicate that the parameter goes
1955 both on the stack and in registers. */
1956 if (XEXP (XVECEXP (dst, 0, 0), 0))
1957 start = 0;
1958 else
1959 start = 1;
1960
1961 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1962
1963 /* Process the pieces. */
1964 for (i = start; i < XVECLEN (dst, 0); i++)
1965 {
1966 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1967 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1968 unsigned int bytelen = GET_MODE_SIZE (mode);
1969 int shift = 0;
1970
1971 /* Handle trailing fragments that run over the size of the struct. */
1972 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1973 {
1974 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1975 bytelen = ssize - bytepos;
1976 if (bytelen <= 0)
1977 abort ();
1978 }
1979
1980 /* If we won't be loading directly from memory, protect the real source
1981 from strange tricks we might play; but make sure that the source can
1982 be loaded directly into the destination. */
1983 src = orig_src;
1984 if (GET_CODE (orig_src) != MEM
1985 && (!CONSTANT_P (orig_src)
1986 || (GET_MODE (orig_src) != mode
1987 && GET_MODE (orig_src) != VOIDmode)))
1988 {
1989 if (GET_MODE (orig_src) == VOIDmode)
1990 src = gen_reg_rtx (mode);
1991 else
1992 src = gen_reg_rtx (GET_MODE (orig_src));
1993
1994 emit_move_insn (src, orig_src);
1995 }
1996
1997 /* Optimize the access just a bit. */
1998 if (GET_CODE (src) == MEM
1999 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2000 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2001 && bytelen == GET_MODE_SIZE (mode))
2002 {
2003 tmps[i] = gen_reg_rtx (mode);
2004 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2005 }
2006 else if (GET_CODE (src) == CONCAT)
2007 {
2008 if (bytepos == 0
2009 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2010 tmps[i] = XEXP (src, 0);
2011 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2012 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2013 tmps[i] = XEXP (src, 1);
2014 else if (bytepos == 0)
2015 {
2016 rtx mem = assign_stack_temp (GET_MODE (src),
2017 GET_MODE_SIZE (GET_MODE (src)), 0);
2018 emit_move_insn (mem, src);
2019 tmps[i] = adjust_address (mem, mode, 0);
2020 }
2021 else
2022 abort ();
2023 }
2024 else if (CONSTANT_P (src)
2025 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2026 tmps[i] = src;
2027 else
2028 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2029 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2030 mode, mode, ssize);
2031
2032 if (BYTES_BIG_ENDIAN && shift)
2033 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2034 tmps[i], 0, OPTAB_WIDEN);
2035 }
2036
2037 emit_queue ();
2038
2039 /* Copy the extracted pieces into the proper (probable) hard regs. */
2040 for (i = start; i < XVECLEN (dst, 0); i++)
2041 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2042 }
2043
2044 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2045 registers represented by a PARALLEL. SSIZE represents the total size of
2046 block DST, or -1 if not known. */
2047
2048 void
2049 emit_group_store (orig_dst, src, ssize)
2050 rtx orig_dst, src;
2051 int ssize;
2052 {
2053 rtx *tmps, dst;
2054 int start, i;
2055
2056 if (GET_CODE (src) != PARALLEL)
2057 abort ();
2058
2059 /* Check for a NULL entry, used to indicate that the parameter goes
2060 both on the stack and in registers. */
2061 if (XEXP (XVECEXP (src, 0, 0), 0))
2062 start = 0;
2063 else
2064 start = 1;
2065
2066 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2067
2068 /* Copy the (probable) hard regs into pseudos. */
2069 for (i = start; i < XVECLEN (src, 0); i++)
2070 {
2071 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2072 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2073 emit_move_insn (tmps[i], reg);
2074 }
2075 emit_queue ();
2076
2077 /* If we won't be storing directly into memory, protect the real destination
2078 from strange tricks we might play. */
2079 dst = orig_dst;
2080 if (GET_CODE (dst) == PARALLEL)
2081 {
2082 rtx temp;
2083
2084 /* We can get a PARALLEL dst if there is a conditional expression in
2085 a return statement. In that case, the dst and src are the same,
2086 so no action is necessary. */
2087 if (rtx_equal_p (dst, src))
2088 return;
2089
2090 /* It is unclear if we can ever reach here, but we may as well handle
2091 it. Allocate a temporary, and split this into a store/load to/from
2092 the temporary. */
2093
2094 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2095 emit_group_store (temp, src, ssize);
2096 emit_group_load (dst, temp, ssize);
2097 return;
2098 }
2099 else if (GET_CODE (dst) != MEM)
2100 {
2101 dst = gen_reg_rtx (GET_MODE (orig_dst));
2102 /* Make life a bit easier for combine. */
2103 emit_move_insn (dst, const0_rtx);
2104 }
2105
2106 /* Process the pieces. */
2107 for (i = start; i < XVECLEN (src, 0); i++)
2108 {
2109 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2110 enum machine_mode mode = GET_MODE (tmps[i]);
2111 unsigned int bytelen = GET_MODE_SIZE (mode);
2112
2113 /* Handle trailing fragments that run over the size of the struct. */
2114 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2115 {
2116 if (BYTES_BIG_ENDIAN)
2117 {
2118 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2119 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2120 tmps[i], 0, OPTAB_WIDEN);
2121 }
2122 bytelen = ssize - bytepos;
2123 }
2124
2125 /* Optimize the access just a bit. */
2126 if (GET_CODE (dst) == MEM
2127 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2128 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2129 && bytelen == GET_MODE_SIZE (mode))
2130 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2131 else
2132 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2133 mode, tmps[i], ssize);
2134 }
2135
2136 emit_queue ();
2137
2138 /* Copy from the pseudo into the (probable) hard reg. */
2139 if (GET_CODE (dst) == REG)
2140 emit_move_insn (orig_dst, dst);
2141 }
2142
2143 /* Generate code to copy a BLKmode object of TYPE out of a
2144 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2145 is null, a stack temporary is created. TGTBLK is returned.
2146
2147 The primary purpose of this routine is to handle functions
2148 that return BLKmode structures in registers. Some machines
2149 (the PA for example) want to return all small structures
2150 in registers regardless of the structure's alignment. */
2151
2152 rtx
2153 copy_blkmode_from_reg (tgtblk, srcreg, type)
2154 rtx tgtblk;
2155 rtx srcreg;
2156 tree type;
2157 {
2158 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2159 rtx src = NULL, dst = NULL;
2160 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2161 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2162
2163 if (tgtblk == 0)
2164 {
2165 tgtblk = assign_temp (build_qualified_type (type,
2166 (TYPE_QUALS (type)
2167 | TYPE_QUAL_CONST)),
2168 0, 1, 1);
2169 preserve_temp_slots (tgtblk);
2170 }
2171
2172 /* This code assumes srcreg is at least a full word. If it isn't,
2173 copy it into a new pseudo which is a full word. */
2174 if (GET_MODE (srcreg) != BLKmode
2175 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2176 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2177
2178 /* Structures whose size is not a multiple of a word are aligned
2179 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2180 machine, this means we must skip the empty high order bytes when
2181 calculating the bit offset. */
2182 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2183 big_endian_correction
2184 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2185
2186 /* Copy the structure BITSIZE bites at a time.
2187
2188 We could probably emit more efficient code for machines which do not use
2189 strict alignment, but it doesn't seem worth the effort at the current
2190 time. */
2191 for (bitpos = 0, xbitpos = big_endian_correction;
2192 bitpos < bytes * BITS_PER_UNIT;
2193 bitpos += bitsize, xbitpos += bitsize)
2194 {
2195 /* We need a new source operand each time xbitpos is on a
2196 word boundary and when xbitpos == big_endian_correction
2197 (the first time through). */
2198 if (xbitpos % BITS_PER_WORD == 0
2199 || xbitpos == big_endian_correction)
2200 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2201 GET_MODE (srcreg));
2202
2203 /* We need a new destination operand each time bitpos is on
2204 a word boundary. */
2205 if (bitpos % BITS_PER_WORD == 0)
2206 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2207
2208 /* Use xbitpos for the source extraction (right justified) and
2209 xbitpos for the destination store (left justified). */
2210 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2211 extract_bit_field (src, bitsize,
2212 xbitpos % BITS_PER_WORD, 1,
2213 NULL_RTX, word_mode, word_mode,
2214 BITS_PER_WORD),
2215 BITS_PER_WORD);
2216 }
2217
2218 return tgtblk;
2219 }
2220
2221 /* Add a USE expression for REG to the (possibly empty) list pointed
2222 to by CALL_FUSAGE. REG must denote a hard register. */
2223
2224 void
2225 use_reg (call_fusage, reg)
2226 rtx *call_fusage, reg;
2227 {
2228 if (GET_CODE (reg) != REG
2229 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2230 abort ();
2231
2232 *call_fusage
2233 = gen_rtx_EXPR_LIST (VOIDmode,
2234 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2235 }
2236
2237 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2238 starting at REGNO. All of these registers must be hard registers. */
2239
2240 void
2241 use_regs (call_fusage, regno, nregs)
2242 rtx *call_fusage;
2243 int regno;
2244 int nregs;
2245 {
2246 int i;
2247
2248 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2249 abort ();
2250
2251 for (i = 0; i < nregs; i++)
2252 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2253 }
2254
2255 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2256 PARALLEL REGS. This is for calls that pass values in multiple
2257 non-contiguous locations. The Irix 6 ABI has examples of this. */
2258
2259 void
2260 use_group_regs (call_fusage, regs)
2261 rtx *call_fusage;
2262 rtx regs;
2263 {
2264 int i;
2265
2266 for (i = 0; i < XVECLEN (regs, 0); i++)
2267 {
2268 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2269
2270 /* A NULL entry means the parameter goes both on the stack and in
2271 registers. This can also be a MEM for targets that pass values
2272 partially on the stack and partially in registers. */
2273 if (reg != 0 && GET_CODE (reg) == REG)
2274 use_reg (call_fusage, reg);
2275 }
2276 }
2277 \f
2278
2279 int
2280 can_store_by_pieces (len, constfun, constfundata, align)
2281 unsigned HOST_WIDE_INT len;
2282 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2283 PTR constfundata;
2284 unsigned int align;
2285 {
2286 unsigned HOST_WIDE_INT max_size, l;
2287 HOST_WIDE_INT offset = 0;
2288 enum machine_mode mode, tmode;
2289 enum insn_code icode;
2290 int reverse;
2291 rtx cst;
2292
2293 if (! MOVE_BY_PIECES_P (len, align))
2294 return 0;
2295
2296 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2297 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2298 align = MOVE_MAX * BITS_PER_UNIT;
2299
2300 /* We would first store what we can in the largest integer mode, then go to
2301 successively smaller modes. */
2302
2303 for (reverse = 0;
2304 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2305 reverse++)
2306 {
2307 l = len;
2308 mode = VOIDmode;
2309 max_size = MOVE_MAX_PIECES + 1;
2310 while (max_size > 1)
2311 {
2312 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2313 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2314 if (GET_MODE_SIZE (tmode) < max_size)
2315 mode = tmode;
2316
2317 if (mode == VOIDmode)
2318 break;
2319
2320 icode = mov_optab->handlers[(int) mode].insn_code;
2321 if (icode != CODE_FOR_nothing
2322 && align >= GET_MODE_ALIGNMENT (mode))
2323 {
2324 unsigned int size = GET_MODE_SIZE (mode);
2325
2326 while (l >= size)
2327 {
2328 if (reverse)
2329 offset -= size;
2330
2331 cst = (*constfun) (constfundata, offset, mode);
2332 if (!LEGITIMATE_CONSTANT_P (cst))
2333 return 0;
2334
2335 if (!reverse)
2336 offset += size;
2337
2338 l -= size;
2339 }
2340 }
2341
2342 max_size = GET_MODE_SIZE (mode);
2343 }
2344
2345 /* The code above should have handled everything. */
2346 if (l != 0)
2347 abort ();
2348 }
2349
2350 return 1;
2351 }
2352
2353 /* Generate several move instructions to store LEN bytes generated by
2354 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2355 pointer which will be passed as argument in every CONSTFUN call.
2356 ALIGN is maximum alignment we can assume. */
2357
2358 void
2359 store_by_pieces (to, len, constfun, constfundata, align)
2360 rtx to;
2361 unsigned HOST_WIDE_INT len;
2362 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2363 PTR constfundata;
2364 unsigned int align;
2365 {
2366 struct store_by_pieces data;
2367
2368 if (! MOVE_BY_PIECES_P (len, align))
2369 abort ();
2370 to = protect_from_queue (to, 1);
2371 data.constfun = constfun;
2372 data.constfundata = constfundata;
2373 data.len = len;
2374 data.to = to;
2375 store_by_pieces_1 (&data, align);
2376 }
2377
2378 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2379 rtx with BLKmode). The caller must pass TO through protect_from_queue
2380 before calling. ALIGN is maximum alignment we can assume. */
2381
2382 static void
2383 clear_by_pieces (to, len, align)
2384 rtx to;
2385 unsigned HOST_WIDE_INT len;
2386 unsigned int align;
2387 {
2388 struct store_by_pieces data;
2389
2390 data.constfun = clear_by_pieces_1;
2391 data.constfundata = NULL;
2392 data.len = len;
2393 data.to = to;
2394 store_by_pieces_1 (&data, align);
2395 }
2396
2397 /* Callback routine for clear_by_pieces.
2398 Return const0_rtx unconditionally. */
2399
2400 static rtx
2401 clear_by_pieces_1 (data, offset, mode)
2402 PTR data ATTRIBUTE_UNUSED;
2403 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2404 enum machine_mode mode ATTRIBUTE_UNUSED;
2405 {
2406 return const0_rtx;
2407 }
2408
2409 /* Subroutine of clear_by_pieces and store_by_pieces.
2410 Generate several move instructions to store LEN bytes of block TO. (A MEM
2411 rtx with BLKmode). The caller must pass TO through protect_from_queue
2412 before calling. ALIGN is maximum alignment we can assume. */
2413
2414 static void
2415 store_by_pieces_1 (data, align)
2416 struct store_by_pieces *data;
2417 unsigned int align;
2418 {
2419 rtx to_addr = XEXP (data->to, 0);
2420 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2421 enum machine_mode mode = VOIDmode, tmode;
2422 enum insn_code icode;
2423
2424 data->offset = 0;
2425 data->to_addr = to_addr;
2426 data->autinc_to
2427 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2428 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2429
2430 data->explicit_inc_to = 0;
2431 data->reverse
2432 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2433 if (data->reverse)
2434 data->offset = data->len;
2435
2436 /* If storing requires more than two move insns,
2437 copy addresses to registers (to make displacements shorter)
2438 and use post-increment if available. */
2439 if (!data->autinc_to
2440 && move_by_pieces_ninsns (data->len, align) > 2)
2441 {
2442 /* Determine the main mode we'll be using. */
2443 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2444 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2445 if (GET_MODE_SIZE (tmode) < max_size)
2446 mode = tmode;
2447
2448 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2449 {
2450 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2451 data->autinc_to = 1;
2452 data->explicit_inc_to = -1;
2453 }
2454
2455 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2456 && ! data->autinc_to)
2457 {
2458 data->to_addr = copy_addr_to_reg (to_addr);
2459 data->autinc_to = 1;
2460 data->explicit_inc_to = 1;
2461 }
2462
2463 if ( !data->autinc_to && CONSTANT_P (to_addr))
2464 data->to_addr = copy_addr_to_reg (to_addr);
2465 }
2466
2467 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2468 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2469 align = MOVE_MAX * BITS_PER_UNIT;
2470
2471 /* First store what we can in the largest integer mode, then go to
2472 successively smaller modes. */
2473
2474 while (max_size > 1)
2475 {
2476 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2477 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2478 if (GET_MODE_SIZE (tmode) < max_size)
2479 mode = tmode;
2480
2481 if (mode == VOIDmode)
2482 break;
2483
2484 icode = mov_optab->handlers[(int) mode].insn_code;
2485 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2486 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2487
2488 max_size = GET_MODE_SIZE (mode);
2489 }
2490
2491 /* The code above should have handled everything. */
2492 if (data->len != 0)
2493 abort ();
2494 }
2495
2496 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2497 with move instructions for mode MODE. GENFUN is the gen_... function
2498 to make a move insn for that mode. DATA has all the other info. */
2499
2500 static void
2501 store_by_pieces_2 (genfun, mode, data)
2502 rtx (*genfun) PARAMS ((rtx, ...));
2503 enum machine_mode mode;
2504 struct store_by_pieces *data;
2505 {
2506 unsigned int size = GET_MODE_SIZE (mode);
2507 rtx to1, cst;
2508
2509 while (data->len >= size)
2510 {
2511 if (data->reverse)
2512 data->offset -= size;
2513
2514 if (data->autinc_to)
2515 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2516 data->offset);
2517 else
2518 to1 = adjust_address (data->to, mode, data->offset);
2519
2520 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2521 emit_insn (gen_add2_insn (data->to_addr,
2522 GEN_INT (-(HOST_WIDE_INT) size)));
2523
2524 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2525 emit_insn ((*genfun) (to1, cst));
2526
2527 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2528 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2529
2530 if (! data->reverse)
2531 data->offset += size;
2532
2533 data->len -= size;
2534 }
2535 }
2536 \f
2537 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2538 its length in bytes. */
2539
2540 rtx
2541 clear_storage (object, size)
2542 rtx object;
2543 rtx size;
2544 {
2545 #ifdef TARGET_MEM_FUNCTIONS
2546 static tree fn;
2547 tree call_expr, arg_list;
2548 #endif
2549 rtx retval = 0;
2550 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2551 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2552
2553 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2554 just move a zero. Otherwise, do this a piece at a time. */
2555 if (GET_MODE (object) != BLKmode
2556 && GET_CODE (size) == CONST_INT
2557 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2558 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2559 else
2560 {
2561 object = protect_from_queue (object, 1);
2562 size = protect_from_queue (size, 0);
2563
2564 if (GET_CODE (size) == CONST_INT
2565 && MOVE_BY_PIECES_P (INTVAL (size), align))
2566 clear_by_pieces (object, INTVAL (size), align);
2567 else
2568 {
2569 /* Try the most limited insn first, because there's no point
2570 including more than one in the machine description unless
2571 the more limited one has some advantage. */
2572
2573 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2574 enum machine_mode mode;
2575
2576 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2577 mode = GET_MODE_WIDER_MODE (mode))
2578 {
2579 enum insn_code code = clrstr_optab[(int) mode];
2580 insn_operand_predicate_fn pred;
2581
2582 if (code != CODE_FOR_nothing
2583 /* We don't need MODE to be narrower than
2584 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2585 the mode mask, as it is returned by the macro, it will
2586 definitely be less than the actual mode mask. */
2587 && ((GET_CODE (size) == CONST_INT
2588 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2589 <= (GET_MODE_MASK (mode) >> 1)))
2590 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2591 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2592 || (*pred) (object, BLKmode))
2593 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2594 || (*pred) (opalign, VOIDmode)))
2595 {
2596 rtx op1;
2597 rtx last = get_last_insn ();
2598 rtx pat;
2599
2600 op1 = convert_to_mode (mode, size, 1);
2601 pred = insn_data[(int) code].operand[1].predicate;
2602 if (pred != 0 && ! (*pred) (op1, mode))
2603 op1 = copy_to_mode_reg (mode, op1);
2604
2605 pat = GEN_FCN ((int) code) (object, op1, opalign);
2606 if (pat)
2607 {
2608 emit_insn (pat);
2609 return 0;
2610 }
2611 else
2612 delete_insns_since (last);
2613 }
2614 }
2615
2616 /* OBJECT or SIZE may have been passed through protect_from_queue.
2617
2618 It is unsafe to save the value generated by protect_from_queue
2619 and reuse it later. Consider what happens if emit_queue is
2620 called before the return value from protect_from_queue is used.
2621
2622 Expansion of the CALL_EXPR below will call emit_queue before
2623 we are finished emitting RTL for argument setup. So if we are
2624 not careful we could get the wrong value for an argument.
2625
2626 To avoid this problem we go ahead and emit code to copy OBJECT
2627 and SIZE into new pseudos. We can then place those new pseudos
2628 into an RTL_EXPR and use them later, even after a call to
2629 emit_queue.
2630
2631 Note this is not strictly needed for library calls since they
2632 do not call emit_queue before loading their arguments. However,
2633 we may need to have library calls call emit_queue in the future
2634 since failing to do so could cause problems for targets which
2635 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2636 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2637
2638 #ifdef TARGET_MEM_FUNCTIONS
2639 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2640 #else
2641 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2642 TREE_UNSIGNED (integer_type_node));
2643 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2644 #endif
2645
2646 #ifdef TARGET_MEM_FUNCTIONS
2647 /* It is incorrect to use the libcall calling conventions to call
2648 memset in this context.
2649
2650 This could be a user call to memset and the user may wish to
2651 examine the return value from memset.
2652
2653 For targets where libcalls and normal calls have different
2654 conventions for returning pointers, we could end up generating
2655 incorrect code.
2656
2657 So instead of using a libcall sequence we build up a suitable
2658 CALL_EXPR and expand the call in the normal fashion. */
2659 if (fn == NULL_TREE)
2660 {
2661 tree fntype;
2662
2663 /* This was copied from except.c, I don't know if all this is
2664 necessary in this context or not. */
2665 fn = get_identifier ("memset");
2666 fntype = build_pointer_type (void_type_node);
2667 fntype = build_function_type (fntype, NULL_TREE);
2668 fn = build_decl (FUNCTION_DECL, fn, fntype);
2669 ggc_add_tree_root (&fn, 1);
2670 DECL_EXTERNAL (fn) = 1;
2671 TREE_PUBLIC (fn) = 1;
2672 DECL_ARTIFICIAL (fn) = 1;
2673 TREE_NOTHROW (fn) = 1;
2674 make_decl_rtl (fn, NULL);
2675 assemble_external (fn);
2676 }
2677
2678 /* We need to make an argument list for the function call.
2679
2680 memset has three arguments, the first is a void * addresses, the
2681 second an integer with the initialization value, the last is a
2682 size_t byte count for the copy. */
2683 arg_list
2684 = build_tree_list (NULL_TREE,
2685 make_tree (build_pointer_type (void_type_node),
2686 object));
2687 TREE_CHAIN (arg_list)
2688 = build_tree_list (NULL_TREE,
2689 make_tree (integer_type_node, const0_rtx));
2690 TREE_CHAIN (TREE_CHAIN (arg_list))
2691 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2692 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2693
2694 /* Now we have to build up the CALL_EXPR itself. */
2695 call_expr = build1 (ADDR_EXPR,
2696 build_pointer_type (TREE_TYPE (fn)), fn);
2697 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2698 call_expr, arg_list, NULL_TREE);
2699 TREE_SIDE_EFFECTS (call_expr) = 1;
2700
2701 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2702 #else
2703 emit_library_call (bzero_libfunc, LCT_NORMAL,
2704 VOIDmode, 2, object, Pmode, size,
2705 TYPE_MODE (integer_type_node));
2706 #endif
2707
2708 /* If we are initializing a readonly value, show the above call
2709 clobbered it. Otherwise, a load from it may erroneously be
2710 hoisted from a loop. */
2711 if (RTX_UNCHANGING_P (object))
2712 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2713 }
2714 }
2715
2716 return retval;
2717 }
2718
2719 /* Generate code to copy Y into X.
2720 Both Y and X must have the same mode, except that
2721 Y can be a constant with VOIDmode.
2722 This mode cannot be BLKmode; use emit_block_move for that.
2723
2724 Return the last instruction emitted. */
2725
2726 rtx
2727 emit_move_insn (x, y)
2728 rtx x, y;
2729 {
2730 enum machine_mode mode = GET_MODE (x);
2731 rtx y_cst = NULL_RTX;
2732 rtx last_insn;
2733
2734 x = protect_from_queue (x, 1);
2735 y = protect_from_queue (y, 0);
2736
2737 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2738 abort ();
2739
2740 /* Never force constant_p_rtx to memory. */
2741 if (GET_CODE (y) == CONSTANT_P_RTX)
2742 ;
2743 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2744 {
2745 y_cst = y;
2746 y = force_const_mem (mode, y);
2747 }
2748
2749 /* If X or Y are memory references, verify that their addresses are valid
2750 for the machine. */
2751 if (GET_CODE (x) == MEM
2752 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2753 && ! push_operand (x, GET_MODE (x)))
2754 || (flag_force_addr
2755 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2756 x = validize_mem (x);
2757
2758 if (GET_CODE (y) == MEM
2759 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2760 || (flag_force_addr
2761 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2762 y = validize_mem (y);
2763
2764 if (mode == BLKmode)
2765 abort ();
2766
2767 last_insn = emit_move_insn_1 (x, y);
2768
2769 if (y_cst && GET_CODE (x) == REG)
2770 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2771
2772 return last_insn;
2773 }
2774
2775 /* Low level part of emit_move_insn.
2776 Called just like emit_move_insn, but assumes X and Y
2777 are basically valid. */
2778
2779 rtx
2780 emit_move_insn_1 (x, y)
2781 rtx x, y;
2782 {
2783 enum machine_mode mode = GET_MODE (x);
2784 enum machine_mode submode;
2785 enum mode_class class = GET_MODE_CLASS (mode);
2786 unsigned int i;
2787
2788 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2789 abort ();
2790
2791 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2792 return
2793 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2794
2795 /* Expand complex moves by moving real part and imag part, if possible. */
2796 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2797 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2798 * BITS_PER_UNIT),
2799 (class == MODE_COMPLEX_INT
2800 ? MODE_INT : MODE_FLOAT),
2801 0))
2802 && (mov_optab->handlers[(int) submode].insn_code
2803 != CODE_FOR_nothing))
2804 {
2805 /* Don't split destination if it is a stack push. */
2806 int stack = push_operand (x, GET_MODE (x));
2807
2808 #ifdef PUSH_ROUNDING
2809 /* In case we output to the stack, but the size is smaller machine can
2810 push exactly, we need to use move instructions. */
2811 if (stack
2812 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2813 {
2814 rtx temp;
2815 int offset1, offset2;
2816
2817 /* Do not use anti_adjust_stack, since we don't want to update
2818 stack_pointer_delta. */
2819 temp = expand_binop (Pmode,
2820 #ifdef STACK_GROWS_DOWNWARD
2821 sub_optab,
2822 #else
2823 add_optab,
2824 #endif
2825 stack_pointer_rtx,
2826 GEN_INT
2827 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2828 stack_pointer_rtx,
2829 0,
2830 OPTAB_LIB_WIDEN);
2831 if (temp != stack_pointer_rtx)
2832 emit_move_insn (stack_pointer_rtx, temp);
2833 #ifdef STACK_GROWS_DOWNWARD
2834 offset1 = 0;
2835 offset2 = GET_MODE_SIZE (submode);
2836 #else
2837 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2838 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2839 + GET_MODE_SIZE (submode));
2840 #endif
2841 emit_move_insn (change_address (x, submode,
2842 gen_rtx_PLUS (Pmode,
2843 stack_pointer_rtx,
2844 GEN_INT (offset1))),
2845 gen_realpart (submode, y));
2846 emit_move_insn (change_address (x, submode,
2847 gen_rtx_PLUS (Pmode,
2848 stack_pointer_rtx,
2849 GEN_INT (offset2))),
2850 gen_imagpart (submode, y));
2851 }
2852 else
2853 #endif
2854 /* If this is a stack, push the highpart first, so it
2855 will be in the argument order.
2856
2857 In that case, change_address is used only to convert
2858 the mode, not to change the address. */
2859 if (stack)
2860 {
2861 /* Note that the real part always precedes the imag part in memory
2862 regardless of machine's endianness. */
2863 #ifdef STACK_GROWS_DOWNWARD
2864 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2865 (gen_rtx_MEM (submode, XEXP (x, 0)),
2866 gen_imagpart (submode, y)));
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_realpart (submode, y)));
2870 #else
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (gen_rtx_MEM (submode, XEXP (x, 0)),
2873 gen_realpart (submode, y)));
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_imagpart (submode, y)));
2877 #endif
2878 }
2879 else
2880 {
2881 rtx realpart_x, realpart_y;
2882 rtx imagpart_x, imagpart_y;
2883
2884 /* If this is a complex value with each part being smaller than a
2885 word, the usual calling sequence will likely pack the pieces into
2886 a single register. Unfortunately, SUBREG of hard registers only
2887 deals in terms of words, so we have a problem converting input
2888 arguments to the CONCAT of two registers that is used elsewhere
2889 for complex values. If this is before reload, we can copy it into
2890 memory and reload. FIXME, we should see about using extract and
2891 insert on integer registers, but complex short and complex char
2892 variables should be rarely used. */
2893 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2894 && (reload_in_progress | reload_completed) == 0)
2895 {
2896 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2897 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2898
2899 if (packed_dest_p || packed_src_p)
2900 {
2901 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2902 ? MODE_FLOAT : MODE_INT);
2903
2904 enum machine_mode reg_mode
2905 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2906
2907 if (reg_mode != BLKmode)
2908 {
2909 rtx mem = assign_stack_temp (reg_mode,
2910 GET_MODE_SIZE (mode), 0);
2911 rtx cmem = adjust_address (mem, mode, 0);
2912
2913 cfun->cannot_inline
2914 = N_("function using short complex types cannot be inline");
2915
2916 if (packed_dest_p)
2917 {
2918 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2919 emit_move_insn_1 (cmem, y);
2920 return emit_move_insn_1 (sreg, mem);
2921 }
2922 else
2923 {
2924 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2925 emit_move_insn_1 (mem, sreg);
2926 return emit_move_insn_1 (x, cmem);
2927 }
2928 }
2929 }
2930 }
2931
2932 realpart_x = gen_realpart (submode, x);
2933 realpart_y = gen_realpart (submode, y);
2934 imagpart_x = gen_imagpart (submode, x);
2935 imagpart_y = gen_imagpart (submode, y);
2936
2937 /* Show the output dies here. This is necessary for SUBREGs
2938 of pseudos since we cannot track their lifetimes correctly;
2939 hard regs shouldn't appear here except as return values.
2940 We never want to emit such a clobber after reload. */
2941 if (x != y
2942 && ! (reload_in_progress || reload_completed)
2943 && (GET_CODE (realpart_x) == SUBREG
2944 || GET_CODE (imagpart_x) == SUBREG))
2945 {
2946 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2947 }
2948
2949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2950 (realpart_x, realpart_y));
2951 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2952 (imagpart_x, imagpart_y));
2953 }
2954
2955 return get_last_insn ();
2956 }
2957
2958 /* This will handle any multi-word mode that lacks a move_insn pattern.
2959 However, you will get better code if you define such patterns,
2960 even if they must turn into multiple assembler instructions. */
2961 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2962 {
2963 rtx last_insn = 0;
2964 rtx seq, inner;
2965 int need_clobber;
2966
2967 #ifdef PUSH_ROUNDING
2968
2969 /* If X is a push on the stack, do the push now and replace
2970 X with a reference to the stack pointer. */
2971 if (push_operand (x, GET_MODE (x)))
2972 {
2973 rtx temp;
2974 enum rtx_code code;
2975
2976 /* Do not use anti_adjust_stack, since we don't want to update
2977 stack_pointer_delta. */
2978 temp = expand_binop (Pmode,
2979 #ifdef STACK_GROWS_DOWNWARD
2980 sub_optab,
2981 #else
2982 add_optab,
2983 #endif
2984 stack_pointer_rtx,
2985 GEN_INT
2986 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2987 stack_pointer_rtx,
2988 0,
2989 OPTAB_LIB_WIDEN);
2990 if (temp != stack_pointer_rtx)
2991 emit_move_insn (stack_pointer_rtx, temp);
2992
2993 code = GET_CODE (XEXP (x, 0));
2994 /* Just hope that small offsets off SP are OK. */
2995 if (code == POST_INC)
2996 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2997 GEN_INT (-(HOST_WIDE_INT)
2998 GET_MODE_SIZE (GET_MODE (x))));
2999 else if (code == POST_DEC)
3000 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3001 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3002 else
3003 temp = stack_pointer_rtx;
3004
3005 x = change_address (x, VOIDmode, temp);
3006 }
3007 #endif
3008
3009 /* If we are in reload, see if either operand is a MEM whose address
3010 is scheduled for replacement. */
3011 if (reload_in_progress && GET_CODE (x) == MEM
3012 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3013 x = replace_equiv_address_nv (x, inner);
3014 if (reload_in_progress && GET_CODE (y) == MEM
3015 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3016 y = replace_equiv_address_nv (y, inner);
3017
3018 start_sequence ();
3019
3020 need_clobber = 0;
3021 for (i = 0;
3022 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3023 i++)
3024 {
3025 rtx xpart = operand_subword (x, i, 1, mode);
3026 rtx ypart = operand_subword (y, i, 1, mode);
3027
3028 /* If we can't get a part of Y, put Y into memory if it is a
3029 constant. Otherwise, force it into a register. If we still
3030 can't get a part of Y, abort. */
3031 if (ypart == 0 && CONSTANT_P (y))
3032 {
3033 y = force_const_mem (mode, y);
3034 ypart = operand_subword (y, i, 1, mode);
3035 }
3036 else if (ypart == 0)
3037 ypart = operand_subword_force (y, i, mode);
3038
3039 if (xpart == 0 || ypart == 0)
3040 abort ();
3041
3042 need_clobber |= (GET_CODE (xpart) == SUBREG);
3043
3044 last_insn = emit_move_insn (xpart, ypart);
3045 }
3046
3047 seq = gen_sequence ();
3048 end_sequence ();
3049
3050 /* Show the output dies here. This is necessary for SUBREGs
3051 of pseudos since we cannot track their lifetimes correctly;
3052 hard regs shouldn't appear here except as return values.
3053 We never want to emit such a clobber after reload. */
3054 if (x != y
3055 && ! (reload_in_progress || reload_completed)
3056 && need_clobber != 0)
3057 {
3058 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3059 }
3060
3061 emit_insn (seq);
3062
3063 return last_insn;
3064 }
3065 else
3066 abort ();
3067 }
3068 \f
3069 /* Pushing data onto the stack. */
3070
3071 /* Push a block of length SIZE (perhaps variable)
3072 and return an rtx to address the beginning of the block.
3073 Note that it is not possible for the value returned to be a QUEUED.
3074 The value may be virtual_outgoing_args_rtx.
3075
3076 EXTRA is the number of bytes of padding to push in addition to SIZE.
3077 BELOW nonzero means this padding comes at low addresses;
3078 otherwise, the padding comes at high addresses. */
3079
3080 rtx
3081 push_block (size, extra, below)
3082 rtx size;
3083 int extra, below;
3084 {
3085 rtx temp;
3086
3087 size = convert_modes (Pmode, ptr_mode, size, 1);
3088 if (CONSTANT_P (size))
3089 anti_adjust_stack (plus_constant (size, extra));
3090 else if (GET_CODE (size) == REG && extra == 0)
3091 anti_adjust_stack (size);
3092 else
3093 {
3094 temp = copy_to_mode_reg (Pmode, size);
3095 if (extra != 0)
3096 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3097 temp, 0, OPTAB_LIB_WIDEN);
3098 anti_adjust_stack (temp);
3099 }
3100
3101 #ifndef STACK_GROWS_DOWNWARD
3102 if (0)
3103 #else
3104 if (1)
3105 #endif
3106 {
3107 temp = virtual_outgoing_args_rtx;
3108 if (extra != 0 && below)
3109 temp = plus_constant (temp, extra);
3110 }
3111 else
3112 {
3113 if (GET_CODE (size) == CONST_INT)
3114 temp = plus_constant (virtual_outgoing_args_rtx,
3115 -INTVAL (size) - (below ? 0 : extra));
3116 else if (extra != 0 && !below)
3117 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3118 negate_rtx (Pmode, plus_constant (size, extra)));
3119 else
3120 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3121 negate_rtx (Pmode, size));
3122 }
3123
3124 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3125 }
3126
3127
3128 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3129 block of SIZE bytes. */
3130
3131 static rtx
3132 get_push_address (size)
3133 int size;
3134 {
3135 rtx temp;
3136
3137 if (STACK_PUSH_CODE == POST_DEC)
3138 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3139 else if (STACK_PUSH_CODE == POST_INC)
3140 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3141 else
3142 temp = stack_pointer_rtx;
3143
3144 return copy_to_reg (temp);
3145 }
3146
3147 #ifdef PUSH_ROUNDING
3148
3149 /* Emit single push insn. */
3150
3151 static void
3152 emit_single_push_insn (mode, x, type)
3153 rtx x;
3154 enum machine_mode mode;
3155 tree type;
3156 {
3157 rtx dest_addr;
3158 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3159 rtx dest;
3160 enum insn_code icode;
3161 insn_operand_predicate_fn pred;
3162
3163 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3164 /* If there is push pattern, use it. Otherwise try old way of throwing
3165 MEM representing push operation to move expander. */
3166 icode = push_optab->handlers[(int) mode].insn_code;
3167 if (icode != CODE_FOR_nothing)
3168 {
3169 if (((pred = insn_data[(int) icode].operand[0].predicate)
3170 && !((*pred) (x, mode))))
3171 x = force_reg (mode, x);
3172 emit_insn (GEN_FCN (icode) (x));
3173 return;
3174 }
3175 if (GET_MODE_SIZE (mode) == rounded_size)
3176 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3177 else
3178 {
3179 #ifdef STACK_GROWS_DOWNWARD
3180 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3181 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3182 #else
3183 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3184 GEN_INT (rounded_size));
3185 #endif
3186 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3187 }
3188
3189 dest = gen_rtx_MEM (mode, dest_addr);
3190
3191 if (type != 0)
3192 {
3193 set_mem_attributes (dest, type, 1);
3194
3195 if (flag_optimize_sibling_calls)
3196 /* Function incoming arguments may overlap with sibling call
3197 outgoing arguments and we cannot allow reordering of reads
3198 from function arguments with stores to outgoing arguments
3199 of sibling calls. */
3200 set_mem_alias_set (dest, 0);
3201 }
3202 emit_move_insn (dest, x);
3203 }
3204 #endif
3205
3206 /* Generate code to push X onto the stack, assuming it has mode MODE and
3207 type TYPE.
3208 MODE is redundant except when X is a CONST_INT (since they don't
3209 carry mode info).
3210 SIZE is an rtx for the size of data to be copied (in bytes),
3211 needed only if X is BLKmode.
3212
3213 ALIGN (in bits) is maximum alignment we can assume.
3214
3215 If PARTIAL and REG are both nonzero, then copy that many of the first
3216 words of X into registers starting with REG, and push the rest of X.
3217 The amount of space pushed is decreased by PARTIAL words,
3218 rounded *down* to a multiple of PARM_BOUNDARY.
3219 REG must be a hard register in this case.
3220 If REG is zero but PARTIAL is not, take any all others actions for an
3221 argument partially in registers, but do not actually load any
3222 registers.
3223
3224 EXTRA is the amount in bytes of extra space to leave next to this arg.
3225 This is ignored if an argument block has already been allocated.
3226
3227 On a machine that lacks real push insns, ARGS_ADDR is the address of
3228 the bottom of the argument block for this call. We use indexing off there
3229 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3230 argument block has not been preallocated.
3231
3232 ARGS_SO_FAR is the size of args previously pushed for this call.
3233
3234 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3235 for arguments passed in registers. If nonzero, it will be the number
3236 of bytes required. */
3237
3238 void
3239 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3240 args_addr, args_so_far, reg_parm_stack_space,
3241 alignment_pad)
3242 rtx x;
3243 enum machine_mode mode;
3244 tree type;
3245 rtx size;
3246 unsigned int align;
3247 int partial;
3248 rtx reg;
3249 int extra;
3250 rtx args_addr;
3251 rtx args_so_far;
3252 int reg_parm_stack_space;
3253 rtx alignment_pad;
3254 {
3255 rtx xinner;
3256 enum direction stack_direction
3257 #ifdef STACK_GROWS_DOWNWARD
3258 = downward;
3259 #else
3260 = upward;
3261 #endif
3262
3263 /* Decide where to pad the argument: `downward' for below,
3264 `upward' for above, or `none' for don't pad it.
3265 Default is below for small data on big-endian machines; else above. */
3266 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3267
3268 /* Invert direction if stack is post-decrement.
3269 FIXME: why? */
3270 if (STACK_PUSH_CODE == POST_DEC)
3271 if (where_pad != none)
3272 where_pad = (where_pad == downward ? upward : downward);
3273
3274 xinner = x = protect_from_queue (x, 0);
3275
3276 if (mode == BLKmode)
3277 {
3278 /* Copy a block into the stack, entirely or partially. */
3279
3280 rtx temp;
3281 int used = partial * UNITS_PER_WORD;
3282 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3283 int skip;
3284
3285 if (size == 0)
3286 abort ();
3287
3288 used -= offset;
3289
3290 /* USED is now the # of bytes we need not copy to the stack
3291 because registers will take care of them. */
3292
3293 if (partial != 0)
3294 xinner = adjust_address (xinner, BLKmode, used);
3295
3296 /* If the partial register-part of the arg counts in its stack size,
3297 skip the part of stack space corresponding to the registers.
3298 Otherwise, start copying to the beginning of the stack space,
3299 by setting SKIP to 0. */
3300 skip = (reg_parm_stack_space == 0) ? 0 : used;
3301
3302 #ifdef PUSH_ROUNDING
3303 /* Do it with several push insns if that doesn't take lots of insns
3304 and if there is no difficulty with push insns that skip bytes
3305 on the stack for alignment purposes. */
3306 if (args_addr == 0
3307 && PUSH_ARGS
3308 && GET_CODE (size) == CONST_INT
3309 && skip == 0
3310 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3311 /* Here we avoid the case of a structure whose weak alignment
3312 forces many pushes of a small amount of data,
3313 and such small pushes do rounding that causes trouble. */
3314 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3315 || align >= BIGGEST_ALIGNMENT
3316 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3317 == (align / BITS_PER_UNIT)))
3318 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3319 {
3320 /* Push padding now if padding above and stack grows down,
3321 or if padding below and stack grows up.
3322 But if space already allocated, this has already been done. */
3323 if (extra && args_addr == 0
3324 && where_pad != none && where_pad != stack_direction)
3325 anti_adjust_stack (GEN_INT (extra));
3326
3327 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3328
3329 if (current_function_check_memory_usage && ! in_check_memory_usage)
3330 {
3331 rtx temp;
3332
3333 in_check_memory_usage = 1;
3334 temp = get_push_address (INTVAL (size) - used);
3335 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3336 emit_library_call (chkr_copy_bitmap_libfunc,
3337 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3338 Pmode, XEXP (xinner, 0), Pmode,
3339 GEN_INT (INTVAL (size) - used),
3340 TYPE_MODE (sizetype));
3341 else
3342 emit_library_call (chkr_set_right_libfunc,
3343 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3344 Pmode, GEN_INT (INTVAL (size) - used),
3345 TYPE_MODE (sizetype),
3346 GEN_INT (MEMORY_USE_RW),
3347 TYPE_MODE (integer_type_node));
3348 in_check_memory_usage = 0;
3349 }
3350 }
3351 else
3352 #endif /* PUSH_ROUNDING */
3353 {
3354 rtx target;
3355
3356 /* Otherwise make space on the stack and copy the data
3357 to the address of that space. */
3358
3359 /* Deduct words put into registers from the size we must copy. */
3360 if (partial != 0)
3361 {
3362 if (GET_CODE (size) == CONST_INT)
3363 size = GEN_INT (INTVAL (size) - used);
3364 else
3365 size = expand_binop (GET_MODE (size), sub_optab, size,
3366 GEN_INT (used), NULL_RTX, 0,
3367 OPTAB_LIB_WIDEN);
3368 }
3369
3370 /* Get the address of the stack space.
3371 In this case, we do not deal with EXTRA separately.
3372 A single stack adjust will do. */
3373 if (! args_addr)
3374 {
3375 temp = push_block (size, extra, where_pad == downward);
3376 extra = 0;
3377 }
3378 else if (GET_CODE (args_so_far) == CONST_INT)
3379 temp = memory_address (BLKmode,
3380 plus_constant (args_addr,
3381 skip + INTVAL (args_so_far)));
3382 else
3383 temp = memory_address (BLKmode,
3384 plus_constant (gen_rtx_PLUS (Pmode,
3385 args_addr,
3386 args_so_far),
3387 skip));
3388 if (current_function_check_memory_usage && ! in_check_memory_usage)
3389 {
3390 in_check_memory_usage = 1;
3391 target = copy_to_reg (temp);
3392 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3393 emit_library_call (chkr_copy_bitmap_libfunc,
3394 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3395 target, Pmode,
3396 XEXP (xinner, 0), Pmode,
3397 size, TYPE_MODE (sizetype));
3398 else
3399 emit_library_call (chkr_set_right_libfunc,
3400 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3401 target, Pmode,
3402 size, TYPE_MODE (sizetype),
3403 GEN_INT (MEMORY_USE_RW),
3404 TYPE_MODE (integer_type_node));
3405 in_check_memory_usage = 0;
3406 }
3407
3408 target = gen_rtx_MEM (BLKmode, temp);
3409
3410 if (type != 0)
3411 {
3412 set_mem_attributes (target, type, 1);
3413 /* Function incoming arguments may overlap with sibling call
3414 outgoing arguments and we cannot allow reordering of reads
3415 from function arguments with stores to outgoing arguments
3416 of sibling calls. */
3417 set_mem_alias_set (target, 0);
3418 }
3419 else
3420 set_mem_align (target, align);
3421
3422 /* TEMP is the address of the block. Copy the data there. */
3423 if (GET_CODE (size) == CONST_INT
3424 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3425 {
3426 move_by_pieces (target, xinner, INTVAL (size), align);
3427 goto ret;
3428 }
3429 else
3430 {
3431 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3432 enum machine_mode mode;
3433
3434 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3435 mode != VOIDmode;
3436 mode = GET_MODE_WIDER_MODE (mode))
3437 {
3438 enum insn_code code = movstr_optab[(int) mode];
3439 insn_operand_predicate_fn pred;
3440
3441 if (code != CODE_FOR_nothing
3442 && ((GET_CODE (size) == CONST_INT
3443 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3444 <= (GET_MODE_MASK (mode) >> 1)))
3445 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3446 && (!(pred = insn_data[(int) code].operand[0].predicate)
3447 || ((*pred) (target, BLKmode)))
3448 && (!(pred = insn_data[(int) code].operand[1].predicate)
3449 || ((*pred) (xinner, BLKmode)))
3450 && (!(pred = insn_data[(int) code].operand[3].predicate)
3451 || ((*pred) (opalign, VOIDmode))))
3452 {
3453 rtx op2 = convert_to_mode (mode, size, 1);
3454 rtx last = get_last_insn ();
3455 rtx pat;
3456
3457 pred = insn_data[(int) code].operand[2].predicate;
3458 if (pred != 0 && ! (*pred) (op2, mode))
3459 op2 = copy_to_mode_reg (mode, op2);
3460
3461 pat = GEN_FCN ((int) code) (target, xinner,
3462 op2, opalign);
3463 if (pat)
3464 {
3465 emit_insn (pat);
3466 goto ret;
3467 }
3468 else
3469 delete_insns_since (last);
3470 }
3471 }
3472 }
3473
3474 if (!ACCUMULATE_OUTGOING_ARGS)
3475 {
3476 /* If the source is referenced relative to the stack pointer,
3477 copy it to another register to stabilize it. We do not need
3478 to do this if we know that we won't be changing sp. */
3479
3480 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3481 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3482 temp = copy_to_reg (temp);
3483 }
3484
3485 /* Make inhibit_defer_pop nonzero around the library call
3486 to force it to pop the bcopy-arguments right away. */
3487 NO_DEFER_POP;
3488 #ifdef TARGET_MEM_FUNCTIONS
3489 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3490 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3491 convert_to_mode (TYPE_MODE (sizetype),
3492 size, TREE_UNSIGNED (sizetype)),
3493 TYPE_MODE (sizetype));
3494 #else
3495 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3496 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3497 convert_to_mode (TYPE_MODE (integer_type_node),
3498 size,
3499 TREE_UNSIGNED (integer_type_node)),
3500 TYPE_MODE (integer_type_node));
3501 #endif
3502 OK_DEFER_POP;
3503 }
3504 }
3505 else if (partial > 0)
3506 {
3507 /* Scalar partly in registers. */
3508
3509 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3510 int i;
3511 int not_stack;
3512 /* # words of start of argument
3513 that we must make space for but need not store. */
3514 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3515 int args_offset = INTVAL (args_so_far);
3516 int skip;
3517
3518 /* Push padding now if padding above and stack grows down,
3519 or if padding below and stack grows up.
3520 But if space already allocated, this has already been done. */
3521 if (extra && args_addr == 0
3522 && where_pad != none && where_pad != stack_direction)
3523 anti_adjust_stack (GEN_INT (extra));
3524
3525 /* If we make space by pushing it, we might as well push
3526 the real data. Otherwise, we can leave OFFSET nonzero
3527 and leave the space uninitialized. */
3528 if (args_addr == 0)
3529 offset = 0;
3530
3531 /* Now NOT_STACK gets the number of words that we don't need to
3532 allocate on the stack. */
3533 not_stack = partial - offset;
3534
3535 /* If the partial register-part of the arg counts in its stack size,
3536 skip the part of stack space corresponding to the registers.
3537 Otherwise, start copying to the beginning of the stack space,
3538 by setting SKIP to 0. */
3539 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3540
3541 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3542 x = validize_mem (force_const_mem (mode, x));
3543
3544 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3545 SUBREGs of such registers are not allowed. */
3546 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3547 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3548 x = copy_to_reg (x);
3549
3550 /* Loop over all the words allocated on the stack for this arg. */
3551 /* We can do it by words, because any scalar bigger than a word
3552 has a size a multiple of a word. */
3553 #ifndef PUSH_ARGS_REVERSED
3554 for (i = not_stack; i < size; i++)
3555 #else
3556 for (i = size - 1; i >= not_stack; i--)
3557 #endif
3558 if (i >= not_stack + offset)
3559 emit_push_insn (operand_subword_force (x, i, mode),
3560 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3561 0, args_addr,
3562 GEN_INT (args_offset + ((i - not_stack + skip)
3563 * UNITS_PER_WORD)),
3564 reg_parm_stack_space, alignment_pad);
3565 }
3566 else
3567 {
3568 rtx addr;
3569 rtx target = NULL_RTX;
3570 rtx dest;
3571
3572 /* Push padding now if padding above and stack grows down,
3573 or if padding below and stack grows up.
3574 But if space already allocated, this has already been done. */
3575 if (extra && args_addr == 0
3576 && where_pad != none && where_pad != stack_direction)
3577 anti_adjust_stack (GEN_INT (extra));
3578
3579 #ifdef PUSH_ROUNDING
3580 if (args_addr == 0 && PUSH_ARGS)
3581 emit_single_push_insn (mode, x, type);
3582 else
3583 #endif
3584 {
3585 if (GET_CODE (args_so_far) == CONST_INT)
3586 addr
3587 = memory_address (mode,
3588 plus_constant (args_addr,
3589 INTVAL (args_so_far)));
3590 else
3591 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3592 args_so_far));
3593 target = addr;
3594 dest = gen_rtx_MEM (mode, addr);
3595 if (type != 0)
3596 {
3597 set_mem_attributes (dest, type, 1);
3598 /* Function incoming arguments may overlap with sibling call
3599 outgoing arguments and we cannot allow reordering of reads
3600 from function arguments with stores to outgoing arguments
3601 of sibling calls. */
3602 set_mem_alias_set (dest, 0);
3603 }
3604
3605 emit_move_insn (dest, x);
3606 }
3607
3608 if (current_function_check_memory_usage && ! in_check_memory_usage)
3609 {
3610 in_check_memory_usage = 1;
3611 if (target == 0)
3612 target = get_push_address (GET_MODE_SIZE (mode));
3613
3614 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3615 emit_library_call (chkr_copy_bitmap_libfunc,
3616 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3617 Pmode, XEXP (x, 0), Pmode,
3618 GEN_INT (GET_MODE_SIZE (mode)),
3619 TYPE_MODE (sizetype));
3620 else
3621 emit_library_call (chkr_set_right_libfunc,
3622 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3623 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3624 TYPE_MODE (sizetype),
3625 GEN_INT (MEMORY_USE_RW),
3626 TYPE_MODE (integer_type_node));
3627 in_check_memory_usage = 0;
3628 }
3629 }
3630
3631 ret:
3632 /* If part should go in registers, copy that part
3633 into the appropriate registers. Do this now, at the end,
3634 since mem-to-mem copies above may do function calls. */
3635 if (partial > 0 && reg != 0)
3636 {
3637 /* Handle calls that pass values in multiple non-contiguous locations.
3638 The Irix 6 ABI has examples of this. */
3639 if (GET_CODE (reg) == PARALLEL)
3640 emit_group_load (reg, x, -1); /* ??? size? */
3641 else
3642 move_block_to_reg (REGNO (reg), x, partial, mode);
3643 }
3644
3645 if (extra && args_addr == 0 && where_pad == stack_direction)
3646 anti_adjust_stack (GEN_INT (extra));
3647
3648 if (alignment_pad && args_addr == 0)
3649 anti_adjust_stack (alignment_pad);
3650 }
3651 \f
3652 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3653 operations. */
3654
3655 static rtx
3656 get_subtarget (x)
3657 rtx x;
3658 {
3659 return ((x == 0
3660 /* Only registers can be subtargets. */
3661 || GET_CODE (x) != REG
3662 /* If the register is readonly, it can't be set more than once. */
3663 || RTX_UNCHANGING_P (x)
3664 /* Don't use hard regs to avoid extending their life. */
3665 || REGNO (x) < FIRST_PSEUDO_REGISTER
3666 /* Avoid subtargets inside loops,
3667 since they hide some invariant expressions. */
3668 || preserve_subexpressions_p ())
3669 ? 0 : x);
3670 }
3671
3672 /* Expand an assignment that stores the value of FROM into TO.
3673 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3674 (This may contain a QUEUED rtx;
3675 if the value is constant, this rtx is a constant.)
3676 Otherwise, the returned value is NULL_RTX.
3677
3678 SUGGEST_REG is no longer actually used.
3679 It used to mean, copy the value through a register
3680 and return that register, if that is possible.
3681 We now use WANT_VALUE to decide whether to do this. */
3682
3683 rtx
3684 expand_assignment (to, from, want_value, suggest_reg)
3685 tree to, from;
3686 int want_value;
3687 int suggest_reg ATTRIBUTE_UNUSED;
3688 {
3689 rtx to_rtx = 0;
3690 rtx result;
3691
3692 /* Don't crash if the lhs of the assignment was erroneous. */
3693
3694 if (TREE_CODE (to) == ERROR_MARK)
3695 {
3696 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3697 return want_value ? result : NULL_RTX;
3698 }
3699
3700 /* Assignment of a structure component needs special treatment
3701 if the structure component's rtx is not simply a MEM.
3702 Assignment of an array element at a constant index, and assignment of
3703 an array element in an unaligned packed structure field, has the same
3704 problem. */
3705
3706 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3707 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3708 {
3709 enum machine_mode mode1;
3710 HOST_WIDE_INT bitsize, bitpos;
3711 rtx orig_to_rtx;
3712 tree offset;
3713 int unsignedp;
3714 int volatilep = 0;
3715 tree tem;
3716
3717 push_temp_slots ();
3718 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3719 &unsignedp, &volatilep);
3720
3721 /* If we are going to use store_bit_field and extract_bit_field,
3722 make sure to_rtx will be safe for multiple use. */
3723
3724 if (mode1 == VOIDmode && want_value)
3725 tem = stabilize_reference (tem);
3726
3727 orig_to_rtx = to_rtx
3728 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3729 if (offset != 0)
3730 {
3731 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3732
3733 if (GET_CODE (to_rtx) != MEM)
3734 abort ();
3735
3736 if (GET_MODE (offset_rtx) != ptr_mode)
3737 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3738
3739 #ifdef POINTERS_EXTEND_UNSIGNED
3740 if (GET_MODE (offset_rtx) != Pmode)
3741 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3742 #endif
3743
3744 /* A constant address in TO_RTX can have VOIDmode, we must not try
3745 to call force_reg for that case. Avoid that case. */
3746 if (GET_CODE (to_rtx) == MEM
3747 && GET_MODE (to_rtx) == BLKmode
3748 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3749 && bitsize > 0
3750 && (bitpos % bitsize) == 0
3751 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3752 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3753 {
3754 rtx temp
3755 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3756
3757 if (GET_CODE (XEXP (temp, 0)) == REG)
3758 to_rtx = temp;
3759 else
3760 to_rtx = (replace_equiv_address
3761 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3762 XEXP (temp, 0))));
3763 bitpos = 0;
3764 }
3765
3766 to_rtx = offset_address (to_rtx, offset_rtx,
3767 highest_pow2_factor (offset));
3768 }
3769
3770 if (GET_CODE (to_rtx) == MEM)
3771 {
3772 tree old_expr = MEM_EXPR (to_rtx);
3773
3774 /* If the field is at offset zero, we could have been given the
3775 DECL_RTX of the parent struct. Don't munge it. */
3776 to_rtx = shallow_copy_rtx (to_rtx);
3777
3778 set_mem_attributes (to_rtx, to, 0);
3779
3780 /* If we changed MEM_EXPR, that means we're now referencing
3781 the COMPONENT_REF, which means that MEM_OFFSET must be
3782 relative to that field. But we've not yet reflected BITPOS
3783 in TO_RTX. This will be done in store_field. Adjust for
3784 that by biasing MEM_OFFSET by -bitpos. */
3785 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3786 && (bitpos / BITS_PER_UNIT) != 0)
3787 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3788 - (bitpos / BITS_PER_UNIT)));
3789 }
3790
3791 /* Deal with volatile and readonly fields. The former is only done
3792 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3793 if (volatilep && GET_CODE (to_rtx) == MEM)
3794 {
3795 if (to_rtx == orig_to_rtx)
3796 to_rtx = copy_rtx (to_rtx);
3797 MEM_VOLATILE_P (to_rtx) = 1;
3798 }
3799
3800 if (TREE_CODE (to) == COMPONENT_REF
3801 && TREE_READONLY (TREE_OPERAND (to, 1)))
3802 {
3803 if (to_rtx == orig_to_rtx)
3804 to_rtx = copy_rtx (to_rtx);
3805 RTX_UNCHANGING_P (to_rtx) = 1;
3806 }
3807
3808 if (! can_address_p (to))
3809 {
3810 if (to_rtx == orig_to_rtx)
3811 to_rtx = copy_rtx (to_rtx);
3812 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3813 }
3814
3815 /* Check the access. */
3816 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3817 {
3818 rtx to_addr;
3819 int size;
3820 int best_mode_size;
3821 enum machine_mode best_mode;
3822
3823 best_mode = get_best_mode (bitsize, bitpos,
3824 TYPE_ALIGN (TREE_TYPE (tem)),
3825 mode1, volatilep);
3826 if (best_mode == VOIDmode)
3827 best_mode = QImode;
3828
3829 best_mode_size = GET_MODE_BITSIZE (best_mode);
3830 to_addr = plus_constant (XEXP (to_rtx, 0), bitpos / BITS_PER_UNIT);
3831 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3832 size *= GET_MODE_SIZE (best_mode);
3833
3834 /* Check the access right of the pointer. */
3835 in_check_memory_usage = 1;
3836 if (size)
3837 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3838 VOIDmode, 3, to_addr, Pmode,
3839 GEN_INT (size), TYPE_MODE (sizetype),
3840 GEN_INT (MEMORY_USE_WO),
3841 TYPE_MODE (integer_type_node));
3842 in_check_memory_usage = 0;
3843 }
3844
3845 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3846 (want_value
3847 /* Spurious cast for HPUX compiler. */
3848 ? ((enum machine_mode)
3849 TYPE_MODE (TREE_TYPE (to)))
3850 : VOIDmode),
3851 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3852
3853 preserve_temp_slots (result);
3854 free_temp_slots ();
3855 pop_temp_slots ();
3856
3857 /* If the value is meaningful, convert RESULT to the proper mode.
3858 Otherwise, return nothing. */
3859 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3860 TYPE_MODE (TREE_TYPE (from)),
3861 result,
3862 TREE_UNSIGNED (TREE_TYPE (to)))
3863 : NULL_RTX);
3864 }
3865
3866 /* If the rhs is a function call and its value is not an aggregate,
3867 call the function before we start to compute the lhs.
3868 This is needed for correct code for cases such as
3869 val = setjmp (buf) on machines where reference to val
3870 requires loading up part of an address in a separate insn.
3871
3872 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3873 since it might be a promoted variable where the zero- or sign- extension
3874 needs to be done. Handling this in the normal way is safe because no
3875 computation is done before the call. */
3876 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3877 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3878 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3879 && GET_CODE (DECL_RTL (to)) == REG))
3880 {
3881 rtx value;
3882
3883 push_temp_slots ();
3884 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3885 if (to_rtx == 0)
3886 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3887
3888 /* Handle calls that return values in multiple non-contiguous locations.
3889 The Irix 6 ABI has examples of this. */
3890 if (GET_CODE (to_rtx) == PARALLEL)
3891 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3892 else if (GET_MODE (to_rtx) == BLKmode)
3893 emit_block_move (to_rtx, value, expr_size (from));
3894 else
3895 {
3896 #ifdef POINTERS_EXTEND_UNSIGNED
3897 if (POINTER_TYPE_P (TREE_TYPE (to))
3898 && GET_MODE (to_rtx) != GET_MODE (value))
3899 value = convert_memory_address (GET_MODE (to_rtx), value);
3900 #endif
3901 emit_move_insn (to_rtx, value);
3902 }
3903 preserve_temp_slots (to_rtx);
3904 free_temp_slots ();
3905 pop_temp_slots ();
3906 return want_value ? to_rtx : NULL_RTX;
3907 }
3908
3909 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3910 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3911
3912 if (to_rtx == 0)
3913 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3914
3915 /* Don't move directly into a return register. */
3916 if (TREE_CODE (to) == RESULT_DECL
3917 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3918 {
3919 rtx temp;
3920
3921 push_temp_slots ();
3922 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3923
3924 if (GET_CODE (to_rtx) == PARALLEL)
3925 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3926 else
3927 emit_move_insn (to_rtx, temp);
3928
3929 preserve_temp_slots (to_rtx);
3930 free_temp_slots ();
3931 pop_temp_slots ();
3932 return want_value ? to_rtx : NULL_RTX;
3933 }
3934
3935 /* In case we are returning the contents of an object which overlaps
3936 the place the value is being stored, use a safe function when copying
3937 a value through a pointer into a structure value return block. */
3938 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3939 && current_function_returns_struct
3940 && !current_function_returns_pcc_struct)
3941 {
3942 rtx from_rtx, size;
3943
3944 push_temp_slots ();
3945 size = expr_size (from);
3946 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3947 EXPAND_MEMORY_USE_DONT);
3948
3949 /* Copy the rights of the bitmap. */
3950 if (current_function_check_memory_usage)
3951 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3952 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3953 XEXP (from_rtx, 0), Pmode,
3954 convert_to_mode (TYPE_MODE (sizetype),
3955 size, TREE_UNSIGNED (sizetype)),
3956 TYPE_MODE (sizetype));
3957
3958 #ifdef TARGET_MEM_FUNCTIONS
3959 emit_library_call (memmove_libfunc, LCT_NORMAL,
3960 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3961 XEXP (from_rtx, 0), Pmode,
3962 convert_to_mode (TYPE_MODE (sizetype),
3963 size, TREE_UNSIGNED (sizetype)),
3964 TYPE_MODE (sizetype));
3965 #else
3966 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3967 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3968 XEXP (to_rtx, 0), Pmode,
3969 convert_to_mode (TYPE_MODE (integer_type_node),
3970 size, TREE_UNSIGNED (integer_type_node)),
3971 TYPE_MODE (integer_type_node));
3972 #endif
3973
3974 preserve_temp_slots (to_rtx);
3975 free_temp_slots ();
3976 pop_temp_slots ();
3977 return want_value ? to_rtx : NULL_RTX;
3978 }
3979
3980 /* Compute FROM and store the value in the rtx we got. */
3981
3982 push_temp_slots ();
3983 result = store_expr (from, to_rtx, want_value);
3984 preserve_temp_slots (result);
3985 free_temp_slots ();
3986 pop_temp_slots ();
3987 return want_value ? result : NULL_RTX;
3988 }
3989
3990 /* Generate code for computing expression EXP,
3991 and storing the value into TARGET.
3992 TARGET may contain a QUEUED rtx.
3993
3994 If WANT_VALUE is nonzero, return a copy of the value
3995 not in TARGET, so that we can be sure to use the proper
3996 value in a containing expression even if TARGET has something
3997 else stored in it. If possible, we copy the value through a pseudo
3998 and return that pseudo. Or, if the value is constant, we try to
3999 return the constant. In some cases, we return a pseudo
4000 copied *from* TARGET.
4001
4002 If the mode is BLKmode then we may return TARGET itself.
4003 It turns out that in BLKmode it doesn't cause a problem.
4004 because C has no operators that could combine two different
4005 assignments into the same BLKmode object with different values
4006 with no sequence point. Will other languages need this to
4007 be more thorough?
4008
4009 If WANT_VALUE is 0, we return NULL, to make sure
4010 to catch quickly any cases where the caller uses the value
4011 and fails to set WANT_VALUE. */
4012
4013 rtx
4014 store_expr (exp, target, want_value)
4015 tree exp;
4016 rtx target;
4017 int want_value;
4018 {
4019 rtx temp;
4020 int dont_return_target = 0;
4021 int dont_store_target = 0;
4022
4023 if (TREE_CODE (exp) == COMPOUND_EXPR)
4024 {
4025 /* Perform first part of compound expression, then assign from second
4026 part. */
4027 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4028 emit_queue ();
4029 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4030 }
4031 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4032 {
4033 /* For conditional expression, get safe form of the target. Then
4034 test the condition, doing the appropriate assignment on either
4035 side. This avoids the creation of unnecessary temporaries.
4036 For non-BLKmode, it is more efficient not to do this. */
4037
4038 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4039
4040 emit_queue ();
4041 target = protect_from_queue (target, 1);
4042
4043 do_pending_stack_adjust ();
4044 NO_DEFER_POP;
4045 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4046 start_cleanup_deferral ();
4047 store_expr (TREE_OPERAND (exp, 1), target, 0);
4048 end_cleanup_deferral ();
4049 emit_queue ();
4050 emit_jump_insn (gen_jump (lab2));
4051 emit_barrier ();
4052 emit_label (lab1);
4053 start_cleanup_deferral ();
4054 store_expr (TREE_OPERAND (exp, 2), target, 0);
4055 end_cleanup_deferral ();
4056 emit_queue ();
4057 emit_label (lab2);
4058 OK_DEFER_POP;
4059
4060 return want_value ? target : NULL_RTX;
4061 }
4062 else if (queued_subexp_p (target))
4063 /* If target contains a postincrement, let's not risk
4064 using it as the place to generate the rhs. */
4065 {
4066 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4067 {
4068 /* Expand EXP into a new pseudo. */
4069 temp = gen_reg_rtx (GET_MODE (target));
4070 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4071 }
4072 else
4073 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4074
4075 /* If target is volatile, ANSI requires accessing the value
4076 *from* the target, if it is accessed. So make that happen.
4077 In no case return the target itself. */
4078 if (! MEM_VOLATILE_P (target) && want_value)
4079 dont_return_target = 1;
4080 }
4081 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4082 && GET_MODE (target) != BLKmode)
4083 /* If target is in memory and caller wants value in a register instead,
4084 arrange that. Pass TARGET as target for expand_expr so that,
4085 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4086 We know expand_expr will not use the target in that case.
4087 Don't do this if TARGET is volatile because we are supposed
4088 to write it and then read it. */
4089 {
4090 temp = expand_expr (exp, target, GET_MODE (target), 0);
4091 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4092 {
4093 /* If TEMP is already in the desired TARGET, only copy it from
4094 memory and don't store it there again. */
4095 if (temp == target
4096 || (rtx_equal_p (temp, target)
4097 && ! side_effects_p (temp) && ! side_effects_p (target)))
4098 dont_store_target = 1;
4099 temp = copy_to_reg (temp);
4100 }
4101 dont_return_target = 1;
4102 }
4103 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4104 /* If this is an scalar in a register that is stored in a wider mode
4105 than the declared mode, compute the result into its declared mode
4106 and then convert to the wider mode. Our value is the computed
4107 expression. */
4108 {
4109 /* If we don't want a value, we can do the conversion inside EXP,
4110 which will often result in some optimizations. Do the conversion
4111 in two steps: first change the signedness, if needed, then
4112 the extend. But don't do this if the type of EXP is a subtype
4113 of something else since then the conversion might involve
4114 more than just converting modes. */
4115 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4116 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4117 {
4118 if (TREE_UNSIGNED (TREE_TYPE (exp))
4119 != SUBREG_PROMOTED_UNSIGNED_P (target))
4120 exp
4121 = convert
4122 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4123 TREE_TYPE (exp)),
4124 exp);
4125
4126 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4127 SUBREG_PROMOTED_UNSIGNED_P (target)),
4128 exp);
4129 }
4130
4131 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4132
4133 /* If TEMP is a volatile MEM and we want a result value, make
4134 the access now so it gets done only once. Likewise if
4135 it contains TARGET. */
4136 if (GET_CODE (temp) == MEM && want_value
4137 && (MEM_VOLATILE_P (temp)
4138 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4139 temp = copy_to_reg (temp);
4140
4141 /* If TEMP is a VOIDmode constant, use convert_modes to make
4142 sure that we properly convert it. */
4143 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4144 {
4145 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4146 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4147 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4148 GET_MODE (target), temp,
4149 SUBREG_PROMOTED_UNSIGNED_P (target));
4150 }
4151
4152 convert_move (SUBREG_REG (target), temp,
4153 SUBREG_PROMOTED_UNSIGNED_P (target));
4154
4155 /* If we promoted a constant, change the mode back down to match
4156 target. Otherwise, the caller might get confused by a result whose
4157 mode is larger than expected. */
4158
4159 if (want_value && GET_MODE (temp) != GET_MODE (target)
4160 && GET_MODE (temp) != VOIDmode)
4161 {
4162 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4163 SUBREG_PROMOTED_VAR_P (temp) = 1;
4164 SUBREG_PROMOTED_UNSIGNED_P (temp)
4165 = SUBREG_PROMOTED_UNSIGNED_P (target);
4166 }
4167
4168 return want_value ? temp : NULL_RTX;
4169 }
4170 else
4171 {
4172 temp = expand_expr (exp, target, GET_MODE (target), 0);
4173 /* Return TARGET if it's a specified hardware register.
4174 If TARGET is a volatile mem ref, either return TARGET
4175 or return a reg copied *from* TARGET; ANSI requires this.
4176
4177 Otherwise, if TEMP is not TARGET, return TEMP
4178 if it is constant (for efficiency),
4179 or if we really want the correct value. */
4180 if (!(target && GET_CODE (target) == REG
4181 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4182 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4183 && ! rtx_equal_p (temp, target)
4184 && (CONSTANT_P (temp) || want_value))
4185 dont_return_target = 1;
4186 }
4187
4188 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4189 the same as that of TARGET, adjust the constant. This is needed, for
4190 example, in case it is a CONST_DOUBLE and we want only a word-sized
4191 value. */
4192 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4193 && TREE_CODE (exp) != ERROR_MARK
4194 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4195 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4196 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4197
4198 if (current_function_check_memory_usage
4199 && GET_CODE (target) == MEM
4200 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4201 {
4202 in_check_memory_usage = 1;
4203 if (GET_CODE (temp) == MEM)
4204 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4205 VOIDmode, 3, XEXP (target, 0), Pmode,
4206 XEXP (temp, 0), Pmode,
4207 expr_size (exp), TYPE_MODE (sizetype));
4208 else
4209 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4210 VOIDmode, 3, XEXP (target, 0), Pmode,
4211 expr_size (exp), TYPE_MODE (sizetype),
4212 GEN_INT (MEMORY_USE_WO),
4213 TYPE_MODE (integer_type_node));
4214 in_check_memory_usage = 0;
4215 }
4216
4217 /* If value was not generated in the target, store it there.
4218 Convert the value to TARGET's type first if nec. */
4219 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4220 one or both of them are volatile memory refs, we have to distinguish
4221 two cases:
4222 - expand_expr has used TARGET. In this case, we must not generate
4223 another copy. This can be detected by TARGET being equal according
4224 to == .
4225 - expand_expr has not used TARGET - that means that the source just
4226 happens to have the same RTX form. Since temp will have been created
4227 by expand_expr, it will compare unequal according to == .
4228 We must generate a copy in this case, to reach the correct number
4229 of volatile memory references. */
4230
4231 if ((! rtx_equal_p (temp, target)
4232 || (temp != target && (side_effects_p (temp)
4233 || side_effects_p (target))))
4234 && TREE_CODE (exp) != ERROR_MARK
4235 && ! dont_store_target)
4236 {
4237 target = protect_from_queue (target, 1);
4238 if (GET_MODE (temp) != GET_MODE (target)
4239 && GET_MODE (temp) != VOIDmode)
4240 {
4241 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4242 if (dont_return_target)
4243 {
4244 /* In this case, we will return TEMP,
4245 so make sure it has the proper mode.
4246 But don't forget to store the value into TARGET. */
4247 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4248 emit_move_insn (target, temp);
4249 }
4250 else
4251 convert_move (target, temp, unsignedp);
4252 }
4253
4254 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4255 {
4256 /* Handle copying a string constant into an array.
4257 The string constant may be shorter than the array.
4258 So copy just the string's actual length, and clear the rest. */
4259 rtx size;
4260 rtx addr;
4261
4262 /* Get the size of the data type of the string,
4263 which is actually the size of the target. */
4264 size = expr_size (exp);
4265 if (GET_CODE (size) == CONST_INT
4266 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4267 emit_block_move (target, temp, size);
4268 else
4269 {
4270 /* Compute the size of the data to copy from the string. */
4271 tree copy_size
4272 = size_binop (MIN_EXPR,
4273 make_tree (sizetype, size),
4274 size_int (TREE_STRING_LENGTH (exp)));
4275 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4276 VOIDmode, 0);
4277 rtx label = 0;
4278
4279 /* Copy that much. */
4280 emit_block_move (target, temp, copy_size_rtx);
4281
4282 /* Figure out how much is left in TARGET that we have to clear.
4283 Do all calculations in ptr_mode. */
4284
4285 addr = XEXP (target, 0);
4286 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4287
4288 if (GET_CODE (copy_size_rtx) == CONST_INT)
4289 {
4290 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4291 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4292 }
4293 else
4294 {
4295 addr = force_reg (ptr_mode, addr);
4296 addr = expand_binop (ptr_mode, add_optab, addr,
4297 copy_size_rtx, NULL_RTX, 0,
4298 OPTAB_LIB_WIDEN);
4299
4300 size = expand_binop (ptr_mode, sub_optab, size,
4301 copy_size_rtx, NULL_RTX, 0,
4302 OPTAB_LIB_WIDEN);
4303
4304 label = gen_label_rtx ();
4305 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4306 GET_MODE (size), 0, label);
4307 }
4308
4309 if (size != const0_rtx)
4310 {
4311 rtx dest = gen_rtx_MEM (BLKmode, addr);
4312
4313 MEM_COPY_ATTRIBUTES (dest, target);
4314
4315 /* The residual likely does not have the same alignment
4316 as the original target. While we could compute the
4317 alignment of the residual, it hardely seems worth
4318 the effort. */
4319 set_mem_align (dest, BITS_PER_UNIT);
4320
4321 /* Be sure we can write on ADDR. */
4322 in_check_memory_usage = 1;
4323 if (current_function_check_memory_usage)
4324 emit_library_call (chkr_check_addr_libfunc,
4325 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4326 addr, Pmode,
4327 size, TYPE_MODE (sizetype),
4328 GEN_INT (MEMORY_USE_WO),
4329 TYPE_MODE (integer_type_node));
4330 in_check_memory_usage = 0;
4331 clear_storage (dest, size);
4332 }
4333
4334 if (label)
4335 emit_label (label);
4336 }
4337 }
4338 /* Handle calls that return values in multiple non-contiguous locations.
4339 The Irix 6 ABI has examples of this. */
4340 else if (GET_CODE (target) == PARALLEL)
4341 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4342 else if (GET_MODE (temp) == BLKmode)
4343 emit_block_move (target, temp, expr_size (exp));
4344 else
4345 emit_move_insn (target, temp);
4346 }
4347
4348 /* If we don't want a value, return NULL_RTX. */
4349 if (! want_value)
4350 return NULL_RTX;
4351
4352 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4353 ??? The latter test doesn't seem to make sense. */
4354 else if (dont_return_target && GET_CODE (temp) != MEM)
4355 return temp;
4356
4357 /* Return TARGET itself if it is a hard register. */
4358 else if (want_value && GET_MODE (target) != BLKmode
4359 && ! (GET_CODE (target) == REG
4360 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4361 return copy_to_reg (target);
4362
4363 else
4364 return target;
4365 }
4366 \f
4367 /* Return 1 if EXP just contains zeros. */
4368
4369 static int
4370 is_zeros_p (exp)
4371 tree exp;
4372 {
4373 tree elt;
4374
4375 switch (TREE_CODE (exp))
4376 {
4377 case CONVERT_EXPR:
4378 case NOP_EXPR:
4379 case NON_LVALUE_EXPR:
4380 case VIEW_CONVERT_EXPR:
4381 return is_zeros_p (TREE_OPERAND (exp, 0));
4382
4383 case INTEGER_CST:
4384 return integer_zerop (exp);
4385
4386 case COMPLEX_CST:
4387 return
4388 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4389
4390 case REAL_CST:
4391 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4392
4393 case CONSTRUCTOR:
4394 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4395 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4396 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4397 if (! is_zeros_p (TREE_VALUE (elt)))
4398 return 0;
4399
4400 return 1;
4401
4402 default:
4403 return 0;
4404 }
4405 }
4406
4407 /* Return 1 if EXP contains mostly (3/4) zeros. */
4408
4409 static int
4410 mostly_zeros_p (exp)
4411 tree exp;
4412 {
4413 if (TREE_CODE (exp) == CONSTRUCTOR)
4414 {
4415 int elts = 0, zeros = 0;
4416 tree elt = CONSTRUCTOR_ELTS (exp);
4417 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4418 {
4419 /* If there are no ranges of true bits, it is all zero. */
4420 return elt == NULL_TREE;
4421 }
4422 for (; elt; elt = TREE_CHAIN (elt))
4423 {
4424 /* We do not handle the case where the index is a RANGE_EXPR,
4425 so the statistic will be somewhat inaccurate.
4426 We do make a more accurate count in store_constructor itself,
4427 so since this function is only used for nested array elements,
4428 this should be close enough. */
4429 if (mostly_zeros_p (TREE_VALUE (elt)))
4430 zeros++;
4431 elts++;
4432 }
4433
4434 return 4 * zeros >= 3 * elts;
4435 }
4436
4437 return is_zeros_p (exp);
4438 }
4439 \f
4440 /* Helper function for store_constructor.
4441 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4442 TYPE is the type of the CONSTRUCTOR, not the element type.
4443 CLEARED is as for store_constructor.
4444 ALIAS_SET is the alias set to use for any stores.
4445
4446 This provides a recursive shortcut back to store_constructor when it isn't
4447 necessary to go through store_field. This is so that we can pass through
4448 the cleared field to let store_constructor know that we may not have to
4449 clear a substructure if the outer structure has already been cleared. */
4450
4451 static void
4452 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4453 alias_set)
4454 rtx target;
4455 unsigned HOST_WIDE_INT bitsize;
4456 HOST_WIDE_INT bitpos;
4457 enum machine_mode mode;
4458 tree exp, type;
4459 int cleared;
4460 int alias_set;
4461 {
4462 if (TREE_CODE (exp) == CONSTRUCTOR
4463 && bitpos % BITS_PER_UNIT == 0
4464 /* If we have a non-zero bitpos for a register target, then we just
4465 let store_field do the bitfield handling. This is unlikely to
4466 generate unnecessary clear instructions anyways. */
4467 && (bitpos == 0 || GET_CODE (target) == MEM))
4468 {
4469 if (GET_CODE (target) == MEM)
4470 target
4471 = adjust_address (target,
4472 GET_MODE (target) == BLKmode
4473 || 0 != (bitpos
4474 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4475 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4476
4477
4478 /* Update the alias set, if required. */
4479 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4480 && MEM_ALIAS_SET (target) != 0)
4481 {
4482 target = copy_rtx (target);
4483 set_mem_alias_set (target, alias_set);
4484 }
4485
4486 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4487 }
4488 else
4489 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4490 alias_set);
4491 }
4492
4493 /* Store the value of constructor EXP into the rtx TARGET.
4494 TARGET is either a REG or a MEM; we know it cannot conflict, since
4495 safe_from_p has been called.
4496 CLEARED is true if TARGET is known to have been zero'd.
4497 SIZE is the number of bytes of TARGET we are allowed to modify: this
4498 may not be the same as the size of EXP if we are assigning to a field
4499 which has been packed to exclude padding bits. */
4500
4501 static void
4502 store_constructor (exp, target, cleared, size)
4503 tree exp;
4504 rtx target;
4505 int cleared;
4506 HOST_WIDE_INT size;
4507 {
4508 tree type = TREE_TYPE (exp);
4509 #ifdef WORD_REGISTER_OPERATIONS
4510 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4511 #endif
4512
4513 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4514 || TREE_CODE (type) == QUAL_UNION_TYPE)
4515 {
4516 tree elt;
4517
4518 /* We either clear the aggregate or indicate the value is dead. */
4519 if ((TREE_CODE (type) == UNION_TYPE
4520 || TREE_CODE (type) == QUAL_UNION_TYPE)
4521 && ! cleared
4522 && ! CONSTRUCTOR_ELTS (exp))
4523 /* If the constructor is empty, clear the union. */
4524 {
4525 clear_storage (target, expr_size (exp));
4526 cleared = 1;
4527 }
4528
4529 /* If we are building a static constructor into a register,
4530 set the initial value as zero so we can fold the value into
4531 a constant. But if more than one register is involved,
4532 this probably loses. */
4533 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4534 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4535 {
4536 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4537 cleared = 1;
4538 }
4539
4540 /* If the constructor has fewer fields than the structure
4541 or if we are initializing the structure to mostly zeros,
4542 clear the whole structure first. Don't do this if TARGET is a
4543 register whose mode size isn't equal to SIZE since clear_storage
4544 can't handle this case. */
4545 else if (! cleared && size > 0
4546 && ((list_length (CONSTRUCTOR_ELTS (exp))
4547 != fields_length (type))
4548 || mostly_zeros_p (exp))
4549 && (GET_CODE (target) != REG
4550 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4551 == size)))
4552 {
4553 clear_storage (target, GEN_INT (size));
4554 cleared = 1;
4555 }
4556
4557 if (! cleared)
4558 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4559
4560 /* Store each element of the constructor into
4561 the corresponding field of TARGET. */
4562
4563 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4564 {
4565 tree field = TREE_PURPOSE (elt);
4566 tree value = TREE_VALUE (elt);
4567 enum machine_mode mode;
4568 HOST_WIDE_INT bitsize;
4569 HOST_WIDE_INT bitpos = 0;
4570 int unsignedp;
4571 tree offset;
4572 rtx to_rtx = target;
4573
4574 /* Just ignore missing fields.
4575 We cleared the whole structure, above,
4576 if any fields are missing. */
4577 if (field == 0)
4578 continue;
4579
4580 if (cleared && is_zeros_p (value))
4581 continue;
4582
4583 if (host_integerp (DECL_SIZE (field), 1))
4584 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4585 else
4586 bitsize = -1;
4587
4588 unsignedp = TREE_UNSIGNED (field);
4589 mode = DECL_MODE (field);
4590 if (DECL_BIT_FIELD (field))
4591 mode = VOIDmode;
4592
4593 offset = DECL_FIELD_OFFSET (field);
4594 if (host_integerp (offset, 0)
4595 && host_integerp (bit_position (field), 0))
4596 {
4597 bitpos = int_bit_position (field);
4598 offset = 0;
4599 }
4600 else
4601 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4602
4603 if (offset)
4604 {
4605 rtx offset_rtx;
4606
4607 if (contains_placeholder_p (offset))
4608 offset = build (WITH_RECORD_EXPR, sizetype,
4609 offset, make_tree (TREE_TYPE (exp), target));
4610
4611 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4612 if (GET_CODE (to_rtx) != MEM)
4613 abort ();
4614
4615 if (GET_MODE (offset_rtx) != ptr_mode)
4616 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4617
4618 #ifdef POINTERS_EXTEND_UNSIGNED
4619 if (GET_MODE (offset_rtx) != Pmode)
4620 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4621 #endif
4622
4623 to_rtx = offset_address (to_rtx, offset_rtx,
4624 highest_pow2_factor (offset));
4625 }
4626
4627 if (TREE_READONLY (field))
4628 {
4629 if (GET_CODE (to_rtx) == MEM)
4630 to_rtx = copy_rtx (to_rtx);
4631
4632 RTX_UNCHANGING_P (to_rtx) = 1;
4633 }
4634
4635 #ifdef WORD_REGISTER_OPERATIONS
4636 /* If this initializes a field that is smaller than a word, at the
4637 start of a word, try to widen it to a full word.
4638 This special case allows us to output C++ member function
4639 initializations in a form that the optimizers can understand. */
4640 if (GET_CODE (target) == REG
4641 && bitsize < BITS_PER_WORD
4642 && bitpos % BITS_PER_WORD == 0
4643 && GET_MODE_CLASS (mode) == MODE_INT
4644 && TREE_CODE (value) == INTEGER_CST
4645 && exp_size >= 0
4646 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4647 {
4648 tree type = TREE_TYPE (value);
4649
4650 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4651 {
4652 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4653 value = convert (type, value);
4654 }
4655
4656 if (BYTES_BIG_ENDIAN)
4657 value
4658 = fold (build (LSHIFT_EXPR, type, value,
4659 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4660 bitsize = BITS_PER_WORD;
4661 mode = word_mode;
4662 }
4663 #endif
4664
4665 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4666 && DECL_NONADDRESSABLE_P (field))
4667 {
4668 to_rtx = copy_rtx (to_rtx);
4669 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4670 }
4671
4672 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4673 value, type, cleared,
4674 get_alias_set (TREE_TYPE (field)));
4675 }
4676 }
4677 else if (TREE_CODE (type) == ARRAY_TYPE)
4678 {
4679 tree elt;
4680 int i;
4681 int need_to_clear;
4682 tree domain = TYPE_DOMAIN (type);
4683 tree elttype = TREE_TYPE (type);
4684 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4685 && TYPE_MAX_VALUE (domain)
4686 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4687 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4688 HOST_WIDE_INT minelt = 0;
4689 HOST_WIDE_INT maxelt = 0;
4690
4691 /* If we have constant bounds for the range of the type, get them. */
4692 if (const_bounds_p)
4693 {
4694 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4695 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4696 }
4697
4698 /* If the constructor has fewer elements than the array,
4699 clear the whole array first. Similarly if this is
4700 static constructor of a non-BLKmode object. */
4701 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4702 need_to_clear = 1;
4703 else
4704 {
4705 HOST_WIDE_INT count = 0, zero_count = 0;
4706 need_to_clear = ! const_bounds_p;
4707
4708 /* This loop is a more accurate version of the loop in
4709 mostly_zeros_p (it handles RANGE_EXPR in an index).
4710 It is also needed to check for missing elements. */
4711 for (elt = CONSTRUCTOR_ELTS (exp);
4712 elt != NULL_TREE && ! need_to_clear;
4713 elt = TREE_CHAIN (elt))
4714 {
4715 tree index = TREE_PURPOSE (elt);
4716 HOST_WIDE_INT this_node_count;
4717
4718 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4719 {
4720 tree lo_index = TREE_OPERAND (index, 0);
4721 tree hi_index = TREE_OPERAND (index, 1);
4722
4723 if (! host_integerp (lo_index, 1)
4724 || ! host_integerp (hi_index, 1))
4725 {
4726 need_to_clear = 1;
4727 break;
4728 }
4729
4730 this_node_count = (tree_low_cst (hi_index, 1)
4731 - tree_low_cst (lo_index, 1) + 1);
4732 }
4733 else
4734 this_node_count = 1;
4735
4736 count += this_node_count;
4737 if (mostly_zeros_p (TREE_VALUE (elt)))
4738 zero_count += this_node_count;
4739 }
4740
4741 /* Clear the entire array first if there are any missing elements,
4742 or if the incidence of zero elements is >= 75%. */
4743 if (! need_to_clear
4744 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4745 need_to_clear = 1;
4746 }
4747
4748 if (need_to_clear && size > 0)
4749 {
4750 if (! cleared)
4751 clear_storage (target, GEN_INT (size));
4752 cleared = 1;
4753 }
4754 else if (REG_P (target))
4755 /* Inform later passes that the old value is dead. */
4756 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4757
4758 /* Store each element of the constructor into
4759 the corresponding element of TARGET, determined
4760 by counting the elements. */
4761 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4762 elt;
4763 elt = TREE_CHAIN (elt), i++)
4764 {
4765 enum machine_mode mode;
4766 HOST_WIDE_INT bitsize;
4767 HOST_WIDE_INT bitpos;
4768 int unsignedp;
4769 tree value = TREE_VALUE (elt);
4770 tree index = TREE_PURPOSE (elt);
4771 rtx xtarget = target;
4772
4773 if (cleared && is_zeros_p (value))
4774 continue;
4775
4776 unsignedp = TREE_UNSIGNED (elttype);
4777 mode = TYPE_MODE (elttype);
4778 if (mode == BLKmode)
4779 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4780 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4781 : -1);
4782 else
4783 bitsize = GET_MODE_BITSIZE (mode);
4784
4785 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4786 {
4787 tree lo_index = TREE_OPERAND (index, 0);
4788 tree hi_index = TREE_OPERAND (index, 1);
4789 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4790 struct nesting *loop;
4791 HOST_WIDE_INT lo, hi, count;
4792 tree position;
4793
4794 /* If the range is constant and "small", unroll the loop. */
4795 if (const_bounds_p
4796 && host_integerp (lo_index, 0)
4797 && host_integerp (hi_index, 0)
4798 && (lo = tree_low_cst (lo_index, 0),
4799 hi = tree_low_cst (hi_index, 0),
4800 count = hi - lo + 1,
4801 (GET_CODE (target) != MEM
4802 || count <= 2
4803 || (host_integerp (TYPE_SIZE (elttype), 1)
4804 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4805 <= 40 * 8)))))
4806 {
4807 lo -= minelt; hi -= minelt;
4808 for (; lo <= hi; lo++)
4809 {
4810 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4811
4812 if (GET_CODE (target) == MEM
4813 && !MEM_KEEP_ALIAS_SET_P (target)
4814 && TYPE_NONALIASED_COMPONENT (type))
4815 {
4816 target = copy_rtx (target);
4817 MEM_KEEP_ALIAS_SET_P (target) = 1;
4818 }
4819
4820 store_constructor_field
4821 (target, bitsize, bitpos, mode, value, type, cleared,
4822 get_alias_set (elttype));
4823 }
4824 }
4825 else
4826 {
4827 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4828 loop_top = gen_label_rtx ();
4829 loop_end = gen_label_rtx ();
4830
4831 unsignedp = TREE_UNSIGNED (domain);
4832
4833 index = build_decl (VAR_DECL, NULL_TREE, domain);
4834
4835 index_r
4836 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4837 &unsignedp, 0));
4838 SET_DECL_RTL (index, index_r);
4839 if (TREE_CODE (value) == SAVE_EXPR
4840 && SAVE_EXPR_RTL (value) == 0)
4841 {
4842 /* Make sure value gets expanded once before the
4843 loop. */
4844 expand_expr (value, const0_rtx, VOIDmode, 0);
4845 emit_queue ();
4846 }
4847 store_expr (lo_index, index_r, 0);
4848 loop = expand_start_loop (0);
4849
4850 /* Assign value to element index. */
4851 position
4852 = convert (ssizetype,
4853 fold (build (MINUS_EXPR, TREE_TYPE (index),
4854 index, TYPE_MIN_VALUE (domain))));
4855 position = size_binop (MULT_EXPR, position,
4856 convert (ssizetype,
4857 TYPE_SIZE_UNIT (elttype)));
4858
4859 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4860 xtarget = offset_address (target, pos_rtx,
4861 highest_pow2_factor (position));
4862 xtarget = adjust_address (xtarget, mode, 0);
4863 if (TREE_CODE (value) == CONSTRUCTOR)
4864 store_constructor (value, xtarget, cleared,
4865 bitsize / BITS_PER_UNIT);
4866 else
4867 store_expr (value, xtarget, 0);
4868
4869 expand_exit_loop_if_false (loop,
4870 build (LT_EXPR, integer_type_node,
4871 index, hi_index));
4872
4873 expand_increment (build (PREINCREMENT_EXPR,
4874 TREE_TYPE (index),
4875 index, integer_one_node), 0, 0);
4876 expand_end_loop ();
4877 emit_label (loop_end);
4878 }
4879 }
4880 else if ((index != 0 && ! host_integerp (index, 0))
4881 || ! host_integerp (TYPE_SIZE (elttype), 1))
4882 {
4883 tree position;
4884
4885 if (index == 0)
4886 index = ssize_int (1);
4887
4888 if (minelt)
4889 index = convert (ssizetype,
4890 fold (build (MINUS_EXPR, index,
4891 TYPE_MIN_VALUE (domain))));
4892
4893 position = size_binop (MULT_EXPR, index,
4894 convert (ssizetype,
4895 TYPE_SIZE_UNIT (elttype)));
4896 xtarget = offset_address (target,
4897 expand_expr (position, 0, VOIDmode, 0),
4898 highest_pow2_factor (position));
4899 xtarget = adjust_address (xtarget, mode, 0);
4900 store_expr (value, xtarget, 0);
4901 }
4902 else
4903 {
4904 if (index != 0)
4905 bitpos = ((tree_low_cst (index, 0) - minelt)
4906 * tree_low_cst (TYPE_SIZE (elttype), 1));
4907 else
4908 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4909
4910 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4911 && TYPE_NONALIASED_COMPONENT (type))
4912 {
4913 target = copy_rtx (target);
4914 MEM_KEEP_ALIAS_SET_P (target) = 1;
4915 }
4916
4917 store_constructor_field (target, bitsize, bitpos, mode, value,
4918 type, cleared, get_alias_set (elttype));
4919
4920 }
4921 }
4922 }
4923
4924 /* Set constructor assignments. */
4925 else if (TREE_CODE (type) == SET_TYPE)
4926 {
4927 tree elt = CONSTRUCTOR_ELTS (exp);
4928 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4929 tree domain = TYPE_DOMAIN (type);
4930 tree domain_min, domain_max, bitlength;
4931
4932 /* The default implementation strategy is to extract the constant
4933 parts of the constructor, use that to initialize the target,
4934 and then "or" in whatever non-constant ranges we need in addition.
4935
4936 If a large set is all zero or all ones, it is
4937 probably better to set it using memset (if available) or bzero.
4938 Also, if a large set has just a single range, it may also be
4939 better to first clear all the first clear the set (using
4940 bzero/memset), and set the bits we want. */
4941
4942 /* Check for all zeros. */
4943 if (elt == NULL_TREE && size > 0)
4944 {
4945 if (!cleared)
4946 clear_storage (target, GEN_INT (size));
4947 return;
4948 }
4949
4950 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4951 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4952 bitlength = size_binop (PLUS_EXPR,
4953 size_diffop (domain_max, domain_min),
4954 ssize_int (1));
4955
4956 nbits = tree_low_cst (bitlength, 1);
4957
4958 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4959 are "complicated" (more than one range), initialize (the
4960 constant parts) by copying from a constant. */
4961 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4962 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4963 {
4964 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4965 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4966 char *bit_buffer = (char *) alloca (nbits);
4967 HOST_WIDE_INT word = 0;
4968 unsigned int bit_pos = 0;
4969 unsigned int ibit = 0;
4970 unsigned int offset = 0; /* In bytes from beginning of set. */
4971
4972 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4973 for (;;)
4974 {
4975 if (bit_buffer[ibit])
4976 {
4977 if (BYTES_BIG_ENDIAN)
4978 word |= (1 << (set_word_size - 1 - bit_pos));
4979 else
4980 word |= 1 << bit_pos;
4981 }
4982
4983 bit_pos++; ibit++;
4984 if (bit_pos >= set_word_size || ibit == nbits)
4985 {
4986 if (word != 0 || ! cleared)
4987 {
4988 rtx datum = GEN_INT (word);
4989 rtx to_rtx;
4990
4991 /* The assumption here is that it is safe to use
4992 XEXP if the set is multi-word, but not if
4993 it's single-word. */
4994 if (GET_CODE (target) == MEM)
4995 to_rtx = adjust_address (target, mode, offset);
4996 else if (offset == 0)
4997 to_rtx = target;
4998 else
4999 abort ();
5000 emit_move_insn (to_rtx, datum);
5001 }
5002
5003 if (ibit == nbits)
5004 break;
5005 word = 0;
5006 bit_pos = 0;
5007 offset += set_word_size / BITS_PER_UNIT;
5008 }
5009 }
5010 }
5011 else if (!cleared)
5012 /* Don't bother clearing storage if the set is all ones. */
5013 if (TREE_CHAIN (elt) != NULL_TREE
5014 || (TREE_PURPOSE (elt) == NULL_TREE
5015 ? nbits != 1
5016 : ( ! host_integerp (TREE_VALUE (elt), 0)
5017 || ! host_integerp (TREE_PURPOSE (elt), 0)
5018 || (tree_low_cst (TREE_VALUE (elt), 0)
5019 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5020 != (HOST_WIDE_INT) nbits))))
5021 clear_storage (target, expr_size (exp));
5022
5023 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5024 {
5025 /* Start of range of element or NULL. */
5026 tree startbit = TREE_PURPOSE (elt);
5027 /* End of range of element, or element value. */
5028 tree endbit = TREE_VALUE (elt);
5029 #ifdef TARGET_MEM_FUNCTIONS
5030 HOST_WIDE_INT startb, endb;
5031 #endif
5032 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5033
5034 bitlength_rtx = expand_expr (bitlength,
5035 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5036
5037 /* Handle non-range tuple element like [ expr ]. */
5038 if (startbit == NULL_TREE)
5039 {
5040 startbit = save_expr (endbit);
5041 endbit = startbit;
5042 }
5043
5044 startbit = convert (sizetype, startbit);
5045 endbit = convert (sizetype, endbit);
5046 if (! integer_zerop (domain_min))
5047 {
5048 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5049 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5050 }
5051 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5052 EXPAND_CONST_ADDRESS);
5053 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5054 EXPAND_CONST_ADDRESS);
5055
5056 if (REG_P (target))
5057 {
5058 targetx
5059 = assign_temp
5060 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5061 TYPE_QUAL_CONST)),
5062 0, 1, 1);
5063 emit_move_insn (targetx, target);
5064 }
5065
5066 else if (GET_CODE (target) == MEM)
5067 targetx = target;
5068 else
5069 abort ();
5070
5071 #ifdef TARGET_MEM_FUNCTIONS
5072 /* Optimization: If startbit and endbit are
5073 constants divisible by BITS_PER_UNIT,
5074 call memset instead. */
5075 if (TREE_CODE (startbit) == INTEGER_CST
5076 && TREE_CODE (endbit) == INTEGER_CST
5077 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5078 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5079 {
5080 emit_library_call (memset_libfunc, LCT_NORMAL,
5081 VOIDmode, 3,
5082 plus_constant (XEXP (targetx, 0),
5083 startb / BITS_PER_UNIT),
5084 Pmode,
5085 constm1_rtx, TYPE_MODE (integer_type_node),
5086 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5087 TYPE_MODE (sizetype));
5088 }
5089 else
5090 #endif
5091 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5092 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5093 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5094 startbit_rtx, TYPE_MODE (sizetype),
5095 endbit_rtx, TYPE_MODE (sizetype));
5096
5097 if (REG_P (target))
5098 emit_move_insn (target, targetx);
5099 }
5100 }
5101
5102 else
5103 abort ();
5104 }
5105
5106 /* Store the value of EXP (an expression tree)
5107 into a subfield of TARGET which has mode MODE and occupies
5108 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5109 If MODE is VOIDmode, it means that we are storing into a bit-field.
5110
5111 If VALUE_MODE is VOIDmode, return nothing in particular.
5112 UNSIGNEDP is not used in this case.
5113
5114 Otherwise, return an rtx for the value stored. This rtx
5115 has mode VALUE_MODE if that is convenient to do.
5116 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5117
5118 TYPE is the type of the underlying object,
5119
5120 ALIAS_SET is the alias set for the destination. This value will
5121 (in general) be different from that for TARGET, since TARGET is a
5122 reference to the containing structure. */
5123
5124 static rtx
5125 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5126 alias_set)
5127 rtx target;
5128 HOST_WIDE_INT bitsize;
5129 HOST_WIDE_INT bitpos;
5130 enum machine_mode mode;
5131 tree exp;
5132 enum machine_mode value_mode;
5133 int unsignedp;
5134 tree type;
5135 int alias_set;
5136 {
5137 HOST_WIDE_INT width_mask = 0;
5138
5139 if (TREE_CODE (exp) == ERROR_MARK)
5140 return const0_rtx;
5141
5142 /* If we have nothing to store, do nothing unless the expression has
5143 side-effects. */
5144 if (bitsize == 0)
5145 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5146 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5147 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5148
5149 /* If we are storing into an unaligned field of an aligned union that is
5150 in a register, we may have the mode of TARGET being an integer mode but
5151 MODE == BLKmode. In that case, get an aligned object whose size and
5152 alignment are the same as TARGET and store TARGET into it (we can avoid
5153 the store if the field being stored is the entire width of TARGET). Then
5154 call ourselves recursively to store the field into a BLKmode version of
5155 that object. Finally, load from the object into TARGET. This is not
5156 very efficient in general, but should only be slightly more expensive
5157 than the otherwise-required unaligned accesses. Perhaps this can be
5158 cleaned up later. */
5159
5160 if (mode == BLKmode
5161 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5162 {
5163 rtx object
5164 = assign_temp
5165 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5166 0, 1, 1);
5167 rtx blk_object = copy_rtx (object);
5168
5169 PUT_MODE (blk_object, BLKmode);
5170
5171 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5172 emit_move_insn (object, target);
5173
5174 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5175 alias_set);
5176
5177 emit_move_insn (target, object);
5178
5179 /* We want to return the BLKmode version of the data. */
5180 return blk_object;
5181 }
5182
5183 if (GET_CODE (target) == CONCAT)
5184 {
5185 /* We're storing into a struct containing a single __complex. */
5186
5187 if (bitpos != 0)
5188 abort ();
5189 return store_expr (exp, target, 0);
5190 }
5191
5192 /* If the structure is in a register or if the component
5193 is a bit field, we cannot use addressing to access it.
5194 Use bit-field techniques or SUBREG to store in it. */
5195
5196 if (mode == VOIDmode
5197 || (mode != BLKmode && ! direct_store[(int) mode]
5198 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5199 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5200 || GET_CODE (target) == REG
5201 || GET_CODE (target) == SUBREG
5202 /* If the field isn't aligned enough to store as an ordinary memref,
5203 store it as a bit field. */
5204 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5205 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5206 || bitpos % GET_MODE_ALIGNMENT (mode)))
5207 /* If the RHS and field are a constant size and the size of the
5208 RHS isn't the same size as the bitfield, we must use bitfield
5209 operations. */
5210 || (bitsize >= 0
5211 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5212 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5213 {
5214 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5215
5216 /* If BITSIZE is narrower than the size of the type of EXP
5217 we will be narrowing TEMP. Normally, what's wanted are the
5218 low-order bits. However, if EXP's type is a record and this is
5219 big-endian machine, we want the upper BITSIZE bits. */
5220 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5221 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5222 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5223 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5224 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5225 - bitsize),
5226 temp, 1);
5227
5228 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5229 MODE. */
5230 if (mode != VOIDmode && mode != BLKmode
5231 && mode != TYPE_MODE (TREE_TYPE (exp)))
5232 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5233
5234 /* If the modes of TARGET and TEMP are both BLKmode, both
5235 must be in memory and BITPOS must be aligned on a byte
5236 boundary. If so, we simply do a block copy. */
5237 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5238 {
5239 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5240 || bitpos % BITS_PER_UNIT != 0)
5241 abort ();
5242
5243 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5244 emit_block_move (target, temp,
5245 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5246 / BITS_PER_UNIT));
5247
5248 return value_mode == VOIDmode ? const0_rtx : target;
5249 }
5250
5251 /* Store the value in the bitfield. */
5252 store_bit_field (target, bitsize, bitpos, mode, temp,
5253 int_size_in_bytes (type));
5254
5255 if (value_mode != VOIDmode)
5256 {
5257 /* The caller wants an rtx for the value.
5258 If possible, avoid refetching from the bitfield itself. */
5259 if (width_mask != 0
5260 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5261 {
5262 tree count;
5263 enum machine_mode tmode;
5264
5265 if (unsignedp)
5266 return expand_and (temp,
5267 GEN_INT
5268 (trunc_int_for_mode
5269 (width_mask,
5270 GET_MODE (temp) == VOIDmode
5271 ? value_mode
5272 : GET_MODE (temp))), NULL_RTX);
5273
5274 tmode = GET_MODE (temp);
5275 if (tmode == VOIDmode)
5276 tmode = value_mode;
5277 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5278 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5279 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5280 }
5281
5282 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5283 NULL_RTX, value_mode, VOIDmode,
5284 int_size_in_bytes (type));
5285 }
5286 return const0_rtx;
5287 }
5288 else
5289 {
5290 rtx addr = XEXP (target, 0);
5291 rtx to_rtx = target;
5292
5293 /* If a value is wanted, it must be the lhs;
5294 so make the address stable for multiple use. */
5295
5296 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5297 && ! CONSTANT_ADDRESS_P (addr)
5298 /* A frame-pointer reference is already stable. */
5299 && ! (GET_CODE (addr) == PLUS
5300 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5301 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5302 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5303 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5304
5305 /* Now build a reference to just the desired component. */
5306
5307 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5308
5309 if (to_rtx == target)
5310 to_rtx = copy_rtx (to_rtx);
5311
5312 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5313 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5314 set_mem_alias_set (to_rtx, alias_set);
5315
5316 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5317 }
5318 }
5319 \f
5320 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5321 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5322 codes and find the ultimate containing object, which we return.
5323
5324 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5325 bit position, and *PUNSIGNEDP to the signedness of the field.
5326 If the position of the field is variable, we store a tree
5327 giving the variable offset (in units) in *POFFSET.
5328 This offset is in addition to the bit position.
5329 If the position is not variable, we store 0 in *POFFSET.
5330
5331 If any of the extraction expressions is volatile,
5332 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5333
5334 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5335 is a mode that can be used to access the field. In that case, *PBITSIZE
5336 is redundant.
5337
5338 If the field describes a variable-sized object, *PMODE is set to
5339 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5340 this case, but the address of the object can be found. */
5341
5342 tree
5343 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5344 punsignedp, pvolatilep)
5345 tree exp;
5346 HOST_WIDE_INT *pbitsize;
5347 HOST_WIDE_INT *pbitpos;
5348 tree *poffset;
5349 enum machine_mode *pmode;
5350 int *punsignedp;
5351 int *pvolatilep;
5352 {
5353 tree size_tree = 0;
5354 enum machine_mode mode = VOIDmode;
5355 tree offset = size_zero_node;
5356 tree bit_offset = bitsize_zero_node;
5357 tree placeholder_ptr = 0;
5358 tree tem;
5359
5360 /* First get the mode, signedness, and size. We do this from just the
5361 outermost expression. */
5362 if (TREE_CODE (exp) == COMPONENT_REF)
5363 {
5364 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5365 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5366 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5367
5368 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5369 }
5370 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5371 {
5372 size_tree = TREE_OPERAND (exp, 1);
5373 *punsignedp = TREE_UNSIGNED (exp);
5374 }
5375 else
5376 {
5377 mode = TYPE_MODE (TREE_TYPE (exp));
5378 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5379
5380 if (mode == BLKmode)
5381 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5382 else
5383 *pbitsize = GET_MODE_BITSIZE (mode);
5384 }
5385
5386 if (size_tree != 0)
5387 {
5388 if (! host_integerp (size_tree, 1))
5389 mode = BLKmode, *pbitsize = -1;
5390 else
5391 *pbitsize = tree_low_cst (size_tree, 1);
5392 }
5393
5394 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5395 and find the ultimate containing object. */
5396 while (1)
5397 {
5398 if (TREE_CODE (exp) == BIT_FIELD_REF)
5399 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5400 else if (TREE_CODE (exp) == COMPONENT_REF)
5401 {
5402 tree field = TREE_OPERAND (exp, 1);
5403 tree this_offset = DECL_FIELD_OFFSET (field);
5404
5405 /* If this field hasn't been filled in yet, don't go
5406 past it. This should only happen when folding expressions
5407 made during type construction. */
5408 if (this_offset == 0)
5409 break;
5410 else if (! TREE_CONSTANT (this_offset)
5411 && contains_placeholder_p (this_offset))
5412 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5413
5414 offset = size_binop (PLUS_EXPR, offset, this_offset);
5415 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5416 DECL_FIELD_BIT_OFFSET (field));
5417
5418 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5419 }
5420
5421 else if (TREE_CODE (exp) == ARRAY_REF
5422 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5423 {
5424 tree index = TREE_OPERAND (exp, 1);
5425 tree array = TREE_OPERAND (exp, 0);
5426 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5427 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5428 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5429
5430 /* We assume all arrays have sizes that are a multiple of a byte.
5431 First subtract the lower bound, if any, in the type of the
5432 index, then convert to sizetype and multiply by the size of the
5433 array element. */
5434 if (low_bound != 0 && ! integer_zerop (low_bound))
5435 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5436 index, low_bound));
5437
5438 /* If the index has a self-referential type, pass it to a
5439 WITH_RECORD_EXPR; if the component size is, pass our
5440 component to one. */
5441 if (! TREE_CONSTANT (index)
5442 && contains_placeholder_p (index))
5443 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5444 if (! TREE_CONSTANT (unit_size)
5445 && contains_placeholder_p (unit_size))
5446 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5447
5448 offset = size_binop (PLUS_EXPR, offset,
5449 size_binop (MULT_EXPR,
5450 convert (sizetype, index),
5451 unit_size));
5452 }
5453
5454 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5455 {
5456 tree new = find_placeholder (exp, &placeholder_ptr);
5457
5458 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5459 We might have been called from tree optimization where we
5460 haven't set up an object yet. */
5461 if (new == 0)
5462 break;
5463 else
5464 exp = new;
5465
5466 continue;
5467 }
5468 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5469 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5470 && ! ((TREE_CODE (exp) == NOP_EXPR
5471 || TREE_CODE (exp) == CONVERT_EXPR)
5472 && (TYPE_MODE (TREE_TYPE (exp))
5473 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5474 break;
5475
5476 /* If any reference in the chain is volatile, the effect is volatile. */
5477 if (TREE_THIS_VOLATILE (exp))
5478 *pvolatilep = 1;
5479
5480 exp = TREE_OPERAND (exp, 0);
5481 }
5482
5483 /* If OFFSET is constant, see if we can return the whole thing as a
5484 constant bit position. Otherwise, split it up. */
5485 if (host_integerp (offset, 0)
5486 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5487 bitsize_unit_node))
5488 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5489 && host_integerp (tem, 0))
5490 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5491 else
5492 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5493
5494 *pmode = mode;
5495 return exp;
5496 }
5497
5498 /* Return 1 if T is an expression that get_inner_reference handles. */
5499
5500 int
5501 handled_component_p (t)
5502 tree t;
5503 {
5504 switch (TREE_CODE (t))
5505 {
5506 case BIT_FIELD_REF:
5507 case COMPONENT_REF:
5508 case ARRAY_REF:
5509 case ARRAY_RANGE_REF:
5510 case NON_LVALUE_EXPR:
5511 case VIEW_CONVERT_EXPR:
5512 return 1;
5513
5514 case NOP_EXPR:
5515 case CONVERT_EXPR:
5516 return (TYPE_MODE (TREE_TYPE (t))
5517 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5518
5519 default:
5520 return 0;
5521 }
5522 }
5523
5524 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5525
5526 static enum memory_use_mode
5527 get_memory_usage_from_modifier (modifier)
5528 enum expand_modifier modifier;
5529 {
5530 switch (modifier)
5531 {
5532 case EXPAND_NORMAL:
5533 case EXPAND_SUM:
5534 return MEMORY_USE_RO;
5535 break;
5536 case EXPAND_MEMORY_USE_WO:
5537 return MEMORY_USE_WO;
5538 break;
5539 case EXPAND_MEMORY_USE_RW:
5540 return MEMORY_USE_RW;
5541 break;
5542 case EXPAND_MEMORY_USE_DONT:
5543 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5544 MEMORY_USE_DONT, because they are modifiers to a call of
5545 expand_expr in the ADDR_EXPR case of expand_expr. */
5546 case EXPAND_CONST_ADDRESS:
5547 case EXPAND_INITIALIZER:
5548 return MEMORY_USE_DONT;
5549 case EXPAND_MEMORY_USE_BAD:
5550 default:
5551 abort ();
5552 }
5553 }
5554 \f
5555 /* Given an rtx VALUE that may contain additions and multiplications, return
5556 an equivalent value that just refers to a register, memory, or constant.
5557 This is done by generating instructions to perform the arithmetic and
5558 returning a pseudo-register containing the value.
5559
5560 The returned value may be a REG, SUBREG, MEM or constant. */
5561
5562 rtx
5563 force_operand (value, target)
5564 rtx value, target;
5565 {
5566 optab binoptab = 0;
5567 /* Use a temporary to force order of execution of calls to
5568 `force_operand'. */
5569 rtx tmp;
5570 rtx op2;
5571 /* Use subtarget as the target for operand 0 of a binary operation. */
5572 rtx subtarget = get_subtarget (target);
5573
5574 /* Check for a PIC address load. */
5575 if (flag_pic
5576 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5577 && XEXP (value, 0) == pic_offset_table_rtx
5578 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5579 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5580 || GET_CODE (XEXP (value, 1)) == CONST))
5581 {
5582 if (!subtarget)
5583 subtarget = gen_reg_rtx (GET_MODE (value));
5584 emit_move_insn (subtarget, value);
5585 return subtarget;
5586 }
5587
5588 if (GET_CODE (value) == PLUS)
5589 binoptab = add_optab;
5590 else if (GET_CODE (value) == MINUS)
5591 binoptab = sub_optab;
5592 else if (GET_CODE (value) == MULT)
5593 {
5594 op2 = XEXP (value, 1);
5595 if (!CONSTANT_P (op2)
5596 && !(GET_CODE (op2) == REG && op2 != subtarget))
5597 subtarget = 0;
5598 tmp = force_operand (XEXP (value, 0), subtarget);
5599 return expand_mult (GET_MODE (value), tmp,
5600 force_operand (op2, NULL_RTX),
5601 target, 1);
5602 }
5603
5604 if (binoptab)
5605 {
5606 op2 = XEXP (value, 1);
5607 if (!CONSTANT_P (op2)
5608 && !(GET_CODE (op2) == REG && op2 != subtarget))
5609 subtarget = 0;
5610 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5611 {
5612 binoptab = add_optab;
5613 op2 = negate_rtx (GET_MODE (value), op2);
5614 }
5615
5616 /* Check for an addition with OP2 a constant integer and our first
5617 operand a PLUS of a virtual register and something else. In that
5618 case, we want to emit the sum of the virtual register and the
5619 constant first and then add the other value. This allows virtual
5620 register instantiation to simply modify the constant rather than
5621 creating another one around this addition. */
5622 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5623 && GET_CODE (XEXP (value, 0)) == PLUS
5624 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5625 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5626 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5627 {
5628 rtx temp = expand_binop (GET_MODE (value), binoptab,
5629 XEXP (XEXP (value, 0), 0), op2,
5630 subtarget, 0, OPTAB_LIB_WIDEN);
5631 return expand_binop (GET_MODE (value), binoptab, temp,
5632 force_operand (XEXP (XEXP (value, 0), 1), 0),
5633 target, 0, OPTAB_LIB_WIDEN);
5634 }
5635
5636 tmp = force_operand (XEXP (value, 0), subtarget);
5637 return expand_binop (GET_MODE (value), binoptab, tmp,
5638 force_operand (op2, NULL_RTX),
5639 target, 0, OPTAB_LIB_WIDEN);
5640 /* We give UNSIGNEDP = 0 to expand_binop
5641 because the only operations we are expanding here are signed ones. */
5642 }
5643
5644 #ifdef INSN_SCHEDULING
5645 /* On machines that have insn scheduling, we want all memory reference to be
5646 explicit, so we need to deal with such paradoxical SUBREGs. */
5647 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5648 && (GET_MODE_SIZE (GET_MODE (value))
5649 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5650 value
5651 = simplify_gen_subreg (GET_MODE (value),
5652 force_reg (GET_MODE (SUBREG_REG (value)),
5653 force_operand (SUBREG_REG (value),
5654 NULL_RTX)),
5655 GET_MODE (SUBREG_REG (value)),
5656 SUBREG_BYTE (value));
5657 #endif
5658
5659 return value;
5660 }
5661 \f
5662 /* Subroutine of expand_expr: return nonzero iff there is no way that
5663 EXP can reference X, which is being modified. TOP_P is nonzero if this
5664 call is going to be used to determine whether we need a temporary
5665 for EXP, as opposed to a recursive call to this function.
5666
5667 It is always safe for this routine to return zero since it merely
5668 searches for optimization opportunities. */
5669
5670 int
5671 safe_from_p (x, exp, top_p)
5672 rtx x;
5673 tree exp;
5674 int top_p;
5675 {
5676 rtx exp_rtl = 0;
5677 int i, nops;
5678 static tree save_expr_list;
5679
5680 if (x == 0
5681 /* If EXP has varying size, we MUST use a target since we currently
5682 have no way of allocating temporaries of variable size
5683 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5684 So we assume here that something at a higher level has prevented a
5685 clash. This is somewhat bogus, but the best we can do. Only
5686 do this when X is BLKmode and when we are at the top level. */
5687 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5688 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5689 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5690 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5691 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5692 != INTEGER_CST)
5693 && GET_MODE (x) == BLKmode)
5694 /* If X is in the outgoing argument area, it is always safe. */
5695 || (GET_CODE (x) == MEM
5696 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5697 || (GET_CODE (XEXP (x, 0)) == PLUS
5698 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5699 return 1;
5700
5701 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5702 find the underlying pseudo. */
5703 if (GET_CODE (x) == SUBREG)
5704 {
5705 x = SUBREG_REG (x);
5706 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5707 return 0;
5708 }
5709
5710 /* A SAVE_EXPR might appear many times in the expression passed to the
5711 top-level safe_from_p call, and if it has a complex subexpression,
5712 examining it multiple times could result in a combinatorial explosion.
5713 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5714 with optimization took about 28 minutes to compile -- even though it was
5715 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5716 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5717 we have processed. Note that the only test of top_p was above. */
5718
5719 if (top_p)
5720 {
5721 int rtn;
5722 tree t;
5723
5724 save_expr_list = 0;
5725
5726 rtn = safe_from_p (x, exp, 0);
5727
5728 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5729 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5730
5731 return rtn;
5732 }
5733
5734 /* Now look at our tree code and possibly recurse. */
5735 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5736 {
5737 case 'd':
5738 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5739 break;
5740
5741 case 'c':
5742 return 1;
5743
5744 case 'x':
5745 if (TREE_CODE (exp) == TREE_LIST)
5746 return ((TREE_VALUE (exp) == 0
5747 || safe_from_p (x, TREE_VALUE (exp), 0))
5748 && (TREE_CHAIN (exp) == 0
5749 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5750 else if (TREE_CODE (exp) == ERROR_MARK)
5751 return 1; /* An already-visited SAVE_EXPR? */
5752 else
5753 return 0;
5754
5755 case '1':
5756 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5757
5758 case '2':
5759 case '<':
5760 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5761 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5762
5763 case 'e':
5764 case 'r':
5765 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5766 the expression. If it is set, we conflict iff we are that rtx or
5767 both are in memory. Otherwise, we check all operands of the
5768 expression recursively. */
5769
5770 switch (TREE_CODE (exp))
5771 {
5772 case ADDR_EXPR:
5773 /* If the operand is static or we are static, we can't conflict.
5774 Likewise if we don't conflict with the operand at all. */
5775 if (staticp (TREE_OPERAND (exp, 0))
5776 || TREE_STATIC (exp)
5777 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5778 return 1;
5779
5780 /* Otherwise, the only way this can conflict is if we are taking
5781 the address of a DECL a that address if part of X, which is
5782 very rare. */
5783 exp = TREE_OPERAND (exp, 0);
5784 if (DECL_P (exp))
5785 {
5786 if (!DECL_RTL_SET_P (exp)
5787 || GET_CODE (DECL_RTL (exp)) != MEM)
5788 return 0;
5789 else
5790 exp_rtl = XEXP (DECL_RTL (exp), 0);
5791 }
5792 break;
5793
5794 case INDIRECT_REF:
5795 if (GET_CODE (x) == MEM
5796 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5797 get_alias_set (exp)))
5798 return 0;
5799 break;
5800
5801 case CALL_EXPR:
5802 /* Assume that the call will clobber all hard registers and
5803 all of memory. */
5804 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5805 || GET_CODE (x) == MEM)
5806 return 0;
5807 break;
5808
5809 case RTL_EXPR:
5810 /* If a sequence exists, we would have to scan every instruction
5811 in the sequence to see if it was safe. This is probably not
5812 worthwhile. */
5813 if (RTL_EXPR_SEQUENCE (exp))
5814 return 0;
5815
5816 exp_rtl = RTL_EXPR_RTL (exp);
5817 break;
5818
5819 case WITH_CLEANUP_EXPR:
5820 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5821 break;
5822
5823 case CLEANUP_POINT_EXPR:
5824 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5825
5826 case SAVE_EXPR:
5827 exp_rtl = SAVE_EXPR_RTL (exp);
5828 if (exp_rtl)
5829 break;
5830
5831 /* If we've already scanned this, don't do it again. Otherwise,
5832 show we've scanned it and record for clearing the flag if we're
5833 going on. */
5834 if (TREE_PRIVATE (exp))
5835 return 1;
5836
5837 TREE_PRIVATE (exp) = 1;
5838 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5839 {
5840 TREE_PRIVATE (exp) = 0;
5841 return 0;
5842 }
5843
5844 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5845 return 1;
5846
5847 case BIND_EXPR:
5848 /* The only operand we look at is operand 1. The rest aren't
5849 part of the expression. */
5850 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5851
5852 case METHOD_CALL_EXPR:
5853 /* This takes an rtx argument, but shouldn't appear here. */
5854 abort ();
5855
5856 default:
5857 break;
5858 }
5859
5860 /* If we have an rtx, we do not need to scan our operands. */
5861 if (exp_rtl)
5862 break;
5863
5864 nops = first_rtl_op (TREE_CODE (exp));
5865 for (i = 0; i < nops; i++)
5866 if (TREE_OPERAND (exp, i) != 0
5867 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5868 return 0;
5869
5870 /* If this is a language-specific tree code, it may require
5871 special handling. */
5872 if ((unsigned int) TREE_CODE (exp)
5873 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5874 && !(*lang_hooks.safe_from_p) (x, exp))
5875 return 0;
5876 }
5877
5878 /* If we have an rtl, find any enclosed object. Then see if we conflict
5879 with it. */
5880 if (exp_rtl)
5881 {
5882 if (GET_CODE (exp_rtl) == SUBREG)
5883 {
5884 exp_rtl = SUBREG_REG (exp_rtl);
5885 if (GET_CODE (exp_rtl) == REG
5886 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5887 return 0;
5888 }
5889
5890 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5891 are memory and they conflict. */
5892 return ! (rtx_equal_p (x, exp_rtl)
5893 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5894 && true_dependence (exp_rtl, GET_MODE (x), x,
5895 rtx_addr_varies_p)));
5896 }
5897
5898 /* If we reach here, it is safe. */
5899 return 1;
5900 }
5901
5902 /* Subroutine of expand_expr: return rtx if EXP is a
5903 variable or parameter; else return 0. */
5904
5905 static rtx
5906 var_rtx (exp)
5907 tree exp;
5908 {
5909 STRIP_NOPS (exp);
5910 switch (TREE_CODE (exp))
5911 {
5912 case PARM_DECL:
5913 case VAR_DECL:
5914 return DECL_RTL (exp);
5915 default:
5916 return 0;
5917 }
5918 }
5919
5920 #ifdef MAX_INTEGER_COMPUTATION_MODE
5921
5922 void
5923 check_max_integer_computation_mode (exp)
5924 tree exp;
5925 {
5926 enum tree_code code;
5927 enum machine_mode mode;
5928
5929 /* Strip any NOPs that don't change the mode. */
5930 STRIP_NOPS (exp);
5931 code = TREE_CODE (exp);
5932
5933 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5934 if (code == NOP_EXPR
5935 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5936 return;
5937
5938 /* First check the type of the overall operation. We need only look at
5939 unary, binary and relational operations. */
5940 if (TREE_CODE_CLASS (code) == '1'
5941 || TREE_CODE_CLASS (code) == '2'
5942 || TREE_CODE_CLASS (code) == '<')
5943 {
5944 mode = TYPE_MODE (TREE_TYPE (exp));
5945 if (GET_MODE_CLASS (mode) == MODE_INT
5946 && mode > MAX_INTEGER_COMPUTATION_MODE)
5947 internal_error ("unsupported wide integer operation");
5948 }
5949
5950 /* Check operand of a unary op. */
5951 if (TREE_CODE_CLASS (code) == '1')
5952 {
5953 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5954 if (GET_MODE_CLASS (mode) == MODE_INT
5955 && mode > MAX_INTEGER_COMPUTATION_MODE)
5956 internal_error ("unsupported wide integer operation");
5957 }
5958
5959 /* Check operands of a binary/comparison op. */
5960 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5961 {
5962 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5963 if (GET_MODE_CLASS (mode) == MODE_INT
5964 && mode > MAX_INTEGER_COMPUTATION_MODE)
5965 internal_error ("unsupported wide integer operation");
5966
5967 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5968 if (GET_MODE_CLASS (mode) == MODE_INT
5969 && mode > MAX_INTEGER_COMPUTATION_MODE)
5970 internal_error ("unsupported wide integer operation");
5971 }
5972 }
5973 #endif
5974 \f
5975 /* Return the highest power of two that EXP is known to be a multiple of.
5976 This is used in updating alignment of MEMs in array references. */
5977
5978 static HOST_WIDE_INT
5979 highest_pow2_factor (exp)
5980 tree exp;
5981 {
5982 HOST_WIDE_INT c0, c1;
5983
5984 switch (TREE_CODE (exp))
5985 {
5986 case INTEGER_CST:
5987 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5988 lowest bit that's a one. If the result is zero, pessimize by
5989 returning 1. This is overly-conservative, but such things should not
5990 happen in the offset expressions that we are called with. */
5991 if (host_integerp (exp, 0))
5992 {
5993 c0 = tree_low_cst (exp, 0);
5994 c0 = c0 < 0 ? - c0 : c0;
5995 return c0 != 0 ? c0 & -c0 : 1;
5996 }
5997 break;
5998
5999 case PLUS_EXPR: case MINUS_EXPR:
6000 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6001 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6002 return MIN (c0, c1);
6003
6004 case MULT_EXPR:
6005 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6006 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6007 return c0 * c1;
6008
6009 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6010 case CEIL_DIV_EXPR:
6011 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6012 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6013 return MAX (1, c0 / c1);
6014
6015 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6016 case COMPOUND_EXPR: case SAVE_EXPR: case WITH_RECORD_EXPR:
6017 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6018
6019 case COND_EXPR:
6020 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6021 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6022 return MIN (c0, c1);
6023
6024 default:
6025 break;
6026 }
6027
6028 return 1;
6029 }
6030 \f
6031 /* Return an object on the placeholder list that matches EXP, a
6032 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6033 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6034 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6035 is a location which initially points to a starting location in the
6036 placeholder list (zero means start of the list) and where a pointer into
6037 the placeholder list at which the object is found is placed. */
6038
6039 tree
6040 find_placeholder (exp, plist)
6041 tree exp;
6042 tree *plist;
6043 {
6044 tree type = TREE_TYPE (exp);
6045 tree placeholder_expr;
6046
6047 for (placeholder_expr
6048 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6049 placeholder_expr != 0;
6050 placeholder_expr = TREE_CHAIN (placeholder_expr))
6051 {
6052 tree need_type = TYPE_MAIN_VARIANT (type);
6053 tree elt;
6054
6055 /* Find the outermost reference that is of the type we want. If none,
6056 see if any object has a type that is a pointer to the type we
6057 want. */
6058 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6059 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6060 || TREE_CODE (elt) == COND_EXPR)
6061 ? TREE_OPERAND (elt, 1)
6062 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6063 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6064 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6065 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6066 ? TREE_OPERAND (elt, 0) : 0))
6067 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6068 {
6069 if (plist)
6070 *plist = placeholder_expr;
6071 return elt;
6072 }
6073
6074 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6075 elt
6076 = ((TREE_CODE (elt) == COMPOUND_EXPR
6077 || TREE_CODE (elt) == COND_EXPR)
6078 ? TREE_OPERAND (elt, 1)
6079 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6080 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6081 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6082 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6083 ? TREE_OPERAND (elt, 0) : 0))
6084 if (POINTER_TYPE_P (TREE_TYPE (elt))
6085 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6086 == need_type))
6087 {
6088 if (plist)
6089 *plist = placeholder_expr;
6090 return build1 (INDIRECT_REF, need_type, elt);
6091 }
6092 }
6093
6094 return 0;
6095 }
6096 \f
6097 /* expand_expr: generate code for computing expression EXP.
6098 An rtx for the computed value is returned. The value is never null.
6099 In the case of a void EXP, const0_rtx is returned.
6100
6101 The value may be stored in TARGET if TARGET is nonzero.
6102 TARGET is just a suggestion; callers must assume that
6103 the rtx returned may not be the same as TARGET.
6104
6105 If TARGET is CONST0_RTX, it means that the value will be ignored.
6106
6107 If TMODE is not VOIDmode, it suggests generating the
6108 result in mode TMODE. But this is done only when convenient.
6109 Otherwise, TMODE is ignored and the value generated in its natural mode.
6110 TMODE is just a suggestion; callers must assume that
6111 the rtx returned may not have mode TMODE.
6112
6113 Note that TARGET may have neither TMODE nor MODE. In that case, it
6114 probably will not be used.
6115
6116 If MODIFIER is EXPAND_SUM then when EXP is an addition
6117 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6118 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6119 products as above, or REG or MEM, or constant.
6120 Ordinarily in such cases we would output mul or add instructions
6121 and then return a pseudo reg containing the sum.
6122
6123 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6124 it also marks a label as absolutely required (it can't be dead).
6125 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6126 This is used for outputting expressions used in initializers.
6127
6128 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6129 with a constant address even if that address is not normally legitimate.
6130 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6131
6132 rtx
6133 expand_expr (exp, target, tmode, modifier)
6134 tree exp;
6135 rtx target;
6136 enum machine_mode tmode;
6137 enum expand_modifier modifier;
6138 {
6139 rtx op0, op1, temp;
6140 tree type = TREE_TYPE (exp);
6141 int unsignedp = TREE_UNSIGNED (type);
6142 enum machine_mode mode;
6143 enum tree_code code = TREE_CODE (exp);
6144 optab this_optab;
6145 rtx subtarget, original_target;
6146 int ignore;
6147 tree context;
6148 /* Used by check-memory-usage to make modifier read only. */
6149 enum expand_modifier ro_modifier;
6150
6151 /* Handle ERROR_MARK before anybody tries to access its type. */
6152 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6153 {
6154 op0 = CONST0_RTX (tmode);
6155 if (op0 != 0)
6156 return op0;
6157 return const0_rtx;
6158 }
6159
6160 mode = TYPE_MODE (type);
6161 /* Use subtarget as the target for operand 0 of a binary operation. */
6162 subtarget = get_subtarget (target);
6163 original_target = target;
6164 ignore = (target == const0_rtx
6165 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6166 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6167 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6168 && TREE_CODE (type) == VOID_TYPE));
6169
6170 /* Make a read-only version of the modifier. */
6171 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6172 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6173 ro_modifier = modifier;
6174 else
6175 ro_modifier = EXPAND_NORMAL;
6176
6177 /* If we are going to ignore this result, we need only do something
6178 if there is a side-effect somewhere in the expression. If there
6179 is, short-circuit the most common cases here. Note that we must
6180 not call expand_expr with anything but const0_rtx in case this
6181 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6182
6183 if (ignore)
6184 {
6185 if (! TREE_SIDE_EFFECTS (exp))
6186 return const0_rtx;
6187
6188 /* Ensure we reference a volatile object even if value is ignored, but
6189 don't do this if all we are doing is taking its address. */
6190 if (TREE_THIS_VOLATILE (exp)
6191 && TREE_CODE (exp) != FUNCTION_DECL
6192 && mode != VOIDmode && mode != BLKmode
6193 && modifier != EXPAND_CONST_ADDRESS)
6194 {
6195 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6196 if (GET_CODE (temp) == MEM)
6197 temp = copy_to_reg (temp);
6198 return const0_rtx;
6199 }
6200
6201 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6202 || code == INDIRECT_REF || code == BUFFER_REF)
6203 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6204 VOIDmode, ro_modifier);
6205 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6206 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6207 {
6208 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6209 ro_modifier);
6210 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6211 ro_modifier);
6212 return const0_rtx;
6213 }
6214 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6215 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6216 /* If the second operand has no side effects, just evaluate
6217 the first. */
6218 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6219 VOIDmode, ro_modifier);
6220 else if (code == BIT_FIELD_REF)
6221 {
6222 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6223 ro_modifier);
6224 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6225 ro_modifier);
6226 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6227 ro_modifier);
6228 return const0_rtx;
6229 }
6230 ;
6231 target = 0;
6232 }
6233
6234 #ifdef MAX_INTEGER_COMPUTATION_MODE
6235 /* Only check stuff here if the mode we want is different from the mode
6236 of the expression; if it's the same, check_max_integer_computiation_mode
6237 will handle it. Do we really need to check this stuff at all? */
6238
6239 if (target
6240 && GET_MODE (target) != mode
6241 && TREE_CODE (exp) != INTEGER_CST
6242 && TREE_CODE (exp) != PARM_DECL
6243 && TREE_CODE (exp) != ARRAY_REF
6244 && TREE_CODE (exp) != ARRAY_RANGE_REF
6245 && TREE_CODE (exp) != COMPONENT_REF
6246 && TREE_CODE (exp) != BIT_FIELD_REF
6247 && TREE_CODE (exp) != INDIRECT_REF
6248 && TREE_CODE (exp) != CALL_EXPR
6249 && TREE_CODE (exp) != VAR_DECL
6250 && TREE_CODE (exp) != RTL_EXPR)
6251 {
6252 enum machine_mode mode = GET_MODE (target);
6253
6254 if (GET_MODE_CLASS (mode) == MODE_INT
6255 && mode > MAX_INTEGER_COMPUTATION_MODE)
6256 internal_error ("unsupported wide integer operation");
6257 }
6258
6259 if (tmode != mode
6260 && TREE_CODE (exp) != INTEGER_CST
6261 && TREE_CODE (exp) != PARM_DECL
6262 && TREE_CODE (exp) != ARRAY_REF
6263 && TREE_CODE (exp) != ARRAY_RANGE_REF
6264 && TREE_CODE (exp) != COMPONENT_REF
6265 && TREE_CODE (exp) != BIT_FIELD_REF
6266 && TREE_CODE (exp) != INDIRECT_REF
6267 && TREE_CODE (exp) != VAR_DECL
6268 && TREE_CODE (exp) != CALL_EXPR
6269 && TREE_CODE (exp) != RTL_EXPR
6270 && GET_MODE_CLASS (tmode) == MODE_INT
6271 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6272 internal_error ("unsupported wide integer operation");
6273
6274 check_max_integer_computation_mode (exp);
6275 #endif
6276
6277 /* If will do cse, generate all results into pseudo registers
6278 since 1) that allows cse to find more things
6279 and 2) otherwise cse could produce an insn the machine
6280 cannot support. */
6281
6282 if (! cse_not_expected && mode != BLKmode && target
6283 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6284 target = subtarget;
6285
6286 switch (code)
6287 {
6288 case LABEL_DECL:
6289 {
6290 tree function = decl_function_context (exp);
6291 /* Handle using a label in a containing function. */
6292 if (function != current_function_decl
6293 && function != inline_function_decl && function != 0)
6294 {
6295 struct function *p = find_function_data (function);
6296 p->expr->x_forced_labels
6297 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6298 p->expr->x_forced_labels);
6299 }
6300 else
6301 {
6302 if (modifier == EXPAND_INITIALIZER)
6303 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6304 label_rtx (exp),
6305 forced_labels);
6306 }
6307
6308 temp = gen_rtx_MEM (FUNCTION_MODE,
6309 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6310 if (function != current_function_decl
6311 && function != inline_function_decl && function != 0)
6312 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6313 return temp;
6314 }
6315
6316 case PARM_DECL:
6317 if (DECL_RTL (exp) == 0)
6318 {
6319 error_with_decl (exp, "prior parameter's size depends on `%s'");
6320 return CONST0_RTX (mode);
6321 }
6322
6323 /* ... fall through ... */
6324
6325 case VAR_DECL:
6326 /* If a static var's type was incomplete when the decl was written,
6327 but the type is complete now, lay out the decl now. */
6328 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6329 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6330 {
6331 rtx value = DECL_RTL_IF_SET (exp);
6332
6333 layout_decl (exp, 0);
6334
6335 /* If the RTL was already set, update its mode and memory
6336 attributes. */
6337 if (value != 0)
6338 {
6339 PUT_MODE (value, DECL_MODE (exp));
6340 SET_DECL_RTL (exp, 0);
6341 set_mem_attributes (value, exp, 1);
6342 SET_DECL_RTL (exp, value);
6343 }
6344 }
6345
6346 /* Although static-storage variables start off initialized, according to
6347 ANSI C, a memcpy could overwrite them with uninitialized values. So
6348 we check them too. This also lets us check for read-only variables
6349 accessed via a non-const declaration, in case it won't be detected
6350 any other way (e.g., in an embedded system or OS kernel without
6351 memory protection).
6352
6353 Aggregates are not checked here; they're handled elsewhere. */
6354 if (cfun && current_function_check_memory_usage
6355 && code == VAR_DECL
6356 && GET_CODE (DECL_RTL (exp)) == MEM
6357 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6358 {
6359 enum memory_use_mode memory_usage;
6360 memory_usage = get_memory_usage_from_modifier (modifier);
6361
6362 in_check_memory_usage = 1;
6363 if (memory_usage != MEMORY_USE_DONT)
6364 emit_library_call (chkr_check_addr_libfunc,
6365 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6366 XEXP (DECL_RTL (exp), 0), Pmode,
6367 GEN_INT (int_size_in_bytes (type)),
6368 TYPE_MODE (sizetype),
6369 GEN_INT (memory_usage),
6370 TYPE_MODE (integer_type_node));
6371 in_check_memory_usage = 0;
6372 }
6373
6374 /* ... fall through ... */
6375
6376 case FUNCTION_DECL:
6377 case RESULT_DECL:
6378 if (DECL_RTL (exp) == 0)
6379 abort ();
6380
6381 /* Ensure variable marked as used even if it doesn't go through
6382 a parser. If it hasn't be used yet, write out an external
6383 definition. */
6384 if (! TREE_USED (exp))
6385 {
6386 assemble_external (exp);
6387 TREE_USED (exp) = 1;
6388 }
6389
6390 /* Show we haven't gotten RTL for this yet. */
6391 temp = 0;
6392
6393 /* Handle variables inherited from containing functions. */
6394 context = decl_function_context (exp);
6395
6396 /* We treat inline_function_decl as an alias for the current function
6397 because that is the inline function whose vars, types, etc.
6398 are being merged into the current function.
6399 See expand_inline_function. */
6400
6401 if (context != 0 && context != current_function_decl
6402 && context != inline_function_decl
6403 /* If var is static, we don't need a static chain to access it. */
6404 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6405 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6406 {
6407 rtx addr;
6408
6409 /* Mark as non-local and addressable. */
6410 DECL_NONLOCAL (exp) = 1;
6411 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6412 abort ();
6413 mark_addressable (exp);
6414 if (GET_CODE (DECL_RTL (exp)) != MEM)
6415 abort ();
6416 addr = XEXP (DECL_RTL (exp), 0);
6417 if (GET_CODE (addr) == MEM)
6418 addr
6419 = replace_equiv_address (addr,
6420 fix_lexical_addr (XEXP (addr, 0), exp));
6421 else
6422 addr = fix_lexical_addr (addr, exp);
6423
6424 temp = replace_equiv_address (DECL_RTL (exp), addr);
6425 }
6426
6427 /* This is the case of an array whose size is to be determined
6428 from its initializer, while the initializer is still being parsed.
6429 See expand_decl. */
6430
6431 else if (GET_CODE (DECL_RTL (exp)) == MEM
6432 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6433 temp = validize_mem (DECL_RTL (exp));
6434
6435 /* If DECL_RTL is memory, we are in the normal case and either
6436 the address is not valid or it is not a register and -fforce-addr
6437 is specified, get the address into a register. */
6438
6439 else if (GET_CODE (DECL_RTL (exp)) == MEM
6440 && modifier != EXPAND_CONST_ADDRESS
6441 && modifier != EXPAND_SUM
6442 && modifier != EXPAND_INITIALIZER
6443 && (! memory_address_p (DECL_MODE (exp),
6444 XEXP (DECL_RTL (exp), 0))
6445 || (flag_force_addr
6446 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6447 temp = replace_equiv_address (DECL_RTL (exp),
6448 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6449
6450 /* If we got something, return it. But first, set the alignment
6451 if the address is a register. */
6452 if (temp != 0)
6453 {
6454 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6455 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6456
6457 return temp;
6458 }
6459
6460 /* If the mode of DECL_RTL does not match that of the decl, it
6461 must be a promoted value. We return a SUBREG of the wanted mode,
6462 but mark it so that we know that it was already extended. */
6463
6464 if (GET_CODE (DECL_RTL (exp)) == REG
6465 && GET_MODE (DECL_RTL (exp)) != mode)
6466 {
6467 /* Get the signedness used for this variable. Ensure we get the
6468 same mode we got when the variable was declared. */
6469 if (GET_MODE (DECL_RTL (exp))
6470 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6471 abort ();
6472
6473 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6474 SUBREG_PROMOTED_VAR_P (temp) = 1;
6475 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6476 return temp;
6477 }
6478
6479 return DECL_RTL (exp);
6480
6481 case INTEGER_CST:
6482 return immed_double_const (TREE_INT_CST_LOW (exp),
6483 TREE_INT_CST_HIGH (exp), mode);
6484
6485 case CONST_DECL:
6486 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6487 EXPAND_MEMORY_USE_BAD);
6488
6489 case REAL_CST:
6490 /* If optimized, generate immediate CONST_DOUBLE
6491 which will be turned into memory by reload if necessary.
6492
6493 We used to force a register so that loop.c could see it. But
6494 this does not allow gen_* patterns to perform optimizations with
6495 the constants. It also produces two insns in cases like "x = 1.0;".
6496 On most machines, floating-point constants are not permitted in
6497 many insns, so we'd end up copying it to a register in any case.
6498
6499 Now, we do the copying in expand_binop, if appropriate. */
6500 return immed_real_const (exp);
6501
6502 case COMPLEX_CST:
6503 case STRING_CST:
6504 if (! TREE_CST_RTL (exp))
6505 output_constant_def (exp, 1);
6506
6507 /* TREE_CST_RTL probably contains a constant address.
6508 On RISC machines where a constant address isn't valid,
6509 make some insns to get that address into a register. */
6510 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6511 && modifier != EXPAND_CONST_ADDRESS
6512 && modifier != EXPAND_INITIALIZER
6513 && modifier != EXPAND_SUM
6514 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6515 || (flag_force_addr
6516 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6517 return replace_equiv_address (TREE_CST_RTL (exp),
6518 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6519 return TREE_CST_RTL (exp);
6520
6521 case EXPR_WITH_FILE_LOCATION:
6522 {
6523 rtx to_return;
6524 const char *saved_input_filename = input_filename;
6525 int saved_lineno = lineno;
6526 input_filename = EXPR_WFL_FILENAME (exp);
6527 lineno = EXPR_WFL_LINENO (exp);
6528 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6529 emit_line_note (input_filename, lineno);
6530 /* Possibly avoid switching back and forth here. */
6531 to_return = expand_expr (EXPR_WFL_NODE (exp),
6532 target || ! ignore ? target : const0_rtx,
6533 tmode, modifier);
6534 input_filename = saved_input_filename;
6535 lineno = saved_lineno;
6536 return to_return;
6537 }
6538
6539 case SAVE_EXPR:
6540 context = decl_function_context (exp);
6541
6542 /* If this SAVE_EXPR was at global context, assume we are an
6543 initialization function and move it into our context. */
6544 if (context == 0)
6545 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6546
6547 /* We treat inline_function_decl as an alias for the current function
6548 because that is the inline function whose vars, types, etc.
6549 are being merged into the current function.
6550 See expand_inline_function. */
6551 if (context == current_function_decl || context == inline_function_decl)
6552 context = 0;
6553
6554 /* If this is non-local, handle it. */
6555 if (context)
6556 {
6557 /* The following call just exists to abort if the context is
6558 not of a containing function. */
6559 find_function_data (context);
6560
6561 temp = SAVE_EXPR_RTL (exp);
6562 if (temp && GET_CODE (temp) == REG)
6563 {
6564 put_var_into_stack (exp);
6565 temp = SAVE_EXPR_RTL (exp);
6566 }
6567 if (temp == 0 || GET_CODE (temp) != MEM)
6568 abort ();
6569 return
6570 replace_equiv_address (temp,
6571 fix_lexical_addr (XEXP (temp, 0), exp));
6572 }
6573 if (SAVE_EXPR_RTL (exp) == 0)
6574 {
6575 if (mode == VOIDmode)
6576 temp = const0_rtx;
6577 else
6578 temp = assign_temp (build_qualified_type (type,
6579 (TYPE_QUALS (type)
6580 | TYPE_QUAL_CONST)),
6581 3, 0, 0);
6582
6583 SAVE_EXPR_RTL (exp) = temp;
6584 if (!optimize && GET_CODE (temp) == REG)
6585 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6586 save_expr_regs);
6587
6588 /* If the mode of TEMP does not match that of the expression, it
6589 must be a promoted value. We pass store_expr a SUBREG of the
6590 wanted mode but mark it so that we know that it was already
6591 extended. Note that `unsignedp' was modified above in
6592 this case. */
6593
6594 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6595 {
6596 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6597 SUBREG_PROMOTED_VAR_P (temp) = 1;
6598 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6599 }
6600
6601 if (temp == const0_rtx)
6602 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6603 EXPAND_MEMORY_USE_BAD);
6604 else
6605 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6606
6607 TREE_USED (exp) = 1;
6608 }
6609
6610 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6611 must be a promoted value. We return a SUBREG of the wanted mode,
6612 but mark it so that we know that it was already extended. */
6613
6614 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6615 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6616 {
6617 /* Compute the signedness and make the proper SUBREG. */
6618 promote_mode (type, mode, &unsignedp, 0);
6619 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6620 SUBREG_PROMOTED_VAR_P (temp) = 1;
6621 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6622 return temp;
6623 }
6624
6625 return SAVE_EXPR_RTL (exp);
6626
6627 case UNSAVE_EXPR:
6628 {
6629 rtx temp;
6630 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6631 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6632 return temp;
6633 }
6634
6635 case PLACEHOLDER_EXPR:
6636 {
6637 tree old_list = placeholder_list;
6638 tree placeholder_expr = 0;
6639
6640 exp = find_placeholder (exp, &placeholder_expr);
6641 if (exp == 0)
6642 abort ();
6643
6644 placeholder_list = TREE_CHAIN (placeholder_expr);
6645 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6646 placeholder_list = old_list;
6647 return temp;
6648 }
6649
6650 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6651 abort ();
6652
6653 case WITH_RECORD_EXPR:
6654 /* Put the object on the placeholder list, expand our first operand,
6655 and pop the list. */
6656 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6657 placeholder_list);
6658 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6659 tmode, ro_modifier);
6660 placeholder_list = TREE_CHAIN (placeholder_list);
6661 return target;
6662
6663 case GOTO_EXPR:
6664 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6665 expand_goto (TREE_OPERAND (exp, 0));
6666 else
6667 expand_computed_goto (TREE_OPERAND (exp, 0));
6668 return const0_rtx;
6669
6670 case EXIT_EXPR:
6671 expand_exit_loop_if_false (NULL,
6672 invert_truthvalue (TREE_OPERAND (exp, 0)));
6673 return const0_rtx;
6674
6675 case LABELED_BLOCK_EXPR:
6676 if (LABELED_BLOCK_BODY (exp))
6677 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6678 /* Should perhaps use expand_label, but this is simpler and safer. */
6679 do_pending_stack_adjust ();
6680 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6681 return const0_rtx;
6682
6683 case EXIT_BLOCK_EXPR:
6684 if (EXIT_BLOCK_RETURN (exp))
6685 sorry ("returned value in block_exit_expr");
6686 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6687 return const0_rtx;
6688
6689 case LOOP_EXPR:
6690 push_temp_slots ();
6691 expand_start_loop (1);
6692 expand_expr_stmt (TREE_OPERAND (exp, 0));
6693 expand_end_loop ();
6694 pop_temp_slots ();
6695
6696 return const0_rtx;
6697
6698 case BIND_EXPR:
6699 {
6700 tree vars = TREE_OPERAND (exp, 0);
6701 int vars_need_expansion = 0;
6702
6703 /* Need to open a binding contour here because
6704 if there are any cleanups they must be contained here. */
6705 expand_start_bindings (2);
6706
6707 /* Mark the corresponding BLOCK for output in its proper place. */
6708 if (TREE_OPERAND (exp, 2) != 0
6709 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6710 insert_block (TREE_OPERAND (exp, 2));
6711
6712 /* If VARS have not yet been expanded, expand them now. */
6713 while (vars)
6714 {
6715 if (!DECL_RTL_SET_P (vars))
6716 {
6717 vars_need_expansion = 1;
6718 expand_decl (vars);
6719 }
6720 expand_decl_init (vars);
6721 vars = TREE_CHAIN (vars);
6722 }
6723
6724 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6725
6726 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6727
6728 return temp;
6729 }
6730
6731 case RTL_EXPR:
6732 if (RTL_EXPR_SEQUENCE (exp))
6733 {
6734 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6735 abort ();
6736 emit_insns (RTL_EXPR_SEQUENCE (exp));
6737 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6738 }
6739 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6740 free_temps_for_rtl_expr (exp);
6741 return RTL_EXPR_RTL (exp);
6742
6743 case CONSTRUCTOR:
6744 /* If we don't need the result, just ensure we evaluate any
6745 subexpressions. */
6746 if (ignore)
6747 {
6748 tree elt;
6749 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6750 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6751 EXPAND_MEMORY_USE_BAD);
6752 return const0_rtx;
6753 }
6754
6755 /* All elts simple constants => refer to a constant in memory. But
6756 if this is a non-BLKmode mode, let it store a field at a time
6757 since that should make a CONST_INT or CONST_DOUBLE when we
6758 fold. Likewise, if we have a target we can use, it is best to
6759 store directly into the target unless the type is large enough
6760 that memcpy will be used. If we are making an initializer and
6761 all operands are constant, put it in memory as well. */
6762 else if ((TREE_STATIC (exp)
6763 && ((mode == BLKmode
6764 && ! (target != 0 && safe_from_p (target, exp, 1)))
6765 || TREE_ADDRESSABLE (exp)
6766 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6767 && (! MOVE_BY_PIECES_P
6768 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6769 TYPE_ALIGN (type)))
6770 && ! mostly_zeros_p (exp))))
6771 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6772 {
6773 rtx constructor = output_constant_def (exp, 1);
6774
6775 if (modifier != EXPAND_CONST_ADDRESS
6776 && modifier != EXPAND_INITIALIZER
6777 && modifier != EXPAND_SUM)
6778 constructor = validize_mem (constructor);
6779
6780 return constructor;
6781 }
6782 else
6783 {
6784 /* Handle calls that pass values in multiple non-contiguous
6785 locations. The Irix 6 ABI has examples of this. */
6786 if (target == 0 || ! safe_from_p (target, exp, 1)
6787 || GET_CODE (target) == PARALLEL)
6788 target
6789 = assign_temp (build_qualified_type (type,
6790 (TYPE_QUALS (type)
6791 | (TREE_READONLY (exp)
6792 * TYPE_QUAL_CONST))),
6793 TREE_ADDRESSABLE (exp), 1, 1);
6794
6795 store_constructor (exp, target, 0,
6796 int_size_in_bytes (TREE_TYPE (exp)));
6797 return target;
6798 }
6799
6800 case INDIRECT_REF:
6801 {
6802 tree exp1 = TREE_OPERAND (exp, 0);
6803 tree index;
6804 tree string = string_constant (exp1, &index);
6805
6806 /* Try to optimize reads from const strings. */
6807 if (string
6808 && TREE_CODE (string) == STRING_CST
6809 && TREE_CODE (index) == INTEGER_CST
6810 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6811 && GET_MODE_CLASS (mode) == MODE_INT
6812 && GET_MODE_SIZE (mode) == 1
6813 && modifier != EXPAND_MEMORY_USE_WO)
6814 return
6815 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6816
6817 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6818 op0 = memory_address (mode, op0);
6819
6820 if (cfun && current_function_check_memory_usage
6821 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6822 {
6823 enum memory_use_mode memory_usage;
6824 memory_usage = get_memory_usage_from_modifier (modifier);
6825
6826 if (memory_usage != MEMORY_USE_DONT)
6827 {
6828 in_check_memory_usage = 1;
6829 emit_library_call (chkr_check_addr_libfunc,
6830 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6831 Pmode, GEN_INT (int_size_in_bytes (type)),
6832 TYPE_MODE (sizetype),
6833 GEN_INT (memory_usage),
6834 TYPE_MODE (integer_type_node));
6835 in_check_memory_usage = 0;
6836 }
6837 }
6838
6839 temp = gen_rtx_MEM (mode, op0);
6840 set_mem_attributes (temp, exp, 0);
6841
6842 /* If we are writing to this object and its type is a record with
6843 readonly fields, we must mark it as readonly so it will
6844 conflict with readonly references to those fields. */
6845 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6846 RTX_UNCHANGING_P (temp) = 1;
6847
6848 return temp;
6849 }
6850
6851 case ARRAY_REF:
6852 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6853 abort ();
6854
6855 {
6856 tree array = TREE_OPERAND (exp, 0);
6857 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6858 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6859 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6860 HOST_WIDE_INT i;
6861
6862 /* Optimize the special-case of a zero lower bound.
6863
6864 We convert the low_bound to sizetype to avoid some problems
6865 with constant folding. (E.g. suppose the lower bound is 1,
6866 and its mode is QI. Without the conversion, (ARRAY
6867 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6868 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6869
6870 if (! integer_zerop (low_bound))
6871 index = size_diffop (index, convert (sizetype, low_bound));
6872
6873 /* Fold an expression like: "foo"[2].
6874 This is not done in fold so it won't happen inside &.
6875 Don't fold if this is for wide characters since it's too
6876 difficult to do correctly and this is a very rare case. */
6877
6878 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6879 && TREE_CODE (array) == STRING_CST
6880 && TREE_CODE (index) == INTEGER_CST
6881 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6882 && GET_MODE_CLASS (mode) == MODE_INT
6883 && GET_MODE_SIZE (mode) == 1)
6884 return
6885 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6886
6887 /* If this is a constant index into a constant array,
6888 just get the value from the array. Handle both the cases when
6889 we have an explicit constructor and when our operand is a variable
6890 that was declared const. */
6891
6892 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6893 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6894 && TREE_CODE (index) == INTEGER_CST
6895 && 0 > compare_tree_int (index,
6896 list_length (CONSTRUCTOR_ELTS
6897 (TREE_OPERAND (exp, 0)))))
6898 {
6899 tree elem;
6900
6901 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6902 i = TREE_INT_CST_LOW (index);
6903 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6904 ;
6905
6906 if (elem)
6907 return expand_expr (fold (TREE_VALUE (elem)), target,
6908 tmode, ro_modifier);
6909 }
6910
6911 else if (optimize >= 1
6912 && modifier != EXPAND_CONST_ADDRESS
6913 && modifier != EXPAND_INITIALIZER
6914 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6915 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6916 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6917 {
6918 if (TREE_CODE (index) == INTEGER_CST)
6919 {
6920 tree init = DECL_INITIAL (array);
6921
6922 if (TREE_CODE (init) == CONSTRUCTOR)
6923 {
6924 tree elem;
6925
6926 for (elem = CONSTRUCTOR_ELTS (init);
6927 (elem
6928 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6929 elem = TREE_CHAIN (elem))
6930 ;
6931
6932 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6933 return expand_expr (fold (TREE_VALUE (elem)), target,
6934 tmode, ro_modifier);
6935 }
6936 else if (TREE_CODE (init) == STRING_CST
6937 && 0 > compare_tree_int (index,
6938 TREE_STRING_LENGTH (init)))
6939 {
6940 tree type = TREE_TYPE (TREE_TYPE (init));
6941 enum machine_mode mode = TYPE_MODE (type);
6942
6943 if (GET_MODE_CLASS (mode) == MODE_INT
6944 && GET_MODE_SIZE (mode) == 1)
6945 return (GEN_INT
6946 (TREE_STRING_POINTER
6947 (init)[TREE_INT_CST_LOW (index)]));
6948 }
6949 }
6950 }
6951 }
6952 /* Fall through. */
6953
6954 case COMPONENT_REF:
6955 case BIT_FIELD_REF:
6956 case ARRAY_RANGE_REF:
6957 /* If the operand is a CONSTRUCTOR, we can just extract the
6958 appropriate field if it is present. Don't do this if we have
6959 already written the data since we want to refer to that copy
6960 and varasm.c assumes that's what we'll do. */
6961 if (code == COMPONENT_REF
6962 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6963 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6964 {
6965 tree elt;
6966
6967 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6968 elt = TREE_CHAIN (elt))
6969 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6970 /* We can normally use the value of the field in the
6971 CONSTRUCTOR. However, if this is a bitfield in
6972 an integral mode that we can fit in a HOST_WIDE_INT,
6973 we must mask only the number of bits in the bitfield,
6974 since this is done implicitly by the constructor. If
6975 the bitfield does not meet either of those conditions,
6976 we can't do this optimization. */
6977 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6978 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6979 == MODE_INT)
6980 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6981 <= HOST_BITS_PER_WIDE_INT))))
6982 {
6983 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6984 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6985 {
6986 HOST_WIDE_INT bitsize
6987 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6988
6989 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6990 {
6991 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6992 op0 = expand_and (op0, op1, target);
6993 }
6994 else
6995 {
6996 enum machine_mode imode
6997 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6998 tree count
6999 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7000 0);
7001
7002 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7003 target, 0);
7004 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7005 target, 0);
7006 }
7007 }
7008
7009 return op0;
7010 }
7011 }
7012
7013 {
7014 enum machine_mode mode1;
7015 HOST_WIDE_INT bitsize, bitpos;
7016 tree offset;
7017 int volatilep = 0;
7018 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7019 &mode1, &unsignedp, &volatilep);
7020 rtx orig_op0;
7021
7022 /* If we got back the original object, something is wrong. Perhaps
7023 we are evaluating an expression too early. In any event, don't
7024 infinitely recurse. */
7025 if (tem == exp)
7026 abort ();
7027
7028 /* If TEM's type is a union of variable size, pass TARGET to the inner
7029 computation, since it will need a temporary and TARGET is known
7030 to have to do. This occurs in unchecked conversion in Ada. */
7031
7032 orig_op0 = op0
7033 = expand_expr (tem,
7034 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7035 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7036 != INTEGER_CST)
7037 ? target : NULL_RTX),
7038 VOIDmode,
7039 (modifier == EXPAND_INITIALIZER
7040 || modifier == EXPAND_CONST_ADDRESS)
7041 ? modifier : EXPAND_NORMAL);
7042
7043 /* If this is a constant, put it into a register if it is a
7044 legitimate constant and OFFSET is 0 and memory if it isn't. */
7045 if (CONSTANT_P (op0))
7046 {
7047 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7048 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7049 && offset == 0)
7050 op0 = force_reg (mode, op0);
7051 else
7052 op0 = validize_mem (force_const_mem (mode, op0));
7053 }
7054
7055 if (offset != 0)
7056 {
7057 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7058
7059 /* If this object is in a register, put it into memory.
7060 This case can't occur in C, but can in Ada if we have
7061 unchecked conversion of an expression from a scalar type to
7062 an array or record type. */
7063 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7064 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7065 {
7066 /* If the operand is a SAVE_EXPR, we can deal with this by
7067 forcing the SAVE_EXPR into memory. */
7068 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7069 {
7070 put_var_into_stack (TREE_OPERAND (exp, 0));
7071 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7072 }
7073 else
7074 {
7075 tree nt
7076 = build_qualified_type (TREE_TYPE (tem),
7077 (TYPE_QUALS (TREE_TYPE (tem))
7078 | TYPE_QUAL_CONST));
7079 rtx memloc = assign_temp (nt, 1, 1, 1);
7080
7081 emit_move_insn (memloc, op0);
7082 op0 = memloc;
7083 }
7084 }
7085
7086 if (GET_CODE (op0) != MEM)
7087 abort ();
7088
7089 if (GET_MODE (offset_rtx) != ptr_mode)
7090 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7091
7092 #ifdef POINTERS_EXTEND_UNSIGNED
7093 if (GET_MODE (offset_rtx) != Pmode)
7094 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7095 #endif
7096
7097 /* A constant address in OP0 can have VOIDmode, we must not try
7098 to call force_reg for that case. Avoid that case. */
7099 if (GET_CODE (op0) == MEM
7100 && GET_MODE (op0) == BLKmode
7101 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7102 && bitsize != 0
7103 && (bitpos % bitsize) == 0
7104 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7105 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7106 {
7107 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7108
7109 if (GET_CODE (XEXP (temp, 0)) == REG)
7110 op0 = temp;
7111 else
7112 op0 = (replace_equiv_address
7113 (op0,
7114 force_reg (GET_MODE (XEXP (temp, 0)),
7115 XEXP (temp, 0))));
7116 bitpos = 0;
7117 }
7118
7119 op0 = offset_address (op0, offset_rtx,
7120 highest_pow2_factor (offset));
7121 }
7122
7123 /* Don't forget about volatility even if this is a bitfield. */
7124 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7125 {
7126 if (op0 == orig_op0)
7127 op0 = copy_rtx (op0);
7128
7129 MEM_VOLATILE_P (op0) = 1;
7130 }
7131
7132 /* Check the access. */
7133 if (cfun != 0 && current_function_check_memory_usage
7134 && GET_CODE (op0) == MEM)
7135 {
7136 enum memory_use_mode memory_usage;
7137 memory_usage = get_memory_usage_from_modifier (modifier);
7138
7139 if (memory_usage != MEMORY_USE_DONT)
7140 {
7141 rtx to;
7142 int size;
7143
7144 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7145 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7146
7147 /* Check the access right of the pointer. */
7148 in_check_memory_usage = 1;
7149 if (size > BITS_PER_UNIT)
7150 emit_library_call (chkr_check_addr_libfunc,
7151 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7152 Pmode, GEN_INT (size / BITS_PER_UNIT),
7153 TYPE_MODE (sizetype),
7154 GEN_INT (memory_usage),
7155 TYPE_MODE (integer_type_node));
7156 in_check_memory_usage = 0;
7157 }
7158 }
7159
7160 /* In cases where an aligned union has an unaligned object
7161 as a field, we might be extracting a BLKmode value from
7162 an integer-mode (e.g., SImode) object. Handle this case
7163 by doing the extract into an object as wide as the field
7164 (which we know to be the width of a basic mode), then
7165 storing into memory, and changing the mode to BLKmode. */
7166 if (mode1 == VOIDmode
7167 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7168 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7169 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7170 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7171 && modifier != EXPAND_CONST_ADDRESS
7172 && modifier != EXPAND_INITIALIZER)
7173 /* If the field isn't aligned enough to fetch as a memref,
7174 fetch it as a bit field. */
7175 || (mode1 != BLKmode
7176 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7177 && ((TYPE_ALIGN (TREE_TYPE (tem))
7178 < GET_MODE_ALIGNMENT (mode))
7179 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7180 /* If the type and the field are a constant size and the
7181 size of the type isn't the same size as the bitfield,
7182 we must use bitfield operations. */
7183 || (bitsize >= 0
7184 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7185 == INTEGER_CST)
7186 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7187 bitsize)))
7188 {
7189 enum machine_mode ext_mode = mode;
7190
7191 if (ext_mode == BLKmode
7192 && ! (target != 0 && GET_CODE (op0) == MEM
7193 && GET_CODE (target) == MEM
7194 && bitpos % BITS_PER_UNIT == 0))
7195 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7196
7197 if (ext_mode == BLKmode)
7198 {
7199 /* In this case, BITPOS must start at a byte boundary and
7200 TARGET, if specified, must be a MEM. */
7201 if (GET_CODE (op0) != MEM
7202 || (target != 0 && GET_CODE (target) != MEM)
7203 || bitpos % BITS_PER_UNIT != 0)
7204 abort ();
7205
7206 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7207 if (target == 0)
7208 target = assign_temp (type, 0, 1, 1);
7209
7210 emit_block_move (target, op0,
7211 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7212 / BITS_PER_UNIT));
7213
7214 return target;
7215 }
7216
7217 op0 = validize_mem (op0);
7218
7219 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7220 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7221
7222 op0 = extract_bit_field (op0, bitsize, bitpos,
7223 unsignedp, target, ext_mode, ext_mode,
7224 int_size_in_bytes (TREE_TYPE (tem)));
7225
7226 /* If the result is a record type and BITSIZE is narrower than
7227 the mode of OP0, an integral mode, and this is a big endian
7228 machine, we must put the field into the high-order bits. */
7229 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7230 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7231 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7232 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7233 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7234 - bitsize),
7235 op0, 1);
7236
7237 if (mode == BLKmode)
7238 {
7239 rtx new = assign_temp (build_qualified_type
7240 (type_for_mode (ext_mode, 0),
7241 TYPE_QUAL_CONST), 0, 1, 1);
7242
7243 emit_move_insn (new, op0);
7244 op0 = copy_rtx (new);
7245 PUT_MODE (op0, BLKmode);
7246 set_mem_attributes (op0, exp, 1);
7247 }
7248
7249 return op0;
7250 }
7251
7252 /* If the result is BLKmode, use that to access the object
7253 now as well. */
7254 if (mode == BLKmode)
7255 mode1 = BLKmode;
7256
7257 /* Get a reference to just this component. */
7258 if (modifier == EXPAND_CONST_ADDRESS
7259 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7260 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7261 else
7262 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7263
7264 if (op0 == orig_op0)
7265 op0 = copy_rtx (op0);
7266
7267 set_mem_attributes (op0, exp, 0);
7268 if (GET_CODE (XEXP (op0, 0)) == REG)
7269 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7270
7271 MEM_VOLATILE_P (op0) |= volatilep;
7272 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7273 || modifier == EXPAND_CONST_ADDRESS
7274 || modifier == EXPAND_INITIALIZER)
7275 return op0;
7276 else if (target == 0)
7277 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7278
7279 convert_move (target, op0, unsignedp);
7280 return target;
7281 }
7282
7283 case VTABLE_REF:
7284 {
7285 rtx insn, before = get_last_insn (), vtbl_ref;
7286
7287 /* Evaluate the interior expression. */
7288 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7289 tmode, modifier);
7290
7291 /* Get or create an instruction off which to hang a note. */
7292 if (REG_P (subtarget))
7293 {
7294 target = subtarget;
7295 insn = get_last_insn ();
7296 if (insn == before)
7297 abort ();
7298 if (! INSN_P (insn))
7299 insn = prev_nonnote_insn (insn);
7300 }
7301 else
7302 {
7303 target = gen_reg_rtx (GET_MODE (subtarget));
7304 insn = emit_move_insn (target, subtarget);
7305 }
7306
7307 /* Collect the data for the note. */
7308 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7309 vtbl_ref = plus_constant (vtbl_ref,
7310 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7311 /* Discard the initial CONST that was added. */
7312 vtbl_ref = XEXP (vtbl_ref, 0);
7313
7314 REG_NOTES (insn)
7315 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7316
7317 return target;
7318 }
7319
7320 /* Intended for a reference to a buffer of a file-object in Pascal.
7321 But it's not certain that a special tree code will really be
7322 necessary for these. INDIRECT_REF might work for them. */
7323 case BUFFER_REF:
7324 abort ();
7325
7326 case IN_EXPR:
7327 {
7328 /* Pascal set IN expression.
7329
7330 Algorithm:
7331 rlo = set_low - (set_low%bits_per_word);
7332 the_word = set [ (index - rlo)/bits_per_word ];
7333 bit_index = index % bits_per_word;
7334 bitmask = 1 << bit_index;
7335 return !!(the_word & bitmask); */
7336
7337 tree set = TREE_OPERAND (exp, 0);
7338 tree index = TREE_OPERAND (exp, 1);
7339 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7340 tree set_type = TREE_TYPE (set);
7341 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7342 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7343 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7344 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7345 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7346 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7347 rtx setaddr = XEXP (setval, 0);
7348 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7349 rtx rlow;
7350 rtx diff, quo, rem, addr, bit, result;
7351
7352 /* If domain is empty, answer is no. Likewise if index is constant
7353 and out of bounds. */
7354 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7355 && TREE_CODE (set_low_bound) == INTEGER_CST
7356 && tree_int_cst_lt (set_high_bound, set_low_bound))
7357 || (TREE_CODE (index) == INTEGER_CST
7358 && TREE_CODE (set_low_bound) == INTEGER_CST
7359 && tree_int_cst_lt (index, set_low_bound))
7360 || (TREE_CODE (set_high_bound) == INTEGER_CST
7361 && TREE_CODE (index) == INTEGER_CST
7362 && tree_int_cst_lt (set_high_bound, index))))
7363 return const0_rtx;
7364
7365 if (target == 0)
7366 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7367
7368 /* If we get here, we have to generate the code for both cases
7369 (in range and out of range). */
7370
7371 op0 = gen_label_rtx ();
7372 op1 = gen_label_rtx ();
7373
7374 if (! (GET_CODE (index_val) == CONST_INT
7375 && GET_CODE (lo_r) == CONST_INT))
7376 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7377 GET_MODE (index_val), iunsignedp, op1);
7378
7379 if (! (GET_CODE (index_val) == CONST_INT
7380 && GET_CODE (hi_r) == CONST_INT))
7381 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7382 GET_MODE (index_val), iunsignedp, op1);
7383
7384 /* Calculate the element number of bit zero in the first word
7385 of the set. */
7386 if (GET_CODE (lo_r) == CONST_INT)
7387 rlow = GEN_INT (INTVAL (lo_r)
7388 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7389 else
7390 rlow = expand_binop (index_mode, and_optab, lo_r,
7391 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7392 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7393
7394 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7395 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7396
7397 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7398 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7399 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7400 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7401
7402 addr = memory_address (byte_mode,
7403 expand_binop (index_mode, add_optab, diff,
7404 setaddr, NULL_RTX, iunsignedp,
7405 OPTAB_LIB_WIDEN));
7406
7407 /* Extract the bit we want to examine. */
7408 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7409 gen_rtx_MEM (byte_mode, addr),
7410 make_tree (TREE_TYPE (index), rem),
7411 NULL_RTX, 1);
7412 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7413 GET_MODE (target) == byte_mode ? target : 0,
7414 1, OPTAB_LIB_WIDEN);
7415
7416 if (result != target)
7417 convert_move (target, result, 1);
7418
7419 /* Output the code to handle the out-of-range case. */
7420 emit_jump (op0);
7421 emit_label (op1);
7422 emit_move_insn (target, const0_rtx);
7423 emit_label (op0);
7424 return target;
7425 }
7426
7427 case WITH_CLEANUP_EXPR:
7428 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7429 {
7430 WITH_CLEANUP_EXPR_RTL (exp)
7431 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7432 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7433
7434 /* That's it for this cleanup. */
7435 TREE_OPERAND (exp, 1) = 0;
7436 }
7437 return WITH_CLEANUP_EXPR_RTL (exp);
7438
7439 case CLEANUP_POINT_EXPR:
7440 {
7441 /* Start a new binding layer that will keep track of all cleanup
7442 actions to be performed. */
7443 expand_start_bindings (2);
7444
7445 target_temp_slot_level = temp_slot_level;
7446
7447 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7448 /* If we're going to use this value, load it up now. */
7449 if (! ignore)
7450 op0 = force_not_mem (op0);
7451 preserve_temp_slots (op0);
7452 expand_end_bindings (NULL_TREE, 0, 0);
7453 }
7454 return op0;
7455
7456 case CALL_EXPR:
7457 /* Check for a built-in function. */
7458 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7459 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7460 == FUNCTION_DECL)
7461 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7462 {
7463 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7464 == BUILT_IN_FRONTEND)
7465 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7466 else
7467 return expand_builtin (exp, target, subtarget, tmode, ignore);
7468 }
7469
7470 return expand_call (exp, target, ignore);
7471
7472 case NON_LVALUE_EXPR:
7473 case NOP_EXPR:
7474 case CONVERT_EXPR:
7475 case REFERENCE_EXPR:
7476 if (TREE_OPERAND (exp, 0) == error_mark_node)
7477 return const0_rtx;
7478
7479 if (TREE_CODE (type) == UNION_TYPE)
7480 {
7481 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7482
7483 /* If both input and output are BLKmode, this conversion isn't doing
7484 anything except possibly changing memory attribute. */
7485 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7486 {
7487 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7488 modifier);
7489
7490 result = copy_rtx (result);
7491 set_mem_attributes (result, exp, 0);
7492 return result;
7493 }
7494
7495 if (target == 0)
7496 target = assign_temp (type, 0, 1, 1);
7497
7498 if (GET_CODE (target) == MEM)
7499 /* Store data into beginning of memory target. */
7500 store_expr (TREE_OPERAND (exp, 0),
7501 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7502
7503 else if (GET_CODE (target) == REG)
7504 /* Store this field into a union of the proper type. */
7505 store_field (target,
7506 MIN ((int_size_in_bytes (TREE_TYPE
7507 (TREE_OPERAND (exp, 0)))
7508 * BITS_PER_UNIT),
7509 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7510 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7511 VOIDmode, 0, type, 0);
7512 else
7513 abort ();
7514
7515 /* Return the entire union. */
7516 return target;
7517 }
7518
7519 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7520 {
7521 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7522 ro_modifier);
7523
7524 /* If the signedness of the conversion differs and OP0 is
7525 a promoted SUBREG, clear that indication since we now
7526 have to do the proper extension. */
7527 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7528 && GET_CODE (op0) == SUBREG)
7529 SUBREG_PROMOTED_VAR_P (op0) = 0;
7530
7531 return op0;
7532 }
7533
7534 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7535 if (GET_MODE (op0) == mode)
7536 return op0;
7537
7538 /* If OP0 is a constant, just convert it into the proper mode. */
7539 if (CONSTANT_P (op0))
7540 return
7541 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7542 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7543
7544 if (modifier == EXPAND_INITIALIZER)
7545 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7546
7547 if (target == 0)
7548 return
7549 convert_to_mode (mode, op0,
7550 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7551 else
7552 convert_move (target, op0,
7553 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7554 return target;
7555
7556 case VIEW_CONVERT_EXPR:
7557 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, ro_modifier);
7558
7559 /* If the input and output modes are both the same, we are done.
7560 Otherwise, if neither mode is BLKmode and both are within a word, we
7561 can use gen_lowpart. If neither is true, make sure the operand is
7562 in memory and convert the MEM to the new mode. */
7563 if (TYPE_MODE (type) == GET_MODE (op0))
7564 ;
7565 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7566 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7567 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7568 op0 = gen_lowpart (TYPE_MODE (type), op0);
7569 else if (GET_CODE (op0) != MEM)
7570 {
7571 /* If the operand is not a MEM, force it into memory. Since we
7572 are going to be be changing the mode of the MEM, don't call
7573 force_const_mem for constants because we don't allow pool
7574 constants to change mode. */
7575 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7576
7577 if (TREE_ADDRESSABLE (exp))
7578 abort ();
7579
7580 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7581 target
7582 = assign_stack_temp_for_type
7583 (TYPE_MODE (inner_type),
7584 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7585
7586 emit_move_insn (target, op0);
7587 op0 = target;
7588 }
7589
7590 /* At this point, OP0 is in the correct mode. If the output type is such
7591 that the operand is known to be aligned, indicate that it is.
7592 Otherwise, we need only be concerned about alignment for non-BLKmode
7593 results. */
7594 if (GET_CODE (op0) == MEM)
7595 {
7596 op0 = copy_rtx (op0);
7597
7598 if (TYPE_ALIGN_OK (type))
7599 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7600 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7601 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7602 {
7603 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7604 HOST_WIDE_INT temp_size = MAX (int_size_in_bytes (inner_type),
7605 GET_MODE_SIZE (TYPE_MODE (type)));
7606 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7607 temp_size, 0, type);
7608 rtx new_with_op0_mode = copy_rtx (new);
7609
7610 if (TREE_ADDRESSABLE (exp))
7611 abort ();
7612
7613 PUT_MODE (new_with_op0_mode, GET_MODE (op0));
7614 if (GET_MODE (op0) == BLKmode)
7615 emit_block_move (new_with_op0_mode, op0,
7616 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7617 else
7618 emit_move_insn (new_with_op0_mode, op0);
7619
7620 op0 = new;
7621 }
7622
7623 PUT_MODE (op0, TYPE_MODE (type));
7624 }
7625
7626 return op0;
7627
7628 case PLUS_EXPR:
7629 /* We come here from MINUS_EXPR when the second operand is a
7630 constant. */
7631 plus_expr:
7632 this_optab = ! unsignedp && flag_trapv
7633 && (GET_MODE_CLASS(mode) == MODE_INT)
7634 ? addv_optab : add_optab;
7635
7636 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7637 something else, make sure we add the register to the constant and
7638 then to the other thing. This case can occur during strength
7639 reduction and doing it this way will produce better code if the
7640 frame pointer or argument pointer is eliminated.
7641
7642 fold-const.c will ensure that the constant is always in the inner
7643 PLUS_EXPR, so the only case we need to do anything about is if
7644 sp, ap, or fp is our second argument, in which case we must swap
7645 the innermost first argument and our second argument. */
7646
7647 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7648 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7649 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7650 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7651 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7652 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7653 {
7654 tree t = TREE_OPERAND (exp, 1);
7655
7656 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7657 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7658 }
7659
7660 /* If the result is to be ptr_mode and we are adding an integer to
7661 something, we might be forming a constant. So try to use
7662 plus_constant. If it produces a sum and we can't accept it,
7663 use force_operand. This allows P = &ARR[const] to generate
7664 efficient code on machines where a SYMBOL_REF is not a valid
7665 address.
7666
7667 If this is an EXPAND_SUM call, always return the sum. */
7668 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7669 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7670 {
7671 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7672 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7673 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7674 {
7675 rtx constant_part;
7676
7677 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7678 EXPAND_SUM);
7679 /* Use immed_double_const to ensure that the constant is
7680 truncated according to the mode of OP1, then sign extended
7681 to a HOST_WIDE_INT. Using the constant directly can result
7682 in non-canonical RTL in a 64x32 cross compile. */
7683 constant_part
7684 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7685 (HOST_WIDE_INT) 0,
7686 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7687 op1 = plus_constant (op1, INTVAL (constant_part));
7688 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7689 op1 = force_operand (op1, target);
7690 return op1;
7691 }
7692
7693 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7694 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7695 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7696 {
7697 rtx constant_part;
7698
7699 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7700 EXPAND_SUM);
7701 if (! CONSTANT_P (op0))
7702 {
7703 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7704 VOIDmode, modifier);
7705 /* Don't go to both_summands if modifier
7706 says it's not right to return a PLUS. */
7707 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7708 goto binop2;
7709 goto both_summands;
7710 }
7711 /* Use immed_double_const to ensure that the constant is
7712 truncated according to the mode of OP1, then sign extended
7713 to a HOST_WIDE_INT. Using the constant directly can result
7714 in non-canonical RTL in a 64x32 cross compile. */
7715 constant_part
7716 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7717 (HOST_WIDE_INT) 0,
7718 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7719 op0 = plus_constant (op0, INTVAL (constant_part));
7720 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7721 op0 = force_operand (op0, target);
7722 return op0;
7723 }
7724 }
7725
7726 /* No sense saving up arithmetic to be done
7727 if it's all in the wrong mode to form part of an address.
7728 And force_operand won't know whether to sign-extend or
7729 zero-extend. */
7730 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7731 || mode != ptr_mode)
7732 goto binop;
7733
7734 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7735 subtarget = 0;
7736
7737 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7738 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7739
7740 both_summands:
7741 /* Make sure any term that's a sum with a constant comes last. */
7742 if (GET_CODE (op0) == PLUS
7743 && CONSTANT_P (XEXP (op0, 1)))
7744 {
7745 temp = op0;
7746 op0 = op1;
7747 op1 = temp;
7748 }
7749 /* If adding to a sum including a constant,
7750 associate it to put the constant outside. */
7751 if (GET_CODE (op1) == PLUS
7752 && CONSTANT_P (XEXP (op1, 1)))
7753 {
7754 rtx constant_term = const0_rtx;
7755
7756 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7757 if (temp != 0)
7758 op0 = temp;
7759 /* Ensure that MULT comes first if there is one. */
7760 else if (GET_CODE (op0) == MULT)
7761 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7762 else
7763 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7764
7765 /* Let's also eliminate constants from op0 if possible. */
7766 op0 = eliminate_constant_term (op0, &constant_term);
7767
7768 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7769 their sum should be a constant. Form it into OP1, since the
7770 result we want will then be OP0 + OP1. */
7771
7772 temp = simplify_binary_operation (PLUS, mode, constant_term,
7773 XEXP (op1, 1));
7774 if (temp != 0)
7775 op1 = temp;
7776 else
7777 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7778 }
7779
7780 /* Put a constant term last and put a multiplication first. */
7781 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7782 temp = op1, op1 = op0, op0 = temp;
7783
7784 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7785 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7786
7787 case MINUS_EXPR:
7788 /* For initializers, we are allowed to return a MINUS of two
7789 symbolic constants. Here we handle all cases when both operands
7790 are constant. */
7791 /* Handle difference of two symbolic constants,
7792 for the sake of an initializer. */
7793 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7794 && really_constant_p (TREE_OPERAND (exp, 0))
7795 && really_constant_p (TREE_OPERAND (exp, 1)))
7796 {
7797 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7798 VOIDmode, ro_modifier);
7799 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7800 VOIDmode, ro_modifier);
7801
7802 /* If the last operand is a CONST_INT, use plus_constant of
7803 the negated constant. Else make the MINUS. */
7804 if (GET_CODE (op1) == CONST_INT)
7805 return plus_constant (op0, - INTVAL (op1));
7806 else
7807 return gen_rtx_MINUS (mode, op0, op1);
7808 }
7809 /* Convert A - const to A + (-const). */
7810 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7811 {
7812 tree negated = fold (build1 (NEGATE_EXPR, type,
7813 TREE_OPERAND (exp, 1)));
7814
7815 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7816 /* If we can't negate the constant in TYPE, leave it alone and
7817 expand_binop will negate it for us. We used to try to do it
7818 here in the signed version of TYPE, but that doesn't work
7819 on POINTER_TYPEs. */;
7820 else
7821 {
7822 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7823 goto plus_expr;
7824 }
7825 }
7826 this_optab = ! unsignedp && flag_trapv
7827 && (GET_MODE_CLASS(mode) == MODE_INT)
7828 ? subv_optab : sub_optab;
7829 goto binop;
7830
7831 case MULT_EXPR:
7832 /* If first operand is constant, swap them.
7833 Thus the following special case checks need only
7834 check the second operand. */
7835 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7836 {
7837 tree t1 = TREE_OPERAND (exp, 0);
7838 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7839 TREE_OPERAND (exp, 1) = t1;
7840 }
7841
7842 /* Attempt to return something suitable for generating an
7843 indexed address, for machines that support that. */
7844
7845 if (modifier == EXPAND_SUM && mode == ptr_mode
7846 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7847 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7848 {
7849 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7850 EXPAND_SUM);
7851
7852 /* Apply distributive law if OP0 is x+c. */
7853 if (GET_CODE (op0) == PLUS
7854 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7855 return
7856 gen_rtx_PLUS
7857 (mode,
7858 gen_rtx_MULT
7859 (mode, XEXP (op0, 0),
7860 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7861 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7862 * INTVAL (XEXP (op0, 1))));
7863
7864 if (GET_CODE (op0) != REG)
7865 op0 = force_operand (op0, NULL_RTX);
7866 if (GET_CODE (op0) != REG)
7867 op0 = copy_to_mode_reg (mode, op0);
7868
7869 return
7870 gen_rtx_MULT (mode, op0,
7871 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7872 }
7873
7874 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7875 subtarget = 0;
7876
7877 /* Check for multiplying things that have been extended
7878 from a narrower type. If this machine supports multiplying
7879 in that narrower type with a result in the desired type,
7880 do it that way, and avoid the explicit type-conversion. */
7881 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7882 && TREE_CODE (type) == INTEGER_TYPE
7883 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7884 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7885 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7886 && int_fits_type_p (TREE_OPERAND (exp, 1),
7887 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7888 /* Don't use a widening multiply if a shift will do. */
7889 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7890 > HOST_BITS_PER_WIDE_INT)
7891 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7892 ||
7893 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7894 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7895 ==
7896 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7897 /* If both operands are extended, they must either both
7898 be zero-extended or both be sign-extended. */
7899 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7900 ==
7901 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7902 {
7903 enum machine_mode innermode
7904 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7905 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7906 ? smul_widen_optab : umul_widen_optab);
7907 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7908 ? umul_widen_optab : smul_widen_optab);
7909 if (mode == GET_MODE_WIDER_MODE (innermode))
7910 {
7911 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7912 {
7913 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7914 NULL_RTX, VOIDmode, 0);
7915 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7916 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7917 VOIDmode, 0);
7918 else
7919 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7920 NULL_RTX, VOIDmode, 0);
7921 goto binop2;
7922 }
7923 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7924 && innermode == word_mode)
7925 {
7926 rtx htem;
7927 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7928 NULL_RTX, VOIDmode, 0);
7929 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7930 op1 = convert_modes (innermode, mode,
7931 expand_expr (TREE_OPERAND (exp, 1),
7932 NULL_RTX, VOIDmode, 0),
7933 unsignedp);
7934 else
7935 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7936 NULL_RTX, VOIDmode, 0);
7937 temp = expand_binop (mode, other_optab, op0, op1, target,
7938 unsignedp, OPTAB_LIB_WIDEN);
7939 htem = expand_mult_highpart_adjust (innermode,
7940 gen_highpart (innermode, temp),
7941 op0, op1,
7942 gen_highpart (innermode, temp),
7943 unsignedp);
7944 emit_move_insn (gen_highpart (innermode, temp), htem);
7945 return temp;
7946 }
7947 }
7948 }
7949 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7950 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7951 return expand_mult (mode, op0, op1, target, unsignedp);
7952
7953 case TRUNC_DIV_EXPR:
7954 case FLOOR_DIV_EXPR:
7955 case CEIL_DIV_EXPR:
7956 case ROUND_DIV_EXPR:
7957 case EXACT_DIV_EXPR:
7958 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7959 subtarget = 0;
7960 /* Possible optimization: compute the dividend with EXPAND_SUM
7961 then if the divisor is constant can optimize the case
7962 where some terms of the dividend have coeffs divisible by it. */
7963 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7964 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7965 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7966
7967 case RDIV_EXPR:
7968 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7969 expensive divide. If not, combine will rebuild the original
7970 computation. */
7971 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7972 && !real_onep (TREE_OPERAND (exp, 0)))
7973 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7974 build (RDIV_EXPR, type,
7975 build_real (type, dconst1),
7976 TREE_OPERAND (exp, 1))),
7977 target, tmode, unsignedp);
7978 this_optab = sdiv_optab;
7979 goto binop;
7980
7981 case TRUNC_MOD_EXPR:
7982 case FLOOR_MOD_EXPR:
7983 case CEIL_MOD_EXPR:
7984 case ROUND_MOD_EXPR:
7985 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7986 subtarget = 0;
7987 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7988 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7989 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7990
7991 case FIX_ROUND_EXPR:
7992 case FIX_FLOOR_EXPR:
7993 case FIX_CEIL_EXPR:
7994 abort (); /* Not used for C. */
7995
7996 case FIX_TRUNC_EXPR:
7997 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7998 if (target == 0)
7999 target = gen_reg_rtx (mode);
8000 expand_fix (target, op0, unsignedp);
8001 return target;
8002
8003 case FLOAT_EXPR:
8004 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8005 if (target == 0)
8006 target = gen_reg_rtx (mode);
8007 /* expand_float can't figure out what to do if FROM has VOIDmode.
8008 So give it the correct mode. With -O, cse will optimize this. */
8009 if (GET_MODE (op0) == VOIDmode)
8010 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8011 op0);
8012 expand_float (target, op0,
8013 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8014 return target;
8015
8016 case NEGATE_EXPR:
8017 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8018 temp = expand_unop (mode,
8019 ! unsignedp && flag_trapv
8020 && (GET_MODE_CLASS(mode) == MODE_INT)
8021 ? negv_optab : neg_optab, op0, target, 0);
8022 if (temp == 0)
8023 abort ();
8024 return temp;
8025
8026 case ABS_EXPR:
8027 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8028
8029 /* Handle complex values specially. */
8030 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8031 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8032 return expand_complex_abs (mode, op0, target, unsignedp);
8033
8034 /* Unsigned abs is simply the operand. Testing here means we don't
8035 risk generating incorrect code below. */
8036 if (TREE_UNSIGNED (type))
8037 return op0;
8038
8039 return expand_abs (mode, op0, target, unsignedp,
8040 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8041
8042 case MAX_EXPR:
8043 case MIN_EXPR:
8044 target = original_target;
8045 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8046 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8047 || GET_MODE (target) != mode
8048 || (GET_CODE (target) == REG
8049 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8050 target = gen_reg_rtx (mode);
8051 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8052 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8053
8054 /* First try to do it with a special MIN or MAX instruction.
8055 If that does not win, use a conditional jump to select the proper
8056 value. */
8057 this_optab = (TREE_UNSIGNED (type)
8058 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8059 : (code == MIN_EXPR ? smin_optab : smax_optab));
8060
8061 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8062 OPTAB_WIDEN);
8063 if (temp != 0)
8064 return temp;
8065
8066 /* At this point, a MEM target is no longer useful; we will get better
8067 code without it. */
8068
8069 if (GET_CODE (target) == MEM)
8070 target = gen_reg_rtx (mode);
8071
8072 if (target != op0)
8073 emit_move_insn (target, op0);
8074
8075 op0 = gen_label_rtx ();
8076
8077 /* If this mode is an integer too wide to compare properly,
8078 compare word by word. Rely on cse to optimize constant cases. */
8079 if (GET_MODE_CLASS (mode) == MODE_INT
8080 && ! can_compare_p (GE, mode, ccp_jump))
8081 {
8082 if (code == MAX_EXPR)
8083 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8084 target, op1, NULL_RTX, op0);
8085 else
8086 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8087 op1, target, NULL_RTX, op0);
8088 }
8089 else
8090 {
8091 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8092 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8093 unsignedp, mode, NULL_RTX, NULL_RTX,
8094 op0);
8095 }
8096 emit_move_insn (target, op1);
8097 emit_label (op0);
8098 return target;
8099
8100 case BIT_NOT_EXPR:
8101 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8102 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8103 if (temp == 0)
8104 abort ();
8105 return temp;
8106
8107 case FFS_EXPR:
8108 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8109 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8110 if (temp == 0)
8111 abort ();
8112 return temp;
8113
8114 /* ??? Can optimize bitwise operations with one arg constant.
8115 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8116 and (a bitwise1 b) bitwise2 b (etc)
8117 but that is probably not worth while. */
8118
8119 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8120 boolean values when we want in all cases to compute both of them. In
8121 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8122 as actual zero-or-1 values and then bitwise anding. In cases where
8123 there cannot be any side effects, better code would be made by
8124 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8125 how to recognize those cases. */
8126
8127 case TRUTH_AND_EXPR:
8128 case BIT_AND_EXPR:
8129 this_optab = and_optab;
8130 goto binop;
8131
8132 case TRUTH_OR_EXPR:
8133 case BIT_IOR_EXPR:
8134 this_optab = ior_optab;
8135 goto binop;
8136
8137 case TRUTH_XOR_EXPR:
8138 case BIT_XOR_EXPR:
8139 this_optab = xor_optab;
8140 goto binop;
8141
8142 case LSHIFT_EXPR:
8143 case RSHIFT_EXPR:
8144 case LROTATE_EXPR:
8145 case RROTATE_EXPR:
8146 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8147 subtarget = 0;
8148 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8149 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8150 unsignedp);
8151
8152 /* Could determine the answer when only additive constants differ. Also,
8153 the addition of one can be handled by changing the condition. */
8154 case LT_EXPR:
8155 case LE_EXPR:
8156 case GT_EXPR:
8157 case GE_EXPR:
8158 case EQ_EXPR:
8159 case NE_EXPR:
8160 case UNORDERED_EXPR:
8161 case ORDERED_EXPR:
8162 case UNLT_EXPR:
8163 case UNLE_EXPR:
8164 case UNGT_EXPR:
8165 case UNGE_EXPR:
8166 case UNEQ_EXPR:
8167 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8168 if (temp != 0)
8169 return temp;
8170
8171 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8172 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8173 && original_target
8174 && GET_CODE (original_target) == REG
8175 && (GET_MODE (original_target)
8176 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8177 {
8178 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8179 VOIDmode, 0);
8180
8181 if (temp != original_target)
8182 temp = copy_to_reg (temp);
8183
8184 op1 = gen_label_rtx ();
8185 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8186 GET_MODE (temp), unsignedp, op1);
8187 emit_move_insn (temp, const1_rtx);
8188 emit_label (op1);
8189 return temp;
8190 }
8191
8192 /* If no set-flag instruction, must generate a conditional
8193 store into a temporary variable. Drop through
8194 and handle this like && and ||. */
8195
8196 case TRUTH_ANDIF_EXPR:
8197 case TRUTH_ORIF_EXPR:
8198 if (! ignore
8199 && (target == 0 || ! safe_from_p (target, exp, 1)
8200 /* Make sure we don't have a hard reg (such as function's return
8201 value) live across basic blocks, if not optimizing. */
8202 || (!optimize && GET_CODE (target) == REG
8203 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8204 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8205
8206 if (target)
8207 emit_clr_insn (target);
8208
8209 op1 = gen_label_rtx ();
8210 jumpifnot (exp, op1);
8211
8212 if (target)
8213 emit_0_to_1_insn (target);
8214
8215 emit_label (op1);
8216 return ignore ? const0_rtx : target;
8217
8218 case TRUTH_NOT_EXPR:
8219 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8220 /* The parser is careful to generate TRUTH_NOT_EXPR
8221 only with operands that are always zero or one. */
8222 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8223 target, 1, OPTAB_LIB_WIDEN);
8224 if (temp == 0)
8225 abort ();
8226 return temp;
8227
8228 case COMPOUND_EXPR:
8229 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8230 emit_queue ();
8231 return expand_expr (TREE_OPERAND (exp, 1),
8232 (ignore ? const0_rtx : target),
8233 VOIDmode, 0);
8234
8235 case COND_EXPR:
8236 /* If we would have a "singleton" (see below) were it not for a
8237 conversion in each arm, bring that conversion back out. */
8238 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8239 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8240 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8241 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8242 {
8243 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8244 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8245
8246 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8247 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8248 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8249 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8250 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8251 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8252 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8253 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8254 return expand_expr (build1 (NOP_EXPR, type,
8255 build (COND_EXPR, TREE_TYPE (iftrue),
8256 TREE_OPERAND (exp, 0),
8257 iftrue, iffalse)),
8258 target, tmode, modifier);
8259 }
8260
8261 {
8262 /* Note that COND_EXPRs whose type is a structure or union
8263 are required to be constructed to contain assignments of
8264 a temporary variable, so that we can evaluate them here
8265 for side effect only. If type is void, we must do likewise. */
8266
8267 /* If an arm of the branch requires a cleanup,
8268 only that cleanup is performed. */
8269
8270 tree singleton = 0;
8271 tree binary_op = 0, unary_op = 0;
8272
8273 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8274 convert it to our mode, if necessary. */
8275 if (integer_onep (TREE_OPERAND (exp, 1))
8276 && integer_zerop (TREE_OPERAND (exp, 2))
8277 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8278 {
8279 if (ignore)
8280 {
8281 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8282 ro_modifier);
8283 return const0_rtx;
8284 }
8285
8286 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8287 if (GET_MODE (op0) == mode)
8288 return op0;
8289
8290 if (target == 0)
8291 target = gen_reg_rtx (mode);
8292 convert_move (target, op0, unsignedp);
8293 return target;
8294 }
8295
8296 /* Check for X ? A + B : A. If we have this, we can copy A to the
8297 output and conditionally add B. Similarly for unary operations.
8298 Don't do this if X has side-effects because those side effects
8299 might affect A or B and the "?" operation is a sequence point in
8300 ANSI. (operand_equal_p tests for side effects.) */
8301
8302 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8303 && operand_equal_p (TREE_OPERAND (exp, 2),
8304 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8305 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8306 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8307 && operand_equal_p (TREE_OPERAND (exp, 1),
8308 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8309 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8310 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8311 && operand_equal_p (TREE_OPERAND (exp, 2),
8312 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8313 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8314 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8315 && operand_equal_p (TREE_OPERAND (exp, 1),
8316 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8317 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8318
8319 /* If we are not to produce a result, we have no target. Otherwise,
8320 if a target was specified use it; it will not be used as an
8321 intermediate target unless it is safe. If no target, use a
8322 temporary. */
8323
8324 if (ignore)
8325 temp = 0;
8326 else if (original_target
8327 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8328 || (singleton && GET_CODE (original_target) == REG
8329 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8330 && original_target == var_rtx (singleton)))
8331 && GET_MODE (original_target) == mode
8332 #ifdef HAVE_conditional_move
8333 && (! can_conditionally_move_p (mode)
8334 || GET_CODE (original_target) == REG
8335 || TREE_ADDRESSABLE (type))
8336 #endif
8337 && (GET_CODE (original_target) != MEM
8338 || TREE_ADDRESSABLE (type)))
8339 temp = original_target;
8340 else if (TREE_ADDRESSABLE (type))
8341 abort ();
8342 else
8343 temp = assign_temp (type, 0, 0, 1);
8344
8345 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8346 do the test of X as a store-flag operation, do this as
8347 A + ((X != 0) << log C). Similarly for other simple binary
8348 operators. Only do for C == 1 if BRANCH_COST is low. */
8349 if (temp && singleton && binary_op
8350 && (TREE_CODE (binary_op) == PLUS_EXPR
8351 || TREE_CODE (binary_op) == MINUS_EXPR
8352 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8353 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8354 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8355 : integer_onep (TREE_OPERAND (binary_op, 1)))
8356 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8357 {
8358 rtx result;
8359 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8360 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8361 ? addv_optab : add_optab)
8362 : TREE_CODE (binary_op) == MINUS_EXPR
8363 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8364 ? subv_optab : sub_optab)
8365 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8366 : xor_optab);
8367
8368 /* If we had X ? A : A + 1, do this as A + (X == 0).
8369
8370 We have to invert the truth value here and then put it
8371 back later if do_store_flag fails. We cannot simply copy
8372 TREE_OPERAND (exp, 0) to another variable and modify that
8373 because invert_truthvalue can modify the tree pointed to
8374 by its argument. */
8375 if (singleton == TREE_OPERAND (exp, 1))
8376 TREE_OPERAND (exp, 0)
8377 = invert_truthvalue (TREE_OPERAND (exp, 0));
8378
8379 result = do_store_flag (TREE_OPERAND (exp, 0),
8380 (safe_from_p (temp, singleton, 1)
8381 ? temp : NULL_RTX),
8382 mode, BRANCH_COST <= 1);
8383
8384 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8385 result = expand_shift (LSHIFT_EXPR, mode, result,
8386 build_int_2 (tree_log2
8387 (TREE_OPERAND
8388 (binary_op, 1)),
8389 0),
8390 (safe_from_p (temp, singleton, 1)
8391 ? temp : NULL_RTX), 0);
8392
8393 if (result)
8394 {
8395 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8396 return expand_binop (mode, boptab, op1, result, temp,
8397 unsignedp, OPTAB_LIB_WIDEN);
8398 }
8399 else if (singleton == TREE_OPERAND (exp, 1))
8400 TREE_OPERAND (exp, 0)
8401 = invert_truthvalue (TREE_OPERAND (exp, 0));
8402 }
8403
8404 do_pending_stack_adjust ();
8405 NO_DEFER_POP;
8406 op0 = gen_label_rtx ();
8407
8408 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8409 {
8410 if (temp != 0)
8411 {
8412 /* If the target conflicts with the other operand of the
8413 binary op, we can't use it. Also, we can't use the target
8414 if it is a hard register, because evaluating the condition
8415 might clobber it. */
8416 if ((binary_op
8417 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8418 || (GET_CODE (temp) == REG
8419 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8420 temp = gen_reg_rtx (mode);
8421 store_expr (singleton, temp, 0);
8422 }
8423 else
8424 expand_expr (singleton,
8425 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8426 if (singleton == TREE_OPERAND (exp, 1))
8427 jumpif (TREE_OPERAND (exp, 0), op0);
8428 else
8429 jumpifnot (TREE_OPERAND (exp, 0), op0);
8430
8431 start_cleanup_deferral ();
8432 if (binary_op && temp == 0)
8433 /* Just touch the other operand. */
8434 expand_expr (TREE_OPERAND (binary_op, 1),
8435 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8436 else if (binary_op)
8437 store_expr (build (TREE_CODE (binary_op), type,
8438 make_tree (type, temp),
8439 TREE_OPERAND (binary_op, 1)),
8440 temp, 0);
8441 else
8442 store_expr (build1 (TREE_CODE (unary_op), type,
8443 make_tree (type, temp)),
8444 temp, 0);
8445 op1 = op0;
8446 }
8447 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8448 comparison operator. If we have one of these cases, set the
8449 output to A, branch on A (cse will merge these two references),
8450 then set the output to FOO. */
8451 else if (temp
8452 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8453 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8454 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8455 TREE_OPERAND (exp, 1), 0)
8456 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8457 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8458 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8459 {
8460 if (GET_CODE (temp) == REG
8461 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8462 temp = gen_reg_rtx (mode);
8463 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8464 jumpif (TREE_OPERAND (exp, 0), op0);
8465
8466 start_cleanup_deferral ();
8467 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8468 op1 = op0;
8469 }
8470 else if (temp
8471 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8472 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8473 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8474 TREE_OPERAND (exp, 2), 0)
8475 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8476 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8477 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8478 {
8479 if (GET_CODE (temp) == REG
8480 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8481 temp = gen_reg_rtx (mode);
8482 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8483 jumpifnot (TREE_OPERAND (exp, 0), op0);
8484
8485 start_cleanup_deferral ();
8486 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8487 op1 = op0;
8488 }
8489 else
8490 {
8491 op1 = gen_label_rtx ();
8492 jumpifnot (TREE_OPERAND (exp, 0), op0);
8493
8494 start_cleanup_deferral ();
8495
8496 /* One branch of the cond can be void, if it never returns. For
8497 example A ? throw : E */
8498 if (temp != 0
8499 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8500 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8501 else
8502 expand_expr (TREE_OPERAND (exp, 1),
8503 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8504 end_cleanup_deferral ();
8505 emit_queue ();
8506 emit_jump_insn (gen_jump (op1));
8507 emit_barrier ();
8508 emit_label (op0);
8509 start_cleanup_deferral ();
8510 if (temp != 0
8511 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8512 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8513 else
8514 expand_expr (TREE_OPERAND (exp, 2),
8515 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8516 }
8517
8518 end_cleanup_deferral ();
8519
8520 emit_queue ();
8521 emit_label (op1);
8522 OK_DEFER_POP;
8523
8524 return temp;
8525 }
8526
8527 case TARGET_EXPR:
8528 {
8529 /* Something needs to be initialized, but we didn't know
8530 where that thing was when building the tree. For example,
8531 it could be the return value of a function, or a parameter
8532 to a function which lays down in the stack, or a temporary
8533 variable which must be passed by reference.
8534
8535 We guarantee that the expression will either be constructed
8536 or copied into our original target. */
8537
8538 tree slot = TREE_OPERAND (exp, 0);
8539 tree cleanups = NULL_TREE;
8540 tree exp1;
8541
8542 if (TREE_CODE (slot) != VAR_DECL)
8543 abort ();
8544
8545 if (! ignore)
8546 target = original_target;
8547
8548 /* Set this here so that if we get a target that refers to a
8549 register variable that's already been used, put_reg_into_stack
8550 knows that it should fix up those uses. */
8551 TREE_USED (slot) = 1;
8552
8553 if (target == 0)
8554 {
8555 if (DECL_RTL_SET_P (slot))
8556 {
8557 target = DECL_RTL (slot);
8558 /* If we have already expanded the slot, so don't do
8559 it again. (mrs) */
8560 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8561 return target;
8562 }
8563 else
8564 {
8565 target = assign_temp (type, 2, 0, 1);
8566 /* All temp slots at this level must not conflict. */
8567 preserve_temp_slots (target);
8568 SET_DECL_RTL (slot, target);
8569 if (TREE_ADDRESSABLE (slot))
8570 put_var_into_stack (slot);
8571
8572 /* Since SLOT is not known to the called function
8573 to belong to its stack frame, we must build an explicit
8574 cleanup. This case occurs when we must build up a reference
8575 to pass the reference as an argument. In this case,
8576 it is very likely that such a reference need not be
8577 built here. */
8578
8579 if (TREE_OPERAND (exp, 2) == 0)
8580 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8581 cleanups = TREE_OPERAND (exp, 2);
8582 }
8583 }
8584 else
8585 {
8586 /* This case does occur, when expanding a parameter which
8587 needs to be constructed on the stack. The target
8588 is the actual stack address that we want to initialize.
8589 The function we call will perform the cleanup in this case. */
8590
8591 /* If we have already assigned it space, use that space,
8592 not target that we were passed in, as our target
8593 parameter is only a hint. */
8594 if (DECL_RTL_SET_P (slot))
8595 {
8596 target = DECL_RTL (slot);
8597 /* If we have already expanded the slot, so don't do
8598 it again. (mrs) */
8599 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8600 return target;
8601 }
8602 else
8603 {
8604 SET_DECL_RTL (slot, target);
8605 /* If we must have an addressable slot, then make sure that
8606 the RTL that we just stored in slot is OK. */
8607 if (TREE_ADDRESSABLE (slot))
8608 put_var_into_stack (slot);
8609 }
8610 }
8611
8612 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8613 /* Mark it as expanded. */
8614 TREE_OPERAND (exp, 1) = NULL_TREE;
8615
8616 store_expr (exp1, target, 0);
8617
8618 expand_decl_cleanup (NULL_TREE, cleanups);
8619
8620 return target;
8621 }
8622
8623 case INIT_EXPR:
8624 {
8625 tree lhs = TREE_OPERAND (exp, 0);
8626 tree rhs = TREE_OPERAND (exp, 1);
8627
8628 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8629 return temp;
8630 }
8631
8632 case MODIFY_EXPR:
8633 {
8634 /* If lhs is complex, expand calls in rhs before computing it.
8635 That's so we don't compute a pointer and save it over a
8636 call. If lhs is simple, compute it first so we can give it
8637 as a target if the rhs is just a call. This avoids an
8638 extra temp and copy and that prevents a partial-subsumption
8639 which makes bad code. Actually we could treat
8640 component_ref's of vars like vars. */
8641
8642 tree lhs = TREE_OPERAND (exp, 0);
8643 tree rhs = TREE_OPERAND (exp, 1);
8644
8645 temp = 0;
8646
8647 /* Check for |= or &= of a bitfield of size one into another bitfield
8648 of size 1. In this case, (unless we need the result of the
8649 assignment) we can do this more efficiently with a
8650 test followed by an assignment, if necessary.
8651
8652 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8653 things change so we do, this code should be enhanced to
8654 support it. */
8655 if (ignore
8656 && TREE_CODE (lhs) == COMPONENT_REF
8657 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8658 || TREE_CODE (rhs) == BIT_AND_EXPR)
8659 && TREE_OPERAND (rhs, 0) == lhs
8660 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8661 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8662 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8663 {
8664 rtx label = gen_label_rtx ();
8665
8666 do_jump (TREE_OPERAND (rhs, 1),
8667 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8668 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8669 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8670 (TREE_CODE (rhs) == BIT_IOR_EXPR
8671 ? integer_one_node
8672 : integer_zero_node)),
8673 0, 0);
8674 do_pending_stack_adjust ();
8675 emit_label (label);
8676 return const0_rtx;
8677 }
8678
8679 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8680
8681 return temp;
8682 }
8683
8684 case RETURN_EXPR:
8685 if (!TREE_OPERAND (exp, 0))
8686 expand_null_return ();
8687 else
8688 expand_return (TREE_OPERAND (exp, 0));
8689 return const0_rtx;
8690
8691 case PREINCREMENT_EXPR:
8692 case PREDECREMENT_EXPR:
8693 return expand_increment (exp, 0, ignore);
8694
8695 case POSTINCREMENT_EXPR:
8696 case POSTDECREMENT_EXPR:
8697 /* Faster to treat as pre-increment if result is not used. */
8698 return expand_increment (exp, ! ignore, ignore);
8699
8700 case ADDR_EXPR:
8701 /* Are we taking the address of a nested function? */
8702 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8703 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8704 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8705 && ! TREE_STATIC (exp))
8706 {
8707 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8708 op0 = force_operand (op0, target);
8709 }
8710 /* If we are taking the address of something erroneous, just
8711 return a zero. */
8712 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8713 return const0_rtx;
8714 /* If we are taking the address of a constant and are at the
8715 top level, we have to use output_constant_def since we can't
8716 call force_const_mem at top level. */
8717 else if (cfun == 0
8718 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8719 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8720 == 'c')))
8721 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8722 else
8723 {
8724 /* We make sure to pass const0_rtx down if we came in with
8725 ignore set, to avoid doing the cleanups twice for something. */
8726 op0 = expand_expr (TREE_OPERAND (exp, 0),
8727 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8728 (modifier == EXPAND_INITIALIZER
8729 ? modifier : EXPAND_CONST_ADDRESS));
8730
8731 /* If we are going to ignore the result, OP0 will have been set
8732 to const0_rtx, so just return it. Don't get confused and
8733 think we are taking the address of the constant. */
8734 if (ignore)
8735 return op0;
8736
8737 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8738 clever and returns a REG when given a MEM. */
8739 op0 = protect_from_queue (op0, 1);
8740
8741 /* We would like the object in memory. If it is a constant, we can
8742 have it be statically allocated into memory. For a non-constant,
8743 we need to allocate some memory and store the value into it. */
8744
8745 if (CONSTANT_P (op0))
8746 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8747 op0);
8748 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8749 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8750 || GET_CODE (op0) == PARALLEL)
8751 {
8752 /* If this object is in a register, it must can't be BLKmode. */
8753 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8754 tree nt = build_qualified_type (inner_type,
8755 (TYPE_QUALS (inner_type)
8756 | TYPE_QUAL_CONST));
8757 rtx memloc = assign_temp (nt, 1, 1, 1);
8758
8759 if (GET_CODE (op0) == PARALLEL)
8760 /* Handle calls that pass values in multiple non-contiguous
8761 locations. The Irix 6 ABI has examples of this. */
8762 emit_group_store (memloc, op0, int_size_in_bytes (inner_type));
8763 else
8764 emit_move_insn (memloc, op0);
8765
8766 op0 = memloc;
8767 }
8768
8769 if (GET_CODE (op0) != MEM)
8770 abort ();
8771
8772 mark_temp_addr_taken (op0);
8773 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8774 {
8775 op0 = XEXP (op0, 0);
8776 #ifdef POINTERS_EXTEND_UNSIGNED
8777 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8778 && mode == ptr_mode)
8779 op0 = convert_memory_address (ptr_mode, op0);
8780 #endif
8781 return op0;
8782 }
8783
8784 /* If OP0 is not aligned as least as much as the type requires, we
8785 need to make a temporary, copy OP0 to it, and take the address of
8786 the temporary. We want to use the alignment of the type, not of
8787 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8788 the test for BLKmode means that can't happen. The test for
8789 BLKmode is because we never make mis-aligned MEMs with
8790 non-BLKmode.
8791
8792 We don't need to do this at all if the machine doesn't have
8793 strict alignment. */
8794 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8795 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8796 > MEM_ALIGN (op0))
8797 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8798 {
8799 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8800 rtx new
8801 = assign_stack_temp_for_type
8802 (TYPE_MODE (inner_type),
8803 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8804 : int_size_in_bytes (inner_type),
8805 1, build_qualified_type (inner_type,
8806 (TYPE_QUALS (inner_type)
8807 | TYPE_QUAL_CONST)));
8808
8809 if (TYPE_ALIGN_OK (inner_type))
8810 abort ();
8811
8812 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8813 op0 = new;
8814 }
8815
8816 op0 = force_operand (XEXP (op0, 0), target);
8817 }
8818
8819 if (flag_force_addr && GET_CODE (op0) != REG)
8820 op0 = force_reg (Pmode, op0);
8821
8822 if (GET_CODE (op0) == REG
8823 && ! REG_USERVAR_P (op0))
8824 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8825
8826 #ifdef POINTERS_EXTEND_UNSIGNED
8827 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8828 && mode == ptr_mode)
8829 op0 = convert_memory_address (ptr_mode, op0);
8830 #endif
8831
8832 return op0;
8833
8834 case ENTRY_VALUE_EXPR:
8835 abort ();
8836
8837 /* COMPLEX type for Extended Pascal & Fortran */
8838 case COMPLEX_EXPR:
8839 {
8840 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8841 rtx insns;
8842
8843 /* Get the rtx code of the operands. */
8844 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8845 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8846
8847 if (! target)
8848 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8849
8850 start_sequence ();
8851
8852 /* Move the real (op0) and imaginary (op1) parts to their location. */
8853 emit_move_insn (gen_realpart (mode, target), op0);
8854 emit_move_insn (gen_imagpart (mode, target), op1);
8855
8856 insns = get_insns ();
8857 end_sequence ();
8858
8859 /* Complex construction should appear as a single unit. */
8860 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8861 each with a separate pseudo as destination.
8862 It's not correct for flow to treat them as a unit. */
8863 if (GET_CODE (target) != CONCAT)
8864 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8865 else
8866 emit_insns (insns);
8867
8868 return target;
8869 }
8870
8871 case REALPART_EXPR:
8872 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8873 return gen_realpart (mode, op0);
8874
8875 case IMAGPART_EXPR:
8876 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8877 return gen_imagpart (mode, op0);
8878
8879 case CONJ_EXPR:
8880 {
8881 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8882 rtx imag_t;
8883 rtx insns;
8884
8885 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8886
8887 if (! target)
8888 target = gen_reg_rtx (mode);
8889
8890 start_sequence ();
8891
8892 /* Store the realpart and the negated imagpart to target. */
8893 emit_move_insn (gen_realpart (partmode, target),
8894 gen_realpart (partmode, op0));
8895
8896 imag_t = gen_imagpart (partmode, target);
8897 temp = expand_unop (partmode,
8898 ! unsignedp && flag_trapv
8899 && (GET_MODE_CLASS(partmode) == MODE_INT)
8900 ? negv_optab : neg_optab,
8901 gen_imagpart (partmode, op0), imag_t, 0);
8902 if (temp != imag_t)
8903 emit_move_insn (imag_t, temp);
8904
8905 insns = get_insns ();
8906 end_sequence ();
8907
8908 /* Conjugate should appear as a single unit
8909 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8910 each with a separate pseudo as destination.
8911 It's not correct for flow to treat them as a unit. */
8912 if (GET_CODE (target) != CONCAT)
8913 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8914 else
8915 emit_insns (insns);
8916
8917 return target;
8918 }
8919
8920 case TRY_CATCH_EXPR:
8921 {
8922 tree handler = TREE_OPERAND (exp, 1);
8923
8924 expand_eh_region_start ();
8925
8926 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8927
8928 expand_eh_region_end_cleanup (handler);
8929
8930 return op0;
8931 }
8932
8933 case TRY_FINALLY_EXPR:
8934 {
8935 tree try_block = TREE_OPERAND (exp, 0);
8936 tree finally_block = TREE_OPERAND (exp, 1);
8937 rtx finally_label = gen_label_rtx ();
8938 rtx done_label = gen_label_rtx ();
8939 rtx return_link = gen_reg_rtx (Pmode);
8940 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8941 (tree) finally_label, (tree) return_link);
8942 TREE_SIDE_EFFECTS (cleanup) = 1;
8943
8944 /* Start a new binding layer that will keep track of all cleanup
8945 actions to be performed. */
8946 expand_start_bindings (2);
8947
8948 target_temp_slot_level = temp_slot_level;
8949
8950 expand_decl_cleanup (NULL_TREE, cleanup);
8951 op0 = expand_expr (try_block, target, tmode, modifier);
8952
8953 preserve_temp_slots (op0);
8954 expand_end_bindings (NULL_TREE, 0, 0);
8955 emit_jump (done_label);
8956 emit_label (finally_label);
8957 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8958 emit_indirect_jump (return_link);
8959 emit_label (done_label);
8960 return op0;
8961 }
8962
8963 case GOTO_SUBROUTINE_EXPR:
8964 {
8965 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8966 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8967 rtx return_address = gen_label_rtx ();
8968 emit_move_insn (return_link,
8969 gen_rtx_LABEL_REF (Pmode, return_address));
8970 emit_jump (subr);
8971 emit_label (return_address);
8972 return const0_rtx;
8973 }
8974
8975 case VA_ARG_EXPR:
8976 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8977
8978 case EXC_PTR_EXPR:
8979 return get_exception_pointer (cfun);
8980
8981 case FDESC_EXPR:
8982 /* Function descriptors are not valid except for as
8983 initialization constants, and should not be expanded. */
8984 abort ();
8985
8986 default:
8987 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8988 }
8989
8990 /* Here to do an ordinary binary operator, generating an instruction
8991 from the optab already placed in `this_optab'. */
8992 binop:
8993 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8994 subtarget = 0;
8995 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8996 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8997 binop2:
8998 temp = expand_binop (mode, this_optab, op0, op1, target,
8999 unsignedp, OPTAB_LIB_WIDEN);
9000 if (temp == 0)
9001 abort ();
9002 return temp;
9003 }
9004 \f
9005 /* Return the tree node if a ARG corresponds to a string constant or zero
9006 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9007 in bytes within the string that ARG is accessing. The type of the
9008 offset will be `sizetype'. */
9009
9010 tree
9011 string_constant (arg, ptr_offset)
9012 tree arg;
9013 tree *ptr_offset;
9014 {
9015 STRIP_NOPS (arg);
9016
9017 if (TREE_CODE (arg) == ADDR_EXPR
9018 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9019 {
9020 *ptr_offset = size_zero_node;
9021 return TREE_OPERAND (arg, 0);
9022 }
9023 else if (TREE_CODE (arg) == PLUS_EXPR)
9024 {
9025 tree arg0 = TREE_OPERAND (arg, 0);
9026 tree arg1 = TREE_OPERAND (arg, 1);
9027
9028 STRIP_NOPS (arg0);
9029 STRIP_NOPS (arg1);
9030
9031 if (TREE_CODE (arg0) == ADDR_EXPR
9032 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9033 {
9034 *ptr_offset = convert (sizetype, arg1);
9035 return TREE_OPERAND (arg0, 0);
9036 }
9037 else if (TREE_CODE (arg1) == ADDR_EXPR
9038 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9039 {
9040 *ptr_offset = convert (sizetype, arg0);
9041 return TREE_OPERAND (arg1, 0);
9042 }
9043 }
9044
9045 return 0;
9046 }
9047 \f
9048 /* Expand code for a post- or pre- increment or decrement
9049 and return the RTX for the result.
9050 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9051
9052 static rtx
9053 expand_increment (exp, post, ignore)
9054 tree exp;
9055 int post, ignore;
9056 {
9057 rtx op0, op1;
9058 rtx temp, value;
9059 tree incremented = TREE_OPERAND (exp, 0);
9060 optab this_optab = add_optab;
9061 int icode;
9062 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9063 int op0_is_copy = 0;
9064 int single_insn = 0;
9065 /* 1 means we can't store into OP0 directly,
9066 because it is a subreg narrower than a word,
9067 and we don't dare clobber the rest of the word. */
9068 int bad_subreg = 0;
9069
9070 /* Stabilize any component ref that might need to be
9071 evaluated more than once below. */
9072 if (!post
9073 || TREE_CODE (incremented) == BIT_FIELD_REF
9074 || (TREE_CODE (incremented) == COMPONENT_REF
9075 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9076 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9077 incremented = stabilize_reference (incremented);
9078 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9079 ones into save exprs so that they don't accidentally get evaluated
9080 more than once by the code below. */
9081 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9082 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9083 incremented = save_expr (incremented);
9084
9085 /* Compute the operands as RTX.
9086 Note whether OP0 is the actual lvalue or a copy of it:
9087 I believe it is a copy iff it is a register or subreg
9088 and insns were generated in computing it. */
9089
9090 temp = get_last_insn ();
9091 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9092
9093 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9094 in place but instead must do sign- or zero-extension during assignment,
9095 so we copy it into a new register and let the code below use it as
9096 a copy.
9097
9098 Note that we can safely modify this SUBREG since it is know not to be
9099 shared (it was made by the expand_expr call above). */
9100
9101 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9102 {
9103 if (post)
9104 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9105 else
9106 bad_subreg = 1;
9107 }
9108 else if (GET_CODE (op0) == SUBREG
9109 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9110 {
9111 /* We cannot increment this SUBREG in place. If we are
9112 post-incrementing, get a copy of the old value. Otherwise,
9113 just mark that we cannot increment in place. */
9114 if (post)
9115 op0 = copy_to_reg (op0);
9116 else
9117 bad_subreg = 1;
9118 }
9119
9120 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9121 && temp != get_last_insn ());
9122 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9123 EXPAND_MEMORY_USE_BAD);
9124
9125 /* Decide whether incrementing or decrementing. */
9126 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9127 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9128 this_optab = sub_optab;
9129
9130 /* Convert decrement by a constant into a negative increment. */
9131 if (this_optab == sub_optab
9132 && GET_CODE (op1) == CONST_INT)
9133 {
9134 op1 = GEN_INT (-INTVAL (op1));
9135 this_optab = add_optab;
9136 }
9137
9138 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9139 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9140
9141 /* For a preincrement, see if we can do this with a single instruction. */
9142 if (!post)
9143 {
9144 icode = (int) this_optab->handlers[(int) mode].insn_code;
9145 if (icode != (int) CODE_FOR_nothing
9146 /* Make sure that OP0 is valid for operands 0 and 1
9147 of the insn we want to queue. */
9148 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9149 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9150 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9151 single_insn = 1;
9152 }
9153
9154 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9155 then we cannot just increment OP0. We must therefore contrive to
9156 increment the original value. Then, for postincrement, we can return
9157 OP0 since it is a copy of the old value. For preincrement, expand here
9158 unless we can do it with a single insn.
9159
9160 Likewise if storing directly into OP0 would clobber high bits
9161 we need to preserve (bad_subreg). */
9162 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9163 {
9164 /* This is the easiest way to increment the value wherever it is.
9165 Problems with multiple evaluation of INCREMENTED are prevented
9166 because either (1) it is a component_ref or preincrement,
9167 in which case it was stabilized above, or (2) it is an array_ref
9168 with constant index in an array in a register, which is
9169 safe to reevaluate. */
9170 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9171 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9172 ? MINUS_EXPR : PLUS_EXPR),
9173 TREE_TYPE (exp),
9174 incremented,
9175 TREE_OPERAND (exp, 1));
9176
9177 while (TREE_CODE (incremented) == NOP_EXPR
9178 || TREE_CODE (incremented) == CONVERT_EXPR)
9179 {
9180 newexp = convert (TREE_TYPE (incremented), newexp);
9181 incremented = TREE_OPERAND (incremented, 0);
9182 }
9183
9184 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9185 return post ? op0 : temp;
9186 }
9187
9188 if (post)
9189 {
9190 /* We have a true reference to the value in OP0.
9191 If there is an insn to add or subtract in this mode, queue it.
9192 Queueing the increment insn avoids the register shuffling
9193 that often results if we must increment now and first save
9194 the old value for subsequent use. */
9195
9196 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9197 op0 = stabilize (op0);
9198 #endif
9199
9200 icode = (int) this_optab->handlers[(int) mode].insn_code;
9201 if (icode != (int) CODE_FOR_nothing
9202 /* Make sure that OP0 is valid for operands 0 and 1
9203 of the insn we want to queue. */
9204 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9205 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9206 {
9207 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9208 op1 = force_reg (mode, op1);
9209
9210 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9211 }
9212 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9213 {
9214 rtx addr = (general_operand (XEXP (op0, 0), mode)
9215 ? force_reg (Pmode, XEXP (op0, 0))
9216 : copy_to_reg (XEXP (op0, 0)));
9217 rtx temp, result;
9218
9219 op0 = replace_equiv_address (op0, addr);
9220 temp = force_reg (GET_MODE (op0), op0);
9221 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9222 op1 = force_reg (mode, op1);
9223
9224 /* The increment queue is LIFO, thus we have to `queue'
9225 the instructions in reverse order. */
9226 enqueue_insn (op0, gen_move_insn (op0, temp));
9227 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9228 return result;
9229 }
9230 }
9231
9232 /* Preincrement, or we can't increment with one simple insn. */
9233 if (post)
9234 /* Save a copy of the value before inc or dec, to return it later. */
9235 temp = value = copy_to_reg (op0);
9236 else
9237 /* Arrange to return the incremented value. */
9238 /* Copy the rtx because expand_binop will protect from the queue,
9239 and the results of that would be invalid for us to return
9240 if our caller does emit_queue before using our result. */
9241 temp = copy_rtx (value = op0);
9242
9243 /* Increment however we can. */
9244 op1 = expand_binop (mode, this_optab, value, op1,
9245 current_function_check_memory_usage ? NULL_RTX : op0,
9246 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9247 /* Make sure the value is stored into OP0. */
9248 if (op1 != op0)
9249 emit_move_insn (op0, op1);
9250
9251 return temp;
9252 }
9253 \f
9254 /* At the start of a function, record that we have no previously-pushed
9255 arguments waiting to be popped. */
9256
9257 void
9258 init_pending_stack_adjust ()
9259 {
9260 pending_stack_adjust = 0;
9261 }
9262
9263 /* When exiting from function, if safe, clear out any pending stack adjust
9264 so the adjustment won't get done.
9265
9266 Note, if the current function calls alloca, then it must have a
9267 frame pointer regardless of the value of flag_omit_frame_pointer. */
9268
9269 void
9270 clear_pending_stack_adjust ()
9271 {
9272 #ifdef EXIT_IGNORE_STACK
9273 if (optimize > 0
9274 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9275 && EXIT_IGNORE_STACK
9276 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9277 && ! flag_inline_functions)
9278 {
9279 stack_pointer_delta -= pending_stack_adjust,
9280 pending_stack_adjust = 0;
9281 }
9282 #endif
9283 }
9284
9285 /* Pop any previously-pushed arguments that have not been popped yet. */
9286
9287 void
9288 do_pending_stack_adjust ()
9289 {
9290 if (inhibit_defer_pop == 0)
9291 {
9292 if (pending_stack_adjust != 0)
9293 adjust_stack (GEN_INT (pending_stack_adjust));
9294 pending_stack_adjust = 0;
9295 }
9296 }
9297 \f
9298 /* Expand conditional expressions. */
9299
9300 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9301 LABEL is an rtx of code CODE_LABEL, in this function and all the
9302 functions here. */
9303
9304 void
9305 jumpifnot (exp, label)
9306 tree exp;
9307 rtx label;
9308 {
9309 do_jump (exp, label, NULL_RTX);
9310 }
9311
9312 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9313
9314 void
9315 jumpif (exp, label)
9316 tree exp;
9317 rtx label;
9318 {
9319 do_jump (exp, NULL_RTX, label);
9320 }
9321
9322 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9323 the result is zero, or IF_TRUE_LABEL if the result is one.
9324 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9325 meaning fall through in that case.
9326
9327 do_jump always does any pending stack adjust except when it does not
9328 actually perform a jump. An example where there is no jump
9329 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9330
9331 This function is responsible for optimizing cases such as
9332 &&, || and comparison operators in EXP. */
9333
9334 void
9335 do_jump (exp, if_false_label, if_true_label)
9336 tree exp;
9337 rtx if_false_label, if_true_label;
9338 {
9339 enum tree_code code = TREE_CODE (exp);
9340 /* Some cases need to create a label to jump to
9341 in order to properly fall through.
9342 These cases set DROP_THROUGH_LABEL nonzero. */
9343 rtx drop_through_label = 0;
9344 rtx temp;
9345 int i;
9346 tree type;
9347 enum machine_mode mode;
9348
9349 #ifdef MAX_INTEGER_COMPUTATION_MODE
9350 check_max_integer_computation_mode (exp);
9351 #endif
9352
9353 emit_queue ();
9354
9355 switch (code)
9356 {
9357 case ERROR_MARK:
9358 break;
9359
9360 case INTEGER_CST:
9361 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9362 if (temp)
9363 emit_jump (temp);
9364 break;
9365
9366 #if 0
9367 /* This is not true with #pragma weak */
9368 case ADDR_EXPR:
9369 /* The address of something can never be zero. */
9370 if (if_true_label)
9371 emit_jump (if_true_label);
9372 break;
9373 #endif
9374
9375 case NOP_EXPR:
9376 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9377 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9378 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9379 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9380 goto normal;
9381 case CONVERT_EXPR:
9382 /* If we are narrowing the operand, we have to do the compare in the
9383 narrower mode. */
9384 if ((TYPE_PRECISION (TREE_TYPE (exp))
9385 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9386 goto normal;
9387 case NON_LVALUE_EXPR:
9388 case REFERENCE_EXPR:
9389 case ABS_EXPR:
9390 case NEGATE_EXPR:
9391 case LROTATE_EXPR:
9392 case RROTATE_EXPR:
9393 /* These cannot change zero->non-zero or vice versa. */
9394 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9395 break;
9396
9397 case WITH_RECORD_EXPR:
9398 /* Put the object on the placeholder list, recurse through our first
9399 operand, and pop the list. */
9400 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9401 placeholder_list);
9402 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9403 placeholder_list = TREE_CHAIN (placeholder_list);
9404 break;
9405
9406 #if 0
9407 /* This is never less insns than evaluating the PLUS_EXPR followed by
9408 a test and can be longer if the test is eliminated. */
9409 case PLUS_EXPR:
9410 /* Reduce to minus. */
9411 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9412 TREE_OPERAND (exp, 0),
9413 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9414 TREE_OPERAND (exp, 1))));
9415 /* Process as MINUS. */
9416 #endif
9417
9418 case MINUS_EXPR:
9419 /* Non-zero iff operands of minus differ. */
9420 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9421 TREE_OPERAND (exp, 0),
9422 TREE_OPERAND (exp, 1)),
9423 NE, NE, if_false_label, if_true_label);
9424 break;
9425
9426 case BIT_AND_EXPR:
9427 /* If we are AND'ing with a small constant, do this comparison in the
9428 smallest type that fits. If the machine doesn't have comparisons
9429 that small, it will be converted back to the wider comparison.
9430 This helps if we are testing the sign bit of a narrower object.
9431 combine can't do this for us because it can't know whether a
9432 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9433
9434 if (! SLOW_BYTE_ACCESS
9435 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9436 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9437 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9438 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9439 && (type = type_for_mode (mode, 1)) != 0
9440 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9441 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9442 != CODE_FOR_nothing))
9443 {
9444 do_jump (convert (type, exp), if_false_label, if_true_label);
9445 break;
9446 }
9447 goto normal;
9448
9449 case TRUTH_NOT_EXPR:
9450 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9451 break;
9452
9453 case TRUTH_ANDIF_EXPR:
9454 if (if_false_label == 0)
9455 if_false_label = drop_through_label = gen_label_rtx ();
9456 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9457 start_cleanup_deferral ();
9458 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9459 end_cleanup_deferral ();
9460 break;
9461
9462 case TRUTH_ORIF_EXPR:
9463 if (if_true_label == 0)
9464 if_true_label = drop_through_label = gen_label_rtx ();
9465 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9466 start_cleanup_deferral ();
9467 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9468 end_cleanup_deferral ();
9469 break;
9470
9471 case COMPOUND_EXPR:
9472 push_temp_slots ();
9473 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9474 preserve_temp_slots (NULL_RTX);
9475 free_temp_slots ();
9476 pop_temp_slots ();
9477 emit_queue ();
9478 do_pending_stack_adjust ();
9479 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9480 break;
9481
9482 case COMPONENT_REF:
9483 case BIT_FIELD_REF:
9484 case ARRAY_REF:
9485 case ARRAY_RANGE_REF:
9486 {
9487 HOST_WIDE_INT bitsize, bitpos;
9488 int unsignedp;
9489 enum machine_mode mode;
9490 tree type;
9491 tree offset;
9492 int volatilep = 0;
9493
9494 /* Get description of this reference. We don't actually care
9495 about the underlying object here. */
9496 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9497 &unsignedp, &volatilep);
9498
9499 type = type_for_size (bitsize, unsignedp);
9500 if (! SLOW_BYTE_ACCESS
9501 && type != 0 && bitsize >= 0
9502 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9503 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9504 != CODE_FOR_nothing))
9505 {
9506 do_jump (convert (type, exp), if_false_label, if_true_label);
9507 break;
9508 }
9509 goto normal;
9510 }
9511
9512 case COND_EXPR:
9513 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9514 if (integer_onep (TREE_OPERAND (exp, 1))
9515 && integer_zerop (TREE_OPERAND (exp, 2)))
9516 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9517
9518 else if (integer_zerop (TREE_OPERAND (exp, 1))
9519 && integer_onep (TREE_OPERAND (exp, 2)))
9520 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9521
9522 else
9523 {
9524 rtx label1 = gen_label_rtx ();
9525 drop_through_label = gen_label_rtx ();
9526
9527 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9528
9529 start_cleanup_deferral ();
9530 /* Now the THEN-expression. */
9531 do_jump (TREE_OPERAND (exp, 1),
9532 if_false_label ? if_false_label : drop_through_label,
9533 if_true_label ? if_true_label : drop_through_label);
9534 /* In case the do_jump just above never jumps. */
9535 do_pending_stack_adjust ();
9536 emit_label (label1);
9537
9538 /* Now the ELSE-expression. */
9539 do_jump (TREE_OPERAND (exp, 2),
9540 if_false_label ? if_false_label : drop_through_label,
9541 if_true_label ? if_true_label : drop_through_label);
9542 end_cleanup_deferral ();
9543 }
9544 break;
9545
9546 case EQ_EXPR:
9547 {
9548 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9549
9550 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9551 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9552 {
9553 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9554 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9555 do_jump
9556 (fold
9557 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9558 fold (build (EQ_EXPR, TREE_TYPE (exp),
9559 fold (build1 (REALPART_EXPR,
9560 TREE_TYPE (inner_type),
9561 exp0)),
9562 fold (build1 (REALPART_EXPR,
9563 TREE_TYPE (inner_type),
9564 exp1)))),
9565 fold (build (EQ_EXPR, TREE_TYPE (exp),
9566 fold (build1 (IMAGPART_EXPR,
9567 TREE_TYPE (inner_type),
9568 exp0)),
9569 fold (build1 (IMAGPART_EXPR,
9570 TREE_TYPE (inner_type),
9571 exp1)))))),
9572 if_false_label, if_true_label);
9573 }
9574
9575 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9576 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9577
9578 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9579 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9580 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9581 else
9582 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9583 break;
9584 }
9585
9586 case NE_EXPR:
9587 {
9588 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9589
9590 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9591 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9592 {
9593 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9594 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9595 do_jump
9596 (fold
9597 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9598 fold (build (NE_EXPR, TREE_TYPE (exp),
9599 fold (build1 (REALPART_EXPR,
9600 TREE_TYPE (inner_type),
9601 exp0)),
9602 fold (build1 (REALPART_EXPR,
9603 TREE_TYPE (inner_type),
9604 exp1)))),
9605 fold (build (NE_EXPR, TREE_TYPE (exp),
9606 fold (build1 (IMAGPART_EXPR,
9607 TREE_TYPE (inner_type),
9608 exp0)),
9609 fold (build1 (IMAGPART_EXPR,
9610 TREE_TYPE (inner_type),
9611 exp1)))))),
9612 if_false_label, if_true_label);
9613 }
9614
9615 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9616 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9617
9618 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9619 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9620 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9621 else
9622 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9623 break;
9624 }
9625
9626 case LT_EXPR:
9627 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9628 if (GET_MODE_CLASS (mode) == MODE_INT
9629 && ! can_compare_p (LT, mode, ccp_jump))
9630 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9631 else
9632 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9633 break;
9634
9635 case LE_EXPR:
9636 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9637 if (GET_MODE_CLASS (mode) == MODE_INT
9638 && ! can_compare_p (LE, mode, ccp_jump))
9639 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9640 else
9641 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9642 break;
9643
9644 case GT_EXPR:
9645 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9646 if (GET_MODE_CLASS (mode) == MODE_INT
9647 && ! can_compare_p (GT, mode, ccp_jump))
9648 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9649 else
9650 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9651 break;
9652
9653 case GE_EXPR:
9654 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9655 if (GET_MODE_CLASS (mode) == MODE_INT
9656 && ! can_compare_p (GE, mode, ccp_jump))
9657 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9658 else
9659 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9660 break;
9661
9662 case UNORDERED_EXPR:
9663 case ORDERED_EXPR:
9664 {
9665 enum rtx_code cmp, rcmp;
9666 int do_rev;
9667
9668 if (code == UNORDERED_EXPR)
9669 cmp = UNORDERED, rcmp = ORDERED;
9670 else
9671 cmp = ORDERED, rcmp = UNORDERED;
9672 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9673
9674 do_rev = 0;
9675 if (! can_compare_p (cmp, mode, ccp_jump)
9676 && (can_compare_p (rcmp, mode, ccp_jump)
9677 /* If the target doesn't provide either UNORDERED or ORDERED
9678 comparisons, canonicalize on UNORDERED for the library. */
9679 || rcmp == UNORDERED))
9680 do_rev = 1;
9681
9682 if (! do_rev)
9683 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9684 else
9685 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9686 }
9687 break;
9688
9689 {
9690 enum rtx_code rcode1;
9691 enum tree_code tcode2;
9692
9693 case UNLT_EXPR:
9694 rcode1 = UNLT;
9695 tcode2 = LT_EXPR;
9696 goto unordered_bcc;
9697 case UNLE_EXPR:
9698 rcode1 = UNLE;
9699 tcode2 = LE_EXPR;
9700 goto unordered_bcc;
9701 case UNGT_EXPR:
9702 rcode1 = UNGT;
9703 tcode2 = GT_EXPR;
9704 goto unordered_bcc;
9705 case UNGE_EXPR:
9706 rcode1 = UNGE;
9707 tcode2 = GE_EXPR;
9708 goto unordered_bcc;
9709 case UNEQ_EXPR:
9710 rcode1 = UNEQ;
9711 tcode2 = EQ_EXPR;
9712 goto unordered_bcc;
9713
9714 unordered_bcc:
9715 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9716 if (can_compare_p (rcode1, mode, ccp_jump))
9717 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9718 if_true_label);
9719 else
9720 {
9721 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9722 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9723 tree cmp0, cmp1;
9724
9725 /* If the target doesn't support combined unordered
9726 compares, decompose into UNORDERED + comparison. */
9727 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9728 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9729 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9730 do_jump (exp, if_false_label, if_true_label);
9731 }
9732 }
9733 break;
9734
9735 /* Special case:
9736 __builtin_expect (<test>, 0) and
9737 __builtin_expect (<test>, 1)
9738
9739 We need to do this here, so that <test> is not converted to a SCC
9740 operation on machines that use condition code registers and COMPARE
9741 like the PowerPC, and then the jump is done based on whether the SCC
9742 operation produced a 1 or 0. */
9743 case CALL_EXPR:
9744 /* Check for a built-in function. */
9745 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9746 {
9747 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9748 tree arglist = TREE_OPERAND (exp, 1);
9749
9750 if (TREE_CODE (fndecl) == FUNCTION_DECL
9751 && DECL_BUILT_IN (fndecl)
9752 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9753 && arglist != NULL_TREE
9754 && TREE_CHAIN (arglist) != NULL_TREE)
9755 {
9756 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9757 if_true_label);
9758
9759 if (seq != NULL_RTX)
9760 {
9761 emit_insn (seq);
9762 return;
9763 }
9764 }
9765 }
9766 /* fall through and generate the normal code. */
9767
9768 default:
9769 normal:
9770 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9771 #if 0
9772 /* This is not needed any more and causes poor code since it causes
9773 comparisons and tests from non-SI objects to have different code
9774 sequences. */
9775 /* Copy to register to avoid generating bad insns by cse
9776 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9777 if (!cse_not_expected && GET_CODE (temp) == MEM)
9778 temp = copy_to_reg (temp);
9779 #endif
9780 do_pending_stack_adjust ();
9781 /* Do any postincrements in the expression that was tested. */
9782 emit_queue ();
9783
9784 if (GET_CODE (temp) == CONST_INT
9785 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9786 || GET_CODE (temp) == LABEL_REF)
9787 {
9788 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9789 if (target)
9790 emit_jump (target);
9791 }
9792 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9793 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9794 /* Note swapping the labels gives us not-equal. */
9795 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9796 else if (GET_MODE (temp) != VOIDmode)
9797 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9798 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9799 GET_MODE (temp), NULL_RTX,
9800 if_false_label, if_true_label);
9801 else
9802 abort ();
9803 }
9804
9805 if (drop_through_label)
9806 {
9807 /* If do_jump produces code that might be jumped around,
9808 do any stack adjusts from that code, before the place
9809 where control merges in. */
9810 do_pending_stack_adjust ();
9811 emit_label (drop_through_label);
9812 }
9813 }
9814 \f
9815 /* Given a comparison expression EXP for values too wide to be compared
9816 with one insn, test the comparison and jump to the appropriate label.
9817 The code of EXP is ignored; we always test GT if SWAP is 0,
9818 and LT if SWAP is 1. */
9819
9820 static void
9821 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9822 tree exp;
9823 int swap;
9824 rtx if_false_label, if_true_label;
9825 {
9826 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9827 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9828 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9829 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9830
9831 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9832 }
9833
9834 /* Compare OP0 with OP1, word at a time, in mode MODE.
9835 UNSIGNEDP says to do unsigned comparison.
9836 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9837
9838 void
9839 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9840 enum machine_mode mode;
9841 int unsignedp;
9842 rtx op0, op1;
9843 rtx if_false_label, if_true_label;
9844 {
9845 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9846 rtx drop_through_label = 0;
9847 int i;
9848
9849 if (! if_true_label || ! if_false_label)
9850 drop_through_label = gen_label_rtx ();
9851 if (! if_true_label)
9852 if_true_label = drop_through_label;
9853 if (! if_false_label)
9854 if_false_label = drop_through_label;
9855
9856 /* Compare a word at a time, high order first. */
9857 for (i = 0; i < nwords; i++)
9858 {
9859 rtx op0_word, op1_word;
9860
9861 if (WORDS_BIG_ENDIAN)
9862 {
9863 op0_word = operand_subword_force (op0, i, mode);
9864 op1_word = operand_subword_force (op1, i, mode);
9865 }
9866 else
9867 {
9868 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9869 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9870 }
9871
9872 /* All but high-order word must be compared as unsigned. */
9873 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9874 (unsignedp || i > 0), word_mode, NULL_RTX,
9875 NULL_RTX, if_true_label);
9876
9877 /* Consider lower words only if these are equal. */
9878 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9879 NULL_RTX, NULL_RTX, if_false_label);
9880 }
9881
9882 if (if_false_label)
9883 emit_jump (if_false_label);
9884 if (drop_through_label)
9885 emit_label (drop_through_label);
9886 }
9887
9888 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9889 with one insn, test the comparison and jump to the appropriate label. */
9890
9891 static void
9892 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9893 tree exp;
9894 rtx if_false_label, if_true_label;
9895 {
9896 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9897 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9898 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9899 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9900 int i;
9901 rtx drop_through_label = 0;
9902
9903 if (! if_false_label)
9904 drop_through_label = if_false_label = gen_label_rtx ();
9905
9906 for (i = 0; i < nwords; i++)
9907 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9908 operand_subword_force (op1, i, mode),
9909 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9910 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9911
9912 if (if_true_label)
9913 emit_jump (if_true_label);
9914 if (drop_through_label)
9915 emit_label (drop_through_label);
9916 }
9917 \f
9918 /* Jump according to whether OP0 is 0.
9919 We assume that OP0 has an integer mode that is too wide
9920 for the available compare insns. */
9921
9922 void
9923 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9924 rtx op0;
9925 rtx if_false_label, if_true_label;
9926 {
9927 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9928 rtx part;
9929 int i;
9930 rtx drop_through_label = 0;
9931
9932 /* The fastest way of doing this comparison on almost any machine is to
9933 "or" all the words and compare the result. If all have to be loaded
9934 from memory and this is a very wide item, it's possible this may
9935 be slower, but that's highly unlikely. */
9936
9937 part = gen_reg_rtx (word_mode);
9938 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9939 for (i = 1; i < nwords && part != 0; i++)
9940 part = expand_binop (word_mode, ior_optab, part,
9941 operand_subword_force (op0, i, GET_MODE (op0)),
9942 part, 1, OPTAB_WIDEN);
9943
9944 if (part != 0)
9945 {
9946 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9947 NULL_RTX, if_false_label, if_true_label);
9948
9949 return;
9950 }
9951
9952 /* If we couldn't do the "or" simply, do this with a series of compares. */
9953 if (! if_false_label)
9954 drop_through_label = if_false_label = gen_label_rtx ();
9955
9956 for (i = 0; i < nwords; i++)
9957 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9958 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9959 if_false_label, NULL_RTX);
9960
9961 if (if_true_label)
9962 emit_jump (if_true_label);
9963
9964 if (drop_through_label)
9965 emit_label (drop_through_label);
9966 }
9967 \f
9968 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9969 (including code to compute the values to be compared)
9970 and set (CC0) according to the result.
9971 The decision as to signed or unsigned comparison must be made by the caller.
9972
9973 We force a stack adjustment unless there are currently
9974 things pushed on the stack that aren't yet used.
9975
9976 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9977 compared. */
9978
9979 rtx
9980 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9981 rtx op0, op1;
9982 enum rtx_code code;
9983 int unsignedp;
9984 enum machine_mode mode;
9985 rtx size;
9986 {
9987 rtx tem;
9988
9989 /* If one operand is constant, make it the second one. Only do this
9990 if the other operand is not constant as well. */
9991
9992 if (swap_commutative_operands_p (op0, op1))
9993 {
9994 tem = op0;
9995 op0 = op1;
9996 op1 = tem;
9997 code = swap_condition (code);
9998 }
9999
10000 if (flag_force_mem)
10001 {
10002 op0 = force_not_mem (op0);
10003 op1 = force_not_mem (op1);
10004 }
10005
10006 do_pending_stack_adjust ();
10007
10008 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10009 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10010 return tem;
10011
10012 #if 0
10013 /* There's no need to do this now that combine.c can eliminate lots of
10014 sign extensions. This can be less efficient in certain cases on other
10015 machines. */
10016
10017 /* If this is a signed equality comparison, we can do it as an
10018 unsigned comparison since zero-extension is cheaper than sign
10019 extension and comparisons with zero are done as unsigned. This is
10020 the case even on machines that can do fast sign extension, since
10021 zero-extension is easier to combine with other operations than
10022 sign-extension is. If we are comparing against a constant, we must
10023 convert it to what it would look like unsigned. */
10024 if ((code == EQ || code == NE) && ! unsignedp
10025 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10026 {
10027 if (GET_CODE (op1) == CONST_INT
10028 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10029 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10030 unsignedp = 1;
10031 }
10032 #endif
10033
10034 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10035
10036 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10037 }
10038
10039 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10040 The decision as to signed or unsigned comparison must be made by the caller.
10041
10042 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10043 compared. */
10044
10045 void
10046 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10047 if_false_label, if_true_label)
10048 rtx op0, op1;
10049 enum rtx_code code;
10050 int unsignedp;
10051 enum machine_mode mode;
10052 rtx size;
10053 rtx if_false_label, if_true_label;
10054 {
10055 rtx tem;
10056 int dummy_true_label = 0;
10057
10058 /* Reverse the comparison if that is safe and we want to jump if it is
10059 false. */
10060 if (! if_true_label && ! FLOAT_MODE_P (mode))
10061 {
10062 if_true_label = if_false_label;
10063 if_false_label = 0;
10064 code = reverse_condition (code);
10065 }
10066
10067 /* If one operand is constant, make it the second one. Only do this
10068 if the other operand is not constant as well. */
10069
10070 if (swap_commutative_operands_p (op0, op1))
10071 {
10072 tem = op0;
10073 op0 = op1;
10074 op1 = tem;
10075 code = swap_condition (code);
10076 }
10077
10078 if (flag_force_mem)
10079 {
10080 op0 = force_not_mem (op0);
10081 op1 = force_not_mem (op1);
10082 }
10083
10084 do_pending_stack_adjust ();
10085
10086 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10087 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10088 {
10089 if (tem == const_true_rtx)
10090 {
10091 if (if_true_label)
10092 emit_jump (if_true_label);
10093 }
10094 else
10095 {
10096 if (if_false_label)
10097 emit_jump (if_false_label);
10098 }
10099 return;
10100 }
10101
10102 #if 0
10103 /* There's no need to do this now that combine.c can eliminate lots of
10104 sign extensions. This can be less efficient in certain cases on other
10105 machines. */
10106
10107 /* If this is a signed equality comparison, we can do it as an
10108 unsigned comparison since zero-extension is cheaper than sign
10109 extension and comparisons with zero are done as unsigned. This is
10110 the case even on machines that can do fast sign extension, since
10111 zero-extension is easier to combine with other operations than
10112 sign-extension is. If we are comparing against a constant, we must
10113 convert it to what it would look like unsigned. */
10114 if ((code == EQ || code == NE) && ! unsignedp
10115 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10116 {
10117 if (GET_CODE (op1) == CONST_INT
10118 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10119 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10120 unsignedp = 1;
10121 }
10122 #endif
10123
10124 if (! if_true_label)
10125 {
10126 dummy_true_label = 1;
10127 if_true_label = gen_label_rtx ();
10128 }
10129
10130 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10131 if_true_label);
10132
10133 if (if_false_label)
10134 emit_jump (if_false_label);
10135 if (dummy_true_label)
10136 emit_label (if_true_label);
10137 }
10138
10139 /* Generate code for a comparison expression EXP (including code to compute
10140 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10141 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10142 generated code will drop through.
10143 SIGNED_CODE should be the rtx operation for this comparison for
10144 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10145
10146 We force a stack adjustment unless there are currently
10147 things pushed on the stack that aren't yet used. */
10148
10149 static void
10150 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10151 if_true_label)
10152 tree exp;
10153 enum rtx_code signed_code, unsigned_code;
10154 rtx if_false_label, if_true_label;
10155 {
10156 rtx op0, op1;
10157 tree type;
10158 enum machine_mode mode;
10159 int unsignedp;
10160 enum rtx_code code;
10161
10162 /* Don't crash if the comparison was erroneous. */
10163 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10164 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10165 return;
10166
10167 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10168 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10169 return;
10170
10171 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10172 mode = TYPE_MODE (type);
10173 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10174 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10175 || (GET_MODE_BITSIZE (mode)
10176 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10177 1)))))))
10178 {
10179 /* op0 might have been replaced by promoted constant, in which
10180 case the type of second argument should be used. */
10181 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10182 mode = TYPE_MODE (type);
10183 }
10184 unsignedp = TREE_UNSIGNED (type);
10185 code = unsignedp ? unsigned_code : signed_code;
10186
10187 #ifdef HAVE_canonicalize_funcptr_for_compare
10188 /* If function pointers need to be "canonicalized" before they can
10189 be reliably compared, then canonicalize them. */
10190 if (HAVE_canonicalize_funcptr_for_compare
10191 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10192 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10193 == FUNCTION_TYPE))
10194 {
10195 rtx new_op0 = gen_reg_rtx (mode);
10196
10197 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10198 op0 = new_op0;
10199 }
10200
10201 if (HAVE_canonicalize_funcptr_for_compare
10202 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10203 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10204 == FUNCTION_TYPE))
10205 {
10206 rtx new_op1 = gen_reg_rtx (mode);
10207
10208 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10209 op1 = new_op1;
10210 }
10211 #endif
10212
10213 /* Do any postincrements in the expression that was tested. */
10214 emit_queue ();
10215
10216 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10217 ((mode == BLKmode)
10218 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10219 if_false_label, if_true_label);
10220 }
10221 \f
10222 /* Generate code to calculate EXP using a store-flag instruction
10223 and return an rtx for the result. EXP is either a comparison
10224 or a TRUTH_NOT_EXPR whose operand is a comparison.
10225
10226 If TARGET is nonzero, store the result there if convenient.
10227
10228 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10229 cheap.
10230
10231 Return zero if there is no suitable set-flag instruction
10232 available on this machine.
10233
10234 Once expand_expr has been called on the arguments of the comparison,
10235 we are committed to doing the store flag, since it is not safe to
10236 re-evaluate the expression. We emit the store-flag insn by calling
10237 emit_store_flag, but only expand the arguments if we have a reason
10238 to believe that emit_store_flag will be successful. If we think that
10239 it will, but it isn't, we have to simulate the store-flag with a
10240 set/jump/set sequence. */
10241
10242 static rtx
10243 do_store_flag (exp, target, mode, only_cheap)
10244 tree exp;
10245 rtx target;
10246 enum machine_mode mode;
10247 int only_cheap;
10248 {
10249 enum rtx_code code;
10250 tree arg0, arg1, type;
10251 tree tem;
10252 enum machine_mode operand_mode;
10253 int invert = 0;
10254 int unsignedp;
10255 rtx op0, op1;
10256 enum insn_code icode;
10257 rtx subtarget = target;
10258 rtx result, label;
10259
10260 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10261 result at the end. We can't simply invert the test since it would
10262 have already been inverted if it were valid. This case occurs for
10263 some floating-point comparisons. */
10264
10265 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10266 invert = 1, exp = TREE_OPERAND (exp, 0);
10267
10268 arg0 = TREE_OPERAND (exp, 0);
10269 arg1 = TREE_OPERAND (exp, 1);
10270
10271 /* Don't crash if the comparison was erroneous. */
10272 if (arg0 == error_mark_node || arg1 == error_mark_node)
10273 return const0_rtx;
10274
10275 type = TREE_TYPE (arg0);
10276 operand_mode = TYPE_MODE (type);
10277 unsignedp = TREE_UNSIGNED (type);
10278
10279 /* We won't bother with BLKmode store-flag operations because it would mean
10280 passing a lot of information to emit_store_flag. */
10281 if (operand_mode == BLKmode)
10282 return 0;
10283
10284 /* We won't bother with store-flag operations involving function pointers
10285 when function pointers must be canonicalized before comparisons. */
10286 #ifdef HAVE_canonicalize_funcptr_for_compare
10287 if (HAVE_canonicalize_funcptr_for_compare
10288 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10289 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10290 == FUNCTION_TYPE))
10291 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10292 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10293 == FUNCTION_TYPE))))
10294 return 0;
10295 #endif
10296
10297 STRIP_NOPS (arg0);
10298 STRIP_NOPS (arg1);
10299
10300 /* Get the rtx comparison code to use. We know that EXP is a comparison
10301 operation of some type. Some comparisons against 1 and -1 can be
10302 converted to comparisons with zero. Do so here so that the tests
10303 below will be aware that we have a comparison with zero. These
10304 tests will not catch constants in the first operand, but constants
10305 are rarely passed as the first operand. */
10306
10307 switch (TREE_CODE (exp))
10308 {
10309 case EQ_EXPR:
10310 code = EQ;
10311 break;
10312 case NE_EXPR:
10313 code = NE;
10314 break;
10315 case LT_EXPR:
10316 if (integer_onep (arg1))
10317 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10318 else
10319 code = unsignedp ? LTU : LT;
10320 break;
10321 case LE_EXPR:
10322 if (! unsignedp && integer_all_onesp (arg1))
10323 arg1 = integer_zero_node, code = LT;
10324 else
10325 code = unsignedp ? LEU : LE;
10326 break;
10327 case GT_EXPR:
10328 if (! unsignedp && integer_all_onesp (arg1))
10329 arg1 = integer_zero_node, code = GE;
10330 else
10331 code = unsignedp ? GTU : GT;
10332 break;
10333 case GE_EXPR:
10334 if (integer_onep (arg1))
10335 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10336 else
10337 code = unsignedp ? GEU : GE;
10338 break;
10339
10340 case UNORDERED_EXPR:
10341 code = UNORDERED;
10342 break;
10343 case ORDERED_EXPR:
10344 code = ORDERED;
10345 break;
10346 case UNLT_EXPR:
10347 code = UNLT;
10348 break;
10349 case UNLE_EXPR:
10350 code = UNLE;
10351 break;
10352 case UNGT_EXPR:
10353 code = UNGT;
10354 break;
10355 case UNGE_EXPR:
10356 code = UNGE;
10357 break;
10358 case UNEQ_EXPR:
10359 code = UNEQ;
10360 break;
10361
10362 default:
10363 abort ();
10364 }
10365
10366 /* Put a constant second. */
10367 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10368 {
10369 tem = arg0; arg0 = arg1; arg1 = tem;
10370 code = swap_condition (code);
10371 }
10372
10373 /* If this is an equality or inequality test of a single bit, we can
10374 do this by shifting the bit being tested to the low-order bit and
10375 masking the result with the constant 1. If the condition was EQ,
10376 we xor it with 1. This does not require an scc insn and is faster
10377 than an scc insn even if we have it. */
10378
10379 if ((code == NE || code == EQ)
10380 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10381 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10382 {
10383 tree inner = TREE_OPERAND (arg0, 0);
10384 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10385 int ops_unsignedp;
10386
10387 /* If INNER is a right shift of a constant and it plus BITNUM does
10388 not overflow, adjust BITNUM and INNER. */
10389
10390 if (TREE_CODE (inner) == RSHIFT_EXPR
10391 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10392 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10393 && bitnum < TYPE_PRECISION (type)
10394 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10395 bitnum - TYPE_PRECISION (type)))
10396 {
10397 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10398 inner = TREE_OPERAND (inner, 0);
10399 }
10400
10401 /* If we are going to be able to omit the AND below, we must do our
10402 operations as unsigned. If we must use the AND, we have a choice.
10403 Normally unsigned is faster, but for some machines signed is. */
10404 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10405 #ifdef LOAD_EXTEND_OP
10406 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10407 #else
10408 : 1
10409 #endif
10410 );
10411
10412 if (! get_subtarget (subtarget)
10413 || GET_MODE (subtarget) != operand_mode
10414 || ! safe_from_p (subtarget, inner, 1))
10415 subtarget = 0;
10416
10417 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10418
10419 if (bitnum != 0)
10420 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10421 size_int (bitnum), subtarget, ops_unsignedp);
10422
10423 if (GET_MODE (op0) != mode)
10424 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10425
10426 if ((code == EQ && ! invert) || (code == NE && invert))
10427 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10428 ops_unsignedp, OPTAB_LIB_WIDEN);
10429
10430 /* Put the AND last so it can combine with more things. */
10431 if (bitnum != TYPE_PRECISION (type) - 1)
10432 op0 = expand_and (op0, const1_rtx, subtarget);
10433
10434 return op0;
10435 }
10436
10437 /* Now see if we are likely to be able to do this. Return if not. */
10438 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10439 return 0;
10440
10441 icode = setcc_gen_code[(int) code];
10442 if (icode == CODE_FOR_nothing
10443 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10444 {
10445 /* We can only do this if it is one of the special cases that
10446 can be handled without an scc insn. */
10447 if ((code == LT && integer_zerop (arg1))
10448 || (! only_cheap && code == GE && integer_zerop (arg1)))
10449 ;
10450 else if (BRANCH_COST >= 0
10451 && ! only_cheap && (code == NE || code == EQ)
10452 && TREE_CODE (type) != REAL_TYPE
10453 && ((abs_optab->handlers[(int) operand_mode].insn_code
10454 != CODE_FOR_nothing)
10455 || (ffs_optab->handlers[(int) operand_mode].insn_code
10456 != CODE_FOR_nothing)))
10457 ;
10458 else
10459 return 0;
10460 }
10461
10462 if (! get_subtarget (target)
10463 || GET_MODE (subtarget) != operand_mode
10464 || ! safe_from_p (subtarget, arg1, 1))
10465 subtarget = 0;
10466
10467 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10468 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10469
10470 if (target == 0)
10471 target = gen_reg_rtx (mode);
10472
10473 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10474 because, if the emit_store_flag does anything it will succeed and
10475 OP0 and OP1 will not be used subsequently. */
10476
10477 result = emit_store_flag (target, code,
10478 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10479 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10480 operand_mode, unsignedp, 1);
10481
10482 if (result)
10483 {
10484 if (invert)
10485 result = expand_binop (mode, xor_optab, result, const1_rtx,
10486 result, 0, OPTAB_LIB_WIDEN);
10487 return result;
10488 }
10489
10490 /* If this failed, we have to do this with set/compare/jump/set code. */
10491 if (GET_CODE (target) != REG
10492 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10493 target = gen_reg_rtx (GET_MODE (target));
10494
10495 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10496 result = compare_from_rtx (op0, op1, code, unsignedp,
10497 operand_mode, NULL_RTX);
10498 if (GET_CODE (result) == CONST_INT)
10499 return (((result == const0_rtx && ! invert)
10500 || (result != const0_rtx && invert))
10501 ? const0_rtx : const1_rtx);
10502
10503 label = gen_label_rtx ();
10504 if (bcc_gen_fctn[(int) code] == 0)
10505 abort ();
10506
10507 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10508 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10509 emit_label (label);
10510
10511 return target;
10512 }
10513 \f
10514
10515 /* Stubs in case we haven't got a casesi insn. */
10516 #ifndef HAVE_casesi
10517 # define HAVE_casesi 0
10518 # define gen_casesi(a, b, c, d, e) (0)
10519 # define CODE_FOR_casesi CODE_FOR_nothing
10520 #endif
10521
10522 /* If the machine does not have a case insn that compares the bounds,
10523 this means extra overhead for dispatch tables, which raises the
10524 threshold for using them. */
10525 #ifndef CASE_VALUES_THRESHOLD
10526 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10527 #endif /* CASE_VALUES_THRESHOLD */
10528
10529 unsigned int
10530 case_values_threshold ()
10531 {
10532 return CASE_VALUES_THRESHOLD;
10533 }
10534
10535 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10536 0 otherwise (i.e. if there is no casesi instruction). */
10537 int
10538 try_casesi (index_type, index_expr, minval, range,
10539 table_label, default_label)
10540 tree index_type, index_expr, minval, range;
10541 rtx table_label ATTRIBUTE_UNUSED;
10542 rtx default_label;
10543 {
10544 enum machine_mode index_mode = SImode;
10545 int index_bits = GET_MODE_BITSIZE (index_mode);
10546 rtx op1, op2, index;
10547 enum machine_mode op_mode;
10548
10549 if (! HAVE_casesi)
10550 return 0;
10551
10552 /* Convert the index to SImode. */
10553 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10554 {
10555 enum machine_mode omode = TYPE_MODE (index_type);
10556 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10557
10558 /* We must handle the endpoints in the original mode. */
10559 index_expr = build (MINUS_EXPR, index_type,
10560 index_expr, minval);
10561 minval = integer_zero_node;
10562 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10563 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10564 omode, 1, default_label);
10565 /* Now we can safely truncate. */
10566 index = convert_to_mode (index_mode, index, 0);
10567 }
10568 else
10569 {
10570 if (TYPE_MODE (index_type) != index_mode)
10571 {
10572 index_expr = convert (type_for_size (index_bits, 0),
10573 index_expr);
10574 index_type = TREE_TYPE (index_expr);
10575 }
10576
10577 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10578 }
10579 emit_queue ();
10580 index = protect_from_queue (index, 0);
10581 do_pending_stack_adjust ();
10582
10583 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10584 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10585 (index, op_mode))
10586 index = copy_to_mode_reg (op_mode, index);
10587
10588 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10589
10590 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10591 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10592 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10593 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10594 (op1, op_mode))
10595 op1 = copy_to_mode_reg (op_mode, op1);
10596
10597 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10598
10599 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10600 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10601 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10602 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10603 (op2, op_mode))
10604 op2 = copy_to_mode_reg (op_mode, op2);
10605
10606 emit_jump_insn (gen_casesi (index, op1, op2,
10607 table_label, default_label));
10608 return 1;
10609 }
10610
10611 /* Attempt to generate a tablejump instruction; same concept. */
10612 #ifndef HAVE_tablejump
10613 #define HAVE_tablejump 0
10614 #define gen_tablejump(x, y) (0)
10615 #endif
10616
10617 /* Subroutine of the next function.
10618
10619 INDEX is the value being switched on, with the lowest value
10620 in the table already subtracted.
10621 MODE is its expected mode (needed if INDEX is constant).
10622 RANGE is the length of the jump table.
10623 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10624
10625 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10626 index value is out of range. */
10627
10628 static void
10629 do_tablejump (index, mode, range, table_label, default_label)
10630 rtx index, range, table_label, default_label;
10631 enum machine_mode mode;
10632 {
10633 rtx temp, vector;
10634
10635 /* Do an unsigned comparison (in the proper mode) between the index
10636 expression and the value which represents the length of the range.
10637 Since we just finished subtracting the lower bound of the range
10638 from the index expression, this comparison allows us to simultaneously
10639 check that the original index expression value is both greater than
10640 or equal to the minimum value of the range and less than or equal to
10641 the maximum value of the range. */
10642
10643 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10644 default_label);
10645
10646 /* If index is in range, it must fit in Pmode.
10647 Convert to Pmode so we can index with it. */
10648 if (mode != Pmode)
10649 index = convert_to_mode (Pmode, index, 1);
10650
10651 /* Don't let a MEM slip thru, because then INDEX that comes
10652 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10653 and break_out_memory_refs will go to work on it and mess it up. */
10654 #ifdef PIC_CASE_VECTOR_ADDRESS
10655 if (flag_pic && GET_CODE (index) != REG)
10656 index = copy_to_mode_reg (Pmode, index);
10657 #endif
10658
10659 /* If flag_force_addr were to affect this address
10660 it could interfere with the tricky assumptions made
10661 about addresses that contain label-refs,
10662 which may be valid only very near the tablejump itself. */
10663 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10664 GET_MODE_SIZE, because this indicates how large insns are. The other
10665 uses should all be Pmode, because they are addresses. This code
10666 could fail if addresses and insns are not the same size. */
10667 index = gen_rtx_PLUS (Pmode,
10668 gen_rtx_MULT (Pmode, index,
10669 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10670 gen_rtx_LABEL_REF (Pmode, table_label));
10671 #ifdef PIC_CASE_VECTOR_ADDRESS
10672 if (flag_pic)
10673 index = PIC_CASE_VECTOR_ADDRESS (index);
10674 else
10675 #endif
10676 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10677 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10678 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10679 RTX_UNCHANGING_P (vector) = 1;
10680 convert_move (temp, vector, 0);
10681
10682 emit_jump_insn (gen_tablejump (temp, table_label));
10683
10684 /* If we are generating PIC code or if the table is PC-relative, the
10685 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10686 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10687 emit_barrier ();
10688 }
10689
10690 int
10691 try_tablejump (index_type, index_expr, minval, range,
10692 table_label, default_label)
10693 tree index_type, index_expr, minval, range;
10694 rtx table_label, default_label;
10695 {
10696 rtx index;
10697
10698 if (! HAVE_tablejump)
10699 return 0;
10700
10701 index_expr = fold (build (MINUS_EXPR, index_type,
10702 convert (index_type, index_expr),
10703 convert (index_type, minval)));
10704 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10705 emit_queue ();
10706 index = protect_from_queue (index, 0);
10707 do_pending_stack_adjust ();
10708
10709 do_tablejump (index, TYPE_MODE (index_type),
10710 convert_modes (TYPE_MODE (index_type),
10711 TYPE_MODE (TREE_TYPE (range)),
10712 expand_expr (range, NULL_RTX,
10713 VOIDmode, 0),
10714 TREE_UNSIGNED (TREE_TYPE (range))),
10715 table_label, default_label);
10716 return 1;
10717 }