0021910515942a017098f23032bff693aa404fd2
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "recog.h"
37 #include "reload.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "intl.h"
43 #include "tm_p.h"
44
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
47
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
50
51 #ifdef PUSH_ROUNDING
52
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first. */
55 #endif
56
57 #endif
58
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
62 #else
63 #define STACK_PUSH_CODE PRE_INC
64 #endif
65 #endif
66
67 /* Assume that case vectors are not pc-relative. */
68 #ifndef CASE_VECTOR_PC_RELATIVE
69 #define CASE_VECTOR_PC_RELATIVE 0
70 #endif
71
72 /* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
78 parameter. */
79 int (*lang_safe_from_p) PARAMS ((rtx, tree));
80
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
88
89 /* Don't check memory usage, since code is being emitted to check a memory
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
92 static int in_check_memory_usage;
93
94 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95 static tree placeholder_list = 0;
96
97 /* This structure is used by move_by_pieces to describe the move to
98 be performed. */
99 struct move_by_pieces
100 {
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
111 int reverse;
112 };
113
114 /* This structure is used by store_by_pieces to describe the clear to
115 be performed. */
116
117 struct store_by_pieces
118 {
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
126 PTR constfundata;
127 int reverse;
128 };
129
130 extern struct obstack permanent_obstack;
131
132 static rtx get_push_address PARAMS ((int));
133
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
137 unsigned int));
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
141 enum machine_mode));
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 unsigned int));
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
145 unsigned int));
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
147 enum machine_mode,
148 struct store_by_pieces *));
149 static rtx get_subtarget PARAMS ((rtx));
150 static int is_zeros_p PARAMS ((tree));
151 static int mostly_zeros_p PARAMS ((tree));
152 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
154 tree, tree, unsigned int, int,
155 int));
156 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
157 HOST_WIDE_INT));
158 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int,
161 unsigned int, HOST_WIDE_INT, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164 static tree save_noncopied_parts PARAMS ((tree, tree));
165 static tree init_noncopied_parts PARAMS ((tree, tree));
166 static int fixed_type_p PARAMS ((tree));
167 static rtx var_rtx PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 rtx, rtx));
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
176
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
180
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
183
184 /* If a memory-to-memory move would take MOVE_RATIO or more simple
185 move-instruction sequences, we will do a movstr or libcall instead. */
186
187 #ifndef MOVE_RATIO
188 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
189 #define MOVE_RATIO 2
190 #else
191 /* If we are optimizing for space (-Os), cut down the default move ratio. */
192 #define MOVE_RATIO (optimize_size ? 3 : 15)
193 #endif
194 #endif
195
196 /* This macro is used to determine whether move_by_pieces should be called
197 to perform a structure copy. */
198 #ifndef MOVE_BY_PIECES_P
199 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
201 #endif
202
203 /* This array records the insn_code of insns to perform block moves. */
204 enum insn_code movstr_optab[NUM_MACHINE_MODES];
205
206 /* This array records the insn_code of insns to perform block clears. */
207 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
208
209 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
210
211 #ifndef SLOW_UNALIGNED_ACCESS
212 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
213 #endif
214 \f
215 /* This is run once per compilation to set up which modes can be used
216 directly in memory and to initialize the block move optab. */
217
218 void
219 init_expr_once ()
220 {
221 rtx insn, pat;
222 enum machine_mode mode;
223 int num_clobbers;
224 rtx mem, mem1;
225
226 start_sequence ();
227
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
233
234 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
235 pat = PATTERN (insn);
236
237 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
238 mode = (enum machine_mode) ((int) mode + 1))
239 {
240 int regno;
241 rtx reg;
242
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
246
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
249
250 if (mode != VOIDmode && mode != BLKmode)
251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
252 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
253 regno++)
254 {
255 if (! HARD_REGNO_MODE_OK (regno, mode))
256 continue;
257
258 reg = gen_rtx_REG (mode, regno);
259
260 SET_SRC (pat) = mem;
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
264
265 SET_SRC (pat) = mem1;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
269
270 SET_SRC (pat) = reg;
271 SET_DEST (pat) = mem;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
274
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem1;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
279 }
280 }
281
282 end_sequence ();
283 }
284
285 /* This is run at the start of compiling a function. */
286
287 void
288 init_expr ()
289 {
290 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
291
292 pending_chain = 0;
293 pending_stack_adjust = 0;
294 stack_pointer_delta = 0;
295 inhibit_defer_pop = 0;
296 saveregs_value = 0;
297 apply_args_value = 0;
298 forced_labels = 0;
299 }
300
301 void
302 mark_expr_status (p)
303 struct expr_status *p;
304 {
305 if (p == NULL)
306 return;
307
308 ggc_mark_rtx (p->x_saveregs_value);
309 ggc_mark_rtx (p->x_apply_args_value);
310 ggc_mark_rtx (p->x_forced_labels);
311 }
312
313 void
314 free_expr_status (f)
315 struct function *f;
316 {
317 free (f->expr);
318 f->expr = NULL;
319 }
320
321 /* Small sanity check that the queue is empty at the end of a function. */
322
323 void
324 finish_expr_for_function ()
325 {
326 if (pending_chain)
327 abort ();
328 }
329 \f
330 /* Manage the queue of increment instructions to be output
331 for POSTINCREMENT_EXPR expressions, etc. */
332
333 /* Queue up to increment (or change) VAR later. BODY says how:
334 BODY should be the same thing you would pass to emit_insn
335 to increment right away. It will go to emit_insn later on.
336
337 The value is a QUEUED expression to be used in place of VAR
338 where you want to guarantee the pre-incrementation value of VAR. */
339
340 static rtx
341 enqueue_insn (var, body)
342 rtx var, body;
343 {
344 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
345 body, pending_chain);
346 return pending_chain;
347 }
348
349 /* Use protect_from_queue to convert a QUEUED expression
350 into something that you can put immediately into an instruction.
351 If the queued incrementation has not happened yet,
352 protect_from_queue returns the variable itself.
353 If the incrementation has happened, protect_from_queue returns a temp
354 that contains a copy of the old value of the variable.
355
356 Any time an rtx which might possibly be a QUEUED is to be put
357 into an instruction, it must be passed through protect_from_queue first.
358 QUEUED expressions are not meaningful in instructions.
359
360 Do not pass a value through protect_from_queue and then hold
361 on to it for a while before putting it in an instruction!
362 If the queue is flushed in between, incorrect code will result. */
363
364 rtx
365 protect_from_queue (x, modify)
366 register rtx x;
367 int modify;
368 {
369 register RTX_CODE code = GET_CODE (x);
370
371 #if 0 /* A QUEUED can hang around after the queue is forced out. */
372 /* Shortcut for most common case. */
373 if (pending_chain == 0)
374 return x;
375 #endif
376
377 if (code != QUEUED)
378 {
379 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
380 use of autoincrement. Make a copy of the contents of the memory
381 location rather than a copy of the address, but not if the value is
382 of mode BLKmode. Don't modify X in place since it might be
383 shared. */
384 if (code == MEM && GET_MODE (x) != BLKmode
385 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
386 {
387 rtx y = XEXP (x, 0);
388 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
389
390 if (QUEUED_INSN (y))
391 {
392 rtx temp = gen_reg_rtx (GET_MODE (x));
393
394 emit_insn_before (gen_move_insn (temp, new),
395 QUEUED_INSN (y));
396 return temp;
397 }
398
399 /* Copy the address into a pseudo, so that the returned value
400 remains correct across calls to emit_queue. */
401 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
402 }
403
404 /* Otherwise, recursively protect the subexpressions of all
405 the kinds of rtx's that can contain a QUEUED. */
406 if (code == MEM)
407 {
408 rtx tem = protect_from_queue (XEXP (x, 0), 0);
409 if (tem != XEXP (x, 0))
410 {
411 x = copy_rtx (x);
412 XEXP (x, 0) = tem;
413 }
414 }
415 else if (code == PLUS || code == MULT)
416 {
417 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
418 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
419 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
420 {
421 x = copy_rtx (x);
422 XEXP (x, 0) = new0;
423 XEXP (x, 1) = new1;
424 }
425 }
426 return x;
427 }
428 /* If the increment has not happened, use the variable itself. Copy it
429 into a new pseudo so that the value remains correct across calls to
430 emit_queue. */
431 if (QUEUED_INSN (x) == 0)
432 return copy_to_reg (QUEUED_VAR (x));
433 /* If the increment has happened and a pre-increment copy exists,
434 use that copy. */
435 if (QUEUED_COPY (x) != 0)
436 return QUEUED_COPY (x);
437 /* The increment has happened but we haven't set up a pre-increment copy.
438 Set one up now, and use it. */
439 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
440 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
441 QUEUED_INSN (x));
442 return QUEUED_COPY (x);
443 }
444
445 /* Return nonzero if X contains a QUEUED expression:
446 if it contains anything that will be altered by a queued increment.
447 We handle only combinations of MEM, PLUS, MINUS and MULT operators
448 since memory addresses generally contain only those. */
449
450 int
451 queued_subexp_p (x)
452 rtx x;
453 {
454 register enum rtx_code code = GET_CODE (x);
455 switch (code)
456 {
457 case QUEUED:
458 return 1;
459 case MEM:
460 return queued_subexp_p (XEXP (x, 0));
461 case MULT:
462 case PLUS:
463 case MINUS:
464 return (queued_subexp_p (XEXP (x, 0))
465 || queued_subexp_p (XEXP (x, 1)));
466 default:
467 return 0;
468 }
469 }
470
471 /* Perform all the pending incrementations. */
472
473 void
474 emit_queue ()
475 {
476 register rtx p;
477 while ((p = pending_chain))
478 {
479 rtx body = QUEUED_BODY (p);
480
481 if (GET_CODE (body) == SEQUENCE)
482 {
483 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
484 emit_insn (QUEUED_BODY (p));
485 }
486 else
487 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
488 pending_chain = QUEUED_NEXT (p);
489 }
490 }
491 \f
492 /* Copy data from FROM to TO, where the machine modes are not the same.
493 Both modes may be integer, or both may be floating.
494 UNSIGNEDP should be nonzero if FROM is an unsigned type.
495 This causes zero-extension instead of sign-extension. */
496
497 void
498 convert_move (to, from, unsignedp)
499 register rtx to, from;
500 int unsignedp;
501 {
502 enum machine_mode to_mode = GET_MODE (to);
503 enum machine_mode from_mode = GET_MODE (from);
504 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
505 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
506 enum insn_code code;
507 rtx libcall;
508
509 /* rtx code for making an equivalent value. */
510 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
511
512 to = protect_from_queue (to, 1);
513 from = protect_from_queue (from, 0);
514
515 if (to_real != from_real)
516 abort ();
517
518 /* If FROM is a SUBREG that indicates that we have already done at least
519 the required extension, strip it. We don't handle such SUBREGs as
520 TO here. */
521
522 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
523 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
524 >= GET_MODE_SIZE (to_mode))
525 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
526 from = gen_lowpart (to_mode, from), from_mode = to_mode;
527
528 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
529 abort ();
530
531 if (to_mode == from_mode
532 || (from_mode == VOIDmode && CONSTANT_P (from)))
533 {
534 emit_move_insn (to, from);
535 return;
536 }
537
538 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
539 {
540 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
541 abort ();
542
543 if (VECTOR_MODE_P (to_mode))
544 from = gen_rtx_SUBREG (to_mode, from, 0);
545 else
546 to = gen_rtx_SUBREG (from_mode, to, 0);
547
548 emit_move_insn (to, from);
549 return;
550 }
551
552 if (to_real != from_real)
553 abort ();
554
555 if (to_real)
556 {
557 rtx value, insns;
558
559 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
560 {
561 /* Try converting directly if the insn is supported. */
562 if ((code = can_extend_p (to_mode, from_mode, 0))
563 != CODE_FOR_nothing)
564 {
565 emit_unop_insn (code, to, from, UNKNOWN);
566 return;
567 }
568 }
569
570 #ifdef HAVE_trunchfqf2
571 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
572 {
573 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
574 return;
575 }
576 #endif
577 #ifdef HAVE_trunctqfqf2
578 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
579 {
580 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
581 return;
582 }
583 #endif
584 #ifdef HAVE_truncsfqf2
585 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
586 {
587 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
588 return;
589 }
590 #endif
591 #ifdef HAVE_truncdfqf2
592 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
593 {
594 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
595 return;
596 }
597 #endif
598 #ifdef HAVE_truncxfqf2
599 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
602 return;
603 }
604 #endif
605 #ifdef HAVE_trunctfqf2
606 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
607 {
608 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
609 return;
610 }
611 #endif
612
613 #ifdef HAVE_trunctqfhf2
614 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
615 {
616 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
617 return;
618 }
619 #endif
620 #ifdef HAVE_truncsfhf2
621 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
622 {
623 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
624 return;
625 }
626 #endif
627 #ifdef HAVE_truncdfhf2
628 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
629 {
630 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
631 return;
632 }
633 #endif
634 #ifdef HAVE_truncxfhf2
635 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
636 {
637 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
638 return;
639 }
640 #endif
641 #ifdef HAVE_trunctfhf2
642 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
643 {
644 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
645 return;
646 }
647 #endif
648
649 #ifdef HAVE_truncsftqf2
650 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
651 {
652 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
653 return;
654 }
655 #endif
656 #ifdef HAVE_truncdftqf2
657 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
658 {
659 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
660 return;
661 }
662 #endif
663 #ifdef HAVE_truncxftqf2
664 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
665 {
666 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
667 return;
668 }
669 #endif
670 #ifdef HAVE_trunctftqf2
671 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
672 {
673 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
674 return;
675 }
676 #endif
677
678 #ifdef HAVE_truncdfsf2
679 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
680 {
681 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
682 return;
683 }
684 #endif
685 #ifdef HAVE_truncxfsf2
686 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
687 {
688 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
689 return;
690 }
691 #endif
692 #ifdef HAVE_trunctfsf2
693 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
694 {
695 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
696 return;
697 }
698 #endif
699 #ifdef HAVE_truncxfdf2
700 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
701 {
702 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
703 return;
704 }
705 #endif
706 #ifdef HAVE_trunctfdf2
707 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
708 {
709 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
710 return;
711 }
712 #endif
713
714 libcall = (rtx) 0;
715 switch (from_mode)
716 {
717 case SFmode:
718 switch (to_mode)
719 {
720 case DFmode:
721 libcall = extendsfdf2_libfunc;
722 break;
723
724 case XFmode:
725 libcall = extendsfxf2_libfunc;
726 break;
727
728 case TFmode:
729 libcall = extendsftf2_libfunc;
730 break;
731
732 default:
733 break;
734 }
735 break;
736
737 case DFmode:
738 switch (to_mode)
739 {
740 case SFmode:
741 libcall = truncdfsf2_libfunc;
742 break;
743
744 case XFmode:
745 libcall = extenddfxf2_libfunc;
746 break;
747
748 case TFmode:
749 libcall = extenddftf2_libfunc;
750 break;
751
752 default:
753 break;
754 }
755 break;
756
757 case XFmode:
758 switch (to_mode)
759 {
760 case SFmode:
761 libcall = truncxfsf2_libfunc;
762 break;
763
764 case DFmode:
765 libcall = truncxfdf2_libfunc;
766 break;
767
768 default:
769 break;
770 }
771 break;
772
773 case TFmode:
774 switch (to_mode)
775 {
776 case SFmode:
777 libcall = trunctfsf2_libfunc;
778 break;
779
780 case DFmode:
781 libcall = trunctfdf2_libfunc;
782 break;
783
784 default:
785 break;
786 }
787 break;
788
789 default:
790 break;
791 }
792
793 if (libcall == (rtx) 0)
794 /* This conversion is not implemented yet. */
795 abort ();
796
797 start_sequence ();
798 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
799 1, from, from_mode);
800 insns = get_insns ();
801 end_sequence ();
802 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
803 from));
804 return;
805 }
806
807 /* Now both modes are integers. */
808
809 /* Handle expanding beyond a word. */
810 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
811 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
812 {
813 rtx insns;
814 rtx lowpart;
815 rtx fill_value;
816 rtx lowfrom;
817 int i;
818 enum machine_mode lowpart_mode;
819 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
820
821 /* Try converting directly if the insn is supported. */
822 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
823 != CODE_FOR_nothing)
824 {
825 /* If FROM is a SUBREG, put it into a register. Do this
826 so that we always generate the same set of insns for
827 better cse'ing; if an intermediate assignment occurred,
828 we won't be doing the operation directly on the SUBREG. */
829 if (optimize > 0 && GET_CODE (from) == SUBREG)
830 from = force_reg (from_mode, from);
831 emit_unop_insn (code, to, from, equiv_code);
832 return;
833 }
834 /* Next, try converting via full word. */
835 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
836 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
837 != CODE_FOR_nothing))
838 {
839 if (GET_CODE (to) == REG)
840 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
841 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
842 emit_unop_insn (code, to,
843 gen_lowpart (word_mode, to), equiv_code);
844 return;
845 }
846
847 /* No special multiword conversion insn; do it by hand. */
848 start_sequence ();
849
850 /* Since we will turn this into a no conflict block, we must ensure
851 that the source does not overlap the target. */
852
853 if (reg_overlap_mentioned_p (to, from))
854 from = force_reg (from_mode, from);
855
856 /* Get a copy of FROM widened to a word, if necessary. */
857 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
858 lowpart_mode = word_mode;
859 else
860 lowpart_mode = from_mode;
861
862 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
863
864 lowpart = gen_lowpart (lowpart_mode, to);
865 emit_move_insn (lowpart, lowfrom);
866
867 /* Compute the value to put in each remaining word. */
868 if (unsignedp)
869 fill_value = const0_rtx;
870 else
871 {
872 #ifdef HAVE_slt
873 if (HAVE_slt
874 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
875 && STORE_FLAG_VALUE == -1)
876 {
877 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
878 lowpart_mode, 0, 0);
879 fill_value = gen_reg_rtx (word_mode);
880 emit_insn (gen_slt (fill_value));
881 }
882 else
883 #endif
884 {
885 fill_value
886 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
887 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
888 NULL_RTX, 0);
889 fill_value = convert_to_mode (word_mode, fill_value, 1);
890 }
891 }
892
893 /* Fill the remaining words. */
894 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
895 {
896 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
897 rtx subword = operand_subword (to, index, 1, to_mode);
898
899 if (subword == 0)
900 abort ();
901
902 if (fill_value != subword)
903 emit_move_insn (subword, fill_value);
904 }
905
906 insns = get_insns ();
907 end_sequence ();
908
909 emit_no_conflict_block (insns, to, from, NULL_RTX,
910 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
911 return;
912 }
913
914 /* Truncating multi-word to a word or less. */
915 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
916 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
917 {
918 if (!((GET_CODE (from) == MEM
919 && ! MEM_VOLATILE_P (from)
920 && direct_load[(int) to_mode]
921 && ! mode_dependent_address_p (XEXP (from, 0)))
922 || GET_CODE (from) == REG
923 || GET_CODE (from) == SUBREG))
924 from = force_reg (from_mode, from);
925 convert_move (to, gen_lowpart (word_mode, from), 0);
926 return;
927 }
928
929 /* Handle pointer conversion. */ /* SPEE 900220. */
930 if (to_mode == PQImode)
931 {
932 if (from_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
934
935 #ifdef HAVE_truncqipqi2
936 if (HAVE_truncqipqi2)
937 {
938 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
939 return;
940 }
941 #endif /* HAVE_truncqipqi2 */
942 abort ();
943 }
944
945 if (from_mode == PQImode)
946 {
947 if (to_mode != QImode)
948 {
949 from = convert_to_mode (QImode, from, unsignedp);
950 from_mode = QImode;
951 }
952 else
953 {
954 #ifdef HAVE_extendpqiqi2
955 if (HAVE_extendpqiqi2)
956 {
957 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
958 return;
959 }
960 #endif /* HAVE_extendpqiqi2 */
961 abort ();
962 }
963 }
964
965 if (to_mode == PSImode)
966 {
967 if (from_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
969
970 #ifdef HAVE_truncsipsi2
971 if (HAVE_truncsipsi2)
972 {
973 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
974 return;
975 }
976 #endif /* HAVE_truncsipsi2 */
977 abort ();
978 }
979
980 if (from_mode == PSImode)
981 {
982 if (to_mode != SImode)
983 {
984 from = convert_to_mode (SImode, from, unsignedp);
985 from_mode = SImode;
986 }
987 else
988 {
989 #ifdef HAVE_extendpsisi2
990 if (! unsignedp && HAVE_extendpsisi2)
991 {
992 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
993 return;
994 }
995 #endif /* HAVE_extendpsisi2 */
996 #ifdef HAVE_zero_extendpsisi2
997 if (unsignedp && HAVE_zero_extendpsisi2)
998 {
999 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1000 return;
1001 }
1002 #endif /* HAVE_zero_extendpsisi2 */
1003 abort ();
1004 }
1005 }
1006
1007 if (to_mode == PDImode)
1008 {
1009 if (from_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1011
1012 #ifdef HAVE_truncdipdi2
1013 if (HAVE_truncdipdi2)
1014 {
1015 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1016 return;
1017 }
1018 #endif /* HAVE_truncdipdi2 */
1019 abort ();
1020 }
1021
1022 if (from_mode == PDImode)
1023 {
1024 if (to_mode != DImode)
1025 {
1026 from = convert_to_mode (DImode, from, unsignedp);
1027 from_mode = DImode;
1028 }
1029 else
1030 {
1031 #ifdef HAVE_extendpdidi2
1032 if (HAVE_extendpdidi2)
1033 {
1034 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1035 return;
1036 }
1037 #endif /* HAVE_extendpdidi2 */
1038 abort ();
1039 }
1040 }
1041
1042 /* Now follow all the conversions between integers
1043 no more than a word long. */
1044
1045 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1046 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1048 GET_MODE_BITSIZE (from_mode)))
1049 {
1050 if (!((GET_CODE (from) == MEM
1051 && ! MEM_VOLATILE_P (from)
1052 && direct_load[(int) to_mode]
1053 && ! mode_dependent_address_p (XEXP (from, 0)))
1054 || GET_CODE (from) == REG
1055 || GET_CODE (from) == SUBREG))
1056 from = force_reg (from_mode, from);
1057 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1058 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1059 from = copy_to_reg (from);
1060 emit_move_insn (to, gen_lowpart (to_mode, from));
1061 return;
1062 }
1063
1064 /* Handle extension. */
1065 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1066 {
1067 /* Convert directly if that works. */
1068 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1069 != CODE_FOR_nothing)
1070 {
1071 emit_unop_insn (code, to, from, equiv_code);
1072 return;
1073 }
1074 else
1075 {
1076 enum machine_mode intermediate;
1077 rtx tmp;
1078 tree shift_amount;
1079
1080 /* Search for a mode to convert via. */
1081 for (intermediate = from_mode; intermediate != VOIDmode;
1082 intermediate = GET_MODE_WIDER_MODE (intermediate))
1083 if (((can_extend_p (to_mode, intermediate, unsignedp)
1084 != CODE_FOR_nothing)
1085 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1086 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1087 GET_MODE_BITSIZE (intermediate))))
1088 && (can_extend_p (intermediate, from_mode, unsignedp)
1089 != CODE_FOR_nothing))
1090 {
1091 convert_move (to, convert_to_mode (intermediate, from,
1092 unsignedp), unsignedp);
1093 return;
1094 }
1095
1096 /* No suitable intermediate mode.
1097 Generate what we need with shifts. */
1098 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1099 - GET_MODE_BITSIZE (from_mode), 0);
1100 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1101 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1102 to, unsignedp);
1103 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1104 to, unsignedp);
1105 if (tmp != to)
1106 emit_move_insn (to, tmp);
1107 return;
1108 }
1109 }
1110
1111 /* Support special truncate insns for certain modes. */
1112
1113 if (from_mode == DImode && to_mode == SImode)
1114 {
1115 #ifdef HAVE_truncdisi2
1116 if (HAVE_truncdisi2)
1117 {
1118 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1119 return;
1120 }
1121 #endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1124 }
1125
1126 if (from_mode == DImode && to_mode == HImode)
1127 {
1128 #ifdef HAVE_truncdihi2
1129 if (HAVE_truncdihi2)
1130 {
1131 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1132 return;
1133 }
1134 #endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1137 }
1138
1139 if (from_mode == DImode && to_mode == QImode)
1140 {
1141 #ifdef HAVE_truncdiqi2
1142 if (HAVE_truncdiqi2)
1143 {
1144 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1145 return;
1146 }
1147 #endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1150 }
1151
1152 if (from_mode == SImode && to_mode == HImode)
1153 {
1154 #ifdef HAVE_truncsihi2
1155 if (HAVE_truncsihi2)
1156 {
1157 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1158 return;
1159 }
1160 #endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1163 }
1164
1165 if (from_mode == SImode && to_mode == QImode)
1166 {
1167 #ifdef HAVE_truncsiqi2
1168 if (HAVE_truncsiqi2)
1169 {
1170 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1171 return;
1172 }
1173 #endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1176 }
1177
1178 if (from_mode == HImode && to_mode == QImode)
1179 {
1180 #ifdef HAVE_trunchiqi2
1181 if (HAVE_trunchiqi2)
1182 {
1183 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1184 return;
1185 }
1186 #endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1189 }
1190
1191 if (from_mode == TImode && to_mode == DImode)
1192 {
1193 #ifdef HAVE_trunctidi2
1194 if (HAVE_trunctidi2)
1195 {
1196 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1197 return;
1198 }
1199 #endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1202 }
1203
1204 if (from_mode == TImode && to_mode == SImode)
1205 {
1206 #ifdef HAVE_trunctisi2
1207 if (HAVE_trunctisi2)
1208 {
1209 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1210 return;
1211 }
1212 #endif
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 return;
1215 }
1216
1217 if (from_mode == TImode && to_mode == HImode)
1218 {
1219 #ifdef HAVE_trunctihi2
1220 if (HAVE_trunctihi2)
1221 {
1222 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1223 return;
1224 }
1225 #endif
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 return;
1228 }
1229
1230 if (from_mode == TImode && to_mode == QImode)
1231 {
1232 #ifdef HAVE_trunctiqi2
1233 if (HAVE_trunctiqi2)
1234 {
1235 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1236 return;
1237 }
1238 #endif
1239 convert_move (to, force_reg (from_mode, from), unsignedp);
1240 return;
1241 }
1242
1243 /* Handle truncation of volatile memrefs, and so on;
1244 the things that couldn't be truncated directly,
1245 and for which there was no special instruction. */
1246 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1247 {
1248 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1249 emit_move_insn (to, temp);
1250 return;
1251 }
1252
1253 /* Mode combination is not recognized. */
1254 abort ();
1255 }
1256
1257 /* Return an rtx for a value that would result
1258 from converting X to mode MODE.
1259 Both X and MODE may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1261 This can be done by referring to a part of X in place
1262 or by copying to a new temporary with conversion.
1263
1264 This function *must not* call protect_from_queue
1265 except when putting X into an insn (in which case convert_move does it). */
1266
1267 rtx
1268 convert_to_mode (mode, x, unsignedp)
1269 enum machine_mode mode;
1270 rtx x;
1271 int unsignedp;
1272 {
1273 return convert_modes (mode, VOIDmode, x, unsignedp);
1274 }
1275
1276 /* Return an rtx for a value that would result
1277 from converting X from mode OLDMODE to mode MODE.
1278 Both modes may be floating, or both integer.
1279 UNSIGNEDP is nonzero if X is an unsigned value.
1280
1281 This can be done by referring to a part of X in place
1282 or by copying to a new temporary with conversion.
1283
1284 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1285
1286 This function *must not* call protect_from_queue
1287 except when putting X into an insn (in which case convert_move does it). */
1288
1289 rtx
1290 convert_modes (mode, oldmode, x, unsignedp)
1291 enum machine_mode mode, oldmode;
1292 rtx x;
1293 int unsignedp;
1294 {
1295 register rtx temp;
1296
1297 /* If FROM is a SUBREG that indicates that we have already done at least
1298 the required extension, strip it. */
1299
1300 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1301 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1302 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1303 x = gen_lowpart (mode, x);
1304
1305 if (GET_MODE (x) != VOIDmode)
1306 oldmode = GET_MODE (x);
1307
1308 if (mode == oldmode)
1309 return x;
1310
1311 /* There is one case that we must handle specially: If we are converting
1312 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1313 we are to interpret the constant as unsigned, gen_lowpart will do
1314 the wrong if the constant appears negative. What we want to do is
1315 make the high-order word of the constant zero, not all ones. */
1316
1317 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1319 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1320 {
1321 HOST_WIDE_INT val = INTVAL (x);
1322
1323 if (oldmode != VOIDmode
1324 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1325 {
1326 int width = GET_MODE_BITSIZE (oldmode);
1327
1328 /* We need to zero extend VAL. */
1329 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1330 }
1331
1332 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1333 }
1334
1335 /* We can do this with a gen_lowpart if both desired and current modes
1336 are integer, and this is either a constant integer, a register, or a
1337 non-volatile MEM. Except for the constant case where MODE is no
1338 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1339
1340 if ((GET_CODE (x) == CONST_INT
1341 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1342 || (GET_MODE_CLASS (mode) == MODE_INT
1343 && GET_MODE_CLASS (oldmode) == MODE_INT
1344 && (GET_CODE (x) == CONST_DOUBLE
1345 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1346 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1347 && direct_load[(int) mode])
1348 || (GET_CODE (x) == REG
1349 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1350 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1351 {
1352 /* ?? If we don't know OLDMODE, we have to assume here that
1353 X does not need sign- or zero-extension. This may not be
1354 the case, but it's the best we can do. */
1355 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1356 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1357 {
1358 HOST_WIDE_INT val = INTVAL (x);
1359 int width = GET_MODE_BITSIZE (oldmode);
1360
1361 /* We must sign or zero-extend in this case. Start by
1362 zero-extending, then sign extend if we need to. */
1363 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1364 if (! unsignedp
1365 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1366 val |= (HOST_WIDE_INT) (-1) << width;
1367
1368 return GEN_INT (trunc_int_for_mode (val, mode));
1369 }
1370
1371 return gen_lowpart (mode, x);
1372 }
1373
1374 temp = gen_reg_rtx (mode);
1375 convert_move (temp, x, unsignedp);
1376 return temp;
1377 }
1378 \f
1379 /* This macro is used to determine what the largest unit size that
1380 move_by_pieces can use is. */
1381
1382 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1383 move efficiently, as opposed to MOVE_MAX which is the maximum
1384 number of bytes we can move with a single instruction. */
1385
1386 #ifndef MOVE_MAX_PIECES
1387 #define MOVE_MAX_PIECES MOVE_MAX
1388 #endif
1389
1390 /* Generate several move instructions to copy LEN bytes
1391 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1392 The caller must pass FROM and TO
1393 through protect_from_queue before calling.
1394
1395 When TO is NULL, the emit_single_push_insn is used to push the
1396 FROM to stack.
1397
1398 ALIGN is maximum alignment we can assume. */
1399
1400 void
1401 move_by_pieces (to, from, len, align)
1402 rtx to, from;
1403 unsigned HOST_WIDE_INT len;
1404 unsigned int align;
1405 {
1406 struct move_by_pieces data;
1407 rtx to_addr, from_addr = XEXP (from, 0);
1408 unsigned int max_size = MOVE_MAX_PIECES + 1;
1409 enum machine_mode mode = VOIDmode, tmode;
1410 enum insn_code icode;
1411
1412 data.offset = 0;
1413 data.from_addr = from_addr;
1414 if (to)
1415 {
1416 to_addr = XEXP (to, 0);
1417 data.to = to;
1418 data.autinc_to
1419 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1420 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1421 data.reverse
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1423 }
1424 else
1425 {
1426 to_addr = NULL_RTX;
1427 data.to = NULL_RTX;
1428 data.autinc_to = 1;
1429 #ifdef STACK_GROWS_DOWNWARD
1430 data.reverse = 1;
1431 #else
1432 data.reverse = 0;
1433 #endif
1434 }
1435 data.to_addr = to_addr;
1436 data.from = from;
1437 data.autinc_from
1438 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1439 || GET_CODE (from_addr) == POST_INC
1440 || GET_CODE (from_addr) == POST_DEC);
1441
1442 data.explicit_inc_from = 0;
1443 data.explicit_inc_to = 0;
1444 if (data.reverse) data.offset = len;
1445 data.len = len;
1446
1447 /* If copying requires more than two move insns,
1448 copy addresses to registers (to make displacements shorter)
1449 and use post-increment if available. */
1450 if (!(data.autinc_from && data.autinc_to)
1451 && move_by_pieces_ninsns (len, align) > 2)
1452 {
1453 /* Find the mode of the largest move... */
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1457 mode = tmode;
1458
1459 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1460 {
1461 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1462 data.autinc_from = 1;
1463 data.explicit_inc_from = -1;
1464 }
1465 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1466 {
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 data.autinc_from = 1;
1469 data.explicit_inc_from = 1;
1470 }
1471 if (!data.autinc_from && CONSTANT_P (from_addr))
1472 data.from_addr = copy_addr_to_reg (from_addr);
1473 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1474 {
1475 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1476 data.autinc_to = 1;
1477 data.explicit_inc_to = -1;
1478 }
1479 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1480 {
1481 data.to_addr = copy_addr_to_reg (to_addr);
1482 data.autinc_to = 1;
1483 data.explicit_inc_to = 1;
1484 }
1485 if (!data.autinc_to && CONSTANT_P (to_addr))
1486 data.to_addr = copy_addr_to_reg (to_addr);
1487 }
1488
1489 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1490 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1491 align = MOVE_MAX * BITS_PER_UNIT;
1492
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1495
1496 while (max_size > 1)
1497 {
1498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1500 if (GET_MODE_SIZE (tmode) < max_size)
1501 mode = tmode;
1502
1503 if (mode == VOIDmode)
1504 break;
1505
1506 icode = mov_optab->handlers[(int) mode].insn_code;
1507 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1508 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1509
1510 max_size = GET_MODE_SIZE (mode);
1511 }
1512
1513 /* The code above should have handled everything. */
1514 if (data.len > 0)
1515 abort ();
1516 }
1517
1518 /* Return number of insns required to move L bytes by pieces.
1519 ALIGN (in bits) is maximum alignment we can assume. */
1520
1521 static unsigned HOST_WIDE_INT
1522 move_by_pieces_ninsns (l, align)
1523 unsigned HOST_WIDE_INT l;
1524 unsigned int align;
1525 {
1526 unsigned HOST_WIDE_INT n_insns = 0;
1527 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1528
1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1531 align = MOVE_MAX * BITS_PER_UNIT;
1532
1533 while (max_size > 1)
1534 {
1535 enum machine_mode mode = VOIDmode, tmode;
1536 enum insn_code icode;
1537
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
1541 mode = tmode;
1542
1543 if (mode == VOIDmode)
1544 break;
1545
1546 icode = mov_optab->handlers[(int) mode].insn_code;
1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1548 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1549
1550 max_size = GET_MODE_SIZE (mode);
1551 }
1552
1553 if (l)
1554 abort ();
1555 return n_insns;
1556 }
1557
1558 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1561
1562 static void
1563 move_by_pieces_1 (genfun, mode, data)
1564 rtx (*genfun) PARAMS ((rtx, ...));
1565 enum machine_mode mode;
1566 struct move_by_pieces *data;
1567 {
1568 unsigned int size = GET_MODE_SIZE (mode);
1569 rtx to1 = NULL_RTX, from1;
1570
1571 while (data->len >= size)
1572 {
1573 if (data->reverse)
1574 data->offset -= size;
1575
1576 if (data->to)
1577 {
1578 if (data->autinc_to)
1579 {
1580 to1 = replace_equiv_address (data->to, data->to_addr);
1581 to1 = adjust_address (to1, mode, 0);
1582 }
1583 else
1584 to1 = adjust_address (data->to, mode, data->offset);
1585 }
1586
1587 if (data->autinc_from)
1588 {
1589 from1 = replace_equiv_address (data->from, data->from_addr);
1590 from1 = adjust_address (from1, mode, 0);
1591 }
1592 else
1593 from1 = adjust_address (data->from, mode, data->offset);
1594
1595 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1599
1600 if (data->to)
1601 emit_insn ((*genfun) (to1, from1));
1602 else
1603 emit_single_push_insn (mode, from1, NULL);
1604
1605 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1606 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1607 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1608 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1609
1610 if (! data->reverse)
1611 data->offset += size;
1612
1613 data->len -= size;
1614 }
1615 }
1616 \f
1617 /* Emit code to move a block Y to a block X.
1618 This may be done with string-move instructions,
1619 with multiple scalar move instructions, or with a library call.
1620
1621 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1622 with mode BLKmode.
1623 SIZE is an rtx that says how long they are.
1624 ALIGN is the maximum alignment we can assume they have.
1625
1626 Return the address of the new block, if memcpy is called and returns it,
1627 0 otherwise. */
1628
1629 rtx
1630 emit_block_move (x, y, size, align)
1631 rtx x, y;
1632 rtx size;
1633 unsigned int align;
1634 {
1635 rtx retval = 0;
1636 #ifdef TARGET_MEM_FUNCTIONS
1637 static tree fn;
1638 tree call_expr, arg_list;
1639 #endif
1640
1641 if (GET_MODE (x) != BLKmode)
1642 abort ();
1643
1644 if (GET_MODE (y) != BLKmode)
1645 abort ();
1646
1647 x = protect_from_queue (x, 1);
1648 y = protect_from_queue (y, 0);
1649 size = protect_from_queue (size, 0);
1650
1651 if (GET_CODE (x) != MEM)
1652 abort ();
1653 if (GET_CODE (y) != MEM)
1654 abort ();
1655 if (size == 0)
1656 abort ();
1657
1658 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1659 move_by_pieces (x, y, INTVAL (size), align);
1660 else
1661 {
1662 /* Try the most limited insn first, because there's no point
1663 including more than one in the machine description unless
1664 the more limited one has some advantage. */
1665
1666 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1667 enum machine_mode mode;
1668
1669 /* Since this is a move insn, we don't care about volatility. */
1670 volatile_ok = 1;
1671
1672 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1673 mode = GET_MODE_WIDER_MODE (mode))
1674 {
1675 enum insn_code code = movstr_optab[(int) mode];
1676 insn_operand_predicate_fn pred;
1677
1678 if (code != CODE_FOR_nothing
1679 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1680 here because if SIZE is less than the mode mask, as it is
1681 returned by the macro, it will definitely be less than the
1682 actual mode mask. */
1683 && ((GET_CODE (size) == CONST_INT
1684 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1685 <= (GET_MODE_MASK (mode) >> 1)))
1686 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1687 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1688 || (*pred) (x, BLKmode))
1689 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1690 || (*pred) (y, BLKmode))
1691 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1692 || (*pred) (opalign, VOIDmode)))
1693 {
1694 rtx op2;
1695 rtx last = get_last_insn ();
1696 rtx pat;
1697
1698 op2 = convert_to_mode (mode, size, 1);
1699 pred = insn_data[(int) code].operand[2].predicate;
1700 if (pred != 0 && ! (*pred) (op2, mode))
1701 op2 = copy_to_mode_reg (mode, op2);
1702
1703 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1704 if (pat)
1705 {
1706 emit_insn (pat);
1707 volatile_ok = 0;
1708 return 0;
1709 }
1710 else
1711 delete_insns_since (last);
1712 }
1713 }
1714
1715 volatile_ok = 0;
1716
1717 /* X, Y, or SIZE may have been passed through protect_from_queue.
1718
1719 It is unsafe to save the value generated by protect_from_queue
1720 and reuse it later. Consider what happens if emit_queue is
1721 called before the return value from protect_from_queue is used.
1722
1723 Expansion of the CALL_EXPR below will call emit_queue before
1724 we are finished emitting RTL for argument setup. So if we are
1725 not careful we could get the wrong value for an argument.
1726
1727 To avoid this problem we go ahead and emit code to copy X, Y &
1728 SIZE into new pseudos. We can then place those new pseudos
1729 into an RTL_EXPR and use them later, even after a call to
1730 emit_queue.
1731
1732 Note this is not strictly needed for library calls since they
1733 do not call emit_queue before loading their arguments. However,
1734 we may need to have library calls call emit_queue in the future
1735 since failing to do so could cause problems for targets which
1736 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1737 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1738 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1739
1740 #ifdef TARGET_MEM_FUNCTIONS
1741 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1742 #else
1743 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1744 TREE_UNSIGNED (integer_type_node));
1745 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1746 #endif
1747
1748 #ifdef TARGET_MEM_FUNCTIONS
1749 /* It is incorrect to use the libcall calling conventions to call
1750 memcpy in this context.
1751
1752 This could be a user call to memcpy and the user may wish to
1753 examine the return value from memcpy.
1754
1755 For targets where libcalls and normal calls have different conventions
1756 for returning pointers, we could end up generating incorrect code.
1757
1758 So instead of using a libcall sequence we build up a suitable
1759 CALL_EXPR and expand the call in the normal fashion. */
1760 if (fn == NULL_TREE)
1761 {
1762 tree fntype;
1763
1764 /* This was copied from except.c, I don't know if all this is
1765 necessary in this context or not. */
1766 fn = get_identifier ("memcpy");
1767 fntype = build_pointer_type (void_type_node);
1768 fntype = build_function_type (fntype, NULL_TREE);
1769 fn = build_decl (FUNCTION_DECL, fn, fntype);
1770 ggc_add_tree_root (&fn, 1);
1771 DECL_EXTERNAL (fn) = 1;
1772 TREE_PUBLIC (fn) = 1;
1773 DECL_ARTIFICIAL (fn) = 1;
1774 TREE_NOTHROW (fn) = 1;
1775 make_decl_rtl (fn, NULL);
1776 assemble_external (fn);
1777 }
1778
1779 /* We need to make an argument list for the function call.
1780
1781 memcpy has three arguments, the first two are void * addresses and
1782 the last is a size_t byte count for the copy. */
1783 arg_list
1784 = build_tree_list (NULL_TREE,
1785 make_tree (build_pointer_type (void_type_node), x));
1786 TREE_CHAIN (arg_list)
1787 = build_tree_list (NULL_TREE,
1788 make_tree (build_pointer_type (void_type_node), y));
1789 TREE_CHAIN (TREE_CHAIN (arg_list))
1790 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1791 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1792
1793 /* Now we have to build up the CALL_EXPR itself. */
1794 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1795 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1796 call_expr, arg_list, NULL_TREE);
1797 TREE_SIDE_EFFECTS (call_expr) = 1;
1798
1799 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1800 #else
1801 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1802 VOIDmode, 3, y, Pmode, x, Pmode,
1803 convert_to_mode (TYPE_MODE (integer_type_node), size,
1804 TREE_UNSIGNED (integer_type_node)),
1805 TYPE_MODE (integer_type_node));
1806 #endif
1807 }
1808
1809 return retval;
1810 }
1811 \f
1812 /* Copy all or part of a value X into registers starting at REGNO.
1813 The number of registers to be filled is NREGS. */
1814
1815 void
1816 move_block_to_reg (regno, x, nregs, mode)
1817 int regno;
1818 rtx x;
1819 int nregs;
1820 enum machine_mode mode;
1821 {
1822 int i;
1823 #ifdef HAVE_load_multiple
1824 rtx pat;
1825 rtx last;
1826 #endif
1827
1828 if (nregs == 0)
1829 return;
1830
1831 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1832 x = validize_mem (force_const_mem (mode, x));
1833
1834 /* See if the machine can do this with a load multiple insn. */
1835 #ifdef HAVE_load_multiple
1836 if (HAVE_load_multiple)
1837 {
1838 last = get_last_insn ();
1839 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1840 GEN_INT (nregs));
1841 if (pat)
1842 {
1843 emit_insn (pat);
1844 return;
1845 }
1846 else
1847 delete_insns_since (last);
1848 }
1849 #endif
1850
1851 for (i = 0; i < nregs; i++)
1852 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1853 operand_subword_force (x, i, mode));
1854 }
1855
1856 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1857 The number of registers to be filled is NREGS. SIZE indicates the number
1858 of bytes in the object X. */
1859
1860 void
1861 move_block_from_reg (regno, x, nregs, size)
1862 int regno;
1863 rtx x;
1864 int nregs;
1865 int size;
1866 {
1867 int i;
1868 #ifdef HAVE_store_multiple
1869 rtx pat;
1870 rtx last;
1871 #endif
1872 enum machine_mode mode;
1873
1874 if (nregs == 0)
1875 return;
1876
1877 /* If SIZE is that of a mode no bigger than a word, just use that
1878 mode's store operation. */
1879 if (size <= UNITS_PER_WORD
1880 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1881 {
1882 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1883 return;
1884 }
1885
1886 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1887 to the left before storing to memory. Note that the previous test
1888 doesn't handle all cases (e.g. SIZE == 3). */
1889 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1890 {
1891 rtx tem = operand_subword (x, 0, 1, BLKmode);
1892 rtx shift;
1893
1894 if (tem == 0)
1895 abort ();
1896
1897 shift = expand_shift (LSHIFT_EXPR, word_mode,
1898 gen_rtx_REG (word_mode, regno),
1899 build_int_2 ((UNITS_PER_WORD - size)
1900 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1901 emit_move_insn (tem, shift);
1902 return;
1903 }
1904
1905 /* See if the machine can do this with a store multiple insn. */
1906 #ifdef HAVE_store_multiple
1907 if (HAVE_store_multiple)
1908 {
1909 last = get_last_insn ();
1910 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1911 GEN_INT (nregs));
1912 if (pat)
1913 {
1914 emit_insn (pat);
1915 return;
1916 }
1917 else
1918 delete_insns_since (last);
1919 }
1920 #endif
1921
1922 for (i = 0; i < nregs; i++)
1923 {
1924 rtx tem = operand_subword (x, i, 1, BLKmode);
1925
1926 if (tem == 0)
1927 abort ();
1928
1929 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1930 }
1931 }
1932
1933 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1934 registers represented by a PARALLEL. SSIZE represents the total size of
1935 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1936 SRC in bits. */
1937 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1938 the balance will be in what would be the low-order memory addresses, i.e.
1939 left justified for big endian, right justified for little endian. This
1940 happens to be true for the targets currently using this support. If this
1941 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1942 would be needed. */
1943
1944 void
1945 emit_group_load (dst, orig_src, ssize, align)
1946 rtx dst, orig_src;
1947 unsigned int align;
1948 int ssize;
1949 {
1950 rtx *tmps, src;
1951 int start, i;
1952
1953 if (GET_CODE (dst) != PARALLEL)
1954 abort ();
1955
1956 /* Check for a NULL entry, used to indicate that the parameter goes
1957 both on the stack and in registers. */
1958 if (XEXP (XVECEXP (dst, 0, 0), 0))
1959 start = 0;
1960 else
1961 start = 1;
1962
1963 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1964
1965 /* Process the pieces. */
1966 for (i = start; i < XVECLEN (dst, 0); i++)
1967 {
1968 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1969 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1970 unsigned int bytelen = GET_MODE_SIZE (mode);
1971 int shift = 0;
1972
1973 /* Handle trailing fragments that run over the size of the struct. */
1974 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1975 {
1976 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1977 bytelen = ssize - bytepos;
1978 if (bytelen <= 0)
1979 abort ();
1980 }
1981
1982 /* If we won't be loading directly from memory, protect the real source
1983 from strange tricks we might play; but make sure that the source can
1984 be loaded directly into the destination. */
1985 src = orig_src;
1986 if (GET_CODE (orig_src) != MEM
1987 && (!CONSTANT_P (orig_src)
1988 || (GET_MODE (orig_src) != mode
1989 && GET_MODE (orig_src) != VOIDmode)))
1990 {
1991 if (GET_MODE (orig_src) == VOIDmode)
1992 src = gen_reg_rtx (mode);
1993 else
1994 src = gen_reg_rtx (GET_MODE (orig_src));
1995 emit_move_insn (src, orig_src);
1996 }
1997
1998 /* Optimize the access just a bit. */
1999 if (GET_CODE (src) == MEM
2000 && align >= GET_MODE_ALIGNMENT (mode)
2001 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2002 && bytelen == GET_MODE_SIZE (mode))
2003 {
2004 tmps[i] = gen_reg_rtx (mode);
2005 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2006 }
2007 else if (GET_CODE (src) == CONCAT)
2008 {
2009 if (bytepos == 0
2010 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2011 tmps[i] = XEXP (src, 0);
2012 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2013 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2014 tmps[i] = XEXP (src, 1);
2015 else
2016 abort ();
2017 }
2018 else if (CONSTANT_P (src)
2019 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2020 tmps[i] = src;
2021 else
2022 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2023 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2024 mode, mode, align, ssize);
2025
2026 if (BYTES_BIG_ENDIAN && shift)
2027 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2028 tmps[i], 0, OPTAB_WIDEN);
2029 }
2030
2031 emit_queue ();
2032
2033 /* Copy the extracted pieces into the proper (probable) hard regs. */
2034 for (i = start; i < XVECLEN (dst, 0); i++)
2035 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2036 }
2037
2038 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2039 registers represented by a PARALLEL. SSIZE represents the total size of
2040 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2041
2042 void
2043 emit_group_store (orig_dst, src, ssize, align)
2044 rtx orig_dst, src;
2045 int ssize;
2046 unsigned int align;
2047 {
2048 rtx *tmps, dst;
2049 int start, i;
2050
2051 if (GET_CODE (src) != PARALLEL)
2052 abort ();
2053
2054 /* Check for a NULL entry, used to indicate that the parameter goes
2055 both on the stack and in registers. */
2056 if (XEXP (XVECEXP (src, 0, 0), 0))
2057 start = 0;
2058 else
2059 start = 1;
2060
2061 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2062
2063 /* Copy the (probable) hard regs into pseudos. */
2064 for (i = start; i < XVECLEN (src, 0); i++)
2065 {
2066 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2067 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2068 emit_move_insn (tmps[i], reg);
2069 }
2070 emit_queue ();
2071
2072 /* If we won't be storing directly into memory, protect the real destination
2073 from strange tricks we might play. */
2074 dst = orig_dst;
2075 if (GET_CODE (dst) == PARALLEL)
2076 {
2077 rtx temp;
2078
2079 /* We can get a PARALLEL dst if there is a conditional expression in
2080 a return statement. In that case, the dst and src are the same,
2081 so no action is necessary. */
2082 if (rtx_equal_p (dst, src))
2083 return;
2084
2085 /* It is unclear if we can ever reach here, but we may as well handle
2086 it. Allocate a temporary, and split this into a store/load to/from
2087 the temporary. */
2088
2089 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2090 emit_group_store (temp, src, ssize, align);
2091 emit_group_load (dst, temp, ssize, align);
2092 return;
2093 }
2094 else if (GET_CODE (dst) != MEM)
2095 {
2096 dst = gen_reg_rtx (GET_MODE (orig_dst));
2097 /* Make life a bit easier for combine. */
2098 emit_move_insn (dst, const0_rtx);
2099 }
2100
2101 /* Process the pieces. */
2102 for (i = start; i < XVECLEN (src, 0); i++)
2103 {
2104 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2105 enum machine_mode mode = GET_MODE (tmps[i]);
2106 unsigned int bytelen = GET_MODE_SIZE (mode);
2107
2108 /* Handle trailing fragments that run over the size of the struct. */
2109 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2110 {
2111 if (BYTES_BIG_ENDIAN)
2112 {
2113 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2114 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2115 tmps[i], 0, OPTAB_WIDEN);
2116 }
2117 bytelen = ssize - bytepos;
2118 }
2119
2120 /* Optimize the access just a bit. */
2121 if (GET_CODE (dst) == MEM
2122 && align >= GET_MODE_ALIGNMENT (mode)
2123 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2124 && bytelen == GET_MODE_SIZE (mode))
2125 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2126 else
2127 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2128 mode, tmps[i], align, ssize);
2129 }
2130
2131 emit_queue ();
2132
2133 /* Copy from the pseudo into the (probable) hard reg. */
2134 if (GET_CODE (dst) == REG)
2135 emit_move_insn (orig_dst, dst);
2136 }
2137
2138 /* Generate code to copy a BLKmode object of TYPE out of a
2139 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2140 is null, a stack temporary is created. TGTBLK is returned.
2141
2142 The primary purpose of this routine is to handle functions
2143 that return BLKmode structures in registers. Some machines
2144 (the PA for example) want to return all small structures
2145 in registers regardless of the structure's alignment. */
2146
2147 rtx
2148 copy_blkmode_from_reg (tgtblk, srcreg, type)
2149 rtx tgtblk;
2150 rtx srcreg;
2151 tree type;
2152 {
2153 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2154 rtx src = NULL, dst = NULL;
2155 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2156 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2157
2158 if (tgtblk == 0)
2159 {
2160 tgtblk = assign_temp (build_qualified_type (type,
2161 (TYPE_QUALS (type)
2162 | TYPE_QUAL_CONST)),
2163 0, 1, 1);
2164 preserve_temp_slots (tgtblk);
2165 }
2166
2167 /* This code assumes srcreg is at least a full word. If it isn't,
2168 copy it into a new pseudo which is a full word. */
2169 if (GET_MODE (srcreg) != BLKmode
2170 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2171 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2172
2173 /* Structures whose size is not a multiple of a word are aligned
2174 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2175 machine, this means we must skip the empty high order bytes when
2176 calculating the bit offset. */
2177 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2178 big_endian_correction
2179 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2180
2181 /* Copy the structure BITSIZE bites at a time.
2182
2183 We could probably emit more efficient code for machines which do not use
2184 strict alignment, but it doesn't seem worth the effort at the current
2185 time. */
2186 for (bitpos = 0, xbitpos = big_endian_correction;
2187 bitpos < bytes * BITS_PER_UNIT;
2188 bitpos += bitsize, xbitpos += bitsize)
2189 {
2190 /* We need a new source operand each time xbitpos is on a
2191 word boundary and when xbitpos == big_endian_correction
2192 (the first time through). */
2193 if (xbitpos % BITS_PER_WORD == 0
2194 || xbitpos == big_endian_correction)
2195 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2196 GET_MODE (srcreg));
2197
2198 /* We need a new destination operand each time bitpos is on
2199 a word boundary. */
2200 if (bitpos % BITS_PER_WORD == 0)
2201 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2202
2203 /* Use xbitpos for the source extraction (right justified) and
2204 xbitpos for the destination store (left justified). */
2205 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2206 extract_bit_field (src, bitsize,
2207 xbitpos % BITS_PER_WORD, 1,
2208 NULL_RTX, word_mode, word_mode,
2209 bitsize, BITS_PER_WORD),
2210 bitsize, BITS_PER_WORD);
2211 }
2212
2213 return tgtblk;
2214 }
2215
2216 /* Add a USE expression for REG to the (possibly empty) list pointed
2217 to by CALL_FUSAGE. REG must denote a hard register. */
2218
2219 void
2220 use_reg (call_fusage, reg)
2221 rtx *call_fusage, reg;
2222 {
2223 if (GET_CODE (reg) != REG
2224 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2225 abort ();
2226
2227 *call_fusage
2228 = gen_rtx_EXPR_LIST (VOIDmode,
2229 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2230 }
2231
2232 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2233 starting at REGNO. All of these registers must be hard registers. */
2234
2235 void
2236 use_regs (call_fusage, regno, nregs)
2237 rtx *call_fusage;
2238 int regno;
2239 int nregs;
2240 {
2241 int i;
2242
2243 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2244 abort ();
2245
2246 for (i = 0; i < nregs; i++)
2247 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2248 }
2249
2250 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2251 PARALLEL REGS. This is for calls that pass values in multiple
2252 non-contiguous locations. The Irix 6 ABI has examples of this. */
2253
2254 void
2255 use_group_regs (call_fusage, regs)
2256 rtx *call_fusage;
2257 rtx regs;
2258 {
2259 int i;
2260
2261 for (i = 0; i < XVECLEN (regs, 0); i++)
2262 {
2263 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2264
2265 /* A NULL entry means the parameter goes both on the stack and in
2266 registers. This can also be a MEM for targets that pass values
2267 partially on the stack and partially in registers. */
2268 if (reg != 0 && GET_CODE (reg) == REG)
2269 use_reg (call_fusage, reg);
2270 }
2271 }
2272 \f
2273
2274 int
2275 can_store_by_pieces (len, constfun, constfundata, align)
2276 unsigned HOST_WIDE_INT len;
2277 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2278 PTR constfundata;
2279 unsigned int align;
2280 {
2281 unsigned HOST_WIDE_INT max_size, l;
2282 HOST_WIDE_INT offset = 0;
2283 enum machine_mode mode, tmode;
2284 enum insn_code icode;
2285 int reverse;
2286 rtx cst;
2287
2288 if (! MOVE_BY_PIECES_P (len, align))
2289 return 0;
2290
2291 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2292 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2293 align = MOVE_MAX * BITS_PER_UNIT;
2294
2295 /* We would first store what we can in the largest integer mode, then go to
2296 successively smaller modes. */
2297
2298 for (reverse = 0;
2299 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2300 reverse++)
2301 {
2302 l = len;
2303 mode = VOIDmode;
2304 max_size = MOVE_MAX_PIECES + 1;
2305 while (max_size > 1)
2306 {
2307 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2308 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2309 if (GET_MODE_SIZE (tmode) < max_size)
2310 mode = tmode;
2311
2312 if (mode == VOIDmode)
2313 break;
2314
2315 icode = mov_optab->handlers[(int) mode].insn_code;
2316 if (icode != CODE_FOR_nothing
2317 && align >= GET_MODE_ALIGNMENT (mode))
2318 {
2319 unsigned int size = GET_MODE_SIZE (mode);
2320
2321 while (l >= size)
2322 {
2323 if (reverse)
2324 offset -= size;
2325
2326 cst = (*constfun) (constfundata, offset, mode);
2327 if (!LEGITIMATE_CONSTANT_P (cst))
2328 return 0;
2329
2330 if (!reverse)
2331 offset += size;
2332
2333 l -= size;
2334 }
2335 }
2336
2337 max_size = GET_MODE_SIZE (mode);
2338 }
2339
2340 /* The code above should have handled everything. */
2341 if (l != 0)
2342 abort ();
2343 }
2344
2345 return 1;
2346 }
2347
2348 /* Generate several move instructions to store LEN bytes generated by
2349 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2350 pointer which will be passed as argument in every CONSTFUN call.
2351 ALIGN is maximum alignment we can assume. */
2352
2353 void
2354 store_by_pieces (to, len, constfun, constfundata, align)
2355 rtx to;
2356 unsigned HOST_WIDE_INT len;
2357 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2358 PTR constfundata;
2359 unsigned int align;
2360 {
2361 struct store_by_pieces data;
2362
2363 if (! MOVE_BY_PIECES_P (len, align))
2364 abort ();
2365 to = protect_from_queue (to, 1);
2366 data.constfun = constfun;
2367 data.constfundata = constfundata;
2368 data.len = len;
2369 data.to = to;
2370 store_by_pieces_1 (&data, align);
2371 }
2372
2373 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2374 rtx with BLKmode). The caller must pass TO through protect_from_queue
2375 before calling. ALIGN is maximum alignment we can assume. */
2376
2377 static void
2378 clear_by_pieces (to, len, align)
2379 rtx to;
2380 unsigned HOST_WIDE_INT len;
2381 unsigned int align;
2382 {
2383 struct store_by_pieces data;
2384
2385 data.constfun = clear_by_pieces_1;
2386 data.constfundata = NULL;
2387 data.len = len;
2388 data.to = to;
2389 store_by_pieces_1 (&data, align);
2390 }
2391
2392 /* Callback routine for clear_by_pieces.
2393 Return const0_rtx unconditionally. */
2394
2395 static rtx
2396 clear_by_pieces_1 (data, offset, mode)
2397 PTR data ATTRIBUTE_UNUSED;
2398 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2399 enum machine_mode mode ATTRIBUTE_UNUSED;
2400 {
2401 return const0_rtx;
2402 }
2403
2404 /* Subroutine of clear_by_pieces and store_by_pieces.
2405 Generate several move instructions to store LEN bytes of block TO. (A MEM
2406 rtx with BLKmode). The caller must pass TO through protect_from_queue
2407 before calling. ALIGN is maximum alignment we can assume. */
2408
2409 static void
2410 store_by_pieces_1 (data, align)
2411 struct store_by_pieces *data;
2412 unsigned int align;
2413 {
2414 rtx to_addr = XEXP (data->to, 0);
2415 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2416 enum machine_mode mode = VOIDmode, tmode;
2417 enum insn_code icode;
2418
2419 data->offset = 0;
2420 data->to_addr = to_addr;
2421 data->autinc_to
2422 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2423 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2424
2425 data->explicit_inc_to = 0;
2426 data->reverse
2427 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2428 if (data->reverse)
2429 data->offset = data->len;
2430
2431 /* If storing requires more than two move insns,
2432 copy addresses to registers (to make displacements shorter)
2433 and use post-increment if available. */
2434 if (!data->autinc_to
2435 && move_by_pieces_ninsns (data->len, align) > 2)
2436 {
2437 /* Determine the main mode we'll be using. */
2438 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2439 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2440 if (GET_MODE_SIZE (tmode) < max_size)
2441 mode = tmode;
2442
2443 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2444 {
2445 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2446 data->autinc_to = 1;
2447 data->explicit_inc_to = -1;
2448 }
2449
2450 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2451 && ! data->autinc_to)
2452 {
2453 data->to_addr = copy_addr_to_reg (to_addr);
2454 data->autinc_to = 1;
2455 data->explicit_inc_to = 1;
2456 }
2457
2458 if ( !data->autinc_to && CONSTANT_P (to_addr))
2459 data->to_addr = copy_addr_to_reg (to_addr);
2460 }
2461
2462 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2463 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2464 align = MOVE_MAX * BITS_PER_UNIT;
2465
2466 /* First store what we can in the largest integer mode, then go to
2467 successively smaller modes. */
2468
2469 while (max_size > 1)
2470 {
2471 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2472 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2473 if (GET_MODE_SIZE (tmode) < max_size)
2474 mode = tmode;
2475
2476 if (mode == VOIDmode)
2477 break;
2478
2479 icode = mov_optab->handlers[(int) mode].insn_code;
2480 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2481 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2482
2483 max_size = GET_MODE_SIZE (mode);
2484 }
2485
2486 /* The code above should have handled everything. */
2487 if (data->len != 0)
2488 abort ();
2489 }
2490
2491 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2492 with move instructions for mode MODE. GENFUN is the gen_... function
2493 to make a move insn for that mode. DATA has all the other info. */
2494
2495 static void
2496 store_by_pieces_2 (genfun, mode, data)
2497 rtx (*genfun) PARAMS ((rtx, ...));
2498 enum machine_mode mode;
2499 struct store_by_pieces *data;
2500 {
2501 unsigned int size = GET_MODE_SIZE (mode);
2502 rtx to1, cst;
2503
2504 while (data->len >= size)
2505 {
2506 if (data->reverse)
2507 data->offset -= size;
2508
2509 if (data->autinc_to)
2510 {
2511 to1 = replace_equiv_address (data->to, data->to_addr);
2512 to1 = adjust_address (to1, mode, 0);
2513 }
2514 else
2515 to1 = adjust_address (data->to, mode, data->offset);
2516
2517 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2518 emit_insn (gen_add2_insn (data->to_addr,
2519 GEN_INT (-(HOST_WIDE_INT) size)));
2520
2521 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2522 emit_insn ((*genfun) (to1, cst));
2523
2524 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2525 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2526
2527 if (! data->reverse)
2528 data->offset += size;
2529
2530 data->len -= size;
2531 }
2532 }
2533 \f
2534 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2535 its length in bytes and ALIGN is the maximum alignment we can is has.
2536
2537 If we call a function that returns the length of the block, return it. */
2538
2539 rtx
2540 clear_storage (object, size, align)
2541 rtx object;
2542 rtx size;
2543 unsigned int align;
2544 {
2545 #ifdef TARGET_MEM_FUNCTIONS
2546 static tree fn;
2547 tree call_expr, arg_list;
2548 #endif
2549 rtx retval = 0;
2550
2551 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2552 just move a zero. Otherwise, do this a piece at a time. */
2553 if (GET_MODE (object) != BLKmode
2554 && GET_CODE (size) == CONST_INT
2555 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2556 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2557 else
2558 {
2559 object = protect_from_queue (object, 1);
2560 size = protect_from_queue (size, 0);
2561
2562 if (GET_CODE (size) == CONST_INT
2563 && MOVE_BY_PIECES_P (INTVAL (size), align))
2564 clear_by_pieces (object, INTVAL (size), align);
2565 else
2566 {
2567 /* Try the most limited insn first, because there's no point
2568 including more than one in the machine description unless
2569 the more limited one has some advantage. */
2570
2571 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2572 enum machine_mode mode;
2573
2574 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2575 mode = GET_MODE_WIDER_MODE (mode))
2576 {
2577 enum insn_code code = clrstr_optab[(int) mode];
2578 insn_operand_predicate_fn pred;
2579
2580 if (code != CODE_FOR_nothing
2581 /* We don't need MODE to be narrower than
2582 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2583 the mode mask, as it is returned by the macro, it will
2584 definitely be less than the actual mode mask. */
2585 && ((GET_CODE (size) == CONST_INT
2586 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2587 <= (GET_MODE_MASK (mode) >> 1)))
2588 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2589 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2590 || (*pred) (object, BLKmode))
2591 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2592 || (*pred) (opalign, VOIDmode)))
2593 {
2594 rtx op1;
2595 rtx last = get_last_insn ();
2596 rtx pat;
2597
2598 op1 = convert_to_mode (mode, size, 1);
2599 pred = insn_data[(int) code].operand[1].predicate;
2600 if (pred != 0 && ! (*pred) (op1, mode))
2601 op1 = copy_to_mode_reg (mode, op1);
2602
2603 pat = GEN_FCN ((int) code) (object, op1, opalign);
2604 if (pat)
2605 {
2606 emit_insn (pat);
2607 return 0;
2608 }
2609 else
2610 delete_insns_since (last);
2611 }
2612 }
2613
2614 /* OBJECT or SIZE may have been passed through protect_from_queue.
2615
2616 It is unsafe to save the value generated by protect_from_queue
2617 and reuse it later. Consider what happens if emit_queue is
2618 called before the return value from protect_from_queue is used.
2619
2620 Expansion of the CALL_EXPR below will call emit_queue before
2621 we are finished emitting RTL for argument setup. So if we are
2622 not careful we could get the wrong value for an argument.
2623
2624 To avoid this problem we go ahead and emit code to copy OBJECT
2625 and SIZE into new pseudos. We can then place those new pseudos
2626 into an RTL_EXPR and use them later, even after a call to
2627 emit_queue.
2628
2629 Note this is not strictly needed for library calls since they
2630 do not call emit_queue before loading their arguments. However,
2631 we may need to have library calls call emit_queue in the future
2632 since failing to do so could cause problems for targets which
2633 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2634 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2635
2636 #ifdef TARGET_MEM_FUNCTIONS
2637 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2638 #else
2639 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2640 TREE_UNSIGNED (integer_type_node));
2641 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2642 #endif
2643
2644 #ifdef TARGET_MEM_FUNCTIONS
2645 /* It is incorrect to use the libcall calling conventions to call
2646 memset in this context.
2647
2648 This could be a user call to memset and the user may wish to
2649 examine the return value from memset.
2650
2651 For targets where libcalls and normal calls have different
2652 conventions for returning pointers, we could end up generating
2653 incorrect code.
2654
2655 So instead of using a libcall sequence we build up a suitable
2656 CALL_EXPR and expand the call in the normal fashion. */
2657 if (fn == NULL_TREE)
2658 {
2659 tree fntype;
2660
2661 /* This was copied from except.c, I don't know if all this is
2662 necessary in this context or not. */
2663 fn = get_identifier ("memset");
2664 fntype = build_pointer_type (void_type_node);
2665 fntype = build_function_type (fntype, NULL_TREE);
2666 fn = build_decl (FUNCTION_DECL, fn, fntype);
2667 ggc_add_tree_root (&fn, 1);
2668 DECL_EXTERNAL (fn) = 1;
2669 TREE_PUBLIC (fn) = 1;
2670 DECL_ARTIFICIAL (fn) = 1;
2671 TREE_NOTHROW (fn) = 1;
2672 make_decl_rtl (fn, NULL);
2673 assemble_external (fn);
2674 }
2675
2676 /* We need to make an argument list for the function call.
2677
2678 memset has three arguments, the first is a void * addresses, the
2679 second a integer with the initialization value, the last is a
2680 size_t byte count for the copy. */
2681 arg_list
2682 = build_tree_list (NULL_TREE,
2683 make_tree (build_pointer_type (void_type_node),
2684 object));
2685 TREE_CHAIN (arg_list)
2686 = build_tree_list (NULL_TREE,
2687 make_tree (integer_type_node, const0_rtx));
2688 TREE_CHAIN (TREE_CHAIN (arg_list))
2689 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2690 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2691
2692 /* Now we have to build up the CALL_EXPR itself. */
2693 call_expr = build1 (ADDR_EXPR,
2694 build_pointer_type (TREE_TYPE (fn)), fn);
2695 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2696 call_expr, arg_list, NULL_TREE);
2697 TREE_SIDE_EFFECTS (call_expr) = 1;
2698
2699 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2700 #else
2701 emit_library_call (bzero_libfunc, LCT_NORMAL,
2702 VOIDmode, 2, object, Pmode, size,
2703 TYPE_MODE (integer_type_node));
2704 #endif
2705 }
2706 }
2707
2708 return retval;
2709 }
2710
2711 /* Generate code to copy Y into X.
2712 Both Y and X must have the same mode, except that
2713 Y can be a constant with VOIDmode.
2714 This mode cannot be BLKmode; use emit_block_move for that.
2715
2716 Return the last instruction emitted. */
2717
2718 rtx
2719 emit_move_insn (x, y)
2720 rtx x, y;
2721 {
2722 enum machine_mode mode = GET_MODE (x);
2723 rtx y_cst = NULL_RTX;
2724 rtx last_insn;
2725
2726 x = protect_from_queue (x, 1);
2727 y = protect_from_queue (y, 0);
2728
2729 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2730 abort ();
2731
2732 /* Never force constant_p_rtx to memory. */
2733 if (GET_CODE (y) == CONSTANT_P_RTX)
2734 ;
2735 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2736 {
2737 y_cst = y;
2738 y = force_const_mem (mode, y);
2739 }
2740
2741 /* If X or Y are memory references, verify that their addresses are valid
2742 for the machine. */
2743 if (GET_CODE (x) == MEM
2744 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2745 && ! push_operand (x, GET_MODE (x)))
2746 || (flag_force_addr
2747 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2748 x = validize_mem (x);
2749
2750 if (GET_CODE (y) == MEM
2751 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2752 || (flag_force_addr
2753 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2754 y = validize_mem (y);
2755
2756 if (mode == BLKmode)
2757 abort ();
2758
2759 last_insn = emit_move_insn_1 (x, y);
2760
2761 if (y_cst && GET_CODE (x) == REG)
2762 REG_NOTES (last_insn)
2763 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2764
2765 return last_insn;
2766 }
2767
2768 /* Low level part of emit_move_insn.
2769 Called just like emit_move_insn, but assumes X and Y
2770 are basically valid. */
2771
2772 rtx
2773 emit_move_insn_1 (x, y)
2774 rtx x, y;
2775 {
2776 enum machine_mode mode = GET_MODE (x);
2777 enum machine_mode submode;
2778 enum mode_class class = GET_MODE_CLASS (mode);
2779 unsigned int i;
2780
2781 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2782 abort ();
2783
2784 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2785 return
2786 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2787
2788 /* Expand complex moves by moving real part and imag part, if possible. */
2789 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2790 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2791 * BITS_PER_UNIT),
2792 (class == MODE_COMPLEX_INT
2793 ? MODE_INT : MODE_FLOAT),
2794 0))
2795 && (mov_optab->handlers[(int) submode].insn_code
2796 != CODE_FOR_nothing))
2797 {
2798 /* Don't split destination if it is a stack push. */
2799 int stack = push_operand (x, GET_MODE (x));
2800
2801 #ifdef PUSH_ROUNDING
2802 /* In case we output to the stack, but the size is smaller machine can
2803 push exactly, we need to use move instructions. */
2804 if (stack
2805 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2806 {
2807 rtx temp;
2808 int offset1, offset2;
2809
2810 /* Do not use anti_adjust_stack, since we don't want to update
2811 stack_pointer_delta. */
2812 temp = expand_binop (Pmode,
2813 #ifdef STACK_GROWS_DOWNWARD
2814 sub_optab,
2815 #else
2816 add_optab,
2817 #endif
2818 stack_pointer_rtx,
2819 GEN_INT
2820 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2821 stack_pointer_rtx,
2822 0,
2823 OPTAB_LIB_WIDEN);
2824 if (temp != stack_pointer_rtx)
2825 emit_move_insn (stack_pointer_rtx, temp);
2826 #ifdef STACK_GROWS_DOWNWARD
2827 offset1 = 0;
2828 offset2 = GET_MODE_SIZE (submode);
2829 #else
2830 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2831 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2832 + GET_MODE_SIZE (submode));
2833 #endif
2834 emit_move_insn (change_address (x, submode,
2835 gen_rtx_PLUS (Pmode,
2836 stack_pointer_rtx,
2837 GEN_INT (offset1))),
2838 gen_realpart (submode, y));
2839 emit_move_insn (change_address (x, submode,
2840 gen_rtx_PLUS (Pmode,
2841 stack_pointer_rtx,
2842 GEN_INT (offset2))),
2843 gen_imagpart (submode, y));
2844 }
2845 else
2846 #endif
2847 /* If this is a stack, push the highpart first, so it
2848 will be in the argument order.
2849
2850 In that case, change_address is used only to convert
2851 the mode, not to change the address. */
2852 if (stack)
2853 {
2854 /* Note that the real part always precedes the imag part in memory
2855 regardless of machine's endianness. */
2856 #ifdef STACK_GROWS_DOWNWARD
2857 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2858 (gen_rtx_MEM (submode, XEXP (x, 0)),
2859 gen_imagpart (submode, y)));
2860 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2861 (gen_rtx_MEM (submode, XEXP (x, 0)),
2862 gen_realpart (submode, y)));
2863 #else
2864 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2865 (gen_rtx_MEM (submode, XEXP (x, 0)),
2866 gen_realpart (submode, y)));
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_imagpart (submode, y)));
2870 #endif
2871 }
2872 else
2873 {
2874 rtx realpart_x, realpart_y;
2875 rtx imagpart_x, imagpart_y;
2876
2877 /* If this is a complex value with each part being smaller than a
2878 word, the usual calling sequence will likely pack the pieces into
2879 a single register. Unfortunately, SUBREG of hard registers only
2880 deals in terms of words, so we have a problem converting input
2881 arguments to the CONCAT of two registers that is used elsewhere
2882 for complex values. If this is before reload, we can copy it into
2883 memory and reload. FIXME, we should see about using extract and
2884 insert on integer registers, but complex short and complex char
2885 variables should be rarely used. */
2886 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2887 && (reload_in_progress | reload_completed) == 0)
2888 {
2889 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2890 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2891
2892 if (packed_dest_p || packed_src_p)
2893 {
2894 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2895 ? MODE_FLOAT : MODE_INT);
2896
2897 enum machine_mode reg_mode
2898 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2899
2900 if (reg_mode != BLKmode)
2901 {
2902 rtx mem = assign_stack_temp (reg_mode,
2903 GET_MODE_SIZE (mode), 0);
2904 rtx cmem = adjust_address (mem, mode, 0);
2905
2906 cfun->cannot_inline
2907 = N_("function using short complex types cannot be inline");
2908
2909 if (packed_dest_p)
2910 {
2911 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2912 emit_move_insn_1 (cmem, y);
2913 return emit_move_insn_1 (sreg, mem);
2914 }
2915 else
2916 {
2917 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2918 emit_move_insn_1 (mem, sreg);
2919 return emit_move_insn_1 (x, cmem);
2920 }
2921 }
2922 }
2923 }
2924
2925 realpart_x = gen_realpart (submode, x);
2926 realpart_y = gen_realpart (submode, y);
2927 imagpart_x = gen_imagpart (submode, x);
2928 imagpart_y = gen_imagpart (submode, y);
2929
2930 /* Show the output dies here. This is necessary for SUBREGs
2931 of pseudos since we cannot track their lifetimes correctly;
2932 hard regs shouldn't appear here except as return values.
2933 We never want to emit such a clobber after reload. */
2934 if (x != y
2935 && ! (reload_in_progress || reload_completed)
2936 && (GET_CODE (realpart_x) == SUBREG
2937 || GET_CODE (imagpart_x) == SUBREG))
2938 {
2939 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2940 }
2941
2942 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2943 (realpart_x, realpart_y));
2944 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2945 (imagpart_x, imagpart_y));
2946 }
2947
2948 return get_last_insn ();
2949 }
2950
2951 /* This will handle any multi-word mode that lacks a move_insn pattern.
2952 However, you will get better code if you define such patterns,
2953 even if they must turn into multiple assembler instructions. */
2954 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2955 {
2956 rtx last_insn = 0;
2957 rtx seq, inner;
2958 int need_clobber;
2959
2960 #ifdef PUSH_ROUNDING
2961
2962 /* If X is a push on the stack, do the push now and replace
2963 X with a reference to the stack pointer. */
2964 if (push_operand (x, GET_MODE (x)))
2965 {
2966 rtx temp;
2967 enum rtx_code code;
2968
2969 /* Do not use anti_adjust_stack, since we don't want to update
2970 stack_pointer_delta. */
2971 temp = expand_binop (Pmode,
2972 #ifdef STACK_GROWS_DOWNWARD
2973 sub_optab,
2974 #else
2975 add_optab,
2976 #endif
2977 stack_pointer_rtx,
2978 GEN_INT
2979 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2980 stack_pointer_rtx,
2981 0,
2982 OPTAB_LIB_WIDEN);
2983 if (temp != stack_pointer_rtx)
2984 emit_move_insn (stack_pointer_rtx, temp);
2985
2986 code = GET_CODE (XEXP (x, 0));
2987 /* Just hope that small offsets off SP are OK. */
2988 if (code == POST_INC)
2989 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2990 GEN_INT (-(HOST_WIDE_INT)
2991 GET_MODE_SIZE (GET_MODE (x))));
2992 else if (code == POST_DEC)
2993 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2994 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2995 else
2996 temp = stack_pointer_rtx;
2997
2998 x = change_address (x, VOIDmode, temp);
2999 }
3000 #endif
3001
3002 /* If we are in reload, see if either operand is a MEM whose address
3003 is scheduled for replacement. */
3004 if (reload_in_progress && GET_CODE (x) == MEM
3005 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3006 x = replace_equiv_address_nv (x, inner);
3007 if (reload_in_progress && GET_CODE (y) == MEM
3008 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3009 y = replace_equiv_address_nv (y, inner);
3010
3011 start_sequence ();
3012
3013 need_clobber = 0;
3014 for (i = 0;
3015 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3016 i++)
3017 {
3018 rtx xpart = operand_subword (x, i, 1, mode);
3019 rtx ypart = operand_subword (y, i, 1, mode);
3020
3021 /* If we can't get a part of Y, put Y into memory if it is a
3022 constant. Otherwise, force it into a register. If we still
3023 can't get a part of Y, abort. */
3024 if (ypart == 0 && CONSTANT_P (y))
3025 {
3026 y = force_const_mem (mode, y);
3027 ypart = operand_subword (y, i, 1, mode);
3028 }
3029 else if (ypart == 0)
3030 ypart = operand_subword_force (y, i, mode);
3031
3032 if (xpart == 0 || ypart == 0)
3033 abort ();
3034
3035 need_clobber |= (GET_CODE (xpart) == SUBREG);
3036
3037 last_insn = emit_move_insn (xpart, ypart);
3038 }
3039
3040 seq = gen_sequence ();
3041 end_sequence ();
3042
3043 /* Show the output dies here. This is necessary for SUBREGs
3044 of pseudos since we cannot track their lifetimes correctly;
3045 hard regs shouldn't appear here except as return values.
3046 We never want to emit such a clobber after reload. */
3047 if (x != y
3048 && ! (reload_in_progress || reload_completed)
3049 && need_clobber != 0)
3050 {
3051 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3052 }
3053
3054 emit_insn (seq);
3055
3056 return last_insn;
3057 }
3058 else
3059 abort ();
3060 }
3061 \f
3062 /* Pushing data onto the stack. */
3063
3064 /* Push a block of length SIZE (perhaps variable)
3065 and return an rtx to address the beginning of the block.
3066 Note that it is not possible for the value returned to be a QUEUED.
3067 The value may be virtual_outgoing_args_rtx.
3068
3069 EXTRA is the number of bytes of padding to push in addition to SIZE.
3070 BELOW nonzero means this padding comes at low addresses;
3071 otherwise, the padding comes at high addresses. */
3072
3073 rtx
3074 push_block (size, extra, below)
3075 rtx size;
3076 int extra, below;
3077 {
3078 register rtx temp;
3079
3080 size = convert_modes (Pmode, ptr_mode, size, 1);
3081 if (CONSTANT_P (size))
3082 anti_adjust_stack (plus_constant (size, extra));
3083 else if (GET_CODE (size) == REG && extra == 0)
3084 anti_adjust_stack (size);
3085 else
3086 {
3087 temp = copy_to_mode_reg (Pmode, size);
3088 if (extra != 0)
3089 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3090 temp, 0, OPTAB_LIB_WIDEN);
3091 anti_adjust_stack (temp);
3092 }
3093
3094 #ifndef STACK_GROWS_DOWNWARD
3095 #ifdef ARGS_GROW_DOWNWARD
3096 if (!ACCUMULATE_OUTGOING_ARGS)
3097 #else
3098 if (0)
3099 #endif
3100 #else
3101 if (1)
3102 #endif
3103 {
3104 /* Return the lowest stack address when STACK or ARGS grow downward and
3105 we are not aaccumulating outgoing arguments (the c4x port uses such
3106 conventions). */
3107 temp = virtual_outgoing_args_rtx;
3108 if (extra != 0 && below)
3109 temp = plus_constant (temp, extra);
3110 }
3111 else
3112 {
3113 if (GET_CODE (size) == CONST_INT)
3114 temp = plus_constant (virtual_outgoing_args_rtx,
3115 -INTVAL (size) - (below ? 0 : extra));
3116 else if (extra != 0 && !below)
3117 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3118 negate_rtx (Pmode, plus_constant (size, extra)));
3119 else
3120 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3121 negate_rtx (Pmode, size));
3122 }
3123
3124 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3125 }
3126
3127
3128 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3129 block of SIZE bytes. */
3130
3131 static rtx
3132 get_push_address (size)
3133 int size;
3134 {
3135 register rtx temp;
3136
3137 if (STACK_PUSH_CODE == POST_DEC)
3138 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3139 else if (STACK_PUSH_CODE == POST_INC)
3140 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3141 else
3142 temp = stack_pointer_rtx;
3143
3144 return copy_to_reg (temp);
3145 }
3146
3147 /* Emit single push insn. */
3148 static void
3149 emit_single_push_insn (mode, x, type)
3150 rtx x;
3151 enum machine_mode mode;
3152 tree type;
3153 {
3154 #ifdef PUSH_ROUNDING
3155 rtx dest_addr;
3156 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3157 rtx dest;
3158
3159 if (GET_MODE_SIZE (mode) == rounded_size)
3160 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3161 else
3162 {
3163 #ifdef STACK_GROWS_DOWNWARD
3164 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3165 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3166 #else
3167 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3168 GEN_INT (rounded_size));
3169 #endif
3170 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3171 }
3172
3173 dest = gen_rtx_MEM (mode, dest_addr);
3174
3175 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3176
3177 if (type != 0)
3178 {
3179 set_mem_attributes (dest, type, 1);
3180 /* Function incoming arguments may overlap with sibling call
3181 outgoing arguments and we cannot allow reordering of reads
3182 from function arguments with stores to outgoing arguments
3183 of sibling calls. */
3184 set_mem_alias_set (dest, 0);
3185 }
3186 emit_move_insn (dest, x);
3187 #else
3188 abort();
3189 #endif
3190 }
3191
3192 /* Generate code to push X onto the stack, assuming it has mode MODE and
3193 type TYPE.
3194 MODE is redundant except when X is a CONST_INT (since they don't
3195 carry mode info).
3196 SIZE is an rtx for the size of data to be copied (in bytes),
3197 needed only if X is BLKmode.
3198
3199 ALIGN (in bits) is maximum alignment we can assume.
3200
3201 If PARTIAL and REG are both nonzero, then copy that many of the first
3202 words of X into registers starting with REG, and push the rest of X.
3203 The amount of space pushed is decreased by PARTIAL words,
3204 rounded *down* to a multiple of PARM_BOUNDARY.
3205 REG must be a hard register in this case.
3206 If REG is zero but PARTIAL is not, take any all others actions for an
3207 argument partially in registers, but do not actually load any
3208 registers.
3209
3210 EXTRA is the amount in bytes of extra space to leave next to this arg.
3211 This is ignored if an argument block has already been allocated.
3212
3213 On a machine that lacks real push insns, ARGS_ADDR is the address of
3214 the bottom of the argument block for this call. We use indexing off there
3215 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3216 argument block has not been preallocated.
3217
3218 ARGS_SO_FAR is the size of args previously pushed for this call.
3219
3220 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3221 for arguments passed in registers. If nonzero, it will be the number
3222 of bytes required. */
3223
3224 void
3225 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3226 args_addr, args_so_far, reg_parm_stack_space,
3227 alignment_pad)
3228 register rtx x;
3229 enum machine_mode mode;
3230 tree type;
3231 rtx size;
3232 unsigned int align;
3233 int partial;
3234 rtx reg;
3235 int extra;
3236 rtx args_addr;
3237 rtx args_so_far;
3238 int reg_parm_stack_space;
3239 rtx alignment_pad;
3240 {
3241 rtx xinner;
3242 enum direction stack_direction
3243 #ifdef STACK_GROWS_DOWNWARD
3244 = downward;
3245 #else
3246 = upward;
3247 #endif
3248
3249 /* Decide where to pad the argument: `downward' for below,
3250 `upward' for above, or `none' for don't pad it.
3251 Default is below for small data on big-endian machines; else above. */
3252 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3253
3254 /* Invert direction if stack is post-update. */
3255 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3256 if (where_pad != none)
3257 where_pad = (where_pad == downward ? upward : downward);
3258
3259 xinner = x = protect_from_queue (x, 0);
3260
3261 if (mode == BLKmode)
3262 {
3263 /* Copy a block into the stack, entirely or partially. */
3264
3265 register rtx temp;
3266 int used = partial * UNITS_PER_WORD;
3267 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3268 int skip;
3269
3270 if (size == 0)
3271 abort ();
3272
3273 used -= offset;
3274
3275 /* USED is now the # of bytes we need not copy to the stack
3276 because registers will take care of them. */
3277
3278 if (partial != 0)
3279 xinner = adjust_address (xinner, BLKmode, used);
3280
3281 /* If the partial register-part of the arg counts in its stack size,
3282 skip the part of stack space corresponding to the registers.
3283 Otherwise, start copying to the beginning of the stack space,
3284 by setting SKIP to 0. */
3285 skip = (reg_parm_stack_space == 0) ? 0 : used;
3286
3287 #ifdef PUSH_ROUNDING
3288 /* Do it with several push insns if that doesn't take lots of insns
3289 and if there is no difficulty with push insns that skip bytes
3290 on the stack for alignment purposes. */
3291 if (args_addr == 0
3292 && PUSH_ARGS
3293 && GET_CODE (size) == CONST_INT
3294 && skip == 0
3295 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3296 /* Here we avoid the case of a structure whose weak alignment
3297 forces many pushes of a small amount of data,
3298 and such small pushes do rounding that causes trouble. */
3299 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3300 || align >= BIGGEST_ALIGNMENT
3301 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3302 == (align / BITS_PER_UNIT)))
3303 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3304 {
3305 /* Push padding now if padding above and stack grows down,
3306 or if padding below and stack grows up.
3307 But if space already allocated, this has already been done. */
3308 if (extra && args_addr == 0
3309 && where_pad != none && where_pad != stack_direction)
3310 anti_adjust_stack (GEN_INT (extra));
3311
3312 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3313
3314 if (current_function_check_memory_usage && ! in_check_memory_usage)
3315 {
3316 rtx temp;
3317
3318 in_check_memory_usage = 1;
3319 temp = get_push_address (INTVAL (size) - used);
3320 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3321 emit_library_call (chkr_copy_bitmap_libfunc,
3322 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3323 Pmode, XEXP (xinner, 0), Pmode,
3324 GEN_INT (INTVAL (size) - used),
3325 TYPE_MODE (sizetype));
3326 else
3327 emit_library_call (chkr_set_right_libfunc,
3328 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3329 Pmode, GEN_INT (INTVAL (size) - used),
3330 TYPE_MODE (sizetype),
3331 GEN_INT (MEMORY_USE_RW),
3332 TYPE_MODE (integer_type_node));
3333 in_check_memory_usage = 0;
3334 }
3335 }
3336 else
3337 #endif /* PUSH_ROUNDING */
3338 {
3339 rtx target;
3340
3341 /* Otherwise make space on the stack and copy the data
3342 to the address of that space. */
3343
3344 /* Deduct words put into registers from the size we must copy. */
3345 if (partial != 0)
3346 {
3347 if (GET_CODE (size) == CONST_INT)
3348 size = GEN_INT (INTVAL (size) - used);
3349 else
3350 size = expand_binop (GET_MODE (size), sub_optab, size,
3351 GEN_INT (used), NULL_RTX, 0,
3352 OPTAB_LIB_WIDEN);
3353 }
3354
3355 /* Get the address of the stack space.
3356 In this case, we do not deal with EXTRA separately.
3357 A single stack adjust will do. */
3358 if (! args_addr)
3359 {
3360 temp = push_block (size, extra, where_pad == downward);
3361 extra = 0;
3362 }
3363 else if (GET_CODE (args_so_far) == CONST_INT)
3364 temp = memory_address (BLKmode,
3365 plus_constant (args_addr,
3366 skip + INTVAL (args_so_far)));
3367 else
3368 temp = memory_address (BLKmode,
3369 plus_constant (gen_rtx_PLUS (Pmode,
3370 args_addr,
3371 args_so_far),
3372 skip));
3373 if (current_function_check_memory_usage && ! in_check_memory_usage)
3374 {
3375 in_check_memory_usage = 1;
3376 target = copy_to_reg (temp);
3377 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3378 emit_library_call (chkr_copy_bitmap_libfunc,
3379 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3380 target, Pmode,
3381 XEXP (xinner, 0), Pmode,
3382 size, TYPE_MODE (sizetype));
3383 else
3384 emit_library_call (chkr_set_right_libfunc,
3385 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3386 target, Pmode,
3387 size, TYPE_MODE (sizetype),
3388 GEN_INT (MEMORY_USE_RW),
3389 TYPE_MODE (integer_type_node));
3390 in_check_memory_usage = 0;
3391 }
3392
3393 target = gen_rtx_MEM (BLKmode, temp);
3394
3395 if (type != 0)
3396 {
3397 set_mem_attributes (target, type, 1);
3398 /* Function incoming arguments may overlap with sibling call
3399 outgoing arguments and we cannot allow reordering of reads
3400 from function arguments with stores to outgoing arguments
3401 of sibling calls. */
3402 set_mem_alias_set (target, 0);
3403 }
3404
3405 /* TEMP is the address of the block. Copy the data there. */
3406 if (GET_CODE (size) == CONST_INT
3407 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3408 {
3409 move_by_pieces (target, xinner, INTVAL (size), align);
3410 goto ret;
3411 }
3412 else
3413 {
3414 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3415 enum machine_mode mode;
3416
3417 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3418 mode != VOIDmode;
3419 mode = GET_MODE_WIDER_MODE (mode))
3420 {
3421 enum insn_code code = movstr_optab[(int) mode];
3422 insn_operand_predicate_fn pred;
3423
3424 if (code != CODE_FOR_nothing
3425 && ((GET_CODE (size) == CONST_INT
3426 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3427 <= (GET_MODE_MASK (mode) >> 1)))
3428 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3429 && (!(pred = insn_data[(int) code].operand[0].predicate)
3430 || ((*pred) (target, BLKmode)))
3431 && (!(pred = insn_data[(int) code].operand[1].predicate)
3432 || ((*pred) (xinner, BLKmode)))
3433 && (!(pred = insn_data[(int) code].operand[3].predicate)
3434 || ((*pred) (opalign, VOIDmode))))
3435 {
3436 rtx op2 = convert_to_mode (mode, size, 1);
3437 rtx last = get_last_insn ();
3438 rtx pat;
3439
3440 pred = insn_data[(int) code].operand[2].predicate;
3441 if (pred != 0 && ! (*pred) (op2, mode))
3442 op2 = copy_to_mode_reg (mode, op2);
3443
3444 pat = GEN_FCN ((int) code) (target, xinner,
3445 op2, opalign);
3446 if (pat)
3447 {
3448 emit_insn (pat);
3449 goto ret;
3450 }
3451 else
3452 delete_insns_since (last);
3453 }
3454 }
3455 }
3456
3457 if (!ACCUMULATE_OUTGOING_ARGS)
3458 {
3459 /* If the source is referenced relative to the stack pointer,
3460 copy it to another register to stabilize it. We do not need
3461 to do this if we know that we won't be changing sp. */
3462
3463 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3464 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3465 temp = copy_to_reg (temp);
3466 }
3467
3468 /* Make inhibit_defer_pop nonzero around the library call
3469 to force it to pop the bcopy-arguments right away. */
3470 NO_DEFER_POP;
3471 #ifdef TARGET_MEM_FUNCTIONS
3472 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3473 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3474 convert_to_mode (TYPE_MODE (sizetype),
3475 size, TREE_UNSIGNED (sizetype)),
3476 TYPE_MODE (sizetype));
3477 #else
3478 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3479 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3480 convert_to_mode (TYPE_MODE (integer_type_node),
3481 size,
3482 TREE_UNSIGNED (integer_type_node)),
3483 TYPE_MODE (integer_type_node));
3484 #endif
3485 OK_DEFER_POP;
3486 }
3487 }
3488 else if (partial > 0)
3489 {
3490 /* Scalar partly in registers. */
3491
3492 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3493 int i;
3494 int not_stack;
3495 /* # words of start of argument
3496 that we must make space for but need not store. */
3497 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3498 int args_offset = INTVAL (args_so_far);
3499 int skip;
3500
3501 /* Push padding now if padding above and stack grows down,
3502 or if padding below and stack grows up.
3503 But if space already allocated, this has already been done. */
3504 if (extra && args_addr == 0
3505 && where_pad != none && where_pad != stack_direction)
3506 anti_adjust_stack (GEN_INT (extra));
3507
3508 /* If we make space by pushing it, we might as well push
3509 the real data. Otherwise, we can leave OFFSET nonzero
3510 and leave the space uninitialized. */
3511 if (args_addr == 0)
3512 offset = 0;
3513
3514 /* Now NOT_STACK gets the number of words that we don't need to
3515 allocate on the stack. */
3516 not_stack = partial - offset;
3517
3518 /* If the partial register-part of the arg counts in its stack size,
3519 skip the part of stack space corresponding to the registers.
3520 Otherwise, start copying to the beginning of the stack space,
3521 by setting SKIP to 0. */
3522 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3523
3524 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3525 x = validize_mem (force_const_mem (mode, x));
3526
3527 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3528 SUBREGs of such registers are not allowed. */
3529 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3530 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3531 x = copy_to_reg (x);
3532
3533 /* Loop over all the words allocated on the stack for this arg. */
3534 /* We can do it by words, because any scalar bigger than a word
3535 has a size a multiple of a word. */
3536 #ifndef PUSH_ARGS_REVERSED
3537 for (i = not_stack; i < size; i++)
3538 #else
3539 for (i = size - 1; i >= not_stack; i--)
3540 #endif
3541 if (i >= not_stack + offset)
3542 emit_push_insn (operand_subword_force (x, i, mode),
3543 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3544 0, args_addr,
3545 GEN_INT (args_offset + ((i - not_stack + skip)
3546 * UNITS_PER_WORD)),
3547 reg_parm_stack_space, alignment_pad);
3548 }
3549 else
3550 {
3551 rtx addr;
3552 rtx target = NULL_RTX;
3553 rtx dest;
3554
3555 /* Push padding now if padding above and stack grows down,
3556 or if padding below and stack grows up.
3557 But if space already allocated, this has already been done. */
3558 if (extra && args_addr == 0
3559 && where_pad != none && where_pad != stack_direction)
3560 anti_adjust_stack (GEN_INT (extra));
3561
3562 #ifdef PUSH_ROUNDING
3563 if (args_addr == 0 && PUSH_ARGS)
3564 emit_single_push_insn (mode, x, type);
3565 else
3566 #endif
3567 {
3568 if (GET_CODE (args_so_far) == CONST_INT)
3569 addr
3570 = memory_address (mode,
3571 plus_constant (args_addr,
3572 INTVAL (args_so_far)));
3573 else
3574 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3575 args_so_far));
3576 target = addr;
3577 dest = gen_rtx_MEM (mode, addr);
3578 if (type != 0)
3579 {
3580 set_mem_attributes (dest, type, 1);
3581 /* Function incoming arguments may overlap with sibling call
3582 outgoing arguments and we cannot allow reordering of reads
3583 from function arguments with stores to outgoing arguments
3584 of sibling calls. */
3585 set_mem_alias_set (dest, 0);
3586 }
3587
3588 emit_move_insn (dest, x);
3589
3590 }
3591
3592 if (current_function_check_memory_usage && ! in_check_memory_usage)
3593 {
3594 in_check_memory_usage = 1;
3595 if (target == 0)
3596 target = get_push_address (GET_MODE_SIZE (mode));
3597
3598 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3599 emit_library_call (chkr_copy_bitmap_libfunc,
3600 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3601 Pmode, XEXP (x, 0), Pmode,
3602 GEN_INT (GET_MODE_SIZE (mode)),
3603 TYPE_MODE (sizetype));
3604 else
3605 emit_library_call (chkr_set_right_libfunc,
3606 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3607 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3608 TYPE_MODE (sizetype),
3609 GEN_INT (MEMORY_USE_RW),
3610 TYPE_MODE (integer_type_node));
3611 in_check_memory_usage = 0;
3612 }
3613 }
3614
3615 ret:
3616 /* If part should go in registers, copy that part
3617 into the appropriate registers. Do this now, at the end,
3618 since mem-to-mem copies above may do function calls. */
3619 if (partial > 0 && reg != 0)
3620 {
3621 /* Handle calls that pass values in multiple non-contiguous locations.
3622 The Irix 6 ABI has examples of this. */
3623 if (GET_CODE (reg) == PARALLEL)
3624 emit_group_load (reg, x, -1, align); /* ??? size? */
3625 else
3626 move_block_to_reg (REGNO (reg), x, partial, mode);
3627 }
3628
3629 if (extra && args_addr == 0 && where_pad == stack_direction)
3630 anti_adjust_stack (GEN_INT (extra));
3631
3632 if (alignment_pad && args_addr == 0)
3633 anti_adjust_stack (alignment_pad);
3634 }
3635 \f
3636 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3637 operations. */
3638
3639 static rtx
3640 get_subtarget (x)
3641 rtx x;
3642 {
3643 return ((x == 0
3644 /* Only registers can be subtargets. */
3645 || GET_CODE (x) != REG
3646 /* If the register is readonly, it can't be set more than once. */
3647 || RTX_UNCHANGING_P (x)
3648 /* Don't use hard regs to avoid extending their life. */
3649 || REGNO (x) < FIRST_PSEUDO_REGISTER
3650 /* Avoid subtargets inside loops,
3651 since they hide some invariant expressions. */
3652 || preserve_subexpressions_p ())
3653 ? 0 : x);
3654 }
3655
3656 /* Expand an assignment that stores the value of FROM into TO.
3657 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3658 (This may contain a QUEUED rtx;
3659 if the value is constant, this rtx is a constant.)
3660 Otherwise, the returned value is NULL_RTX.
3661
3662 SUGGEST_REG is no longer actually used.
3663 It used to mean, copy the value through a register
3664 and return that register, if that is possible.
3665 We now use WANT_VALUE to decide whether to do this. */
3666
3667 rtx
3668 expand_assignment (to, from, want_value, suggest_reg)
3669 tree to, from;
3670 int want_value;
3671 int suggest_reg ATTRIBUTE_UNUSED;
3672 {
3673 register rtx to_rtx = 0;
3674 rtx result;
3675
3676 /* Don't crash if the lhs of the assignment was erroneous. */
3677
3678 if (TREE_CODE (to) == ERROR_MARK)
3679 {
3680 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3681 return want_value ? result : NULL_RTX;
3682 }
3683
3684 /* Assignment of a structure component needs special treatment
3685 if the structure component's rtx is not simply a MEM.
3686 Assignment of an array element at a constant index, and assignment of
3687 an array element in an unaligned packed structure field, has the same
3688 problem. */
3689
3690 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3691 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3692 {
3693 enum machine_mode mode1;
3694 HOST_WIDE_INT bitsize, bitpos;
3695 tree offset;
3696 int unsignedp;
3697 int volatilep = 0;
3698 tree tem;
3699 unsigned int alignment;
3700
3701 push_temp_slots ();
3702 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3703 &unsignedp, &volatilep, &alignment);
3704
3705 /* If we are going to use store_bit_field and extract_bit_field,
3706 make sure to_rtx will be safe for multiple use. */
3707
3708 if (mode1 == VOIDmode && want_value)
3709 tem = stabilize_reference (tem);
3710
3711 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3712 if (offset != 0)
3713 {
3714 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3715
3716 if (GET_CODE (to_rtx) != MEM)
3717 abort ();
3718
3719 if (GET_MODE (offset_rtx) != ptr_mode)
3720 {
3721 #ifdef POINTERS_EXTEND_UNSIGNED
3722 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3723 #else
3724 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3725 #endif
3726 }
3727
3728 /* A constant address in TO_RTX can have VOIDmode, we must not try
3729 to call force_reg for that case. Avoid that case. */
3730 if (GET_CODE (to_rtx) == MEM
3731 && GET_MODE (to_rtx) == BLKmode
3732 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3733 && bitsize
3734 && (bitpos % bitsize) == 0
3735 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3736 && alignment == GET_MODE_ALIGNMENT (mode1))
3737 {
3738 rtx temp
3739 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3740
3741 if (GET_CODE (XEXP (temp, 0)) == REG)
3742 to_rtx = temp;
3743 else
3744 to_rtx = (replace_equiv_address
3745 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3746 XEXP (temp, 0))));
3747 bitpos = 0;
3748 }
3749
3750 to_rtx = change_address (to_rtx, VOIDmode,
3751 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3752 force_reg (ptr_mode,
3753 offset_rtx)));
3754 }
3755
3756 if (volatilep)
3757 {
3758 if (GET_CODE (to_rtx) == MEM)
3759 {
3760 /* When the offset is zero, to_rtx is the address of the
3761 structure we are storing into, and hence may be shared.
3762 We must make a new MEM before setting the volatile bit. */
3763 if (offset == 0)
3764 to_rtx = copy_rtx (to_rtx);
3765
3766 MEM_VOLATILE_P (to_rtx) = 1;
3767 }
3768 #if 0 /* This was turned off because, when a field is volatile
3769 in an object which is not volatile, the object may be in a register,
3770 and then we would abort over here. */
3771 else
3772 abort ();
3773 #endif
3774 }
3775
3776 if (TREE_CODE (to) == COMPONENT_REF
3777 && TREE_READONLY (TREE_OPERAND (to, 1)))
3778 {
3779 if (offset == 0)
3780 to_rtx = copy_rtx (to_rtx);
3781
3782 RTX_UNCHANGING_P (to_rtx) = 1;
3783 }
3784
3785 /* Check the access. */
3786 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3787 {
3788 rtx to_addr;
3789 int size;
3790 int best_mode_size;
3791 enum machine_mode best_mode;
3792
3793 best_mode = get_best_mode (bitsize, bitpos,
3794 TYPE_ALIGN (TREE_TYPE (tem)),
3795 mode1, volatilep);
3796 if (best_mode == VOIDmode)
3797 best_mode = QImode;
3798
3799 best_mode_size = GET_MODE_BITSIZE (best_mode);
3800 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3801 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3802 size *= GET_MODE_SIZE (best_mode);
3803
3804 /* Check the access right of the pointer. */
3805 in_check_memory_usage = 1;
3806 if (size)
3807 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3808 VOIDmode, 3, to_addr, Pmode,
3809 GEN_INT (size), TYPE_MODE (sizetype),
3810 GEN_INT (MEMORY_USE_WO),
3811 TYPE_MODE (integer_type_node));
3812 in_check_memory_usage = 0;
3813 }
3814
3815 /* If this is a varying-length object, we must get the address of
3816 the source and do an explicit block move. */
3817 if (bitsize < 0)
3818 {
3819 unsigned int from_align;
3820 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3821 rtx inner_to_rtx
3822 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3823
3824 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3825 MIN (alignment, from_align));
3826 free_temp_slots ();
3827 pop_temp_slots ();
3828 return to_rtx;
3829 }
3830 else
3831 {
3832 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3833 (want_value
3834 /* Spurious cast for HPUX compiler. */
3835 ? ((enum machine_mode)
3836 TYPE_MODE (TREE_TYPE (to)))
3837 : VOIDmode),
3838 unsignedp,
3839 alignment,
3840 int_size_in_bytes (TREE_TYPE (tem)),
3841 get_alias_set (to));
3842
3843 preserve_temp_slots (result);
3844 free_temp_slots ();
3845 pop_temp_slots ();
3846
3847 /* If the value is meaningful, convert RESULT to the proper mode.
3848 Otherwise, return nothing. */
3849 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3850 TYPE_MODE (TREE_TYPE (from)),
3851 result,
3852 TREE_UNSIGNED (TREE_TYPE (to)))
3853 : NULL_RTX);
3854 }
3855 }
3856
3857 /* If the rhs is a function call and its value is not an aggregate,
3858 call the function before we start to compute the lhs.
3859 This is needed for correct code for cases such as
3860 val = setjmp (buf) on machines where reference to val
3861 requires loading up part of an address in a separate insn.
3862
3863 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3864 since it might be a promoted variable where the zero- or sign- extension
3865 needs to be done. Handling this in the normal way is safe because no
3866 computation is done before the call. */
3867 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3868 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3869 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3870 && GET_CODE (DECL_RTL (to)) == REG))
3871 {
3872 rtx value;
3873
3874 push_temp_slots ();
3875 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3876 if (to_rtx == 0)
3877 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3878
3879 /* Handle calls that return values in multiple non-contiguous locations.
3880 The Irix 6 ABI has examples of this. */
3881 if (GET_CODE (to_rtx) == PARALLEL)
3882 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3883 TYPE_ALIGN (TREE_TYPE (from)));
3884 else if (GET_MODE (to_rtx) == BLKmode)
3885 emit_block_move (to_rtx, value, expr_size (from),
3886 TYPE_ALIGN (TREE_TYPE (from)));
3887 else
3888 {
3889 #ifdef POINTERS_EXTEND_UNSIGNED
3890 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3891 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3892 value = convert_memory_address (GET_MODE (to_rtx), value);
3893 #endif
3894 emit_move_insn (to_rtx, value);
3895 }
3896 preserve_temp_slots (to_rtx);
3897 free_temp_slots ();
3898 pop_temp_slots ();
3899 return want_value ? to_rtx : NULL_RTX;
3900 }
3901
3902 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3903 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3904
3905 if (to_rtx == 0)
3906 {
3907 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3908 if (GET_CODE (to_rtx) == MEM)
3909 set_mem_alias_set (to_rtx, get_alias_set (to));
3910 }
3911
3912 /* Don't move directly into a return register. */
3913 if (TREE_CODE (to) == RESULT_DECL
3914 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3915 {
3916 rtx temp;
3917
3918 push_temp_slots ();
3919 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3920
3921 if (GET_CODE (to_rtx) == PARALLEL)
3922 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3923 TYPE_ALIGN (TREE_TYPE (from)));
3924 else
3925 emit_move_insn (to_rtx, temp);
3926
3927 preserve_temp_slots (to_rtx);
3928 free_temp_slots ();
3929 pop_temp_slots ();
3930 return want_value ? to_rtx : NULL_RTX;
3931 }
3932
3933 /* In case we are returning the contents of an object which overlaps
3934 the place the value is being stored, use a safe function when copying
3935 a value through a pointer into a structure value return block. */
3936 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3937 && current_function_returns_struct
3938 && !current_function_returns_pcc_struct)
3939 {
3940 rtx from_rtx, size;
3941
3942 push_temp_slots ();
3943 size = expr_size (from);
3944 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3945 EXPAND_MEMORY_USE_DONT);
3946
3947 /* Copy the rights of the bitmap. */
3948 if (current_function_check_memory_usage)
3949 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3950 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3951 XEXP (from_rtx, 0), Pmode,
3952 convert_to_mode (TYPE_MODE (sizetype),
3953 size, TREE_UNSIGNED (sizetype)),
3954 TYPE_MODE (sizetype));
3955
3956 #ifdef TARGET_MEM_FUNCTIONS
3957 emit_library_call (memmove_libfunc, LCT_NORMAL,
3958 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3959 XEXP (from_rtx, 0), Pmode,
3960 convert_to_mode (TYPE_MODE (sizetype),
3961 size, TREE_UNSIGNED (sizetype)),
3962 TYPE_MODE (sizetype));
3963 #else
3964 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3965 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3966 XEXP (to_rtx, 0), Pmode,
3967 convert_to_mode (TYPE_MODE (integer_type_node),
3968 size, TREE_UNSIGNED (integer_type_node)),
3969 TYPE_MODE (integer_type_node));
3970 #endif
3971
3972 preserve_temp_slots (to_rtx);
3973 free_temp_slots ();
3974 pop_temp_slots ();
3975 return want_value ? to_rtx : NULL_RTX;
3976 }
3977
3978 /* Compute FROM and store the value in the rtx we got. */
3979
3980 push_temp_slots ();
3981 result = store_expr (from, to_rtx, want_value);
3982 preserve_temp_slots (result);
3983 free_temp_slots ();
3984 pop_temp_slots ();
3985 return want_value ? result : NULL_RTX;
3986 }
3987
3988 /* Generate code for computing expression EXP,
3989 and storing the value into TARGET.
3990 TARGET may contain a QUEUED rtx.
3991
3992 If WANT_VALUE is nonzero, return a copy of the value
3993 not in TARGET, so that we can be sure to use the proper
3994 value in a containing expression even if TARGET has something
3995 else stored in it. If possible, we copy the value through a pseudo
3996 and return that pseudo. Or, if the value is constant, we try to
3997 return the constant. In some cases, we return a pseudo
3998 copied *from* TARGET.
3999
4000 If the mode is BLKmode then we may return TARGET itself.
4001 It turns out that in BLKmode it doesn't cause a problem.
4002 because C has no operators that could combine two different
4003 assignments into the same BLKmode object with different values
4004 with no sequence point. Will other languages need this to
4005 be more thorough?
4006
4007 If WANT_VALUE is 0, we return NULL, to make sure
4008 to catch quickly any cases where the caller uses the value
4009 and fails to set WANT_VALUE. */
4010
4011 rtx
4012 store_expr (exp, target, want_value)
4013 register tree exp;
4014 register rtx target;
4015 int want_value;
4016 {
4017 register rtx temp;
4018 int dont_return_target = 0;
4019 int dont_store_target = 0;
4020
4021 if (TREE_CODE (exp) == COMPOUND_EXPR)
4022 {
4023 /* Perform first part of compound expression, then assign from second
4024 part. */
4025 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4026 emit_queue ();
4027 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4028 }
4029 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4030 {
4031 /* For conditional expression, get safe form of the target. Then
4032 test the condition, doing the appropriate assignment on either
4033 side. This avoids the creation of unnecessary temporaries.
4034 For non-BLKmode, it is more efficient not to do this. */
4035
4036 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4037
4038 emit_queue ();
4039 target = protect_from_queue (target, 1);
4040
4041 do_pending_stack_adjust ();
4042 NO_DEFER_POP;
4043 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4044 start_cleanup_deferral ();
4045 store_expr (TREE_OPERAND (exp, 1), target, 0);
4046 end_cleanup_deferral ();
4047 emit_queue ();
4048 emit_jump_insn (gen_jump (lab2));
4049 emit_barrier ();
4050 emit_label (lab1);
4051 start_cleanup_deferral ();
4052 store_expr (TREE_OPERAND (exp, 2), target, 0);
4053 end_cleanup_deferral ();
4054 emit_queue ();
4055 emit_label (lab2);
4056 OK_DEFER_POP;
4057
4058 return want_value ? target : NULL_RTX;
4059 }
4060 else if (queued_subexp_p (target))
4061 /* If target contains a postincrement, let's not risk
4062 using it as the place to generate the rhs. */
4063 {
4064 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4065 {
4066 /* Expand EXP into a new pseudo. */
4067 temp = gen_reg_rtx (GET_MODE (target));
4068 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4069 }
4070 else
4071 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4072
4073 /* If target is volatile, ANSI requires accessing the value
4074 *from* the target, if it is accessed. So make that happen.
4075 In no case return the target itself. */
4076 if (! MEM_VOLATILE_P (target) && want_value)
4077 dont_return_target = 1;
4078 }
4079 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4080 && GET_MODE (target) != BLKmode)
4081 /* If target is in memory and caller wants value in a register instead,
4082 arrange that. Pass TARGET as target for expand_expr so that,
4083 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4084 We know expand_expr will not use the target in that case.
4085 Don't do this if TARGET is volatile because we are supposed
4086 to write it and then read it. */
4087 {
4088 temp = expand_expr (exp, target, GET_MODE (target), 0);
4089 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4090 {
4091 /* If TEMP is already in the desired TARGET, only copy it from
4092 memory and don't store it there again. */
4093 if (temp == target
4094 || (rtx_equal_p (temp, target)
4095 && ! side_effects_p (temp) && ! side_effects_p (target)))
4096 dont_store_target = 1;
4097 temp = copy_to_reg (temp);
4098 }
4099 dont_return_target = 1;
4100 }
4101 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4102 /* If this is an scalar in a register that is stored in a wider mode
4103 than the declared mode, compute the result into its declared mode
4104 and then convert to the wider mode. Our value is the computed
4105 expression. */
4106 {
4107 /* If we don't want a value, we can do the conversion inside EXP,
4108 which will often result in some optimizations. Do the conversion
4109 in two steps: first change the signedness, if needed, then
4110 the extend. But don't do this if the type of EXP is a subtype
4111 of something else since then the conversion might involve
4112 more than just converting modes. */
4113 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4114 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4115 {
4116 if (TREE_UNSIGNED (TREE_TYPE (exp))
4117 != SUBREG_PROMOTED_UNSIGNED_P (target))
4118 exp
4119 = convert
4120 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4121 TREE_TYPE (exp)),
4122 exp);
4123
4124 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4125 SUBREG_PROMOTED_UNSIGNED_P (target)),
4126 exp);
4127 }
4128
4129 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4130
4131 /* If TEMP is a volatile MEM and we want a result value, make
4132 the access now so it gets done only once. Likewise if
4133 it contains TARGET. */
4134 if (GET_CODE (temp) == MEM && want_value
4135 && (MEM_VOLATILE_P (temp)
4136 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4137 temp = copy_to_reg (temp);
4138
4139 /* If TEMP is a VOIDmode constant, use convert_modes to make
4140 sure that we properly convert it. */
4141 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4142 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4143 TYPE_MODE (TREE_TYPE (exp)), temp,
4144 SUBREG_PROMOTED_UNSIGNED_P (target));
4145
4146 convert_move (SUBREG_REG (target), temp,
4147 SUBREG_PROMOTED_UNSIGNED_P (target));
4148
4149 /* If we promoted a constant, change the mode back down to match
4150 target. Otherwise, the caller might get confused by a result whose
4151 mode is larger than expected. */
4152
4153 if (want_value && GET_MODE (temp) != GET_MODE (target)
4154 && GET_MODE (temp) != VOIDmode)
4155 {
4156 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4157 SUBREG_PROMOTED_VAR_P (temp) = 1;
4158 SUBREG_PROMOTED_UNSIGNED_P (temp)
4159 = SUBREG_PROMOTED_UNSIGNED_P (target);
4160 }
4161
4162 return want_value ? temp : NULL_RTX;
4163 }
4164 else
4165 {
4166 temp = expand_expr (exp, target, GET_MODE (target), 0);
4167 /* Return TARGET if it's a specified hardware register.
4168 If TARGET is a volatile mem ref, either return TARGET
4169 or return a reg copied *from* TARGET; ANSI requires this.
4170
4171 Otherwise, if TEMP is not TARGET, return TEMP
4172 if it is constant (for efficiency),
4173 or if we really want the correct value. */
4174 if (!(target && GET_CODE (target) == REG
4175 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4176 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4177 && ! rtx_equal_p (temp, target)
4178 && (CONSTANT_P (temp) || want_value))
4179 dont_return_target = 1;
4180 }
4181
4182 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4183 the same as that of TARGET, adjust the constant. This is needed, for
4184 example, in case it is a CONST_DOUBLE and we want only a word-sized
4185 value. */
4186 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4187 && TREE_CODE (exp) != ERROR_MARK
4188 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4189 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4190 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4191
4192 if (current_function_check_memory_usage
4193 && GET_CODE (target) == MEM
4194 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4195 {
4196 in_check_memory_usage = 1;
4197 if (GET_CODE (temp) == MEM)
4198 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4199 VOIDmode, 3, XEXP (target, 0), Pmode,
4200 XEXP (temp, 0), Pmode,
4201 expr_size (exp), TYPE_MODE (sizetype));
4202 else
4203 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4204 VOIDmode, 3, XEXP (target, 0), Pmode,
4205 expr_size (exp), TYPE_MODE (sizetype),
4206 GEN_INT (MEMORY_USE_WO),
4207 TYPE_MODE (integer_type_node));
4208 in_check_memory_usage = 0;
4209 }
4210
4211 /* If value was not generated in the target, store it there.
4212 Convert the value to TARGET's type first if nec. */
4213 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4214 one or both of them are volatile memory refs, we have to distinguish
4215 two cases:
4216 - expand_expr has used TARGET. In this case, we must not generate
4217 another copy. This can be detected by TARGET being equal according
4218 to == .
4219 - expand_expr has not used TARGET - that means that the source just
4220 happens to have the same RTX form. Since temp will have been created
4221 by expand_expr, it will compare unequal according to == .
4222 We must generate a copy in this case, to reach the correct number
4223 of volatile memory references. */
4224
4225 if ((! rtx_equal_p (temp, target)
4226 || (temp != target && (side_effects_p (temp)
4227 || side_effects_p (target))))
4228 && TREE_CODE (exp) != ERROR_MARK
4229 && ! dont_store_target)
4230 {
4231 target = protect_from_queue (target, 1);
4232 if (GET_MODE (temp) != GET_MODE (target)
4233 && GET_MODE (temp) != VOIDmode)
4234 {
4235 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4236 if (dont_return_target)
4237 {
4238 /* In this case, we will return TEMP,
4239 so make sure it has the proper mode.
4240 But don't forget to store the value into TARGET. */
4241 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4242 emit_move_insn (target, temp);
4243 }
4244 else
4245 convert_move (target, temp, unsignedp);
4246 }
4247
4248 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4249 {
4250 /* Handle copying a string constant into an array.
4251 The string constant may be shorter than the array.
4252 So copy just the string's actual length, and clear the rest. */
4253 rtx size;
4254 rtx addr;
4255
4256 /* Get the size of the data type of the string,
4257 which is actually the size of the target. */
4258 size = expr_size (exp);
4259 if (GET_CODE (size) == CONST_INT
4260 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4261 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4262 else
4263 {
4264 /* Compute the size of the data to copy from the string. */
4265 tree copy_size
4266 = size_binop (MIN_EXPR,
4267 make_tree (sizetype, size),
4268 size_int (TREE_STRING_LENGTH (exp)));
4269 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4270 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4271 VOIDmode, 0);
4272 rtx label = 0;
4273
4274 /* Copy that much. */
4275 emit_block_move (target, temp, copy_size_rtx,
4276 TYPE_ALIGN (TREE_TYPE (exp)));
4277
4278 /* Figure out how much is left in TARGET that we have to clear.
4279 Do all calculations in ptr_mode. */
4280
4281 addr = XEXP (target, 0);
4282 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4283
4284 if (GET_CODE (copy_size_rtx) == CONST_INT)
4285 {
4286 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4287 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4288 align = MIN (align,
4289 (unsigned int) (BITS_PER_UNIT
4290 * (INTVAL (copy_size_rtx)
4291 & - INTVAL (copy_size_rtx))));
4292 }
4293 else
4294 {
4295 addr = force_reg (ptr_mode, addr);
4296 addr = expand_binop (ptr_mode, add_optab, addr,
4297 copy_size_rtx, NULL_RTX, 0,
4298 OPTAB_LIB_WIDEN);
4299
4300 size = expand_binop (ptr_mode, sub_optab, size,
4301 copy_size_rtx, NULL_RTX, 0,
4302 OPTAB_LIB_WIDEN);
4303
4304 align = BITS_PER_UNIT;
4305 label = gen_label_rtx ();
4306 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4307 GET_MODE (size), 0, 0, label);
4308 }
4309 align = MIN (align, expr_align (copy_size));
4310
4311 if (size != const0_rtx)
4312 {
4313 rtx dest = gen_rtx_MEM (BLKmode, addr);
4314
4315 MEM_COPY_ATTRIBUTES (dest, target);
4316
4317 /* Be sure we can write on ADDR. */
4318 in_check_memory_usage = 1;
4319 if (current_function_check_memory_usage)
4320 emit_library_call (chkr_check_addr_libfunc,
4321 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4322 addr, Pmode,
4323 size, TYPE_MODE (sizetype),
4324 GEN_INT (MEMORY_USE_WO),
4325 TYPE_MODE (integer_type_node));
4326 in_check_memory_usage = 0;
4327 clear_storage (dest, size, align);
4328 }
4329
4330 if (label)
4331 emit_label (label);
4332 }
4333 }
4334 /* Handle calls that return values in multiple non-contiguous locations.
4335 The Irix 6 ABI has examples of this. */
4336 else if (GET_CODE (target) == PARALLEL)
4337 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4338 TYPE_ALIGN (TREE_TYPE (exp)));
4339 else if (GET_MODE (temp) == BLKmode)
4340 emit_block_move (target, temp, expr_size (exp),
4341 TYPE_ALIGN (TREE_TYPE (exp)));
4342 else
4343 emit_move_insn (target, temp);
4344 }
4345
4346 /* If we don't want a value, return NULL_RTX. */
4347 if (! want_value)
4348 return NULL_RTX;
4349
4350 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4351 ??? The latter test doesn't seem to make sense. */
4352 else if (dont_return_target && GET_CODE (temp) != MEM)
4353 return temp;
4354
4355 /* Return TARGET itself if it is a hard register. */
4356 else if (want_value && GET_MODE (target) != BLKmode
4357 && ! (GET_CODE (target) == REG
4358 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4359 return copy_to_reg (target);
4360
4361 else
4362 return target;
4363 }
4364 \f
4365 /* Return 1 if EXP just contains zeros. */
4366
4367 static int
4368 is_zeros_p (exp)
4369 tree exp;
4370 {
4371 tree elt;
4372
4373 switch (TREE_CODE (exp))
4374 {
4375 case CONVERT_EXPR:
4376 case NOP_EXPR:
4377 case NON_LVALUE_EXPR:
4378 return is_zeros_p (TREE_OPERAND (exp, 0));
4379
4380 case INTEGER_CST:
4381 return integer_zerop (exp);
4382
4383 case COMPLEX_CST:
4384 return
4385 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4386
4387 case REAL_CST:
4388 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4389
4390 case CONSTRUCTOR:
4391 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4392 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4393 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4394 if (! is_zeros_p (TREE_VALUE (elt)))
4395 return 0;
4396
4397 return 1;
4398
4399 default:
4400 return 0;
4401 }
4402 }
4403
4404 /* Return 1 if EXP contains mostly (3/4) zeros. */
4405
4406 static int
4407 mostly_zeros_p (exp)
4408 tree exp;
4409 {
4410 if (TREE_CODE (exp) == CONSTRUCTOR)
4411 {
4412 int elts = 0, zeros = 0;
4413 tree elt = CONSTRUCTOR_ELTS (exp);
4414 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4415 {
4416 /* If there are no ranges of true bits, it is all zero. */
4417 return elt == NULL_TREE;
4418 }
4419 for (; elt; elt = TREE_CHAIN (elt))
4420 {
4421 /* We do not handle the case where the index is a RANGE_EXPR,
4422 so the statistic will be somewhat inaccurate.
4423 We do make a more accurate count in store_constructor itself,
4424 so since this function is only used for nested array elements,
4425 this should be close enough. */
4426 if (mostly_zeros_p (TREE_VALUE (elt)))
4427 zeros++;
4428 elts++;
4429 }
4430
4431 return 4 * zeros >= 3 * elts;
4432 }
4433
4434 return is_zeros_p (exp);
4435 }
4436 \f
4437 /* Helper function for store_constructor.
4438 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4439 TYPE is the type of the CONSTRUCTOR, not the element type.
4440 ALIGN and CLEARED are as for store_constructor.
4441 ALIAS_SET is the alias set to use for any stores.
4442
4443 This provides a recursive shortcut back to store_constructor when it isn't
4444 necessary to go through store_field. This is so that we can pass through
4445 the cleared field to let store_constructor know that we may not have to
4446 clear a substructure if the outer structure has already been cleared. */
4447
4448 static void
4449 store_constructor_field (target, bitsize, bitpos,
4450 mode, exp, type, align, cleared, alias_set)
4451 rtx target;
4452 unsigned HOST_WIDE_INT bitsize;
4453 HOST_WIDE_INT bitpos;
4454 enum machine_mode mode;
4455 tree exp, type;
4456 unsigned int align;
4457 int cleared;
4458 int alias_set;
4459 {
4460 if (TREE_CODE (exp) == CONSTRUCTOR
4461 && bitpos % BITS_PER_UNIT == 0
4462 /* If we have a non-zero bitpos for a register target, then we just
4463 let store_field do the bitfield handling. This is unlikely to
4464 generate unnecessary clear instructions anyways. */
4465 && (bitpos == 0 || GET_CODE (target) == MEM))
4466 {
4467 if (bitpos != 0)
4468 target
4469 = adjust_address (target,
4470 GET_MODE (target) == BLKmode
4471 || 0 != (bitpos
4472 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4473 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4474
4475
4476 /* Show the alignment may no longer be what it was and update the alias
4477 set, if required. */
4478 if (bitpos != 0)
4479 align = MIN (align, (unsigned int) bitpos & - bitpos);
4480 if (GET_CODE (target) == MEM)
4481 set_mem_alias_set (target, alias_set);
4482
4483 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4484 }
4485 else
4486 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4487 int_size_in_bytes (type), alias_set);
4488 }
4489
4490 /* Store the value of constructor EXP into the rtx TARGET.
4491 TARGET is either a REG or a MEM.
4492 ALIGN is the maximum known alignment for TARGET.
4493 CLEARED is true if TARGET is known to have been zero'd.
4494 SIZE is the number of bytes of TARGET we are allowed to modify: this
4495 may not be the same as the size of EXP if we are assigning to a field
4496 which has been packed to exclude padding bits. */
4497
4498 static void
4499 store_constructor (exp, target, align, cleared, size)
4500 tree exp;
4501 rtx target;
4502 unsigned int align;
4503 int cleared;
4504 HOST_WIDE_INT size;
4505 {
4506 tree type = TREE_TYPE (exp);
4507 #ifdef WORD_REGISTER_OPERATIONS
4508 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4509 #endif
4510
4511 /* We know our target cannot conflict, since safe_from_p has been called. */
4512 #if 0
4513 /* Don't try copying piece by piece into a hard register
4514 since that is vulnerable to being clobbered by EXP.
4515 Instead, construct in a pseudo register and then copy it all. */
4516 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4517 {
4518 rtx temp = gen_reg_rtx (GET_MODE (target));
4519 store_constructor (exp, temp, align, cleared, size);
4520 emit_move_insn (target, temp);
4521 return;
4522 }
4523 #endif
4524
4525 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4526 || TREE_CODE (type) == QUAL_UNION_TYPE)
4527 {
4528 register tree elt;
4529
4530 /* Inform later passes that the whole union value is dead. */
4531 if ((TREE_CODE (type) == UNION_TYPE
4532 || TREE_CODE (type) == QUAL_UNION_TYPE)
4533 && ! cleared)
4534 {
4535 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4536
4537 /* If the constructor is empty, clear the union. */
4538 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4539 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4540 }
4541
4542 /* If we are building a static constructor into a register,
4543 set the initial value as zero so we can fold the value into
4544 a constant. But if more than one register is involved,
4545 this probably loses. */
4546 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4547 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4548 {
4549 if (! cleared)
4550 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4551
4552 cleared = 1;
4553 }
4554
4555 /* If the constructor has fewer fields than the structure
4556 or if we are initializing the structure to mostly zeros,
4557 clear the whole structure first. Don't do this if TARGET is a
4558 register whose mode size isn't equal to SIZE since clear_storage
4559 can't handle this case. */
4560 else if (size > 0
4561 && ((list_length (CONSTRUCTOR_ELTS (exp))
4562 != fields_length (type))
4563 || mostly_zeros_p (exp))
4564 && (GET_CODE (target) != REG
4565 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4566 {
4567 if (! cleared)
4568 clear_storage (target, GEN_INT (size), align);
4569
4570 cleared = 1;
4571 }
4572 else if (! cleared)
4573 /* Inform later passes that the old value is dead. */
4574 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4575
4576 /* Store each element of the constructor into
4577 the corresponding field of TARGET. */
4578
4579 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4580 {
4581 register tree field = TREE_PURPOSE (elt);
4582 #ifdef WORD_REGISTER_OPERATIONS
4583 tree value = TREE_VALUE (elt);
4584 #endif
4585 register enum machine_mode mode;
4586 HOST_WIDE_INT bitsize;
4587 HOST_WIDE_INT bitpos = 0;
4588 int unsignedp;
4589 tree offset;
4590 rtx to_rtx = target;
4591
4592 /* Just ignore missing fields.
4593 We cleared the whole structure, above,
4594 if any fields are missing. */
4595 if (field == 0)
4596 continue;
4597
4598 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4599 continue;
4600
4601 if (host_integerp (DECL_SIZE (field), 1))
4602 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4603 else
4604 bitsize = -1;
4605
4606 unsignedp = TREE_UNSIGNED (field);
4607 mode = DECL_MODE (field);
4608 if (DECL_BIT_FIELD (field))
4609 mode = VOIDmode;
4610
4611 offset = DECL_FIELD_OFFSET (field);
4612 if (host_integerp (offset, 0)
4613 && host_integerp (bit_position (field), 0))
4614 {
4615 bitpos = int_bit_position (field);
4616 offset = 0;
4617 }
4618 else
4619 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4620
4621 if (offset)
4622 {
4623 rtx offset_rtx;
4624
4625 if (contains_placeholder_p (offset))
4626 offset = build (WITH_RECORD_EXPR, sizetype,
4627 offset, make_tree (TREE_TYPE (exp), target));
4628
4629 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4630 if (GET_CODE (to_rtx) != MEM)
4631 abort ();
4632
4633 if (GET_MODE (offset_rtx) != ptr_mode)
4634 {
4635 #ifdef POINTERS_EXTEND_UNSIGNED
4636 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4637 #else
4638 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4639 #endif
4640 }
4641
4642 to_rtx
4643 = change_address (to_rtx, VOIDmode,
4644 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4645 force_reg (ptr_mode,
4646 offset_rtx)));
4647 align = DECL_OFFSET_ALIGN (field);
4648 }
4649
4650 if (TREE_READONLY (field))
4651 {
4652 if (GET_CODE (to_rtx) == MEM)
4653 to_rtx = copy_rtx (to_rtx);
4654
4655 RTX_UNCHANGING_P (to_rtx) = 1;
4656 }
4657
4658 #ifdef WORD_REGISTER_OPERATIONS
4659 /* If this initializes a field that is smaller than a word, at the
4660 start of a word, try to widen it to a full word.
4661 This special case allows us to output C++ member function
4662 initializations in a form that the optimizers can understand. */
4663 if (GET_CODE (target) == REG
4664 && bitsize < BITS_PER_WORD
4665 && bitpos % BITS_PER_WORD == 0
4666 && GET_MODE_CLASS (mode) == MODE_INT
4667 && TREE_CODE (value) == INTEGER_CST
4668 && exp_size >= 0
4669 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4670 {
4671 tree type = TREE_TYPE (value);
4672 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4673 {
4674 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4675 value = convert (type, value);
4676 }
4677 if (BYTES_BIG_ENDIAN)
4678 value
4679 = fold (build (LSHIFT_EXPR, type, value,
4680 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4681 bitsize = BITS_PER_WORD;
4682 mode = word_mode;
4683 }
4684 #endif
4685 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4686 TREE_VALUE (elt), type, align, cleared,
4687 (DECL_NONADDRESSABLE_P (field)
4688 && GET_CODE (to_rtx) == MEM)
4689 ? MEM_ALIAS_SET (to_rtx)
4690 : get_alias_set (TREE_TYPE (field)));
4691 }
4692 }
4693 else if (TREE_CODE (type) == ARRAY_TYPE)
4694 {
4695 register tree elt;
4696 register int i;
4697 int need_to_clear;
4698 tree domain = TYPE_DOMAIN (type);
4699 tree elttype = TREE_TYPE (type);
4700 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4701 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4702 HOST_WIDE_INT minelt = 0;
4703 HOST_WIDE_INT maxelt = 0;
4704
4705 /* If we have constant bounds for the range of the type, get them. */
4706 if (const_bounds_p)
4707 {
4708 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4709 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4710 }
4711
4712 /* If the constructor has fewer elements than the array,
4713 clear the whole array first. Similarly if this is
4714 static constructor of a non-BLKmode object. */
4715 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4716 need_to_clear = 1;
4717 else
4718 {
4719 HOST_WIDE_INT count = 0, zero_count = 0;
4720 need_to_clear = ! const_bounds_p;
4721
4722 /* This loop is a more accurate version of the loop in
4723 mostly_zeros_p (it handles RANGE_EXPR in an index).
4724 It is also needed to check for missing elements. */
4725 for (elt = CONSTRUCTOR_ELTS (exp);
4726 elt != NULL_TREE && ! need_to_clear;
4727 elt = TREE_CHAIN (elt))
4728 {
4729 tree index = TREE_PURPOSE (elt);
4730 HOST_WIDE_INT this_node_count;
4731
4732 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4733 {
4734 tree lo_index = TREE_OPERAND (index, 0);
4735 tree hi_index = TREE_OPERAND (index, 1);
4736
4737 if (! host_integerp (lo_index, 1)
4738 || ! host_integerp (hi_index, 1))
4739 {
4740 need_to_clear = 1;
4741 break;
4742 }
4743
4744 this_node_count = (tree_low_cst (hi_index, 1)
4745 - tree_low_cst (lo_index, 1) + 1);
4746 }
4747 else
4748 this_node_count = 1;
4749
4750 count += this_node_count;
4751 if (mostly_zeros_p (TREE_VALUE (elt)))
4752 zero_count += this_node_count;
4753 }
4754
4755 /* Clear the entire array first if there are any missing elements,
4756 or if the incidence of zero elements is >= 75%. */
4757 if (! need_to_clear
4758 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4759 need_to_clear = 1;
4760 }
4761
4762 if (need_to_clear && size > 0)
4763 {
4764 if (! cleared)
4765 clear_storage (target, GEN_INT (size), align);
4766 cleared = 1;
4767 }
4768 else
4769 /* Inform later passes that the old value is dead. */
4770 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4771
4772 /* Store each element of the constructor into
4773 the corresponding element of TARGET, determined
4774 by counting the elements. */
4775 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4776 elt;
4777 elt = TREE_CHAIN (elt), i++)
4778 {
4779 register enum machine_mode mode;
4780 HOST_WIDE_INT bitsize;
4781 HOST_WIDE_INT bitpos;
4782 int unsignedp;
4783 tree value = TREE_VALUE (elt);
4784 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4785 tree index = TREE_PURPOSE (elt);
4786 rtx xtarget = target;
4787
4788 if (cleared && is_zeros_p (value))
4789 continue;
4790
4791 unsignedp = TREE_UNSIGNED (elttype);
4792 mode = TYPE_MODE (elttype);
4793 if (mode == BLKmode)
4794 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4795 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4796 : -1);
4797 else
4798 bitsize = GET_MODE_BITSIZE (mode);
4799
4800 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4801 {
4802 tree lo_index = TREE_OPERAND (index, 0);
4803 tree hi_index = TREE_OPERAND (index, 1);
4804 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4805 struct nesting *loop;
4806 HOST_WIDE_INT lo, hi, count;
4807 tree position;
4808
4809 /* If the range is constant and "small", unroll the loop. */
4810 if (const_bounds_p
4811 && host_integerp (lo_index, 0)
4812 && host_integerp (hi_index, 0)
4813 && (lo = tree_low_cst (lo_index, 0),
4814 hi = tree_low_cst (hi_index, 0),
4815 count = hi - lo + 1,
4816 (GET_CODE (target) != MEM
4817 || count <= 2
4818 || (host_integerp (TYPE_SIZE (elttype), 1)
4819 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4820 <= 40 * 8)))))
4821 {
4822 lo -= minelt; hi -= minelt;
4823 for (; lo <= hi; lo++)
4824 {
4825 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4826 store_constructor_field
4827 (target, bitsize, bitpos, mode, value, type, align,
4828 cleared,
4829 TYPE_NONALIASED_COMPONENT (type)
4830 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4831 }
4832 }
4833 else
4834 {
4835 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4836 loop_top = gen_label_rtx ();
4837 loop_end = gen_label_rtx ();
4838
4839 unsignedp = TREE_UNSIGNED (domain);
4840
4841 index = build_decl (VAR_DECL, NULL_TREE, domain);
4842
4843 index_r
4844 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4845 &unsignedp, 0));
4846 SET_DECL_RTL (index, index_r);
4847 if (TREE_CODE (value) == SAVE_EXPR
4848 && SAVE_EXPR_RTL (value) == 0)
4849 {
4850 /* Make sure value gets expanded once before the
4851 loop. */
4852 expand_expr (value, const0_rtx, VOIDmode, 0);
4853 emit_queue ();
4854 }
4855 store_expr (lo_index, index_r, 0);
4856 loop = expand_start_loop (0);
4857
4858 /* Assign value to element index. */
4859 position
4860 = convert (ssizetype,
4861 fold (build (MINUS_EXPR, TREE_TYPE (index),
4862 index, TYPE_MIN_VALUE (domain))));
4863 position = size_binop (MULT_EXPR, position,
4864 convert (ssizetype,
4865 TYPE_SIZE_UNIT (elttype)));
4866
4867 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4868 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4869 xtarget = change_address (target, mode, addr);
4870 if (TREE_CODE (value) == CONSTRUCTOR)
4871 store_constructor (value, xtarget, align, cleared,
4872 bitsize / BITS_PER_UNIT);
4873 else
4874 store_expr (value, xtarget, 0);
4875
4876 expand_exit_loop_if_false (loop,
4877 build (LT_EXPR, integer_type_node,
4878 index, hi_index));
4879
4880 expand_increment (build (PREINCREMENT_EXPR,
4881 TREE_TYPE (index),
4882 index, integer_one_node), 0, 0);
4883 expand_end_loop ();
4884 emit_label (loop_end);
4885 }
4886 }
4887 else if ((index != 0 && ! host_integerp (index, 0))
4888 || ! host_integerp (TYPE_SIZE (elttype), 1))
4889 {
4890 rtx pos_rtx, addr;
4891 tree position;
4892
4893 if (index == 0)
4894 index = ssize_int (1);
4895
4896 if (minelt)
4897 index = convert (ssizetype,
4898 fold (build (MINUS_EXPR, index,
4899 TYPE_MIN_VALUE (domain))));
4900
4901 position = size_binop (MULT_EXPR, index,
4902 convert (ssizetype,
4903 TYPE_SIZE_UNIT (elttype)));
4904 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4905 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4906 xtarget = change_address (target, mode, addr);
4907 store_expr (value, xtarget, 0);
4908 }
4909 else
4910 {
4911 if (index != 0)
4912 bitpos = ((tree_low_cst (index, 0) - minelt)
4913 * tree_low_cst (TYPE_SIZE (elttype), 1));
4914 else
4915 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4916
4917 store_constructor_field (target, bitsize, bitpos, mode, value,
4918 type, align, cleared,
4919 TYPE_NONALIASED_COMPONENT (type)
4920 && GET_CODE (target) == MEM
4921 ? MEM_ALIAS_SET (target) :
4922 get_alias_set (elttype));
4923
4924 }
4925 }
4926 }
4927
4928 /* Set constructor assignments. */
4929 else if (TREE_CODE (type) == SET_TYPE)
4930 {
4931 tree elt = CONSTRUCTOR_ELTS (exp);
4932 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4933 tree domain = TYPE_DOMAIN (type);
4934 tree domain_min, domain_max, bitlength;
4935
4936 /* The default implementation strategy is to extract the constant
4937 parts of the constructor, use that to initialize the target,
4938 and then "or" in whatever non-constant ranges we need in addition.
4939
4940 If a large set is all zero or all ones, it is
4941 probably better to set it using memset (if available) or bzero.
4942 Also, if a large set has just a single range, it may also be
4943 better to first clear all the first clear the set (using
4944 bzero/memset), and set the bits we want. */
4945
4946 /* Check for all zeros. */
4947 if (elt == NULL_TREE && size > 0)
4948 {
4949 if (!cleared)
4950 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4951 return;
4952 }
4953
4954 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4955 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4956 bitlength = size_binop (PLUS_EXPR,
4957 size_diffop (domain_max, domain_min),
4958 ssize_int (1));
4959
4960 nbits = tree_low_cst (bitlength, 1);
4961
4962 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4963 are "complicated" (more than one range), initialize (the
4964 constant parts) by copying from a constant. */
4965 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4966 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4967 {
4968 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4969 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4970 char *bit_buffer = (char *) alloca (nbits);
4971 HOST_WIDE_INT word = 0;
4972 unsigned int bit_pos = 0;
4973 unsigned int ibit = 0;
4974 unsigned int offset = 0; /* In bytes from beginning of set. */
4975
4976 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4977 for (;;)
4978 {
4979 if (bit_buffer[ibit])
4980 {
4981 if (BYTES_BIG_ENDIAN)
4982 word |= (1 << (set_word_size - 1 - bit_pos));
4983 else
4984 word |= 1 << bit_pos;
4985 }
4986
4987 bit_pos++; ibit++;
4988 if (bit_pos >= set_word_size || ibit == nbits)
4989 {
4990 if (word != 0 || ! cleared)
4991 {
4992 rtx datum = GEN_INT (word);
4993 rtx to_rtx;
4994
4995 /* The assumption here is that it is safe to use
4996 XEXP if the set is multi-word, but not if
4997 it's single-word. */
4998 if (GET_CODE (target) == MEM)
4999 to_rtx = adjust_address (target, mode, offset);
5000 else if (offset == 0)
5001 to_rtx = target;
5002 else
5003 abort ();
5004 emit_move_insn (to_rtx, datum);
5005 }
5006
5007 if (ibit == nbits)
5008 break;
5009 word = 0;
5010 bit_pos = 0;
5011 offset += set_word_size / BITS_PER_UNIT;
5012 }
5013 }
5014 }
5015 else if (!cleared)
5016 /* Don't bother clearing storage if the set is all ones. */
5017 if (TREE_CHAIN (elt) != NULL_TREE
5018 || (TREE_PURPOSE (elt) == NULL_TREE
5019 ? nbits != 1
5020 : ( ! host_integerp (TREE_VALUE (elt), 0)
5021 || ! host_integerp (TREE_PURPOSE (elt), 0)
5022 || (tree_low_cst (TREE_VALUE (elt), 0)
5023 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5024 != (HOST_WIDE_INT) nbits))))
5025 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5026
5027 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5028 {
5029 /* Start of range of element or NULL. */
5030 tree startbit = TREE_PURPOSE (elt);
5031 /* End of range of element, or element value. */
5032 tree endbit = TREE_VALUE (elt);
5033 #ifdef TARGET_MEM_FUNCTIONS
5034 HOST_WIDE_INT startb, endb;
5035 #endif
5036 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5037
5038 bitlength_rtx = expand_expr (bitlength,
5039 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5040
5041 /* Handle non-range tuple element like [ expr ]. */
5042 if (startbit == NULL_TREE)
5043 {
5044 startbit = save_expr (endbit);
5045 endbit = startbit;
5046 }
5047
5048 startbit = convert (sizetype, startbit);
5049 endbit = convert (sizetype, endbit);
5050 if (! integer_zerop (domain_min))
5051 {
5052 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5053 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5054 }
5055 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5056 EXPAND_CONST_ADDRESS);
5057 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5058 EXPAND_CONST_ADDRESS);
5059
5060 if (REG_P (target))
5061 {
5062 targetx
5063 = assign_temp
5064 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5065 TYPE_QUAL_CONST)),
5066 0, 1, 1);
5067 emit_move_insn (targetx, target);
5068 }
5069
5070 else if (GET_CODE (target) == MEM)
5071 targetx = target;
5072 else
5073 abort ();
5074
5075 #ifdef TARGET_MEM_FUNCTIONS
5076 /* Optimization: If startbit and endbit are
5077 constants divisible by BITS_PER_UNIT,
5078 call memset instead. */
5079 if (TREE_CODE (startbit) == INTEGER_CST
5080 && TREE_CODE (endbit) == INTEGER_CST
5081 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5082 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5083 {
5084 emit_library_call (memset_libfunc, LCT_NORMAL,
5085 VOIDmode, 3,
5086 plus_constant (XEXP (targetx, 0),
5087 startb / BITS_PER_UNIT),
5088 Pmode,
5089 constm1_rtx, TYPE_MODE (integer_type_node),
5090 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5091 TYPE_MODE (sizetype));
5092 }
5093 else
5094 #endif
5095 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5096 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5097 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5098 startbit_rtx, TYPE_MODE (sizetype),
5099 endbit_rtx, TYPE_MODE (sizetype));
5100
5101 if (REG_P (target))
5102 emit_move_insn (target, targetx);
5103 }
5104 }
5105
5106 else
5107 abort ();
5108 }
5109
5110 /* Store the value of EXP (an expression tree)
5111 into a subfield of TARGET which has mode MODE and occupies
5112 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5113 If MODE is VOIDmode, it means that we are storing into a bit-field.
5114
5115 If VALUE_MODE is VOIDmode, return nothing in particular.
5116 UNSIGNEDP is not used in this case.
5117
5118 Otherwise, return an rtx for the value stored. This rtx
5119 has mode VALUE_MODE if that is convenient to do.
5120 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5121
5122 ALIGN is the alignment that TARGET is known to have.
5123 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5124
5125 ALIAS_SET is the alias set for the destination. This value will
5126 (in general) be different from that for TARGET, since TARGET is a
5127 reference to the containing structure. */
5128
5129 static rtx
5130 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5131 unsignedp, align, total_size, alias_set)
5132 rtx target;
5133 HOST_WIDE_INT bitsize;
5134 HOST_WIDE_INT bitpos;
5135 enum machine_mode mode;
5136 tree exp;
5137 enum machine_mode value_mode;
5138 int unsignedp;
5139 unsigned int align;
5140 HOST_WIDE_INT total_size;
5141 int alias_set;
5142 {
5143 HOST_WIDE_INT width_mask = 0;
5144
5145 if (TREE_CODE (exp) == ERROR_MARK)
5146 return const0_rtx;
5147
5148 /* If we have nothing to store, do nothing unless the expression has
5149 side-effects. */
5150 if (bitsize == 0)
5151 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5152
5153 if (bitsize < HOST_BITS_PER_WIDE_INT)
5154 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5155
5156 /* If we are storing into an unaligned field of an aligned union that is
5157 in a register, we may have the mode of TARGET being an integer mode but
5158 MODE == BLKmode. In that case, get an aligned object whose size and
5159 alignment are the same as TARGET and store TARGET into it (we can avoid
5160 the store if the field being stored is the entire width of TARGET). Then
5161 call ourselves recursively to store the field into a BLKmode version of
5162 that object. Finally, load from the object into TARGET. This is not
5163 very efficient in general, but should only be slightly more expensive
5164 than the otherwise-required unaligned accesses. Perhaps this can be
5165 cleaned up later. */
5166
5167 if (mode == BLKmode
5168 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5169 {
5170 rtx object
5171 = assign_temp
5172 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5173 TYPE_QUAL_CONST),
5174 0, 1, 1);
5175 rtx blk_object = copy_rtx (object);
5176
5177 PUT_MODE (blk_object, BLKmode);
5178
5179 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5180 emit_move_insn (object, target);
5181
5182 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5183 align, total_size, alias_set);
5184
5185 /* Even though we aren't returning target, we need to
5186 give it the updated value. */
5187 emit_move_insn (target, object);
5188
5189 return blk_object;
5190 }
5191
5192 if (GET_CODE (target) == CONCAT)
5193 {
5194 /* We're storing into a struct containing a single __complex. */
5195
5196 if (bitpos != 0)
5197 abort ();
5198 return store_expr (exp, target, 0);
5199 }
5200
5201 /* If the structure is in a register or if the component
5202 is a bit field, we cannot use addressing to access it.
5203 Use bit-field techniques or SUBREG to store in it. */
5204
5205 if (mode == VOIDmode
5206 || (mode != BLKmode && ! direct_store[(int) mode]
5207 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5208 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5209 || GET_CODE (target) == REG
5210 || GET_CODE (target) == SUBREG
5211 /* If the field isn't aligned enough to store as an ordinary memref,
5212 store it as a bit field. */
5213 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5214 && (align < GET_MODE_ALIGNMENT (mode)
5215 || bitpos % GET_MODE_ALIGNMENT (mode)))
5216 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5217 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5218 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5219 /* If the RHS and field are a constant size and the size of the
5220 RHS isn't the same size as the bitfield, we must use bitfield
5221 operations. */
5222 || (bitsize >= 0
5223 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5224 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5225 {
5226 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5227
5228 /* If BITSIZE is narrower than the size of the type of EXP
5229 we will be narrowing TEMP. Normally, what's wanted are the
5230 low-order bits. However, if EXP's type is a record and this is
5231 big-endian machine, we want the upper BITSIZE bits. */
5232 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5233 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5234 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5235 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5236 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5237 - bitsize),
5238 temp, 1);
5239
5240 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5241 MODE. */
5242 if (mode != VOIDmode && mode != BLKmode
5243 && mode != TYPE_MODE (TREE_TYPE (exp)))
5244 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5245
5246 /* If the modes of TARGET and TEMP are both BLKmode, both
5247 must be in memory and BITPOS must be aligned on a byte
5248 boundary. If so, we simply do a block copy. */
5249 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5250 {
5251 unsigned int exp_align = expr_align (exp);
5252
5253 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5254 || bitpos % BITS_PER_UNIT != 0)
5255 abort ();
5256
5257 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5258
5259 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5260 align = MIN (exp_align, align);
5261
5262 /* Find an alignment that is consistent with the bit position. */
5263 while ((bitpos % align) != 0)
5264 align >>= 1;
5265
5266 emit_block_move (target, temp,
5267 bitsize == -1 ? expr_size (exp)
5268 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5269 / BITS_PER_UNIT),
5270 align);
5271
5272 return value_mode == VOIDmode ? const0_rtx : target;
5273 }
5274
5275 /* Store the value in the bitfield. */
5276 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5277 if (value_mode != VOIDmode)
5278 {
5279 /* The caller wants an rtx for the value. */
5280 /* If possible, avoid refetching from the bitfield itself. */
5281 if (width_mask != 0
5282 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5283 {
5284 tree count;
5285 enum machine_mode tmode;
5286
5287 if (unsignedp)
5288 return expand_and (temp,
5289 GEN_INT
5290 (trunc_int_for_mode
5291 (width_mask,
5292 GET_MODE (temp) == VOIDmode
5293 ? value_mode
5294 : GET_MODE (temp))), NULL_RTX);
5295 tmode = GET_MODE (temp);
5296 if (tmode == VOIDmode)
5297 tmode = value_mode;
5298 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5299 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5300 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5301 }
5302 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5303 NULL_RTX, value_mode, 0, align,
5304 total_size);
5305 }
5306 return const0_rtx;
5307 }
5308 else
5309 {
5310 rtx addr = XEXP (target, 0);
5311 rtx to_rtx;
5312
5313 /* If a value is wanted, it must be the lhs;
5314 so make the address stable for multiple use. */
5315
5316 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5317 && ! CONSTANT_ADDRESS_P (addr)
5318 /* A frame-pointer reference is already stable. */
5319 && ! (GET_CODE (addr) == PLUS
5320 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5321 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5322 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5323 target = replace_equiv_address (target, copy_to_reg (addr));
5324
5325 /* Now build a reference to just the desired component. */
5326
5327 to_rtx = copy_rtx (adjust_address (target, mode,
5328 bitpos / BITS_PER_UNIT));
5329
5330 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5331 /* If the address of the structure varies, then it might be on
5332 the stack. And, stack slots may be shared across scopes.
5333 So, two different structures, of different types, can end up
5334 at the same location. We will give the structures alias set
5335 zero; here we must be careful not to give non-zero alias sets
5336 to their fields. */
5337 set_mem_alias_set (to_rtx,
5338 rtx_varies_p (addr, /*for_alias=*/0)
5339 ? 0 : alias_set);
5340
5341 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5342 }
5343 }
5344 \f
5345 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5346 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5347 codes and find the ultimate containing object, which we return.
5348
5349 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5350 bit position, and *PUNSIGNEDP to the signedness of the field.
5351 If the position of the field is variable, we store a tree
5352 giving the variable offset (in units) in *POFFSET.
5353 This offset is in addition to the bit position.
5354 If the position is not variable, we store 0 in *POFFSET.
5355 We set *PALIGNMENT to the alignment of the address that will be
5356 computed. This is the alignment of the thing we return if *POFFSET
5357 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5358
5359 If any of the extraction expressions is volatile,
5360 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5361
5362 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5363 is a mode that can be used to access the field. In that case, *PBITSIZE
5364 is redundant.
5365
5366 If the field describes a variable-sized object, *PMODE is set to
5367 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5368 this case, but the address of the object can be found. */
5369
5370 tree
5371 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5372 punsignedp, pvolatilep, palignment)
5373 tree exp;
5374 HOST_WIDE_INT *pbitsize;
5375 HOST_WIDE_INT *pbitpos;
5376 tree *poffset;
5377 enum machine_mode *pmode;
5378 int *punsignedp;
5379 int *pvolatilep;
5380 unsigned int *palignment;
5381 {
5382 tree size_tree = 0;
5383 enum machine_mode mode = VOIDmode;
5384 tree offset = size_zero_node;
5385 tree bit_offset = bitsize_zero_node;
5386 unsigned int alignment = BIGGEST_ALIGNMENT;
5387 tree tem;
5388
5389 /* First get the mode, signedness, and size. We do this from just the
5390 outermost expression. */
5391 if (TREE_CODE (exp) == COMPONENT_REF)
5392 {
5393 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5394 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5395 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5396
5397 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5398 }
5399 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5400 {
5401 size_tree = TREE_OPERAND (exp, 1);
5402 *punsignedp = TREE_UNSIGNED (exp);
5403 }
5404 else
5405 {
5406 mode = TYPE_MODE (TREE_TYPE (exp));
5407 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5408
5409 if (mode == BLKmode)
5410 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5411 else
5412 *pbitsize = GET_MODE_BITSIZE (mode);
5413 }
5414
5415 if (size_tree != 0)
5416 {
5417 if (! host_integerp (size_tree, 1))
5418 mode = BLKmode, *pbitsize = -1;
5419 else
5420 *pbitsize = tree_low_cst (size_tree, 1);
5421 }
5422
5423 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5424 and find the ultimate containing object. */
5425 while (1)
5426 {
5427 if (TREE_CODE (exp) == BIT_FIELD_REF)
5428 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5429 else if (TREE_CODE (exp) == COMPONENT_REF)
5430 {
5431 tree field = TREE_OPERAND (exp, 1);
5432 tree this_offset = DECL_FIELD_OFFSET (field);
5433
5434 /* If this field hasn't been filled in yet, don't go
5435 past it. This should only happen when folding expressions
5436 made during type construction. */
5437 if (this_offset == 0)
5438 break;
5439 else if (! TREE_CONSTANT (this_offset)
5440 && contains_placeholder_p (this_offset))
5441 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5442
5443 offset = size_binop (PLUS_EXPR, offset, this_offset);
5444 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5445 DECL_FIELD_BIT_OFFSET (field));
5446
5447 if (! host_integerp (offset, 0))
5448 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5449 }
5450
5451 else if (TREE_CODE (exp) == ARRAY_REF
5452 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5453 {
5454 tree index = TREE_OPERAND (exp, 1);
5455 tree array = TREE_OPERAND (exp, 0);
5456 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5457 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5458 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5459
5460 /* We assume all arrays have sizes that are a multiple of a byte.
5461 First subtract the lower bound, if any, in the type of the
5462 index, then convert to sizetype and multiply by the size of the
5463 array element. */
5464 if (low_bound != 0 && ! integer_zerop (low_bound))
5465 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5466 index, low_bound));
5467
5468 /* If the index has a self-referential type, pass it to a
5469 WITH_RECORD_EXPR; if the component size is, pass our
5470 component to one. */
5471 if (! TREE_CONSTANT (index)
5472 && contains_placeholder_p (index))
5473 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5474 if (! TREE_CONSTANT (unit_size)
5475 && contains_placeholder_p (unit_size))
5476 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5477
5478 offset = size_binop (PLUS_EXPR, offset,
5479 size_binop (MULT_EXPR,
5480 convert (sizetype, index),
5481 unit_size));
5482 }
5483
5484 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5485 && ! ((TREE_CODE (exp) == NOP_EXPR
5486 || TREE_CODE (exp) == CONVERT_EXPR)
5487 && (TYPE_MODE (TREE_TYPE (exp))
5488 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5489 break;
5490
5491 /* If any reference in the chain is volatile, the effect is volatile. */
5492 if (TREE_THIS_VOLATILE (exp))
5493 *pvolatilep = 1;
5494
5495 /* If the offset is non-constant already, then we can't assume any
5496 alignment more than the alignment here. */
5497 if (! TREE_CONSTANT (offset))
5498 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5499
5500 exp = TREE_OPERAND (exp, 0);
5501 }
5502
5503 if (DECL_P (exp))
5504 alignment = MIN (alignment, DECL_ALIGN (exp));
5505 else if (TREE_TYPE (exp) != 0)
5506 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5507
5508 /* If OFFSET is constant, see if we can return the whole thing as a
5509 constant bit position. Otherwise, split it up. */
5510 if (host_integerp (offset, 0)
5511 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5512 bitsize_unit_node))
5513 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5514 && host_integerp (tem, 0))
5515 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5516 else
5517 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5518
5519 *pmode = mode;
5520 *palignment = alignment;
5521 return exp;
5522 }
5523
5524 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5525
5526 static enum memory_use_mode
5527 get_memory_usage_from_modifier (modifier)
5528 enum expand_modifier modifier;
5529 {
5530 switch (modifier)
5531 {
5532 case EXPAND_NORMAL:
5533 case EXPAND_SUM:
5534 return MEMORY_USE_RO;
5535 break;
5536 case EXPAND_MEMORY_USE_WO:
5537 return MEMORY_USE_WO;
5538 break;
5539 case EXPAND_MEMORY_USE_RW:
5540 return MEMORY_USE_RW;
5541 break;
5542 case EXPAND_MEMORY_USE_DONT:
5543 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5544 MEMORY_USE_DONT, because they are modifiers to a call of
5545 expand_expr in the ADDR_EXPR case of expand_expr. */
5546 case EXPAND_CONST_ADDRESS:
5547 case EXPAND_INITIALIZER:
5548 return MEMORY_USE_DONT;
5549 case EXPAND_MEMORY_USE_BAD:
5550 default:
5551 abort ();
5552 }
5553 }
5554 \f
5555 /* Given an rtx VALUE that may contain additions and multiplications, return
5556 an equivalent value that just refers to a register, memory, or constant.
5557 This is done by generating instructions to perform the arithmetic and
5558 returning a pseudo-register containing the value.
5559
5560 The returned value may be a REG, SUBREG, MEM or constant. */
5561
5562 rtx
5563 force_operand (value, target)
5564 rtx value, target;
5565 {
5566 register optab binoptab = 0;
5567 /* Use a temporary to force order of execution of calls to
5568 `force_operand'. */
5569 rtx tmp;
5570 register rtx op2;
5571 /* Use subtarget as the target for operand 0 of a binary operation. */
5572 register rtx subtarget = get_subtarget (target);
5573
5574 /* Check for a PIC address load. */
5575 if (flag_pic
5576 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5577 && XEXP (value, 0) == pic_offset_table_rtx
5578 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5579 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5580 || GET_CODE (XEXP (value, 1)) == CONST))
5581 {
5582 if (!subtarget)
5583 subtarget = gen_reg_rtx (GET_MODE (value));
5584 emit_move_insn (subtarget, value);
5585 return subtarget;
5586 }
5587
5588 if (GET_CODE (value) == PLUS)
5589 binoptab = add_optab;
5590 else if (GET_CODE (value) == MINUS)
5591 binoptab = sub_optab;
5592 else if (GET_CODE (value) == MULT)
5593 {
5594 op2 = XEXP (value, 1);
5595 if (!CONSTANT_P (op2)
5596 && !(GET_CODE (op2) == REG && op2 != subtarget))
5597 subtarget = 0;
5598 tmp = force_operand (XEXP (value, 0), subtarget);
5599 return expand_mult (GET_MODE (value), tmp,
5600 force_operand (op2, NULL_RTX),
5601 target, 1);
5602 }
5603
5604 if (binoptab)
5605 {
5606 op2 = XEXP (value, 1);
5607 if (!CONSTANT_P (op2)
5608 && !(GET_CODE (op2) == REG && op2 != subtarget))
5609 subtarget = 0;
5610 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5611 {
5612 binoptab = add_optab;
5613 op2 = negate_rtx (GET_MODE (value), op2);
5614 }
5615
5616 /* Check for an addition with OP2 a constant integer and our first
5617 operand a PLUS of a virtual register and something else. In that
5618 case, we want to emit the sum of the virtual register and the
5619 constant first and then add the other value. This allows virtual
5620 register instantiation to simply modify the constant rather than
5621 creating another one around this addition. */
5622 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5623 && GET_CODE (XEXP (value, 0)) == PLUS
5624 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5625 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5626 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5627 {
5628 rtx temp = expand_binop (GET_MODE (value), binoptab,
5629 XEXP (XEXP (value, 0), 0), op2,
5630 subtarget, 0, OPTAB_LIB_WIDEN);
5631 return expand_binop (GET_MODE (value), binoptab, temp,
5632 force_operand (XEXP (XEXP (value, 0), 1), 0),
5633 target, 0, OPTAB_LIB_WIDEN);
5634 }
5635
5636 tmp = force_operand (XEXP (value, 0), subtarget);
5637 return expand_binop (GET_MODE (value), binoptab, tmp,
5638 force_operand (op2, NULL_RTX),
5639 target, 0, OPTAB_LIB_WIDEN);
5640 /* We give UNSIGNEDP = 0 to expand_binop
5641 because the only operations we are expanding here are signed ones. */
5642 }
5643 return value;
5644 }
5645 \f
5646 /* Subroutine of expand_expr:
5647 save the non-copied parts (LIST) of an expr (LHS), and return a list
5648 which can restore these values to their previous values,
5649 should something modify their storage. */
5650
5651 static tree
5652 save_noncopied_parts (lhs, list)
5653 tree lhs;
5654 tree list;
5655 {
5656 tree tail;
5657 tree parts = 0;
5658
5659 for (tail = list; tail; tail = TREE_CHAIN (tail))
5660 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5661 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5662 else
5663 {
5664 tree part = TREE_VALUE (tail);
5665 tree part_type = TREE_TYPE (part);
5666 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5667 rtx target
5668 = assign_temp (build_qualified_type (part_type,
5669 (TYPE_QUALS (part_type)
5670 | TYPE_QUAL_CONST)),
5671 0, 1, 1);
5672
5673 parts = tree_cons (to_be_saved,
5674 build (RTL_EXPR, part_type, NULL_TREE,
5675 (tree) validize_mem (target)),
5676 parts);
5677 store_expr (TREE_PURPOSE (parts),
5678 RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5679 }
5680 return parts;
5681 }
5682
5683 /* Subroutine of expand_expr:
5684 record the non-copied parts (LIST) of an expr (LHS), and return a list
5685 which specifies the initial values of these parts. */
5686
5687 static tree
5688 init_noncopied_parts (lhs, list)
5689 tree lhs;
5690 tree list;
5691 {
5692 tree tail;
5693 tree parts = 0;
5694
5695 for (tail = list; tail; tail = TREE_CHAIN (tail))
5696 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5697 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5698 else if (TREE_PURPOSE (tail))
5699 {
5700 tree part = TREE_VALUE (tail);
5701 tree part_type = TREE_TYPE (part);
5702 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5703 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5704 }
5705 return parts;
5706 }
5707
5708 /* Subroutine of expand_expr: return nonzero iff there is no way that
5709 EXP can reference X, which is being modified. TOP_P is nonzero if this
5710 call is going to be used to determine whether we need a temporary
5711 for EXP, as opposed to a recursive call to this function.
5712
5713 It is always safe for this routine to return zero since it merely
5714 searches for optimization opportunities. */
5715
5716 int
5717 safe_from_p (x, exp, top_p)
5718 rtx x;
5719 tree exp;
5720 int top_p;
5721 {
5722 rtx exp_rtl = 0;
5723 int i, nops;
5724 static tree save_expr_list;
5725
5726 if (x == 0
5727 /* If EXP has varying size, we MUST use a target since we currently
5728 have no way of allocating temporaries of variable size
5729 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5730 So we assume here that something at a higher level has prevented a
5731 clash. This is somewhat bogus, but the best we can do. Only
5732 do this when X is BLKmode and when we are at the top level. */
5733 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5734 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5735 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5736 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5737 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5738 != INTEGER_CST)
5739 && GET_MODE (x) == BLKmode)
5740 /* If X is in the outgoing argument area, it is always safe. */
5741 || (GET_CODE (x) == MEM
5742 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5743 || (GET_CODE (XEXP (x, 0)) == PLUS
5744 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5745 return 1;
5746
5747 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5748 find the underlying pseudo. */
5749 if (GET_CODE (x) == SUBREG)
5750 {
5751 x = SUBREG_REG (x);
5752 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5753 return 0;
5754 }
5755
5756 /* A SAVE_EXPR might appear many times in the expression passed to the
5757 top-level safe_from_p call, and if it has a complex subexpression,
5758 examining it multiple times could result in a combinatorial explosion.
5759 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5760 with optimization took about 28 minutes to compile -- even though it was
5761 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5762 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5763 we have processed. Note that the only test of top_p was above. */
5764
5765 if (top_p)
5766 {
5767 int rtn;
5768 tree t;
5769
5770 save_expr_list = 0;
5771
5772 rtn = safe_from_p (x, exp, 0);
5773
5774 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5775 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5776
5777 return rtn;
5778 }
5779
5780 /* Now look at our tree code and possibly recurse. */
5781 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5782 {
5783 case 'd':
5784 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5785 break;
5786
5787 case 'c':
5788 return 1;
5789
5790 case 'x':
5791 if (TREE_CODE (exp) == TREE_LIST)
5792 return ((TREE_VALUE (exp) == 0
5793 || safe_from_p (x, TREE_VALUE (exp), 0))
5794 && (TREE_CHAIN (exp) == 0
5795 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5796 else if (TREE_CODE (exp) == ERROR_MARK)
5797 return 1; /* An already-visited SAVE_EXPR? */
5798 else
5799 return 0;
5800
5801 case '1':
5802 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5803
5804 case '2':
5805 case '<':
5806 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5807 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5808
5809 case 'e':
5810 case 'r':
5811 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5812 the expression. If it is set, we conflict iff we are that rtx or
5813 both are in memory. Otherwise, we check all operands of the
5814 expression recursively. */
5815
5816 switch (TREE_CODE (exp))
5817 {
5818 case ADDR_EXPR:
5819 return (staticp (TREE_OPERAND (exp, 0))
5820 || TREE_STATIC (exp)
5821 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5822
5823 case INDIRECT_REF:
5824 if (GET_CODE (x) == MEM
5825 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5826 get_alias_set (exp)))
5827 return 0;
5828 break;
5829
5830 case CALL_EXPR:
5831 /* Assume that the call will clobber all hard registers and
5832 all of memory. */
5833 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5834 || GET_CODE (x) == MEM)
5835 return 0;
5836 break;
5837
5838 case RTL_EXPR:
5839 /* If a sequence exists, we would have to scan every instruction
5840 in the sequence to see if it was safe. This is probably not
5841 worthwhile. */
5842 if (RTL_EXPR_SEQUENCE (exp))
5843 return 0;
5844
5845 exp_rtl = RTL_EXPR_RTL (exp);
5846 break;
5847
5848 case WITH_CLEANUP_EXPR:
5849 exp_rtl = RTL_EXPR_RTL (exp);
5850 break;
5851
5852 case CLEANUP_POINT_EXPR:
5853 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5854
5855 case SAVE_EXPR:
5856 exp_rtl = SAVE_EXPR_RTL (exp);
5857 if (exp_rtl)
5858 break;
5859
5860 /* If we've already scanned this, don't do it again. Otherwise,
5861 show we've scanned it and record for clearing the flag if we're
5862 going on. */
5863 if (TREE_PRIVATE (exp))
5864 return 1;
5865
5866 TREE_PRIVATE (exp) = 1;
5867 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5868 {
5869 TREE_PRIVATE (exp) = 0;
5870 return 0;
5871 }
5872
5873 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5874 return 1;
5875
5876 case BIND_EXPR:
5877 /* The only operand we look at is operand 1. The rest aren't
5878 part of the expression. */
5879 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5880
5881 case METHOD_CALL_EXPR:
5882 /* This takes a rtx argument, but shouldn't appear here. */
5883 abort ();
5884
5885 default:
5886 break;
5887 }
5888
5889 /* If we have an rtx, we do not need to scan our operands. */
5890 if (exp_rtl)
5891 break;
5892
5893 nops = first_rtl_op (TREE_CODE (exp));
5894 for (i = 0; i < nops; i++)
5895 if (TREE_OPERAND (exp, i) != 0
5896 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5897 return 0;
5898
5899 /* If this is a language-specific tree code, it may require
5900 special handling. */
5901 if ((unsigned int) TREE_CODE (exp)
5902 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5903 && lang_safe_from_p
5904 && !(*lang_safe_from_p) (x, exp))
5905 return 0;
5906 }
5907
5908 /* If we have an rtl, find any enclosed object. Then see if we conflict
5909 with it. */
5910 if (exp_rtl)
5911 {
5912 if (GET_CODE (exp_rtl) == SUBREG)
5913 {
5914 exp_rtl = SUBREG_REG (exp_rtl);
5915 if (GET_CODE (exp_rtl) == REG
5916 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5917 return 0;
5918 }
5919
5920 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5921 are memory and they conflict. */
5922 return ! (rtx_equal_p (x, exp_rtl)
5923 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5924 && true_dependence (exp_rtl, GET_MODE (x), x,
5925 rtx_addr_varies_p)));
5926 }
5927
5928 /* If we reach here, it is safe. */
5929 return 1;
5930 }
5931
5932 /* Subroutine of expand_expr: return nonzero iff EXP is an
5933 expression whose type is statically determinable. */
5934
5935 static int
5936 fixed_type_p (exp)
5937 tree exp;
5938 {
5939 if (TREE_CODE (exp) == PARM_DECL
5940 || TREE_CODE (exp) == VAR_DECL
5941 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5942 || TREE_CODE (exp) == COMPONENT_REF
5943 || TREE_CODE (exp) == ARRAY_REF)
5944 return 1;
5945 return 0;
5946 }
5947
5948 /* Subroutine of expand_expr: return rtx if EXP is a
5949 variable or parameter; else return 0. */
5950
5951 static rtx
5952 var_rtx (exp)
5953 tree exp;
5954 {
5955 STRIP_NOPS (exp);
5956 switch (TREE_CODE (exp))
5957 {
5958 case PARM_DECL:
5959 case VAR_DECL:
5960 return DECL_RTL (exp);
5961 default:
5962 return 0;
5963 }
5964 }
5965
5966 #ifdef MAX_INTEGER_COMPUTATION_MODE
5967
5968 void
5969 check_max_integer_computation_mode (exp)
5970 tree exp;
5971 {
5972 enum tree_code code;
5973 enum machine_mode mode;
5974
5975 /* Strip any NOPs that don't change the mode. */
5976 STRIP_NOPS (exp);
5977 code = TREE_CODE (exp);
5978
5979 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5980 if (code == NOP_EXPR
5981 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5982 return;
5983
5984 /* First check the type of the overall operation. We need only look at
5985 unary, binary and relational operations. */
5986 if (TREE_CODE_CLASS (code) == '1'
5987 || TREE_CODE_CLASS (code) == '2'
5988 || TREE_CODE_CLASS (code) == '<')
5989 {
5990 mode = TYPE_MODE (TREE_TYPE (exp));
5991 if (GET_MODE_CLASS (mode) == MODE_INT
5992 && mode > MAX_INTEGER_COMPUTATION_MODE)
5993 internal_error ("unsupported wide integer operation");
5994 }
5995
5996 /* Check operand of a unary op. */
5997 if (TREE_CODE_CLASS (code) == '1')
5998 {
5999 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6000 if (GET_MODE_CLASS (mode) == MODE_INT
6001 && mode > MAX_INTEGER_COMPUTATION_MODE)
6002 internal_error ("unsupported wide integer operation");
6003 }
6004
6005 /* Check operands of a binary/comparison op. */
6006 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6007 {
6008 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6009 if (GET_MODE_CLASS (mode) == MODE_INT
6010 && mode > MAX_INTEGER_COMPUTATION_MODE)
6011 internal_error ("unsupported wide integer operation");
6012
6013 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6014 if (GET_MODE_CLASS (mode) == MODE_INT
6015 && mode > MAX_INTEGER_COMPUTATION_MODE)
6016 internal_error ("unsupported wide integer operation");
6017 }
6018 }
6019 #endif
6020 \f
6021 /* expand_expr: generate code for computing expression EXP.
6022 An rtx for the computed value is returned. The value is never null.
6023 In the case of a void EXP, const0_rtx is returned.
6024
6025 The value may be stored in TARGET if TARGET is nonzero.
6026 TARGET is just a suggestion; callers must assume that
6027 the rtx returned may not be the same as TARGET.
6028
6029 If TARGET is CONST0_RTX, it means that the value will be ignored.
6030
6031 If TMODE is not VOIDmode, it suggests generating the
6032 result in mode TMODE. But this is done only when convenient.
6033 Otherwise, TMODE is ignored and the value generated in its natural mode.
6034 TMODE is just a suggestion; callers must assume that
6035 the rtx returned may not have mode TMODE.
6036
6037 Note that TARGET may have neither TMODE nor MODE. In that case, it
6038 probably will not be used.
6039
6040 If MODIFIER is EXPAND_SUM then when EXP is an addition
6041 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6042 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6043 products as above, or REG or MEM, or constant.
6044 Ordinarily in such cases we would output mul or add instructions
6045 and then return a pseudo reg containing the sum.
6046
6047 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6048 it also marks a label as absolutely required (it can't be dead).
6049 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6050 This is used for outputting expressions used in initializers.
6051
6052 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6053 with a constant address even if that address is not normally legitimate.
6054 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6055
6056 rtx
6057 expand_expr (exp, target, tmode, modifier)
6058 register tree exp;
6059 rtx target;
6060 enum machine_mode tmode;
6061 enum expand_modifier modifier;
6062 {
6063 register rtx op0, op1, temp;
6064 tree type = TREE_TYPE (exp);
6065 int unsignedp = TREE_UNSIGNED (type);
6066 register enum machine_mode mode;
6067 register enum tree_code code = TREE_CODE (exp);
6068 optab this_optab;
6069 rtx subtarget, original_target;
6070 int ignore;
6071 tree context;
6072 /* Used by check-memory-usage to make modifier read only. */
6073 enum expand_modifier ro_modifier;
6074
6075 /* Handle ERROR_MARK before anybody tries to access its type. */
6076 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6077 {
6078 op0 = CONST0_RTX (tmode);
6079 if (op0 != 0)
6080 return op0;
6081 return const0_rtx;
6082 }
6083
6084 mode = TYPE_MODE (type);
6085 /* Use subtarget as the target for operand 0 of a binary operation. */
6086 subtarget = get_subtarget (target);
6087 original_target = target;
6088 ignore = (target == const0_rtx
6089 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6090 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6091 || code == COND_EXPR)
6092 && TREE_CODE (type) == VOID_TYPE));
6093
6094 /* Make a read-only version of the modifier. */
6095 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6096 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6097 ro_modifier = modifier;
6098 else
6099 ro_modifier = EXPAND_NORMAL;
6100
6101 /* If we are going to ignore this result, we need only do something
6102 if there is a side-effect somewhere in the expression. If there
6103 is, short-circuit the most common cases here. Note that we must
6104 not call expand_expr with anything but const0_rtx in case this
6105 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6106
6107 if (ignore)
6108 {
6109 if (! TREE_SIDE_EFFECTS (exp))
6110 return const0_rtx;
6111
6112 /* Ensure we reference a volatile object even if value is ignored, but
6113 don't do this if all we are doing is taking its address. */
6114 if (TREE_THIS_VOLATILE (exp)
6115 && TREE_CODE (exp) != FUNCTION_DECL
6116 && mode != VOIDmode && mode != BLKmode
6117 && modifier != EXPAND_CONST_ADDRESS)
6118 {
6119 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6120 if (GET_CODE (temp) == MEM)
6121 temp = copy_to_reg (temp);
6122 return const0_rtx;
6123 }
6124
6125 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6126 || code == INDIRECT_REF || code == BUFFER_REF)
6127 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6128 VOIDmode, ro_modifier);
6129 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6130 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6131 {
6132 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6133 ro_modifier);
6134 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6135 ro_modifier);
6136 return const0_rtx;
6137 }
6138 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6139 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6140 /* If the second operand has no side effects, just evaluate
6141 the first. */
6142 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6143 VOIDmode, ro_modifier);
6144 else if (code == BIT_FIELD_REF)
6145 {
6146 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6147 ro_modifier);
6148 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6149 ro_modifier);
6150 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6151 ro_modifier);
6152 return const0_rtx;
6153 }
6154 ;
6155 target = 0;
6156 }
6157
6158 #ifdef MAX_INTEGER_COMPUTATION_MODE
6159 /* Only check stuff here if the mode we want is different from the mode
6160 of the expression; if it's the same, check_max_integer_computiation_mode
6161 will handle it. Do we really need to check this stuff at all? */
6162
6163 if (target
6164 && GET_MODE (target) != mode
6165 && TREE_CODE (exp) != INTEGER_CST
6166 && TREE_CODE (exp) != PARM_DECL
6167 && TREE_CODE (exp) != ARRAY_REF
6168 && TREE_CODE (exp) != ARRAY_RANGE_REF
6169 && TREE_CODE (exp) != COMPONENT_REF
6170 && TREE_CODE (exp) != BIT_FIELD_REF
6171 && TREE_CODE (exp) != INDIRECT_REF
6172 && TREE_CODE (exp) != CALL_EXPR
6173 && TREE_CODE (exp) != VAR_DECL
6174 && TREE_CODE (exp) != RTL_EXPR)
6175 {
6176 enum machine_mode mode = GET_MODE (target);
6177
6178 if (GET_MODE_CLASS (mode) == MODE_INT
6179 && mode > MAX_INTEGER_COMPUTATION_MODE)
6180 internal_error ("unsupported wide integer operation");
6181 }
6182
6183 if (tmode != mode
6184 && TREE_CODE (exp) != INTEGER_CST
6185 && TREE_CODE (exp) != PARM_DECL
6186 && TREE_CODE (exp) != ARRAY_REF
6187 && TREE_CODE (exp) != ARRAY_RANGE_REF
6188 && TREE_CODE (exp) != COMPONENT_REF
6189 && TREE_CODE (exp) != BIT_FIELD_REF
6190 && TREE_CODE (exp) != INDIRECT_REF
6191 && TREE_CODE (exp) != VAR_DECL
6192 && TREE_CODE (exp) != CALL_EXPR
6193 && TREE_CODE (exp) != RTL_EXPR
6194 && GET_MODE_CLASS (tmode) == MODE_INT
6195 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6196 internal_error ("unsupported wide integer operation");
6197
6198 check_max_integer_computation_mode (exp);
6199 #endif
6200
6201 /* If will do cse, generate all results into pseudo registers
6202 since 1) that allows cse to find more things
6203 and 2) otherwise cse could produce an insn the machine
6204 cannot support. */
6205
6206 if (! cse_not_expected && mode != BLKmode && target
6207 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6208 target = subtarget;
6209
6210 switch (code)
6211 {
6212 case LABEL_DECL:
6213 {
6214 tree function = decl_function_context (exp);
6215 /* Handle using a label in a containing function. */
6216 if (function != current_function_decl
6217 && function != inline_function_decl && function != 0)
6218 {
6219 struct function *p = find_function_data (function);
6220 p->expr->x_forced_labels
6221 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6222 p->expr->x_forced_labels);
6223 }
6224 else
6225 {
6226 if (modifier == EXPAND_INITIALIZER)
6227 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6228 label_rtx (exp),
6229 forced_labels);
6230 }
6231
6232 temp = gen_rtx_MEM (FUNCTION_MODE,
6233 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6234 if (function != current_function_decl
6235 && function != inline_function_decl && function != 0)
6236 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6237 return temp;
6238 }
6239
6240 case PARM_DECL:
6241 if (DECL_RTL (exp) == 0)
6242 {
6243 error_with_decl (exp, "prior parameter's size depends on `%s'");
6244 return CONST0_RTX (mode);
6245 }
6246
6247 /* ... fall through ... */
6248
6249 case VAR_DECL:
6250 /* If a static var's type was incomplete when the decl was written,
6251 but the type is complete now, lay out the decl now. */
6252 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6253 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6254 {
6255 layout_decl (exp, 0);
6256 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6257 }
6258
6259 /* Although static-storage variables start off initialized, according to
6260 ANSI C, a memcpy could overwrite them with uninitialized values. So
6261 we check them too. This also lets us check for read-only variables
6262 accessed via a non-const declaration, in case it won't be detected
6263 any other way (e.g., in an embedded system or OS kernel without
6264 memory protection).
6265
6266 Aggregates are not checked here; they're handled elsewhere. */
6267 if (cfun && current_function_check_memory_usage
6268 && code == VAR_DECL
6269 && GET_CODE (DECL_RTL (exp)) == MEM
6270 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6271 {
6272 enum memory_use_mode memory_usage;
6273 memory_usage = get_memory_usage_from_modifier (modifier);
6274
6275 in_check_memory_usage = 1;
6276 if (memory_usage != MEMORY_USE_DONT)
6277 emit_library_call (chkr_check_addr_libfunc,
6278 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6279 XEXP (DECL_RTL (exp), 0), Pmode,
6280 GEN_INT (int_size_in_bytes (type)),
6281 TYPE_MODE (sizetype),
6282 GEN_INT (memory_usage),
6283 TYPE_MODE (integer_type_node));
6284 in_check_memory_usage = 0;
6285 }
6286
6287 /* ... fall through ... */
6288
6289 case FUNCTION_DECL:
6290 case RESULT_DECL:
6291 if (DECL_RTL (exp) == 0)
6292 abort ();
6293
6294 /* Ensure variable marked as used even if it doesn't go through
6295 a parser. If it hasn't be used yet, write out an external
6296 definition. */
6297 if (! TREE_USED (exp))
6298 {
6299 assemble_external (exp);
6300 TREE_USED (exp) = 1;
6301 }
6302
6303 /* Show we haven't gotten RTL for this yet. */
6304 temp = 0;
6305
6306 /* Handle variables inherited from containing functions. */
6307 context = decl_function_context (exp);
6308
6309 /* We treat inline_function_decl as an alias for the current function
6310 because that is the inline function whose vars, types, etc.
6311 are being merged into the current function.
6312 See expand_inline_function. */
6313
6314 if (context != 0 && context != current_function_decl
6315 && context != inline_function_decl
6316 /* If var is static, we don't need a static chain to access it. */
6317 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6318 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6319 {
6320 rtx addr;
6321
6322 /* Mark as non-local and addressable. */
6323 DECL_NONLOCAL (exp) = 1;
6324 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6325 abort ();
6326 mark_addressable (exp);
6327 if (GET_CODE (DECL_RTL (exp)) != MEM)
6328 abort ();
6329 addr = XEXP (DECL_RTL (exp), 0);
6330 if (GET_CODE (addr) == MEM)
6331 addr
6332 = replace_equiv_address (addr,
6333 fix_lexical_addr (XEXP (addr, 0), exp));
6334 else
6335 addr = fix_lexical_addr (addr, exp);
6336
6337 temp = replace_equiv_address (DECL_RTL (exp), addr);
6338 }
6339
6340 /* This is the case of an array whose size is to be determined
6341 from its initializer, while the initializer is still being parsed.
6342 See expand_decl. */
6343
6344 else if (GET_CODE (DECL_RTL (exp)) == MEM
6345 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6346 temp = validize_mem (DECL_RTL (exp));
6347
6348 /* If DECL_RTL is memory, we are in the normal case and either
6349 the address is not valid or it is not a register and -fforce-addr
6350 is specified, get the address into a register. */
6351
6352 else if (GET_CODE (DECL_RTL (exp)) == MEM
6353 && modifier != EXPAND_CONST_ADDRESS
6354 && modifier != EXPAND_SUM
6355 && modifier != EXPAND_INITIALIZER
6356 && (! memory_address_p (DECL_MODE (exp),
6357 XEXP (DECL_RTL (exp), 0))
6358 || (flag_force_addr
6359 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6360 temp = replace_equiv_address (DECL_RTL (exp),
6361 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6362
6363 /* If we got something, return it. But first, set the alignment
6364 if the address is a register. */
6365 if (temp != 0)
6366 {
6367 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6368 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6369
6370 return temp;
6371 }
6372
6373 /* If the mode of DECL_RTL does not match that of the decl, it
6374 must be a promoted value. We return a SUBREG of the wanted mode,
6375 but mark it so that we know that it was already extended. */
6376
6377 if (GET_CODE (DECL_RTL (exp)) == REG
6378 && GET_MODE (DECL_RTL (exp)) != mode)
6379 {
6380 /* Get the signedness used for this variable. Ensure we get the
6381 same mode we got when the variable was declared. */
6382 if (GET_MODE (DECL_RTL (exp))
6383 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6384 abort ();
6385
6386 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6387 SUBREG_PROMOTED_VAR_P (temp) = 1;
6388 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6389 return temp;
6390 }
6391
6392 return DECL_RTL (exp);
6393
6394 case INTEGER_CST:
6395 return immed_double_const (TREE_INT_CST_LOW (exp),
6396 TREE_INT_CST_HIGH (exp), mode);
6397
6398 case CONST_DECL:
6399 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6400 EXPAND_MEMORY_USE_BAD);
6401
6402 case REAL_CST:
6403 /* If optimized, generate immediate CONST_DOUBLE
6404 which will be turned into memory by reload if necessary.
6405
6406 We used to force a register so that loop.c could see it. But
6407 this does not allow gen_* patterns to perform optimizations with
6408 the constants. It also produces two insns in cases like "x = 1.0;".
6409 On most machines, floating-point constants are not permitted in
6410 many insns, so we'd end up copying it to a register in any case.
6411
6412 Now, we do the copying in expand_binop, if appropriate. */
6413 return immed_real_const (exp);
6414
6415 case COMPLEX_CST:
6416 case STRING_CST:
6417 if (! TREE_CST_RTL (exp))
6418 output_constant_def (exp, 1);
6419
6420 /* TREE_CST_RTL probably contains a constant address.
6421 On RISC machines where a constant address isn't valid,
6422 make some insns to get that address into a register. */
6423 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6424 && modifier != EXPAND_CONST_ADDRESS
6425 && modifier != EXPAND_INITIALIZER
6426 && modifier != EXPAND_SUM
6427 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6428 || (flag_force_addr
6429 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6430 return replace_equiv_address (TREE_CST_RTL (exp),
6431 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6432 return TREE_CST_RTL (exp);
6433
6434 case EXPR_WITH_FILE_LOCATION:
6435 {
6436 rtx to_return;
6437 const char *saved_input_filename = input_filename;
6438 int saved_lineno = lineno;
6439 input_filename = EXPR_WFL_FILENAME (exp);
6440 lineno = EXPR_WFL_LINENO (exp);
6441 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6442 emit_line_note (input_filename, lineno);
6443 /* Possibly avoid switching back and force here. */
6444 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6445 input_filename = saved_input_filename;
6446 lineno = saved_lineno;
6447 return to_return;
6448 }
6449
6450 case SAVE_EXPR:
6451 context = decl_function_context (exp);
6452
6453 /* If this SAVE_EXPR was at global context, assume we are an
6454 initialization function and move it into our context. */
6455 if (context == 0)
6456 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6457
6458 /* We treat inline_function_decl as an alias for the current function
6459 because that is the inline function whose vars, types, etc.
6460 are being merged into the current function.
6461 See expand_inline_function. */
6462 if (context == current_function_decl || context == inline_function_decl)
6463 context = 0;
6464
6465 /* If this is non-local, handle it. */
6466 if (context)
6467 {
6468 /* The following call just exists to abort if the context is
6469 not of a containing function. */
6470 find_function_data (context);
6471
6472 temp = SAVE_EXPR_RTL (exp);
6473 if (temp && GET_CODE (temp) == REG)
6474 {
6475 put_var_into_stack (exp);
6476 temp = SAVE_EXPR_RTL (exp);
6477 }
6478 if (temp == 0 || GET_CODE (temp) != MEM)
6479 abort ();
6480 return
6481 replace_equiv_address (temp,
6482 fix_lexical_addr (XEXP (temp, 0), exp));
6483 }
6484 if (SAVE_EXPR_RTL (exp) == 0)
6485 {
6486 if (mode == VOIDmode)
6487 temp = const0_rtx;
6488 else
6489 temp = assign_temp (build_qualified_type (type,
6490 (TYPE_QUALS (type)
6491 | TYPE_QUAL_CONST)),
6492 3, 0, 0);
6493
6494 SAVE_EXPR_RTL (exp) = temp;
6495 if (!optimize && GET_CODE (temp) == REG)
6496 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6497 save_expr_regs);
6498
6499 /* If the mode of TEMP does not match that of the expression, it
6500 must be a promoted value. We pass store_expr a SUBREG of the
6501 wanted mode but mark it so that we know that it was already
6502 extended. Note that `unsignedp' was modified above in
6503 this case. */
6504
6505 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6506 {
6507 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6508 SUBREG_PROMOTED_VAR_P (temp) = 1;
6509 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6510 }
6511
6512 if (temp == const0_rtx)
6513 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6514 EXPAND_MEMORY_USE_BAD);
6515 else
6516 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6517
6518 TREE_USED (exp) = 1;
6519 }
6520
6521 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6522 must be a promoted value. We return a SUBREG of the wanted mode,
6523 but mark it so that we know that it was already extended. */
6524
6525 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6526 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6527 {
6528 /* Compute the signedness and make the proper SUBREG. */
6529 promote_mode (type, mode, &unsignedp, 0);
6530 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6531 SUBREG_PROMOTED_VAR_P (temp) = 1;
6532 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6533 return temp;
6534 }
6535
6536 return SAVE_EXPR_RTL (exp);
6537
6538 case UNSAVE_EXPR:
6539 {
6540 rtx temp;
6541 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6542 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6543 return temp;
6544 }
6545
6546 case PLACEHOLDER_EXPR:
6547 {
6548 tree placeholder_expr;
6549
6550 /* If there is an object on the head of the placeholder list,
6551 see if some object in it of type TYPE or a pointer to it. For
6552 further information, see tree.def. */
6553 for (placeholder_expr = placeholder_list;
6554 placeholder_expr != 0;
6555 placeholder_expr = TREE_CHAIN (placeholder_expr))
6556 {
6557 tree need_type = TYPE_MAIN_VARIANT (type);
6558 tree object = 0;
6559 tree old_list = placeholder_list;
6560 tree elt;
6561
6562 /* Find the outermost reference that is of the type we want.
6563 If none, see if any object has a type that is a pointer to
6564 the type we want. */
6565 for (elt = TREE_PURPOSE (placeholder_expr);
6566 elt != 0 && object == 0;
6567 elt
6568 = ((TREE_CODE (elt) == COMPOUND_EXPR
6569 || TREE_CODE (elt) == COND_EXPR)
6570 ? TREE_OPERAND (elt, 1)
6571 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6572 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6573 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6574 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6575 ? TREE_OPERAND (elt, 0) : 0))
6576 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6577 object = elt;
6578
6579 for (elt = TREE_PURPOSE (placeholder_expr);
6580 elt != 0 && object == 0;
6581 elt
6582 = ((TREE_CODE (elt) == COMPOUND_EXPR
6583 || TREE_CODE (elt) == COND_EXPR)
6584 ? TREE_OPERAND (elt, 1)
6585 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6586 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6587 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6588 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6589 ? TREE_OPERAND (elt, 0) : 0))
6590 if (POINTER_TYPE_P (TREE_TYPE (elt))
6591 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6592 == need_type))
6593 object = build1 (INDIRECT_REF, need_type, elt);
6594
6595 if (object != 0)
6596 {
6597 /* Expand this object skipping the list entries before
6598 it was found in case it is also a PLACEHOLDER_EXPR.
6599 In that case, we want to translate it using subsequent
6600 entries. */
6601 placeholder_list = TREE_CHAIN (placeholder_expr);
6602 temp = expand_expr (object, original_target, tmode,
6603 ro_modifier);
6604 placeholder_list = old_list;
6605 return temp;
6606 }
6607 }
6608 }
6609
6610 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6611 abort ();
6612
6613 case WITH_RECORD_EXPR:
6614 /* Put the object on the placeholder list, expand our first operand,
6615 and pop the list. */
6616 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6617 placeholder_list);
6618 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6619 tmode, ro_modifier);
6620 placeholder_list = TREE_CHAIN (placeholder_list);
6621 return target;
6622
6623 case GOTO_EXPR:
6624 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6625 expand_goto (TREE_OPERAND (exp, 0));
6626 else
6627 expand_computed_goto (TREE_OPERAND (exp, 0));
6628 return const0_rtx;
6629
6630 case EXIT_EXPR:
6631 expand_exit_loop_if_false (NULL,
6632 invert_truthvalue (TREE_OPERAND (exp, 0)));
6633 return const0_rtx;
6634
6635 case LABELED_BLOCK_EXPR:
6636 if (LABELED_BLOCK_BODY (exp))
6637 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6638 /* Should perhaps use expand_label, but this is simpler and safer. */
6639 do_pending_stack_adjust ();
6640 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6641 return const0_rtx;
6642
6643 case EXIT_BLOCK_EXPR:
6644 if (EXIT_BLOCK_RETURN (exp))
6645 sorry ("returned value in block_exit_expr");
6646 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6647 return const0_rtx;
6648
6649 case LOOP_EXPR:
6650 push_temp_slots ();
6651 expand_start_loop (1);
6652 expand_expr_stmt (TREE_OPERAND (exp, 0));
6653 expand_end_loop ();
6654 pop_temp_slots ();
6655
6656 return const0_rtx;
6657
6658 case BIND_EXPR:
6659 {
6660 tree vars = TREE_OPERAND (exp, 0);
6661 int vars_need_expansion = 0;
6662
6663 /* Need to open a binding contour here because
6664 if there are any cleanups they must be contained here. */
6665 expand_start_bindings (2);
6666
6667 /* Mark the corresponding BLOCK for output in its proper place. */
6668 if (TREE_OPERAND (exp, 2) != 0
6669 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6670 insert_block (TREE_OPERAND (exp, 2));
6671
6672 /* If VARS have not yet been expanded, expand them now. */
6673 while (vars)
6674 {
6675 if (!DECL_RTL_SET_P (vars))
6676 {
6677 vars_need_expansion = 1;
6678 expand_decl (vars);
6679 }
6680 expand_decl_init (vars);
6681 vars = TREE_CHAIN (vars);
6682 }
6683
6684 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6685
6686 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6687
6688 return temp;
6689 }
6690
6691 case RTL_EXPR:
6692 if (RTL_EXPR_SEQUENCE (exp))
6693 {
6694 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6695 abort ();
6696 emit_insns (RTL_EXPR_SEQUENCE (exp));
6697 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6698 }
6699 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6700 free_temps_for_rtl_expr (exp);
6701 return RTL_EXPR_RTL (exp);
6702
6703 case CONSTRUCTOR:
6704 /* If we don't need the result, just ensure we evaluate any
6705 subexpressions. */
6706 if (ignore)
6707 {
6708 tree elt;
6709 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6710 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6711 EXPAND_MEMORY_USE_BAD);
6712 return const0_rtx;
6713 }
6714
6715 /* All elts simple constants => refer to a constant in memory. But
6716 if this is a non-BLKmode mode, let it store a field at a time
6717 since that should make a CONST_INT or CONST_DOUBLE when we
6718 fold. Likewise, if we have a target we can use, it is best to
6719 store directly into the target unless the type is large enough
6720 that memcpy will be used. If we are making an initializer and
6721 all operands are constant, put it in memory as well. */
6722 else if ((TREE_STATIC (exp)
6723 && ((mode == BLKmode
6724 && ! (target != 0 && safe_from_p (target, exp, 1)))
6725 || TREE_ADDRESSABLE (exp)
6726 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6727 && (! MOVE_BY_PIECES_P
6728 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6729 TYPE_ALIGN (type)))
6730 && ! mostly_zeros_p (exp))))
6731 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6732 {
6733 rtx constructor = output_constant_def (exp, 1);
6734
6735 if (modifier != EXPAND_CONST_ADDRESS
6736 && modifier != EXPAND_INITIALIZER
6737 && modifier != EXPAND_SUM)
6738 constructor = validize_mem (constructor);
6739
6740 return constructor;
6741 }
6742 else
6743 {
6744 /* Handle calls that pass values in multiple non-contiguous
6745 locations. The Irix 6 ABI has examples of this. */
6746 if (target == 0 || ! safe_from_p (target, exp, 1)
6747 || GET_CODE (target) == PARALLEL)
6748 target
6749 = assign_temp (build_qualified_type (type,
6750 (TYPE_QUALS (type)
6751 | (TREE_READONLY (exp)
6752 * TYPE_QUAL_CONST))),
6753 TREE_ADDRESSABLE (exp), 1, 1);
6754
6755 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6756 int_size_in_bytes (TREE_TYPE (exp)));
6757 return target;
6758 }
6759
6760 case INDIRECT_REF:
6761 {
6762 tree exp1 = TREE_OPERAND (exp, 0);
6763 tree index;
6764 tree string = string_constant (exp1, &index);
6765
6766 /* Try to optimize reads from const strings. */
6767 if (string
6768 && TREE_CODE (string) == STRING_CST
6769 && TREE_CODE (index) == INTEGER_CST
6770 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6771 && GET_MODE_CLASS (mode) == MODE_INT
6772 && GET_MODE_SIZE (mode) == 1
6773 && modifier != EXPAND_MEMORY_USE_WO)
6774 return
6775 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6776
6777 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6778 op0 = memory_address (mode, op0);
6779
6780 if (cfun && current_function_check_memory_usage
6781 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6782 {
6783 enum memory_use_mode memory_usage;
6784 memory_usage = get_memory_usage_from_modifier (modifier);
6785
6786 if (memory_usage != MEMORY_USE_DONT)
6787 {
6788 in_check_memory_usage = 1;
6789 emit_library_call (chkr_check_addr_libfunc,
6790 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6791 Pmode, GEN_INT (int_size_in_bytes (type)),
6792 TYPE_MODE (sizetype),
6793 GEN_INT (memory_usage),
6794 TYPE_MODE (integer_type_node));
6795 in_check_memory_usage = 0;
6796 }
6797 }
6798
6799 temp = gen_rtx_MEM (mode, op0);
6800 set_mem_attributes (temp, exp, 0);
6801
6802 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6803 here, because, in C and C++, the fact that a location is accessed
6804 through a pointer to const does not mean that the value there can
6805 never change. Languages where it can never change should
6806 also set TREE_STATIC. */
6807 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6808
6809 /* If we are writing to this object and its type is a record with
6810 readonly fields, we must mark it as readonly so it will
6811 conflict with readonly references to those fields. */
6812 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6813 RTX_UNCHANGING_P (temp) = 1;
6814
6815 return temp;
6816 }
6817
6818 case ARRAY_REF:
6819 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6820 abort ();
6821
6822 {
6823 tree array = TREE_OPERAND (exp, 0);
6824 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6825 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6826 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6827 HOST_WIDE_INT i;
6828
6829 /* Optimize the special-case of a zero lower bound.
6830
6831 We convert the low_bound to sizetype to avoid some problems
6832 with constant folding. (E.g. suppose the lower bound is 1,
6833 and its mode is QI. Without the conversion, (ARRAY
6834 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6835 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6836
6837 if (! integer_zerop (low_bound))
6838 index = size_diffop (index, convert (sizetype, low_bound));
6839
6840 /* Fold an expression like: "foo"[2].
6841 This is not done in fold so it won't happen inside &.
6842 Don't fold if this is for wide characters since it's too
6843 difficult to do correctly and this is a very rare case. */
6844
6845 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6846 && TREE_CODE (array) == STRING_CST
6847 && TREE_CODE (index) == INTEGER_CST
6848 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6849 && GET_MODE_CLASS (mode) == MODE_INT
6850 && GET_MODE_SIZE (mode) == 1)
6851 return
6852 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6853
6854 /* If this is a constant index into a constant array,
6855 just get the value from the array. Handle both the cases when
6856 we have an explicit constructor and when our operand is a variable
6857 that was declared const. */
6858
6859 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6860 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6861 && TREE_CODE (index) == INTEGER_CST
6862 && 0 > compare_tree_int (index,
6863 list_length (CONSTRUCTOR_ELTS
6864 (TREE_OPERAND (exp, 0)))))
6865 {
6866 tree elem;
6867
6868 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6869 i = TREE_INT_CST_LOW (index);
6870 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6871 ;
6872
6873 if (elem)
6874 return expand_expr (fold (TREE_VALUE (elem)), target,
6875 tmode, ro_modifier);
6876 }
6877
6878 else if (optimize >= 1
6879 && modifier != EXPAND_CONST_ADDRESS
6880 && modifier != EXPAND_INITIALIZER
6881 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6882 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6883 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6884 {
6885 if (TREE_CODE (index) == INTEGER_CST)
6886 {
6887 tree init = DECL_INITIAL (array);
6888
6889 if (TREE_CODE (init) == CONSTRUCTOR)
6890 {
6891 tree elem;
6892
6893 for (elem = CONSTRUCTOR_ELTS (init);
6894 (elem
6895 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6896 elem = TREE_CHAIN (elem))
6897 ;
6898
6899 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6900 return expand_expr (fold (TREE_VALUE (elem)), target,
6901 tmode, ro_modifier);
6902 }
6903 else if (TREE_CODE (init) == STRING_CST
6904 && 0 > compare_tree_int (index,
6905 TREE_STRING_LENGTH (init)))
6906 {
6907 tree type = TREE_TYPE (TREE_TYPE (init));
6908 enum machine_mode mode = TYPE_MODE (type);
6909
6910 if (GET_MODE_CLASS (mode) == MODE_INT
6911 && GET_MODE_SIZE (mode) == 1)
6912 return (GEN_INT
6913 (TREE_STRING_POINTER
6914 (init)[TREE_INT_CST_LOW (index)]));
6915 }
6916 }
6917 }
6918 }
6919 /* Fall through. */
6920
6921 case COMPONENT_REF:
6922 case BIT_FIELD_REF:
6923 case ARRAY_RANGE_REF:
6924 /* If the operand is a CONSTRUCTOR, we can just extract the
6925 appropriate field if it is present. Don't do this if we have
6926 already written the data since we want to refer to that copy
6927 and varasm.c assumes that's what we'll do. */
6928 if (code == COMPONENT_REF
6929 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6930 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6931 {
6932 tree elt;
6933
6934 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6935 elt = TREE_CHAIN (elt))
6936 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6937 /* We can normally use the value of the field in the
6938 CONSTRUCTOR. However, if this is a bitfield in
6939 an integral mode that we can fit in a HOST_WIDE_INT,
6940 we must mask only the number of bits in the bitfield,
6941 since this is done implicitly by the constructor. If
6942 the bitfield does not meet either of those conditions,
6943 we can't do this optimization. */
6944 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6945 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6946 == MODE_INT)
6947 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6948 <= HOST_BITS_PER_WIDE_INT))))
6949 {
6950 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6951 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6952 {
6953 HOST_WIDE_INT bitsize
6954 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6955
6956 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6957 {
6958 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6959 op0 = expand_and (op0, op1, target);
6960 }
6961 else
6962 {
6963 enum machine_mode imode
6964 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6965 tree count
6966 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6967 0);
6968
6969 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6970 target, 0);
6971 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6972 target, 0);
6973 }
6974 }
6975
6976 return op0;
6977 }
6978 }
6979
6980 {
6981 enum machine_mode mode1;
6982 HOST_WIDE_INT bitsize, bitpos;
6983 tree offset;
6984 int volatilep = 0;
6985 unsigned int alignment;
6986 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6987 &mode1, &unsignedp, &volatilep,
6988 &alignment);
6989
6990 /* If we got back the original object, something is wrong. Perhaps
6991 we are evaluating an expression too early. In any event, don't
6992 infinitely recurse. */
6993 if (tem == exp)
6994 abort ();
6995
6996 /* If TEM's type is a union of variable size, pass TARGET to the inner
6997 computation, since it will need a temporary and TARGET is known
6998 to have to do. This occurs in unchecked conversion in Ada. */
6999
7000 op0 = expand_expr (tem,
7001 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7002 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7003 != INTEGER_CST)
7004 ? target : NULL_RTX),
7005 VOIDmode,
7006 (modifier == EXPAND_INITIALIZER
7007 || modifier == EXPAND_CONST_ADDRESS)
7008 ? modifier : EXPAND_NORMAL);
7009
7010 /* If this is a constant, put it into a register if it is a
7011 legitimate constant and OFFSET is 0 and memory if it isn't. */
7012 if (CONSTANT_P (op0))
7013 {
7014 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7015 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7016 && offset == 0)
7017 op0 = force_reg (mode, op0);
7018 else
7019 op0 = validize_mem (force_const_mem (mode, op0));
7020 }
7021
7022 if (offset != 0)
7023 {
7024 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7025
7026 /* If this object is in a register, put it into memory.
7027 This case can't occur in C, but can in Ada if we have
7028 unchecked conversion of an expression from a scalar type to
7029 an array or record type. */
7030 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7031 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7032 {
7033 /* If the operand is a SAVE_EXPR, we can deal with this by
7034 forcing the SAVE_EXPR into memory. */
7035 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7036 {
7037 put_var_into_stack (TREE_OPERAND (exp, 0));
7038 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7039 }
7040 else
7041 {
7042 tree nt
7043 = build_qualified_type (TREE_TYPE (tem),
7044 (TYPE_QUALS (TREE_TYPE (tem))
7045 | TYPE_QUAL_CONST));
7046 rtx memloc = assign_temp (nt, 1, 1, 1);
7047
7048 mark_temp_addr_taken (memloc);
7049 emit_move_insn (memloc, op0);
7050 op0 = memloc;
7051 }
7052 }
7053
7054 if (GET_CODE (op0) != MEM)
7055 abort ();
7056
7057 if (GET_MODE (offset_rtx) != ptr_mode)
7058 {
7059 #ifdef POINTERS_EXTEND_UNSIGNED
7060 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7061 #else
7062 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7063 #endif
7064 }
7065
7066 /* A constant address in OP0 can have VOIDmode, we must not try
7067 to call force_reg for that case. Avoid that case. */
7068 if (GET_CODE (op0) == MEM
7069 && GET_MODE (op0) == BLKmode
7070 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7071 && bitsize != 0
7072 && (bitpos % bitsize) == 0
7073 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7074 && alignment == GET_MODE_ALIGNMENT (mode1))
7075 {
7076 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7077
7078 if (GET_CODE (XEXP (temp, 0)) == REG)
7079 op0 = temp;
7080 else
7081 op0 = (replace_equiv_address
7082 (op0,
7083 force_reg (GET_MODE (XEXP (temp, 0)),
7084 XEXP (temp, 0))));
7085 bitpos = 0;
7086 }
7087
7088 op0 = change_address (op0, VOIDmode,
7089 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7090 force_reg (ptr_mode,
7091 offset_rtx)));
7092 }
7093
7094 /* Don't forget about volatility even if this is a bitfield. */
7095 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7096 {
7097 op0 = copy_rtx (op0);
7098 MEM_VOLATILE_P (op0) = 1;
7099 }
7100
7101 /* Check the access. */
7102 if (cfun != 0 && current_function_check_memory_usage
7103 && GET_CODE (op0) == MEM)
7104 {
7105 enum memory_use_mode memory_usage;
7106 memory_usage = get_memory_usage_from_modifier (modifier);
7107
7108 if (memory_usage != MEMORY_USE_DONT)
7109 {
7110 rtx to;
7111 int size;
7112
7113 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7114 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7115
7116 /* Check the access right of the pointer. */
7117 in_check_memory_usage = 1;
7118 if (size > BITS_PER_UNIT)
7119 emit_library_call (chkr_check_addr_libfunc,
7120 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7121 Pmode, GEN_INT (size / BITS_PER_UNIT),
7122 TYPE_MODE (sizetype),
7123 GEN_INT (memory_usage),
7124 TYPE_MODE (integer_type_node));
7125 in_check_memory_usage = 0;
7126 }
7127 }
7128
7129 /* In cases where an aligned union has an unaligned object
7130 as a field, we might be extracting a BLKmode value from
7131 an integer-mode (e.g., SImode) object. Handle this case
7132 by doing the extract into an object as wide as the field
7133 (which we know to be the width of a basic mode), then
7134 storing into memory, and changing the mode to BLKmode. */
7135 if (mode1 == VOIDmode
7136 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7137 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7138 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7139 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7140 && modifier != EXPAND_CONST_ADDRESS
7141 && modifier != EXPAND_INITIALIZER)
7142 /* If the field isn't aligned enough to fetch as a memref,
7143 fetch it as a bit field. */
7144 || (mode1 != BLKmode
7145 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7146 && ((TYPE_ALIGN (TREE_TYPE (tem))
7147 < GET_MODE_ALIGNMENT (mode))
7148 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7149 /* If the type and the field are a constant size and the
7150 size of the type isn't the same size as the bitfield,
7151 we must use bitfield operations. */
7152 || (bitsize >= 0
7153 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7154 == INTEGER_CST)
7155 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7156 bitsize))
7157 || (mode == BLKmode
7158 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7159 && (TYPE_ALIGN (type) > alignment
7160 || bitpos % TYPE_ALIGN (type) != 0)))
7161 {
7162 enum machine_mode ext_mode = mode;
7163
7164 if (ext_mode == BLKmode
7165 && ! (target != 0 && GET_CODE (op0) == MEM
7166 && GET_CODE (target) == MEM
7167 && bitpos % BITS_PER_UNIT == 0))
7168 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7169
7170 if (ext_mode == BLKmode)
7171 {
7172 /* In this case, BITPOS must start at a byte boundary and
7173 TARGET, if specified, must be a MEM. */
7174 if (GET_CODE (op0) != MEM
7175 || (target != 0 && GET_CODE (target) != MEM)
7176 || bitpos % BITS_PER_UNIT != 0)
7177 abort ();
7178
7179 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7180 if (target == 0)
7181 target = assign_temp (type, 0, 1, 1);
7182
7183 emit_block_move (target, op0,
7184 bitsize == -1 ? expr_size (exp)
7185 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7186 / BITS_PER_UNIT),
7187 BITS_PER_UNIT);
7188
7189 return target;
7190 }
7191
7192 op0 = validize_mem (op0);
7193
7194 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7195 mark_reg_pointer (XEXP (op0, 0), alignment);
7196
7197 op0 = extract_bit_field (op0, bitsize, bitpos,
7198 unsignedp, target, ext_mode, ext_mode,
7199 alignment,
7200 int_size_in_bytes (TREE_TYPE (tem)));
7201
7202 /* If the result is a record type and BITSIZE is narrower than
7203 the mode of OP0, an integral mode, and this is a big endian
7204 machine, we must put the field into the high-order bits. */
7205 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7206 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7207 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7208 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7209 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7210 - bitsize),
7211 op0, 1);
7212
7213 if (mode == BLKmode)
7214 {
7215 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7216 TYPE_QUAL_CONST);
7217 rtx new = assign_temp (nt, 0, 1, 1);
7218
7219 emit_move_insn (new, op0);
7220 op0 = copy_rtx (new);
7221 PUT_MODE (op0, BLKmode);
7222 }
7223
7224 return op0;
7225 }
7226
7227 /* If the result is BLKmode, use that to access the object
7228 now as well. */
7229 if (mode == BLKmode)
7230 mode1 = BLKmode;
7231
7232 /* Get a reference to just this component. */
7233 if (modifier == EXPAND_CONST_ADDRESS
7234 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7235 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7236 else
7237 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7238
7239 set_mem_attributes (op0, exp, 0);
7240 if (GET_CODE (XEXP (op0, 0)) == REG)
7241 mark_reg_pointer (XEXP (op0, 0), alignment);
7242
7243 MEM_VOLATILE_P (op0) |= volatilep;
7244 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7245 || modifier == EXPAND_CONST_ADDRESS
7246 || modifier == EXPAND_INITIALIZER)
7247 return op0;
7248 else if (target == 0)
7249 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7250
7251 convert_move (target, op0, unsignedp);
7252 return target;
7253 }
7254
7255 /* Intended for a reference to a buffer of a file-object in Pascal.
7256 But it's not certain that a special tree code will really be
7257 necessary for these. INDIRECT_REF might work for them. */
7258 case BUFFER_REF:
7259 abort ();
7260
7261 case IN_EXPR:
7262 {
7263 /* Pascal set IN expression.
7264
7265 Algorithm:
7266 rlo = set_low - (set_low%bits_per_word);
7267 the_word = set [ (index - rlo)/bits_per_word ];
7268 bit_index = index % bits_per_word;
7269 bitmask = 1 << bit_index;
7270 return !!(the_word & bitmask); */
7271
7272 tree set = TREE_OPERAND (exp, 0);
7273 tree index = TREE_OPERAND (exp, 1);
7274 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7275 tree set_type = TREE_TYPE (set);
7276 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7277 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7278 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7279 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7280 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7281 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7282 rtx setaddr = XEXP (setval, 0);
7283 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7284 rtx rlow;
7285 rtx diff, quo, rem, addr, bit, result;
7286
7287 /* If domain is empty, answer is no. Likewise if index is constant
7288 and out of bounds. */
7289 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7290 && TREE_CODE (set_low_bound) == INTEGER_CST
7291 && tree_int_cst_lt (set_high_bound, set_low_bound))
7292 || (TREE_CODE (index) == INTEGER_CST
7293 && TREE_CODE (set_low_bound) == INTEGER_CST
7294 && tree_int_cst_lt (index, set_low_bound))
7295 || (TREE_CODE (set_high_bound) == INTEGER_CST
7296 && TREE_CODE (index) == INTEGER_CST
7297 && tree_int_cst_lt (set_high_bound, index))))
7298 return const0_rtx;
7299
7300 if (target == 0)
7301 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7302
7303 /* If we get here, we have to generate the code for both cases
7304 (in range and out of range). */
7305
7306 op0 = gen_label_rtx ();
7307 op1 = gen_label_rtx ();
7308
7309 if (! (GET_CODE (index_val) == CONST_INT
7310 && GET_CODE (lo_r) == CONST_INT))
7311 {
7312 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7313 GET_MODE (index_val), iunsignedp, 0, op1);
7314 }
7315
7316 if (! (GET_CODE (index_val) == CONST_INT
7317 && GET_CODE (hi_r) == CONST_INT))
7318 {
7319 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7320 GET_MODE (index_val), iunsignedp, 0, op1);
7321 }
7322
7323 /* Calculate the element number of bit zero in the first word
7324 of the set. */
7325 if (GET_CODE (lo_r) == CONST_INT)
7326 rlow = GEN_INT (INTVAL (lo_r)
7327 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7328 else
7329 rlow = expand_binop (index_mode, and_optab, lo_r,
7330 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7331 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7332
7333 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7334 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7335
7336 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7337 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7338 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7339 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7340
7341 addr = memory_address (byte_mode,
7342 expand_binop (index_mode, add_optab, diff,
7343 setaddr, NULL_RTX, iunsignedp,
7344 OPTAB_LIB_WIDEN));
7345
7346 /* Extract the bit we want to examine. */
7347 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7348 gen_rtx_MEM (byte_mode, addr),
7349 make_tree (TREE_TYPE (index), rem),
7350 NULL_RTX, 1);
7351 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7352 GET_MODE (target) == byte_mode ? target : 0,
7353 1, OPTAB_LIB_WIDEN);
7354
7355 if (result != target)
7356 convert_move (target, result, 1);
7357
7358 /* Output the code to handle the out-of-range case. */
7359 emit_jump (op0);
7360 emit_label (op1);
7361 emit_move_insn (target, const0_rtx);
7362 emit_label (op0);
7363 return target;
7364 }
7365
7366 case WITH_CLEANUP_EXPR:
7367 if (RTL_EXPR_RTL (exp) == 0)
7368 {
7369 RTL_EXPR_RTL (exp)
7370 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7371 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7372
7373 /* That's it for this cleanup. */
7374 TREE_OPERAND (exp, 2) = 0;
7375 }
7376 return RTL_EXPR_RTL (exp);
7377
7378 case CLEANUP_POINT_EXPR:
7379 {
7380 /* Start a new binding layer that will keep track of all cleanup
7381 actions to be performed. */
7382 expand_start_bindings (2);
7383
7384 target_temp_slot_level = temp_slot_level;
7385
7386 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7387 /* If we're going to use this value, load it up now. */
7388 if (! ignore)
7389 op0 = force_not_mem (op0);
7390 preserve_temp_slots (op0);
7391 expand_end_bindings (NULL_TREE, 0, 0);
7392 }
7393 return op0;
7394
7395 case CALL_EXPR:
7396 /* Check for a built-in function. */
7397 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7398 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7399 == FUNCTION_DECL)
7400 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7401 {
7402 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7403 == BUILT_IN_FRONTEND)
7404 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7405 else
7406 return expand_builtin (exp, target, subtarget, tmode, ignore);
7407 }
7408
7409 return expand_call (exp, target, ignore);
7410
7411 case NON_LVALUE_EXPR:
7412 case NOP_EXPR:
7413 case CONVERT_EXPR:
7414 case REFERENCE_EXPR:
7415 if (TREE_OPERAND (exp, 0) == error_mark_node)
7416 return const0_rtx;
7417
7418 if (TREE_CODE (type) == UNION_TYPE)
7419 {
7420 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7421
7422 /* If both input and output are BLKmode, this conversion
7423 isn't actually doing anything unless we need to make the
7424 alignment stricter. */
7425 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7426 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7427 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7428 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7429 modifier);
7430
7431 if (target == 0)
7432 target = assign_temp (type, 0, 1, 1);
7433
7434 if (GET_CODE (target) == MEM)
7435 /* Store data into beginning of memory target. */
7436 store_expr (TREE_OPERAND (exp, 0),
7437 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7438
7439 else if (GET_CODE (target) == REG)
7440 /* Store this field into a union of the proper type. */
7441 store_field (target,
7442 MIN ((int_size_in_bytes (TREE_TYPE
7443 (TREE_OPERAND (exp, 0)))
7444 * BITS_PER_UNIT),
7445 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7446 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7447 VOIDmode, 0, BITS_PER_UNIT,
7448 int_size_in_bytes (type), 0);
7449 else
7450 abort ();
7451
7452 /* Return the entire union. */
7453 return target;
7454 }
7455
7456 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7457 {
7458 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7459 ro_modifier);
7460
7461 /* If the signedness of the conversion differs and OP0 is
7462 a promoted SUBREG, clear that indication since we now
7463 have to do the proper extension. */
7464 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7465 && GET_CODE (op0) == SUBREG)
7466 SUBREG_PROMOTED_VAR_P (op0) = 0;
7467
7468 return op0;
7469 }
7470
7471 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7472 if (GET_MODE (op0) == mode)
7473 return op0;
7474
7475 /* If OP0 is a constant, just convert it into the proper mode. */
7476 if (CONSTANT_P (op0))
7477 return
7478 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7479 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7480
7481 if (modifier == EXPAND_INITIALIZER)
7482 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7483
7484 if (target == 0)
7485 return
7486 convert_to_mode (mode, op0,
7487 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7488 else
7489 convert_move (target, op0,
7490 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7491 return target;
7492
7493 case PLUS_EXPR:
7494 /* We come here from MINUS_EXPR when the second operand is a
7495 constant. */
7496 plus_expr:
7497 this_optab = ! unsignedp && flag_trapv
7498 && (GET_MODE_CLASS(mode) == MODE_INT)
7499 ? addv_optab : add_optab;
7500
7501 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7502 something else, make sure we add the register to the constant and
7503 then to the other thing. This case can occur during strength
7504 reduction and doing it this way will produce better code if the
7505 frame pointer or argument pointer is eliminated.
7506
7507 fold-const.c will ensure that the constant is always in the inner
7508 PLUS_EXPR, so the only case we need to do anything about is if
7509 sp, ap, or fp is our second argument, in which case we must swap
7510 the innermost first argument and our second argument. */
7511
7512 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7513 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7514 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7515 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7516 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7517 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7518 {
7519 tree t = TREE_OPERAND (exp, 1);
7520
7521 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7522 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7523 }
7524
7525 /* If the result is to be ptr_mode and we are adding an integer to
7526 something, we might be forming a constant. So try to use
7527 plus_constant. If it produces a sum and we can't accept it,
7528 use force_operand. This allows P = &ARR[const] to generate
7529 efficient code on machines where a SYMBOL_REF is not a valid
7530 address.
7531
7532 If this is an EXPAND_SUM call, always return the sum. */
7533 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7534 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7535 {
7536 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7537 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7538 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7539 {
7540 rtx constant_part;
7541
7542 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7543 EXPAND_SUM);
7544 /* Use immed_double_const to ensure that the constant is
7545 truncated according to the mode of OP1, then sign extended
7546 to a HOST_WIDE_INT. Using the constant directly can result
7547 in non-canonical RTL in a 64x32 cross compile. */
7548 constant_part
7549 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7550 (HOST_WIDE_INT) 0,
7551 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7552 op1 = plus_constant (op1, INTVAL (constant_part));
7553 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7554 op1 = force_operand (op1, target);
7555 return op1;
7556 }
7557
7558 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7559 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7560 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7561 {
7562 rtx constant_part;
7563
7564 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7565 EXPAND_SUM);
7566 if (! CONSTANT_P (op0))
7567 {
7568 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7569 VOIDmode, modifier);
7570 /* Don't go to both_summands if modifier
7571 says it's not right to return a PLUS. */
7572 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7573 goto binop2;
7574 goto both_summands;
7575 }
7576 /* Use immed_double_const to ensure that the constant is
7577 truncated according to the mode of OP1, then sign extended
7578 to a HOST_WIDE_INT. Using the constant directly can result
7579 in non-canonical RTL in a 64x32 cross compile. */
7580 constant_part
7581 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7582 (HOST_WIDE_INT) 0,
7583 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7584 op0 = plus_constant (op0, INTVAL (constant_part));
7585 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7586 op0 = force_operand (op0, target);
7587 return op0;
7588 }
7589 }
7590
7591 /* No sense saving up arithmetic to be done
7592 if it's all in the wrong mode to form part of an address.
7593 And force_operand won't know whether to sign-extend or
7594 zero-extend. */
7595 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7596 || mode != ptr_mode)
7597 goto binop;
7598
7599 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7600 subtarget = 0;
7601
7602 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7603 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7604
7605 both_summands:
7606 /* Make sure any term that's a sum with a constant comes last. */
7607 if (GET_CODE (op0) == PLUS
7608 && CONSTANT_P (XEXP (op0, 1)))
7609 {
7610 temp = op0;
7611 op0 = op1;
7612 op1 = temp;
7613 }
7614 /* If adding to a sum including a constant,
7615 associate it to put the constant outside. */
7616 if (GET_CODE (op1) == PLUS
7617 && CONSTANT_P (XEXP (op1, 1)))
7618 {
7619 rtx constant_term = const0_rtx;
7620
7621 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7622 if (temp != 0)
7623 op0 = temp;
7624 /* Ensure that MULT comes first if there is one. */
7625 else if (GET_CODE (op0) == MULT)
7626 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7627 else
7628 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7629
7630 /* Let's also eliminate constants from op0 if possible. */
7631 op0 = eliminate_constant_term (op0, &constant_term);
7632
7633 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7634 their sum should be a constant. Form it into OP1, since the
7635 result we want will then be OP0 + OP1. */
7636
7637 temp = simplify_binary_operation (PLUS, mode, constant_term,
7638 XEXP (op1, 1));
7639 if (temp != 0)
7640 op1 = temp;
7641 else
7642 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7643 }
7644
7645 /* Put a constant term last and put a multiplication first. */
7646 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7647 temp = op1, op1 = op0, op0 = temp;
7648
7649 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7650 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7651
7652 case MINUS_EXPR:
7653 /* For initializers, we are allowed to return a MINUS of two
7654 symbolic constants. Here we handle all cases when both operands
7655 are constant. */
7656 /* Handle difference of two symbolic constants,
7657 for the sake of an initializer. */
7658 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7659 && really_constant_p (TREE_OPERAND (exp, 0))
7660 && really_constant_p (TREE_OPERAND (exp, 1)))
7661 {
7662 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7663 VOIDmode, ro_modifier);
7664 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7665 VOIDmode, ro_modifier);
7666
7667 /* If the last operand is a CONST_INT, use plus_constant of
7668 the negated constant. Else make the MINUS. */
7669 if (GET_CODE (op1) == CONST_INT)
7670 return plus_constant (op0, - INTVAL (op1));
7671 else
7672 return gen_rtx_MINUS (mode, op0, op1);
7673 }
7674 /* Convert A - const to A + (-const). */
7675 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7676 {
7677 tree negated = fold (build1 (NEGATE_EXPR, type,
7678 TREE_OPERAND (exp, 1)));
7679
7680 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7681 /* If we can't negate the constant in TYPE, leave it alone and
7682 expand_binop will negate it for us. We used to try to do it
7683 here in the signed version of TYPE, but that doesn't work
7684 on POINTER_TYPEs. */;
7685 else
7686 {
7687 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7688 goto plus_expr;
7689 }
7690 }
7691 this_optab = ! unsignedp && flag_trapv
7692 && (GET_MODE_CLASS(mode) == MODE_INT)
7693 ? subv_optab : sub_optab;
7694 goto binop;
7695
7696 case MULT_EXPR:
7697 /* If first operand is constant, swap them.
7698 Thus the following special case checks need only
7699 check the second operand. */
7700 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7701 {
7702 register tree t1 = TREE_OPERAND (exp, 0);
7703 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7704 TREE_OPERAND (exp, 1) = t1;
7705 }
7706
7707 /* Attempt to return something suitable for generating an
7708 indexed address, for machines that support that. */
7709
7710 if (modifier == EXPAND_SUM && mode == ptr_mode
7711 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7712 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7713 {
7714 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7715 EXPAND_SUM);
7716
7717 /* Apply distributive law if OP0 is x+c. */
7718 if (GET_CODE (op0) == PLUS
7719 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7720 return
7721 gen_rtx_PLUS
7722 (mode,
7723 gen_rtx_MULT
7724 (mode, XEXP (op0, 0),
7725 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7726 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7727 * INTVAL (XEXP (op0, 1))));
7728
7729 if (GET_CODE (op0) != REG)
7730 op0 = force_operand (op0, NULL_RTX);
7731 if (GET_CODE (op0) != REG)
7732 op0 = copy_to_mode_reg (mode, op0);
7733
7734 return
7735 gen_rtx_MULT (mode, op0,
7736 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7737 }
7738
7739 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7740 subtarget = 0;
7741
7742 /* Check for multiplying things that have been extended
7743 from a narrower type. If this machine supports multiplying
7744 in that narrower type with a result in the desired type,
7745 do it that way, and avoid the explicit type-conversion. */
7746 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7747 && TREE_CODE (type) == INTEGER_TYPE
7748 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7749 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7750 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7751 && int_fits_type_p (TREE_OPERAND (exp, 1),
7752 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7753 /* Don't use a widening multiply if a shift will do. */
7754 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7755 > HOST_BITS_PER_WIDE_INT)
7756 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7757 ||
7758 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7759 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7760 ==
7761 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7762 /* If both operands are extended, they must either both
7763 be zero-extended or both be sign-extended. */
7764 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7765 ==
7766 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7767 {
7768 enum machine_mode innermode
7769 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7770 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7771 ? smul_widen_optab : umul_widen_optab);
7772 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7773 ? umul_widen_optab : smul_widen_optab);
7774 if (mode == GET_MODE_WIDER_MODE (innermode))
7775 {
7776 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7777 {
7778 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7779 NULL_RTX, VOIDmode, 0);
7780 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7781 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7782 VOIDmode, 0);
7783 else
7784 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7785 NULL_RTX, VOIDmode, 0);
7786 goto binop2;
7787 }
7788 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7789 && innermode == word_mode)
7790 {
7791 rtx htem;
7792 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7793 NULL_RTX, VOIDmode, 0);
7794 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7795 op1 = convert_modes (innermode, mode,
7796 expand_expr (TREE_OPERAND (exp, 1),
7797 NULL_RTX, VOIDmode, 0),
7798 unsignedp);
7799 else
7800 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7801 NULL_RTX, VOIDmode, 0);
7802 temp = expand_binop (mode, other_optab, op0, op1, target,
7803 unsignedp, OPTAB_LIB_WIDEN);
7804 htem = expand_mult_highpart_adjust (innermode,
7805 gen_highpart (innermode, temp),
7806 op0, op1,
7807 gen_highpart (innermode, temp),
7808 unsignedp);
7809 emit_move_insn (gen_highpart (innermode, temp), htem);
7810 return temp;
7811 }
7812 }
7813 }
7814 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7815 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7816 return expand_mult (mode, op0, op1, target, unsignedp);
7817
7818 case TRUNC_DIV_EXPR:
7819 case FLOOR_DIV_EXPR:
7820 case CEIL_DIV_EXPR:
7821 case ROUND_DIV_EXPR:
7822 case EXACT_DIV_EXPR:
7823 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7824 subtarget = 0;
7825 /* Possible optimization: compute the dividend with EXPAND_SUM
7826 then if the divisor is constant can optimize the case
7827 where some terms of the dividend have coeffs divisible by it. */
7828 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7829 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7830 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7831
7832 case RDIV_EXPR:
7833 this_optab = flodiv_optab;
7834 goto binop;
7835
7836 case TRUNC_MOD_EXPR:
7837 case FLOOR_MOD_EXPR:
7838 case CEIL_MOD_EXPR:
7839 case ROUND_MOD_EXPR:
7840 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7841 subtarget = 0;
7842 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7843 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7844 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7845
7846 case FIX_ROUND_EXPR:
7847 case FIX_FLOOR_EXPR:
7848 case FIX_CEIL_EXPR:
7849 abort (); /* Not used for C. */
7850
7851 case FIX_TRUNC_EXPR:
7852 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7853 if (target == 0)
7854 target = gen_reg_rtx (mode);
7855 expand_fix (target, op0, unsignedp);
7856 return target;
7857
7858 case FLOAT_EXPR:
7859 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7860 if (target == 0)
7861 target = gen_reg_rtx (mode);
7862 /* expand_float can't figure out what to do if FROM has VOIDmode.
7863 So give it the correct mode. With -O, cse will optimize this. */
7864 if (GET_MODE (op0) == VOIDmode)
7865 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7866 op0);
7867 expand_float (target, op0,
7868 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7869 return target;
7870
7871 case NEGATE_EXPR:
7872 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7873 temp = expand_unop (mode,
7874 ! unsignedp && flag_trapv
7875 && (GET_MODE_CLASS(mode) == MODE_INT)
7876 ? negv_optab : neg_optab, op0, target, 0);
7877 if (temp == 0)
7878 abort ();
7879 return temp;
7880
7881 case ABS_EXPR:
7882 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7883
7884 /* Handle complex values specially. */
7885 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7886 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7887 return expand_complex_abs (mode, op0, target, unsignedp);
7888
7889 /* Unsigned abs is simply the operand. Testing here means we don't
7890 risk generating incorrect code below. */
7891 if (TREE_UNSIGNED (type))
7892 return op0;
7893
7894 return expand_abs (mode, op0, target, unsignedp,
7895 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7896
7897 case MAX_EXPR:
7898 case MIN_EXPR:
7899 target = original_target;
7900 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7901 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7902 || GET_MODE (target) != mode
7903 || (GET_CODE (target) == REG
7904 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7905 target = gen_reg_rtx (mode);
7906 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7907 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7908
7909 /* First try to do it with a special MIN or MAX instruction.
7910 If that does not win, use a conditional jump to select the proper
7911 value. */
7912 this_optab = (TREE_UNSIGNED (type)
7913 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7914 : (code == MIN_EXPR ? smin_optab : smax_optab));
7915
7916 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7917 OPTAB_WIDEN);
7918 if (temp != 0)
7919 return temp;
7920
7921 /* At this point, a MEM target is no longer useful; we will get better
7922 code without it. */
7923
7924 if (GET_CODE (target) == MEM)
7925 target = gen_reg_rtx (mode);
7926
7927 if (target != op0)
7928 emit_move_insn (target, op0);
7929
7930 op0 = gen_label_rtx ();
7931
7932 /* If this mode is an integer too wide to compare properly,
7933 compare word by word. Rely on cse to optimize constant cases. */
7934 if (GET_MODE_CLASS (mode) == MODE_INT
7935 && ! can_compare_p (GE, mode, ccp_jump))
7936 {
7937 if (code == MAX_EXPR)
7938 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7939 target, op1, NULL_RTX, op0);
7940 else
7941 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7942 op1, target, NULL_RTX, op0);
7943 }
7944 else
7945 {
7946 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7947 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7948 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7949 op0);
7950 }
7951 emit_move_insn (target, op1);
7952 emit_label (op0);
7953 return target;
7954
7955 case BIT_NOT_EXPR:
7956 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7957 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7958 if (temp == 0)
7959 abort ();
7960 return temp;
7961
7962 case FFS_EXPR:
7963 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7964 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7965 if (temp == 0)
7966 abort ();
7967 return temp;
7968
7969 /* ??? Can optimize bitwise operations with one arg constant.
7970 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7971 and (a bitwise1 b) bitwise2 b (etc)
7972 but that is probably not worth while. */
7973
7974 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7975 boolean values when we want in all cases to compute both of them. In
7976 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7977 as actual zero-or-1 values and then bitwise anding. In cases where
7978 there cannot be any side effects, better code would be made by
7979 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7980 how to recognize those cases. */
7981
7982 case TRUTH_AND_EXPR:
7983 case BIT_AND_EXPR:
7984 this_optab = and_optab;
7985 goto binop;
7986
7987 case TRUTH_OR_EXPR:
7988 case BIT_IOR_EXPR:
7989 this_optab = ior_optab;
7990 goto binop;
7991
7992 case TRUTH_XOR_EXPR:
7993 case BIT_XOR_EXPR:
7994 this_optab = xor_optab;
7995 goto binop;
7996
7997 case LSHIFT_EXPR:
7998 case RSHIFT_EXPR:
7999 case LROTATE_EXPR:
8000 case RROTATE_EXPR:
8001 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8002 subtarget = 0;
8003 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8004 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8005 unsignedp);
8006
8007 /* Could determine the answer when only additive constants differ. Also,
8008 the addition of one can be handled by changing the condition. */
8009 case LT_EXPR:
8010 case LE_EXPR:
8011 case GT_EXPR:
8012 case GE_EXPR:
8013 case EQ_EXPR:
8014 case NE_EXPR:
8015 case UNORDERED_EXPR:
8016 case ORDERED_EXPR:
8017 case UNLT_EXPR:
8018 case UNLE_EXPR:
8019 case UNGT_EXPR:
8020 case UNGE_EXPR:
8021 case UNEQ_EXPR:
8022 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8023 if (temp != 0)
8024 return temp;
8025
8026 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8027 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8028 && original_target
8029 && GET_CODE (original_target) == REG
8030 && (GET_MODE (original_target)
8031 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8032 {
8033 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8034 VOIDmode, 0);
8035
8036 if (temp != original_target)
8037 temp = copy_to_reg (temp);
8038
8039 op1 = gen_label_rtx ();
8040 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8041 GET_MODE (temp), unsignedp, 0, op1);
8042 emit_move_insn (temp, const1_rtx);
8043 emit_label (op1);
8044 return temp;
8045 }
8046
8047 /* If no set-flag instruction, must generate a conditional
8048 store into a temporary variable. Drop through
8049 and handle this like && and ||. */
8050
8051 case TRUTH_ANDIF_EXPR:
8052 case TRUTH_ORIF_EXPR:
8053 if (! ignore
8054 && (target == 0 || ! safe_from_p (target, exp, 1)
8055 /* Make sure we don't have a hard reg (such as function's return
8056 value) live across basic blocks, if not optimizing. */
8057 || (!optimize && GET_CODE (target) == REG
8058 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8059 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8060
8061 if (target)
8062 emit_clr_insn (target);
8063
8064 op1 = gen_label_rtx ();
8065 jumpifnot (exp, op1);
8066
8067 if (target)
8068 emit_0_to_1_insn (target);
8069
8070 emit_label (op1);
8071 return ignore ? const0_rtx : target;
8072
8073 case TRUTH_NOT_EXPR:
8074 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8075 /* The parser is careful to generate TRUTH_NOT_EXPR
8076 only with operands that are always zero or one. */
8077 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8078 target, 1, OPTAB_LIB_WIDEN);
8079 if (temp == 0)
8080 abort ();
8081 return temp;
8082
8083 case COMPOUND_EXPR:
8084 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8085 emit_queue ();
8086 return expand_expr (TREE_OPERAND (exp, 1),
8087 (ignore ? const0_rtx : target),
8088 VOIDmode, 0);
8089
8090 case COND_EXPR:
8091 /* If we would have a "singleton" (see below) were it not for a
8092 conversion in each arm, bring that conversion back out. */
8093 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8094 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8095 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8096 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8097 {
8098 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8099 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8100
8101 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8102 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8103 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8104 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8105 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8106 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8107 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8108 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8109 return expand_expr (build1 (NOP_EXPR, type,
8110 build (COND_EXPR, TREE_TYPE (iftrue),
8111 TREE_OPERAND (exp, 0),
8112 iftrue, iffalse)),
8113 target, tmode, modifier);
8114 }
8115
8116 {
8117 /* Note that COND_EXPRs whose type is a structure or union
8118 are required to be constructed to contain assignments of
8119 a temporary variable, so that we can evaluate them here
8120 for side effect only. If type is void, we must do likewise. */
8121
8122 /* If an arm of the branch requires a cleanup,
8123 only that cleanup is performed. */
8124
8125 tree singleton = 0;
8126 tree binary_op = 0, unary_op = 0;
8127
8128 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8129 convert it to our mode, if necessary. */
8130 if (integer_onep (TREE_OPERAND (exp, 1))
8131 && integer_zerop (TREE_OPERAND (exp, 2))
8132 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8133 {
8134 if (ignore)
8135 {
8136 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8137 ro_modifier);
8138 return const0_rtx;
8139 }
8140
8141 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8142 if (GET_MODE (op0) == mode)
8143 return op0;
8144
8145 if (target == 0)
8146 target = gen_reg_rtx (mode);
8147 convert_move (target, op0, unsignedp);
8148 return target;
8149 }
8150
8151 /* Check for X ? A + B : A. If we have this, we can copy A to the
8152 output and conditionally add B. Similarly for unary operations.
8153 Don't do this if X has side-effects because those side effects
8154 might affect A or B and the "?" operation is a sequence point in
8155 ANSI. (operand_equal_p tests for side effects.) */
8156
8157 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8158 && operand_equal_p (TREE_OPERAND (exp, 2),
8159 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8160 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8161 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8162 && operand_equal_p (TREE_OPERAND (exp, 1),
8163 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8164 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8165 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8166 && operand_equal_p (TREE_OPERAND (exp, 2),
8167 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8168 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8169 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8170 && operand_equal_p (TREE_OPERAND (exp, 1),
8171 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8172 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8173
8174 /* If we are not to produce a result, we have no target. Otherwise,
8175 if a target was specified use it; it will not be used as an
8176 intermediate target unless it is safe. If no target, use a
8177 temporary. */
8178
8179 if (ignore)
8180 temp = 0;
8181 else if (original_target
8182 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8183 || (singleton && GET_CODE (original_target) == REG
8184 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8185 && original_target == var_rtx (singleton)))
8186 && GET_MODE (original_target) == mode
8187 #ifdef HAVE_conditional_move
8188 && (! can_conditionally_move_p (mode)
8189 || GET_CODE (original_target) == REG
8190 || TREE_ADDRESSABLE (type))
8191 #endif
8192 && ! (GET_CODE (original_target) == MEM
8193 && MEM_VOLATILE_P (original_target)))
8194 temp = original_target;
8195 else if (TREE_ADDRESSABLE (type))
8196 abort ();
8197 else
8198 temp = assign_temp (type, 0, 0, 1);
8199
8200 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8201 do the test of X as a store-flag operation, do this as
8202 A + ((X != 0) << log C). Similarly for other simple binary
8203 operators. Only do for C == 1 if BRANCH_COST is low. */
8204 if (temp && singleton && binary_op
8205 && (TREE_CODE (binary_op) == PLUS_EXPR
8206 || TREE_CODE (binary_op) == MINUS_EXPR
8207 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8208 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8209 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8210 : integer_onep (TREE_OPERAND (binary_op, 1)))
8211 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8212 {
8213 rtx result;
8214 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8215 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8216 ? addv_optab : add_optab)
8217 : TREE_CODE (binary_op) == MINUS_EXPR
8218 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8219 ? subv_optab : sub_optab)
8220 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8221 : xor_optab);
8222
8223 /* If we had X ? A : A + 1, do this as A + (X == 0).
8224
8225 We have to invert the truth value here and then put it
8226 back later if do_store_flag fails. We cannot simply copy
8227 TREE_OPERAND (exp, 0) to another variable and modify that
8228 because invert_truthvalue can modify the tree pointed to
8229 by its argument. */
8230 if (singleton == TREE_OPERAND (exp, 1))
8231 TREE_OPERAND (exp, 0)
8232 = invert_truthvalue (TREE_OPERAND (exp, 0));
8233
8234 result = do_store_flag (TREE_OPERAND (exp, 0),
8235 (safe_from_p (temp, singleton, 1)
8236 ? temp : NULL_RTX),
8237 mode, BRANCH_COST <= 1);
8238
8239 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8240 result = expand_shift (LSHIFT_EXPR, mode, result,
8241 build_int_2 (tree_log2
8242 (TREE_OPERAND
8243 (binary_op, 1)),
8244 0),
8245 (safe_from_p (temp, singleton, 1)
8246 ? temp : NULL_RTX), 0);
8247
8248 if (result)
8249 {
8250 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8251 return expand_binop (mode, boptab, op1, result, temp,
8252 unsignedp, OPTAB_LIB_WIDEN);
8253 }
8254 else if (singleton == TREE_OPERAND (exp, 1))
8255 TREE_OPERAND (exp, 0)
8256 = invert_truthvalue (TREE_OPERAND (exp, 0));
8257 }
8258
8259 do_pending_stack_adjust ();
8260 NO_DEFER_POP;
8261 op0 = gen_label_rtx ();
8262
8263 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8264 {
8265 if (temp != 0)
8266 {
8267 /* If the target conflicts with the other operand of the
8268 binary op, we can't use it. Also, we can't use the target
8269 if it is a hard register, because evaluating the condition
8270 might clobber it. */
8271 if ((binary_op
8272 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8273 || (GET_CODE (temp) == REG
8274 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8275 temp = gen_reg_rtx (mode);
8276 store_expr (singleton, temp, 0);
8277 }
8278 else
8279 expand_expr (singleton,
8280 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8281 if (singleton == TREE_OPERAND (exp, 1))
8282 jumpif (TREE_OPERAND (exp, 0), op0);
8283 else
8284 jumpifnot (TREE_OPERAND (exp, 0), op0);
8285
8286 start_cleanup_deferral ();
8287 if (binary_op && temp == 0)
8288 /* Just touch the other operand. */
8289 expand_expr (TREE_OPERAND (binary_op, 1),
8290 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8291 else if (binary_op)
8292 store_expr (build (TREE_CODE (binary_op), type,
8293 make_tree (type, temp),
8294 TREE_OPERAND (binary_op, 1)),
8295 temp, 0);
8296 else
8297 store_expr (build1 (TREE_CODE (unary_op), type,
8298 make_tree (type, temp)),
8299 temp, 0);
8300 op1 = op0;
8301 }
8302 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8303 comparison operator. If we have one of these cases, set the
8304 output to A, branch on A (cse will merge these two references),
8305 then set the output to FOO. */
8306 else if (temp
8307 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8308 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8309 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8310 TREE_OPERAND (exp, 1), 0)
8311 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8312 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8313 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8314 {
8315 if (GET_CODE (temp) == REG
8316 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8317 temp = gen_reg_rtx (mode);
8318 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8319 jumpif (TREE_OPERAND (exp, 0), op0);
8320
8321 start_cleanup_deferral ();
8322 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8323 op1 = op0;
8324 }
8325 else if (temp
8326 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8327 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8328 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8329 TREE_OPERAND (exp, 2), 0)
8330 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8331 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8332 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8333 {
8334 if (GET_CODE (temp) == REG
8335 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8336 temp = gen_reg_rtx (mode);
8337 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8338 jumpifnot (TREE_OPERAND (exp, 0), op0);
8339
8340 start_cleanup_deferral ();
8341 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8342 op1 = op0;
8343 }
8344 else
8345 {
8346 op1 = gen_label_rtx ();
8347 jumpifnot (TREE_OPERAND (exp, 0), op0);
8348
8349 start_cleanup_deferral ();
8350
8351 /* One branch of the cond can be void, if it never returns. For
8352 example A ? throw : E */
8353 if (temp != 0
8354 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8355 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8356 else
8357 expand_expr (TREE_OPERAND (exp, 1),
8358 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8359 end_cleanup_deferral ();
8360 emit_queue ();
8361 emit_jump_insn (gen_jump (op1));
8362 emit_barrier ();
8363 emit_label (op0);
8364 start_cleanup_deferral ();
8365 if (temp != 0
8366 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8367 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8368 else
8369 expand_expr (TREE_OPERAND (exp, 2),
8370 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8371 }
8372
8373 end_cleanup_deferral ();
8374
8375 emit_queue ();
8376 emit_label (op1);
8377 OK_DEFER_POP;
8378
8379 return temp;
8380 }
8381
8382 case TARGET_EXPR:
8383 {
8384 /* Something needs to be initialized, but we didn't know
8385 where that thing was when building the tree. For example,
8386 it could be the return value of a function, or a parameter
8387 to a function which lays down in the stack, or a temporary
8388 variable which must be passed by reference.
8389
8390 We guarantee that the expression will either be constructed
8391 or copied into our original target. */
8392
8393 tree slot = TREE_OPERAND (exp, 0);
8394 tree cleanups = NULL_TREE;
8395 tree exp1;
8396
8397 if (TREE_CODE (slot) != VAR_DECL)
8398 abort ();
8399
8400 if (! ignore)
8401 target = original_target;
8402
8403 /* Set this here so that if we get a target that refers to a
8404 register variable that's already been used, put_reg_into_stack
8405 knows that it should fix up those uses. */
8406 TREE_USED (slot) = 1;
8407
8408 if (target == 0)
8409 {
8410 if (DECL_RTL_SET_P (slot))
8411 {
8412 target = DECL_RTL (slot);
8413 /* If we have already expanded the slot, so don't do
8414 it again. (mrs) */
8415 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8416 return target;
8417 }
8418 else
8419 {
8420 target = assign_temp (type, 2, 0, 1);
8421 /* All temp slots at this level must not conflict. */
8422 preserve_temp_slots (target);
8423 SET_DECL_RTL (slot, target);
8424 if (TREE_ADDRESSABLE (slot))
8425 put_var_into_stack (slot);
8426
8427 /* Since SLOT is not known to the called function
8428 to belong to its stack frame, we must build an explicit
8429 cleanup. This case occurs when we must build up a reference
8430 to pass the reference as an argument. In this case,
8431 it is very likely that such a reference need not be
8432 built here. */
8433
8434 if (TREE_OPERAND (exp, 2) == 0)
8435 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8436 cleanups = TREE_OPERAND (exp, 2);
8437 }
8438 }
8439 else
8440 {
8441 /* This case does occur, when expanding a parameter which
8442 needs to be constructed on the stack. The target
8443 is the actual stack address that we want to initialize.
8444 The function we call will perform the cleanup in this case. */
8445
8446 /* If we have already assigned it space, use that space,
8447 not target that we were passed in, as our target
8448 parameter is only a hint. */
8449 if (DECL_RTL_SET_P (slot))
8450 {
8451 target = DECL_RTL (slot);
8452 /* If we have already expanded the slot, so don't do
8453 it again. (mrs) */
8454 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8455 return target;
8456 }
8457 else
8458 {
8459 SET_DECL_RTL (slot, target);
8460 /* If we must have an addressable slot, then make sure that
8461 the RTL that we just stored in slot is OK. */
8462 if (TREE_ADDRESSABLE (slot))
8463 put_var_into_stack (slot);
8464 }
8465 }
8466
8467 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8468 /* Mark it as expanded. */
8469 TREE_OPERAND (exp, 1) = NULL_TREE;
8470
8471 store_expr (exp1, target, 0);
8472
8473 expand_decl_cleanup (NULL_TREE, cleanups);
8474
8475 return target;
8476 }
8477
8478 case INIT_EXPR:
8479 {
8480 tree lhs = TREE_OPERAND (exp, 0);
8481 tree rhs = TREE_OPERAND (exp, 1);
8482 tree noncopied_parts = 0;
8483 tree lhs_type = TREE_TYPE (lhs);
8484
8485 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8486 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8487 noncopied_parts
8488 = init_noncopied_parts (stabilize_reference (lhs),
8489 TYPE_NONCOPIED_PARTS (lhs_type));
8490
8491 while (noncopied_parts != 0)
8492 {
8493 expand_assignment (TREE_VALUE (noncopied_parts),
8494 TREE_PURPOSE (noncopied_parts), 0, 0);
8495 noncopied_parts = TREE_CHAIN (noncopied_parts);
8496 }
8497 return temp;
8498 }
8499
8500 case MODIFY_EXPR:
8501 {
8502 /* If lhs is complex, expand calls in rhs before computing it.
8503 That's so we don't compute a pointer and save it over a call.
8504 If lhs is simple, compute it first so we can give it as a
8505 target if the rhs is just a call. This avoids an extra temp and copy
8506 and that prevents a partial-subsumption which makes bad code.
8507 Actually we could treat component_ref's of vars like vars. */
8508
8509 tree lhs = TREE_OPERAND (exp, 0);
8510 tree rhs = TREE_OPERAND (exp, 1);
8511 tree noncopied_parts = 0;
8512 tree lhs_type = TREE_TYPE (lhs);
8513
8514 temp = 0;
8515
8516 /* Check for |= or &= of a bitfield of size one into another bitfield
8517 of size 1. In this case, (unless we need the result of the
8518 assignment) we can do this more efficiently with a
8519 test followed by an assignment, if necessary.
8520
8521 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8522 things change so we do, this code should be enhanced to
8523 support it. */
8524 if (ignore
8525 && TREE_CODE (lhs) == COMPONENT_REF
8526 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8527 || TREE_CODE (rhs) == BIT_AND_EXPR)
8528 && TREE_OPERAND (rhs, 0) == lhs
8529 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8530 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8531 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8532 {
8533 rtx label = gen_label_rtx ();
8534
8535 do_jump (TREE_OPERAND (rhs, 1),
8536 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8537 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8538 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8539 (TREE_CODE (rhs) == BIT_IOR_EXPR
8540 ? integer_one_node
8541 : integer_zero_node)),
8542 0, 0);
8543 do_pending_stack_adjust ();
8544 emit_label (label);
8545 return const0_rtx;
8546 }
8547
8548 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8549 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8550 noncopied_parts
8551 = save_noncopied_parts (stabilize_reference (lhs),
8552 TYPE_NONCOPIED_PARTS (lhs_type));
8553
8554 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8555 while (noncopied_parts != 0)
8556 {
8557 expand_assignment (TREE_PURPOSE (noncopied_parts),
8558 TREE_VALUE (noncopied_parts), 0, 0);
8559 noncopied_parts = TREE_CHAIN (noncopied_parts);
8560 }
8561 return temp;
8562 }
8563
8564 case RETURN_EXPR:
8565 if (!TREE_OPERAND (exp, 0))
8566 expand_null_return ();
8567 else
8568 expand_return (TREE_OPERAND (exp, 0));
8569 return const0_rtx;
8570
8571 case PREINCREMENT_EXPR:
8572 case PREDECREMENT_EXPR:
8573 return expand_increment (exp, 0, ignore);
8574
8575 case POSTINCREMENT_EXPR:
8576 case POSTDECREMENT_EXPR:
8577 /* Faster to treat as pre-increment if result is not used. */
8578 return expand_increment (exp, ! ignore, ignore);
8579
8580 case ADDR_EXPR:
8581 /* If nonzero, TEMP will be set to the address of something that might
8582 be a MEM corresponding to a stack slot. */
8583 temp = 0;
8584
8585 /* Are we taking the address of a nested function? */
8586 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8587 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8588 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8589 && ! TREE_STATIC (exp))
8590 {
8591 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8592 op0 = force_operand (op0, target);
8593 }
8594 /* If we are taking the address of something erroneous, just
8595 return a zero. */
8596 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8597 return const0_rtx;
8598 else
8599 {
8600 /* We make sure to pass const0_rtx down if we came in with
8601 ignore set, to avoid doing the cleanups twice for something. */
8602 op0 = expand_expr (TREE_OPERAND (exp, 0),
8603 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8604 (modifier == EXPAND_INITIALIZER
8605 ? modifier : EXPAND_CONST_ADDRESS));
8606
8607 /* If we are going to ignore the result, OP0 will have been set
8608 to const0_rtx, so just return it. Don't get confused and
8609 think we are taking the address of the constant. */
8610 if (ignore)
8611 return op0;
8612
8613 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8614 clever and returns a REG when given a MEM. */
8615 op0 = protect_from_queue (op0, 1);
8616
8617 /* We would like the object in memory. If it is a constant, we can
8618 have it be statically allocated into memory. For a non-constant,
8619 we need to allocate some memory and store the value into it. */
8620
8621 if (CONSTANT_P (op0))
8622 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8623 op0);
8624 else if (GET_CODE (op0) == MEM)
8625 {
8626 mark_temp_addr_taken (op0);
8627 temp = XEXP (op0, 0);
8628 }
8629
8630 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8631 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8632 || GET_CODE (op0) == PARALLEL)
8633 {
8634 /* If this object is in a register, it must be not
8635 be BLKmode. */
8636 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8637 tree nt = build_qualified_type (inner_type,
8638 (TYPE_QUALS (inner_type)
8639 | TYPE_QUAL_CONST));
8640 rtx memloc = assign_temp (nt, 1, 1, 1);
8641
8642 mark_temp_addr_taken (memloc);
8643 if (GET_CODE (op0) == PARALLEL)
8644 /* Handle calls that pass values in multiple non-contiguous
8645 locations. The Irix 6 ABI has examples of this. */
8646 emit_group_store (memloc, op0,
8647 int_size_in_bytes (inner_type),
8648 TYPE_ALIGN (inner_type));
8649 else
8650 emit_move_insn (memloc, op0);
8651 op0 = memloc;
8652 }
8653
8654 if (GET_CODE (op0) != MEM)
8655 abort ();
8656
8657 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8658 {
8659 temp = XEXP (op0, 0);
8660 #ifdef POINTERS_EXTEND_UNSIGNED
8661 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8662 && mode == ptr_mode)
8663 temp = convert_memory_address (ptr_mode, temp);
8664 #endif
8665 return temp;
8666 }
8667
8668 op0 = force_operand (XEXP (op0, 0), target);
8669 }
8670
8671 if (flag_force_addr && GET_CODE (op0) != REG)
8672 op0 = force_reg (Pmode, op0);
8673
8674 if (GET_CODE (op0) == REG
8675 && ! REG_USERVAR_P (op0))
8676 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8677
8678 /* If we might have had a temp slot, add an equivalent address
8679 for it. */
8680 if (temp != 0)
8681 update_temp_slot_address (temp, op0);
8682
8683 #ifdef POINTERS_EXTEND_UNSIGNED
8684 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8685 && mode == ptr_mode)
8686 op0 = convert_memory_address (ptr_mode, op0);
8687 #endif
8688
8689 return op0;
8690
8691 case ENTRY_VALUE_EXPR:
8692 abort ();
8693
8694 /* COMPLEX type for Extended Pascal & Fortran */
8695 case COMPLEX_EXPR:
8696 {
8697 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8698 rtx insns;
8699
8700 /* Get the rtx code of the operands. */
8701 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8702 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8703
8704 if (! target)
8705 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8706
8707 start_sequence ();
8708
8709 /* Move the real (op0) and imaginary (op1) parts to their location. */
8710 emit_move_insn (gen_realpart (mode, target), op0);
8711 emit_move_insn (gen_imagpart (mode, target), op1);
8712
8713 insns = get_insns ();
8714 end_sequence ();
8715
8716 /* Complex construction should appear as a single unit. */
8717 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8718 each with a separate pseudo as destination.
8719 It's not correct for flow to treat them as a unit. */
8720 if (GET_CODE (target) != CONCAT)
8721 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8722 else
8723 emit_insns (insns);
8724
8725 return target;
8726 }
8727
8728 case REALPART_EXPR:
8729 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8730 return gen_realpart (mode, op0);
8731
8732 case IMAGPART_EXPR:
8733 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8734 return gen_imagpart (mode, op0);
8735
8736 case CONJ_EXPR:
8737 {
8738 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8739 rtx imag_t;
8740 rtx insns;
8741
8742 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8743
8744 if (! target)
8745 target = gen_reg_rtx (mode);
8746
8747 start_sequence ();
8748
8749 /* Store the realpart and the negated imagpart to target. */
8750 emit_move_insn (gen_realpart (partmode, target),
8751 gen_realpart (partmode, op0));
8752
8753 imag_t = gen_imagpart (partmode, target);
8754 temp = expand_unop (partmode,
8755 ! unsignedp && flag_trapv
8756 && (GET_MODE_CLASS(partmode) == MODE_INT)
8757 ? negv_optab : neg_optab,
8758 gen_imagpart (partmode, op0), imag_t, 0);
8759 if (temp != imag_t)
8760 emit_move_insn (imag_t, temp);
8761
8762 insns = get_insns ();
8763 end_sequence ();
8764
8765 /* Conjugate should appear as a single unit
8766 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8767 each with a separate pseudo as destination.
8768 It's not correct for flow to treat them as a unit. */
8769 if (GET_CODE (target) != CONCAT)
8770 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8771 else
8772 emit_insns (insns);
8773
8774 return target;
8775 }
8776
8777 case TRY_CATCH_EXPR:
8778 {
8779 tree handler = TREE_OPERAND (exp, 1);
8780
8781 expand_eh_region_start ();
8782
8783 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8784
8785 expand_eh_region_end_cleanup (handler);
8786
8787 return op0;
8788 }
8789
8790 case TRY_FINALLY_EXPR:
8791 {
8792 tree try_block = TREE_OPERAND (exp, 0);
8793 tree finally_block = TREE_OPERAND (exp, 1);
8794 rtx finally_label = gen_label_rtx ();
8795 rtx done_label = gen_label_rtx ();
8796 rtx return_link = gen_reg_rtx (Pmode);
8797 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8798 (tree) finally_label, (tree) return_link);
8799 TREE_SIDE_EFFECTS (cleanup) = 1;
8800
8801 /* Start a new binding layer that will keep track of all cleanup
8802 actions to be performed. */
8803 expand_start_bindings (2);
8804
8805 target_temp_slot_level = temp_slot_level;
8806
8807 expand_decl_cleanup (NULL_TREE, cleanup);
8808 op0 = expand_expr (try_block, target, tmode, modifier);
8809
8810 preserve_temp_slots (op0);
8811 expand_end_bindings (NULL_TREE, 0, 0);
8812 emit_jump (done_label);
8813 emit_label (finally_label);
8814 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8815 emit_indirect_jump (return_link);
8816 emit_label (done_label);
8817 return op0;
8818 }
8819
8820 case GOTO_SUBROUTINE_EXPR:
8821 {
8822 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8823 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8824 rtx return_address = gen_label_rtx ();
8825 emit_move_insn (return_link,
8826 gen_rtx_LABEL_REF (Pmode, return_address));
8827 emit_jump (subr);
8828 emit_label (return_address);
8829 return const0_rtx;
8830 }
8831
8832 case VA_ARG_EXPR:
8833 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8834
8835 case EXC_PTR_EXPR:
8836 return get_exception_pointer (cfun);
8837
8838 default:
8839 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8840 }
8841
8842 /* Here to do an ordinary binary operator, generating an instruction
8843 from the optab already placed in `this_optab'. */
8844 binop:
8845 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8846 subtarget = 0;
8847 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8848 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8849 binop2:
8850 temp = expand_binop (mode, this_optab, op0, op1, target,
8851 unsignedp, OPTAB_LIB_WIDEN);
8852 if (temp == 0)
8853 abort ();
8854 return temp;
8855 }
8856 \f
8857 /* Similar to expand_expr, except that we don't specify a target, target
8858 mode, or modifier and we return the alignment of the inner type. This is
8859 used in cases where it is not necessary to align the result to the
8860 alignment of its type as long as we know the alignment of the result, for
8861 example for comparisons of BLKmode values. */
8862
8863 static rtx
8864 expand_expr_unaligned (exp, palign)
8865 register tree exp;
8866 unsigned int *palign;
8867 {
8868 register rtx op0;
8869 tree type = TREE_TYPE (exp);
8870 register enum machine_mode mode = TYPE_MODE (type);
8871
8872 /* Default the alignment we return to that of the type. */
8873 *palign = TYPE_ALIGN (type);
8874
8875 /* The only cases in which we do anything special is if the resulting mode
8876 is BLKmode. */
8877 if (mode != BLKmode)
8878 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8879
8880 switch (TREE_CODE (exp))
8881 {
8882 case CONVERT_EXPR:
8883 case NOP_EXPR:
8884 case NON_LVALUE_EXPR:
8885 /* Conversions between BLKmode values don't change the underlying
8886 alignment or value. */
8887 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8888 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8889 break;
8890
8891 case ARRAY_REF:
8892 /* Much of the code for this case is copied directly from expand_expr.
8893 We need to duplicate it here because we will do something different
8894 in the fall-through case, so we need to handle the same exceptions
8895 it does. */
8896 {
8897 tree array = TREE_OPERAND (exp, 0);
8898 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8899 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8900 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8901 HOST_WIDE_INT i;
8902
8903 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8904 abort ();
8905
8906 /* Optimize the special-case of a zero lower bound.
8907
8908 We convert the low_bound to sizetype to avoid some problems
8909 with constant folding. (E.g. suppose the lower bound is 1,
8910 and its mode is QI. Without the conversion, (ARRAY
8911 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8912 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8913
8914 if (! integer_zerop (low_bound))
8915 index = size_diffop (index, convert (sizetype, low_bound));
8916
8917 /* If this is a constant index into a constant array,
8918 just get the value from the array. Handle both the cases when
8919 we have an explicit constructor and when our operand is a variable
8920 that was declared const. */
8921
8922 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8923 && host_integerp (index, 0)
8924 && 0 > compare_tree_int (index,
8925 list_length (CONSTRUCTOR_ELTS
8926 (TREE_OPERAND (exp, 0)))))
8927 {
8928 tree elem;
8929
8930 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8931 i = tree_low_cst (index, 0);
8932 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8933 ;
8934
8935 if (elem)
8936 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8937 }
8938
8939 else if (optimize >= 1
8940 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8941 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8942 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8943 {
8944 if (TREE_CODE (index) == INTEGER_CST)
8945 {
8946 tree init = DECL_INITIAL (array);
8947
8948 if (TREE_CODE (init) == CONSTRUCTOR)
8949 {
8950 tree elem;
8951
8952 for (elem = CONSTRUCTOR_ELTS (init);
8953 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8954 elem = TREE_CHAIN (elem))
8955 ;
8956
8957 if (elem)
8958 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8959 palign);
8960 }
8961 }
8962 }
8963 }
8964 /* Fall through. */
8965
8966 case COMPONENT_REF:
8967 case BIT_FIELD_REF:
8968 case ARRAY_RANGE_REF:
8969 /* If the operand is a CONSTRUCTOR, we can just extract the
8970 appropriate field if it is present. Don't do this if we have
8971 already written the data since we want to refer to that copy
8972 and varasm.c assumes that's what we'll do. */
8973 if (TREE_CODE (exp) == COMPONENT_REF
8974 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8975 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8976 {
8977 tree elt;
8978
8979 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8980 elt = TREE_CHAIN (elt))
8981 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8982 /* Note that unlike the case in expand_expr, we know this is
8983 BLKmode and hence not an integer. */
8984 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8985 }
8986
8987 {
8988 enum machine_mode mode1;
8989 HOST_WIDE_INT bitsize, bitpos;
8990 tree offset;
8991 int volatilep = 0;
8992 unsigned int alignment;
8993 int unsignedp;
8994 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8995 &mode1, &unsignedp, &volatilep,
8996 &alignment);
8997
8998 /* If we got back the original object, something is wrong. Perhaps
8999 we are evaluating an expression too early. In any event, don't
9000 infinitely recurse. */
9001 if (tem == exp)
9002 abort ();
9003
9004 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9005
9006 /* If this is a constant, put it into a register if it is a
9007 legitimate constant and OFFSET is 0 and memory if it isn't. */
9008 if (CONSTANT_P (op0))
9009 {
9010 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9011
9012 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9013 && offset == 0)
9014 op0 = force_reg (inner_mode, op0);
9015 else
9016 op0 = validize_mem (force_const_mem (inner_mode, op0));
9017 }
9018
9019 if (offset != 0)
9020 {
9021 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9022
9023 /* If this object is in a register, put it into memory.
9024 This case can't occur in C, but can in Ada if we have
9025 unchecked conversion of an expression from a scalar type to
9026 an array or record type. */
9027 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9028 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9029 {
9030 tree nt = build_qualified_type (TREE_TYPE (tem),
9031 (TYPE_QUALS (TREE_TYPE (tem))
9032 | TYPE_QUAL_CONST));
9033 rtx memloc = assign_temp (nt, 1, 1, 1);
9034
9035 mark_temp_addr_taken (memloc);
9036 emit_move_insn (memloc, op0);
9037 op0 = memloc;
9038 }
9039
9040 if (GET_CODE (op0) != MEM)
9041 abort ();
9042
9043 if (GET_MODE (offset_rtx) != ptr_mode)
9044 {
9045 #ifdef POINTERS_EXTEND_UNSIGNED
9046 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9047 #else
9048 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9049 #endif
9050 }
9051
9052 op0 = change_address (op0, VOIDmode,
9053 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9054 force_reg (ptr_mode,
9055 offset_rtx)));
9056 }
9057
9058 /* Don't forget about volatility even if this is a bitfield. */
9059 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9060 {
9061 op0 = copy_rtx (op0);
9062 MEM_VOLATILE_P (op0) = 1;
9063 }
9064
9065 /* Check the access. */
9066 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9067 {
9068 rtx to;
9069 int size;
9070
9071 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9072 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9073
9074 /* Check the access right of the pointer. */
9075 in_check_memory_usage = 1;
9076 if (size > BITS_PER_UNIT)
9077 emit_library_call (chkr_check_addr_libfunc,
9078 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9079 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9080 TYPE_MODE (sizetype),
9081 GEN_INT (MEMORY_USE_RO),
9082 TYPE_MODE (integer_type_node));
9083 in_check_memory_usage = 0;
9084 }
9085
9086 /* In cases where an aligned union has an unaligned object
9087 as a field, we might be extracting a BLKmode value from
9088 an integer-mode (e.g., SImode) object. Handle this case
9089 by doing the extract into an object as wide as the field
9090 (which we know to be the width of a basic mode), then
9091 storing into memory, and changing the mode to BLKmode.
9092 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9093 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9094 if (mode1 == VOIDmode
9095 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9096 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9097 && (TYPE_ALIGN (type) > alignment
9098 || bitpos % TYPE_ALIGN (type) != 0)))
9099 {
9100 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9101
9102 if (ext_mode == BLKmode)
9103 {
9104 /* In this case, BITPOS must start at a byte boundary. */
9105 if (GET_CODE (op0) != MEM
9106 || bitpos % BITS_PER_UNIT != 0)
9107 abort ();
9108
9109 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9110 }
9111 else
9112 {
9113 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9114 TYPE_QUAL_CONST);
9115 rtx new = assign_temp (nt, 0, 1, 1);
9116
9117 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9118 unsignedp, NULL_RTX, ext_mode,
9119 ext_mode, alignment,
9120 int_size_in_bytes (TREE_TYPE (tem)));
9121
9122 /* If the result is a record type and BITSIZE is narrower than
9123 the mode of OP0, an integral mode, and this is a big endian
9124 machine, we must put the field into the high-order bits. */
9125 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9126 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9127 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9128 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9129 size_int (GET_MODE_BITSIZE
9130 (GET_MODE (op0))
9131 - bitsize),
9132 op0, 1);
9133
9134 emit_move_insn (new, op0);
9135 op0 = copy_rtx (new);
9136 PUT_MODE (op0, BLKmode);
9137 }
9138 }
9139 else
9140 /* Get a reference to just this component. */
9141 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9142
9143 set_mem_alias_set (op0, get_alias_set (exp));
9144
9145 /* Adjust the alignment in case the bit position is not
9146 a multiple of the alignment of the inner object. */
9147 while (bitpos % alignment != 0)
9148 alignment >>= 1;
9149
9150 if (GET_CODE (XEXP (op0, 0)) == REG)
9151 mark_reg_pointer (XEXP (op0, 0), alignment);
9152
9153 MEM_IN_STRUCT_P (op0) = 1;
9154 MEM_VOLATILE_P (op0) |= volatilep;
9155
9156 *palign = alignment;
9157 return op0;
9158 }
9159
9160 default:
9161 break;
9162
9163 }
9164
9165 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9166 }
9167 \f
9168 /* Return the tree node if a ARG corresponds to a string constant or zero
9169 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9170 in bytes within the string that ARG is accessing. The type of the
9171 offset will be `sizetype'. */
9172
9173 tree
9174 string_constant (arg, ptr_offset)
9175 tree arg;
9176 tree *ptr_offset;
9177 {
9178 STRIP_NOPS (arg);
9179
9180 if (TREE_CODE (arg) == ADDR_EXPR
9181 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9182 {
9183 *ptr_offset = size_zero_node;
9184 return TREE_OPERAND (arg, 0);
9185 }
9186 else if (TREE_CODE (arg) == PLUS_EXPR)
9187 {
9188 tree arg0 = TREE_OPERAND (arg, 0);
9189 tree arg1 = TREE_OPERAND (arg, 1);
9190
9191 STRIP_NOPS (arg0);
9192 STRIP_NOPS (arg1);
9193
9194 if (TREE_CODE (arg0) == ADDR_EXPR
9195 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9196 {
9197 *ptr_offset = convert (sizetype, arg1);
9198 return TREE_OPERAND (arg0, 0);
9199 }
9200 else if (TREE_CODE (arg1) == ADDR_EXPR
9201 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9202 {
9203 *ptr_offset = convert (sizetype, arg0);
9204 return TREE_OPERAND (arg1, 0);
9205 }
9206 }
9207
9208 return 0;
9209 }
9210 \f
9211 /* Expand code for a post- or pre- increment or decrement
9212 and return the RTX for the result.
9213 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9214
9215 static rtx
9216 expand_increment (exp, post, ignore)
9217 register tree exp;
9218 int post, ignore;
9219 {
9220 register rtx op0, op1;
9221 register rtx temp, value;
9222 register tree incremented = TREE_OPERAND (exp, 0);
9223 optab this_optab = add_optab;
9224 int icode;
9225 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9226 int op0_is_copy = 0;
9227 int single_insn = 0;
9228 /* 1 means we can't store into OP0 directly,
9229 because it is a subreg narrower than a word,
9230 and we don't dare clobber the rest of the word. */
9231 int bad_subreg = 0;
9232
9233 /* Stabilize any component ref that might need to be
9234 evaluated more than once below. */
9235 if (!post
9236 || TREE_CODE (incremented) == BIT_FIELD_REF
9237 || (TREE_CODE (incremented) == COMPONENT_REF
9238 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9239 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9240 incremented = stabilize_reference (incremented);
9241 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9242 ones into save exprs so that they don't accidentally get evaluated
9243 more than once by the code below. */
9244 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9245 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9246 incremented = save_expr (incremented);
9247
9248 /* Compute the operands as RTX.
9249 Note whether OP0 is the actual lvalue or a copy of it:
9250 I believe it is a copy iff it is a register or subreg
9251 and insns were generated in computing it. */
9252
9253 temp = get_last_insn ();
9254 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9255
9256 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9257 in place but instead must do sign- or zero-extension during assignment,
9258 so we copy it into a new register and let the code below use it as
9259 a copy.
9260
9261 Note that we can safely modify this SUBREG since it is know not to be
9262 shared (it was made by the expand_expr call above). */
9263
9264 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9265 {
9266 if (post)
9267 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9268 else
9269 bad_subreg = 1;
9270 }
9271 else if (GET_CODE (op0) == SUBREG
9272 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9273 {
9274 /* We cannot increment this SUBREG in place. If we are
9275 post-incrementing, get a copy of the old value. Otherwise,
9276 just mark that we cannot increment in place. */
9277 if (post)
9278 op0 = copy_to_reg (op0);
9279 else
9280 bad_subreg = 1;
9281 }
9282
9283 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9284 && temp != get_last_insn ());
9285 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9286 EXPAND_MEMORY_USE_BAD);
9287
9288 /* Decide whether incrementing or decrementing. */
9289 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9290 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9291 this_optab = sub_optab;
9292
9293 /* Convert decrement by a constant into a negative increment. */
9294 if (this_optab == sub_optab
9295 && GET_CODE (op1) == CONST_INT)
9296 {
9297 op1 = GEN_INT (-INTVAL (op1));
9298 this_optab = add_optab;
9299 }
9300
9301 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9302 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9303
9304 /* For a preincrement, see if we can do this with a single instruction. */
9305 if (!post)
9306 {
9307 icode = (int) this_optab->handlers[(int) mode].insn_code;
9308 if (icode != (int) CODE_FOR_nothing
9309 /* Make sure that OP0 is valid for operands 0 and 1
9310 of the insn we want to queue. */
9311 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9312 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9313 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9314 single_insn = 1;
9315 }
9316
9317 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9318 then we cannot just increment OP0. We must therefore contrive to
9319 increment the original value. Then, for postincrement, we can return
9320 OP0 since it is a copy of the old value. For preincrement, expand here
9321 unless we can do it with a single insn.
9322
9323 Likewise if storing directly into OP0 would clobber high bits
9324 we need to preserve (bad_subreg). */
9325 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9326 {
9327 /* This is the easiest way to increment the value wherever it is.
9328 Problems with multiple evaluation of INCREMENTED are prevented
9329 because either (1) it is a component_ref or preincrement,
9330 in which case it was stabilized above, or (2) it is an array_ref
9331 with constant index in an array in a register, which is
9332 safe to reevaluate. */
9333 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9334 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9335 ? MINUS_EXPR : PLUS_EXPR),
9336 TREE_TYPE (exp),
9337 incremented,
9338 TREE_OPERAND (exp, 1));
9339
9340 while (TREE_CODE (incremented) == NOP_EXPR
9341 || TREE_CODE (incremented) == CONVERT_EXPR)
9342 {
9343 newexp = convert (TREE_TYPE (incremented), newexp);
9344 incremented = TREE_OPERAND (incremented, 0);
9345 }
9346
9347 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9348 return post ? op0 : temp;
9349 }
9350
9351 if (post)
9352 {
9353 /* We have a true reference to the value in OP0.
9354 If there is an insn to add or subtract in this mode, queue it.
9355 Queueing the increment insn avoids the register shuffling
9356 that often results if we must increment now and first save
9357 the old value for subsequent use. */
9358
9359 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9360 op0 = stabilize (op0);
9361 #endif
9362
9363 icode = (int) this_optab->handlers[(int) mode].insn_code;
9364 if (icode != (int) CODE_FOR_nothing
9365 /* Make sure that OP0 is valid for operands 0 and 1
9366 of the insn we want to queue. */
9367 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9368 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9369 {
9370 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9371 op1 = force_reg (mode, op1);
9372
9373 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9374 }
9375 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9376 {
9377 rtx addr = (general_operand (XEXP (op0, 0), mode)
9378 ? force_reg (Pmode, XEXP (op0, 0))
9379 : copy_to_reg (XEXP (op0, 0)));
9380 rtx temp, result;
9381
9382 op0 = replace_equiv_address (op0, addr);
9383 temp = force_reg (GET_MODE (op0), op0);
9384 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9385 op1 = force_reg (mode, op1);
9386
9387 /* The increment queue is LIFO, thus we have to `queue'
9388 the instructions in reverse order. */
9389 enqueue_insn (op0, gen_move_insn (op0, temp));
9390 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9391 return result;
9392 }
9393 }
9394
9395 /* Preincrement, or we can't increment with one simple insn. */
9396 if (post)
9397 /* Save a copy of the value before inc or dec, to return it later. */
9398 temp = value = copy_to_reg (op0);
9399 else
9400 /* Arrange to return the incremented value. */
9401 /* Copy the rtx because expand_binop will protect from the queue,
9402 and the results of that would be invalid for us to return
9403 if our caller does emit_queue before using our result. */
9404 temp = copy_rtx (value = op0);
9405
9406 /* Increment however we can. */
9407 op1 = expand_binop (mode, this_optab, value, op1,
9408 current_function_check_memory_usage ? NULL_RTX : op0,
9409 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9410 /* Make sure the value is stored into OP0. */
9411 if (op1 != op0)
9412 emit_move_insn (op0, op1);
9413
9414 return temp;
9415 }
9416 \f
9417 /* At the start of a function, record that we have no previously-pushed
9418 arguments waiting to be popped. */
9419
9420 void
9421 init_pending_stack_adjust ()
9422 {
9423 pending_stack_adjust = 0;
9424 }
9425
9426 /* When exiting from function, if safe, clear out any pending stack adjust
9427 so the adjustment won't get done.
9428
9429 Note, if the current function calls alloca, then it must have a
9430 frame pointer regardless of the value of flag_omit_frame_pointer. */
9431
9432 void
9433 clear_pending_stack_adjust ()
9434 {
9435 #ifdef EXIT_IGNORE_STACK
9436 if (optimize > 0
9437 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9438 && EXIT_IGNORE_STACK
9439 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9440 && ! flag_inline_functions)
9441 {
9442 stack_pointer_delta -= pending_stack_adjust,
9443 pending_stack_adjust = 0;
9444 }
9445 #endif
9446 }
9447
9448 /* Pop any previously-pushed arguments that have not been popped yet. */
9449
9450 void
9451 do_pending_stack_adjust ()
9452 {
9453 if (inhibit_defer_pop == 0)
9454 {
9455 if (pending_stack_adjust != 0)
9456 adjust_stack (GEN_INT (pending_stack_adjust));
9457 pending_stack_adjust = 0;
9458 }
9459 }
9460 \f
9461 /* Expand conditional expressions. */
9462
9463 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9464 LABEL is an rtx of code CODE_LABEL, in this function and all the
9465 functions here. */
9466
9467 void
9468 jumpifnot (exp, label)
9469 tree exp;
9470 rtx label;
9471 {
9472 do_jump (exp, label, NULL_RTX);
9473 }
9474
9475 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9476
9477 void
9478 jumpif (exp, label)
9479 tree exp;
9480 rtx label;
9481 {
9482 do_jump (exp, NULL_RTX, label);
9483 }
9484
9485 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9486 the result is zero, or IF_TRUE_LABEL if the result is one.
9487 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9488 meaning fall through in that case.
9489
9490 do_jump always does any pending stack adjust except when it does not
9491 actually perform a jump. An example where there is no jump
9492 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9493
9494 This function is responsible for optimizing cases such as
9495 &&, || and comparison operators in EXP. */
9496
9497 void
9498 do_jump (exp, if_false_label, if_true_label)
9499 tree exp;
9500 rtx if_false_label, if_true_label;
9501 {
9502 register enum tree_code code = TREE_CODE (exp);
9503 /* Some cases need to create a label to jump to
9504 in order to properly fall through.
9505 These cases set DROP_THROUGH_LABEL nonzero. */
9506 rtx drop_through_label = 0;
9507 rtx temp;
9508 int i;
9509 tree type;
9510 enum machine_mode mode;
9511
9512 #ifdef MAX_INTEGER_COMPUTATION_MODE
9513 check_max_integer_computation_mode (exp);
9514 #endif
9515
9516 emit_queue ();
9517
9518 switch (code)
9519 {
9520 case ERROR_MARK:
9521 break;
9522
9523 case INTEGER_CST:
9524 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9525 if (temp)
9526 emit_jump (temp);
9527 break;
9528
9529 #if 0
9530 /* This is not true with #pragma weak */
9531 case ADDR_EXPR:
9532 /* The address of something can never be zero. */
9533 if (if_true_label)
9534 emit_jump (if_true_label);
9535 break;
9536 #endif
9537
9538 case NOP_EXPR:
9539 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9540 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9541 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9542 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9543 goto normal;
9544 case CONVERT_EXPR:
9545 /* If we are narrowing the operand, we have to do the compare in the
9546 narrower mode. */
9547 if ((TYPE_PRECISION (TREE_TYPE (exp))
9548 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9549 goto normal;
9550 case NON_LVALUE_EXPR:
9551 case REFERENCE_EXPR:
9552 case ABS_EXPR:
9553 case NEGATE_EXPR:
9554 case LROTATE_EXPR:
9555 case RROTATE_EXPR:
9556 /* These cannot change zero->non-zero or vice versa. */
9557 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9558 break;
9559
9560 case WITH_RECORD_EXPR:
9561 /* Put the object on the placeholder list, recurse through our first
9562 operand, and pop the list. */
9563 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9564 placeholder_list);
9565 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9566 placeholder_list = TREE_CHAIN (placeholder_list);
9567 break;
9568
9569 #if 0
9570 /* This is never less insns than evaluating the PLUS_EXPR followed by
9571 a test and can be longer if the test is eliminated. */
9572 case PLUS_EXPR:
9573 /* Reduce to minus. */
9574 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9575 TREE_OPERAND (exp, 0),
9576 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9577 TREE_OPERAND (exp, 1))));
9578 /* Process as MINUS. */
9579 #endif
9580
9581 case MINUS_EXPR:
9582 /* Non-zero iff operands of minus differ. */
9583 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9584 TREE_OPERAND (exp, 0),
9585 TREE_OPERAND (exp, 1)),
9586 NE, NE, if_false_label, if_true_label);
9587 break;
9588
9589 case BIT_AND_EXPR:
9590 /* If we are AND'ing with a small constant, do this comparison in the
9591 smallest type that fits. If the machine doesn't have comparisons
9592 that small, it will be converted back to the wider comparison.
9593 This helps if we are testing the sign bit of a narrower object.
9594 combine can't do this for us because it can't know whether a
9595 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9596
9597 if (! SLOW_BYTE_ACCESS
9598 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9599 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9600 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9601 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9602 && (type = type_for_mode (mode, 1)) != 0
9603 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9604 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9605 != CODE_FOR_nothing))
9606 {
9607 do_jump (convert (type, exp), if_false_label, if_true_label);
9608 break;
9609 }
9610 goto normal;
9611
9612 case TRUTH_NOT_EXPR:
9613 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9614 break;
9615
9616 case TRUTH_ANDIF_EXPR:
9617 if (if_false_label == 0)
9618 if_false_label = drop_through_label = gen_label_rtx ();
9619 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9620 start_cleanup_deferral ();
9621 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9622 end_cleanup_deferral ();
9623 break;
9624
9625 case TRUTH_ORIF_EXPR:
9626 if (if_true_label == 0)
9627 if_true_label = drop_through_label = gen_label_rtx ();
9628 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9629 start_cleanup_deferral ();
9630 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9631 end_cleanup_deferral ();
9632 break;
9633
9634 case COMPOUND_EXPR:
9635 push_temp_slots ();
9636 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9637 preserve_temp_slots (NULL_RTX);
9638 free_temp_slots ();
9639 pop_temp_slots ();
9640 emit_queue ();
9641 do_pending_stack_adjust ();
9642 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9643 break;
9644
9645 case COMPONENT_REF:
9646 case BIT_FIELD_REF:
9647 case ARRAY_REF:
9648 case ARRAY_RANGE_REF:
9649 {
9650 HOST_WIDE_INT bitsize, bitpos;
9651 int unsignedp;
9652 enum machine_mode mode;
9653 tree type;
9654 tree offset;
9655 int volatilep = 0;
9656 unsigned int alignment;
9657
9658 /* Get description of this reference. We don't actually care
9659 about the underlying object here. */
9660 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9661 &unsignedp, &volatilep, &alignment);
9662
9663 type = type_for_size (bitsize, unsignedp);
9664 if (! SLOW_BYTE_ACCESS
9665 && type != 0 && bitsize >= 0
9666 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9667 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9668 != CODE_FOR_nothing))
9669 {
9670 do_jump (convert (type, exp), if_false_label, if_true_label);
9671 break;
9672 }
9673 goto normal;
9674 }
9675
9676 case COND_EXPR:
9677 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9678 if (integer_onep (TREE_OPERAND (exp, 1))
9679 && integer_zerop (TREE_OPERAND (exp, 2)))
9680 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9681
9682 else if (integer_zerop (TREE_OPERAND (exp, 1))
9683 && integer_onep (TREE_OPERAND (exp, 2)))
9684 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9685
9686 else
9687 {
9688 register rtx label1 = gen_label_rtx ();
9689 drop_through_label = gen_label_rtx ();
9690
9691 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9692
9693 start_cleanup_deferral ();
9694 /* Now the THEN-expression. */
9695 do_jump (TREE_OPERAND (exp, 1),
9696 if_false_label ? if_false_label : drop_through_label,
9697 if_true_label ? if_true_label : drop_through_label);
9698 /* In case the do_jump just above never jumps. */
9699 do_pending_stack_adjust ();
9700 emit_label (label1);
9701
9702 /* Now the ELSE-expression. */
9703 do_jump (TREE_OPERAND (exp, 2),
9704 if_false_label ? if_false_label : drop_through_label,
9705 if_true_label ? if_true_label : drop_through_label);
9706 end_cleanup_deferral ();
9707 }
9708 break;
9709
9710 case EQ_EXPR:
9711 {
9712 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9713
9714 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9715 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9716 {
9717 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9718 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9719 do_jump
9720 (fold
9721 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9722 fold (build (EQ_EXPR, TREE_TYPE (exp),
9723 fold (build1 (REALPART_EXPR,
9724 TREE_TYPE (inner_type),
9725 exp0)),
9726 fold (build1 (REALPART_EXPR,
9727 TREE_TYPE (inner_type),
9728 exp1)))),
9729 fold (build (EQ_EXPR, TREE_TYPE (exp),
9730 fold (build1 (IMAGPART_EXPR,
9731 TREE_TYPE (inner_type),
9732 exp0)),
9733 fold (build1 (IMAGPART_EXPR,
9734 TREE_TYPE (inner_type),
9735 exp1)))))),
9736 if_false_label, if_true_label);
9737 }
9738
9739 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9740 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9741
9742 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9743 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9744 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9745 else
9746 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9747 break;
9748 }
9749
9750 case NE_EXPR:
9751 {
9752 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9753
9754 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9755 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9756 {
9757 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9758 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9759 do_jump
9760 (fold
9761 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9762 fold (build (NE_EXPR, TREE_TYPE (exp),
9763 fold (build1 (REALPART_EXPR,
9764 TREE_TYPE (inner_type),
9765 exp0)),
9766 fold (build1 (REALPART_EXPR,
9767 TREE_TYPE (inner_type),
9768 exp1)))),
9769 fold (build (NE_EXPR, TREE_TYPE (exp),
9770 fold (build1 (IMAGPART_EXPR,
9771 TREE_TYPE (inner_type),
9772 exp0)),
9773 fold (build1 (IMAGPART_EXPR,
9774 TREE_TYPE (inner_type),
9775 exp1)))))),
9776 if_false_label, if_true_label);
9777 }
9778
9779 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9780 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9781
9782 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9783 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9784 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9785 else
9786 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9787 break;
9788 }
9789
9790 case LT_EXPR:
9791 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9792 if (GET_MODE_CLASS (mode) == MODE_INT
9793 && ! can_compare_p (LT, mode, ccp_jump))
9794 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9795 else
9796 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9797 break;
9798
9799 case LE_EXPR:
9800 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9801 if (GET_MODE_CLASS (mode) == MODE_INT
9802 && ! can_compare_p (LE, mode, ccp_jump))
9803 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9804 else
9805 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9806 break;
9807
9808 case GT_EXPR:
9809 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9810 if (GET_MODE_CLASS (mode) == MODE_INT
9811 && ! can_compare_p (GT, mode, ccp_jump))
9812 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9813 else
9814 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9815 break;
9816
9817 case GE_EXPR:
9818 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9819 if (GET_MODE_CLASS (mode) == MODE_INT
9820 && ! can_compare_p (GE, mode, ccp_jump))
9821 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9822 else
9823 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9824 break;
9825
9826 case UNORDERED_EXPR:
9827 case ORDERED_EXPR:
9828 {
9829 enum rtx_code cmp, rcmp;
9830 int do_rev;
9831
9832 if (code == UNORDERED_EXPR)
9833 cmp = UNORDERED, rcmp = ORDERED;
9834 else
9835 cmp = ORDERED, rcmp = UNORDERED;
9836 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9837
9838 do_rev = 0;
9839 if (! can_compare_p (cmp, mode, ccp_jump)
9840 && (can_compare_p (rcmp, mode, ccp_jump)
9841 /* If the target doesn't provide either UNORDERED or ORDERED
9842 comparisons, canonicalize on UNORDERED for the library. */
9843 || rcmp == UNORDERED))
9844 do_rev = 1;
9845
9846 if (! do_rev)
9847 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9848 else
9849 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9850 }
9851 break;
9852
9853 {
9854 enum rtx_code rcode1;
9855 enum tree_code tcode2;
9856
9857 case UNLT_EXPR:
9858 rcode1 = UNLT;
9859 tcode2 = LT_EXPR;
9860 goto unordered_bcc;
9861 case UNLE_EXPR:
9862 rcode1 = UNLE;
9863 tcode2 = LE_EXPR;
9864 goto unordered_bcc;
9865 case UNGT_EXPR:
9866 rcode1 = UNGT;
9867 tcode2 = GT_EXPR;
9868 goto unordered_bcc;
9869 case UNGE_EXPR:
9870 rcode1 = UNGE;
9871 tcode2 = GE_EXPR;
9872 goto unordered_bcc;
9873 case UNEQ_EXPR:
9874 rcode1 = UNEQ;
9875 tcode2 = EQ_EXPR;
9876 goto unordered_bcc;
9877
9878 unordered_bcc:
9879 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9880 if (can_compare_p (rcode1, mode, ccp_jump))
9881 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9882 if_true_label);
9883 else
9884 {
9885 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9886 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9887 tree cmp0, cmp1;
9888
9889 /* If the target doesn't support combined unordered
9890 compares, decompose into UNORDERED + comparison. */
9891 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9892 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9893 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9894 do_jump (exp, if_false_label, if_true_label);
9895 }
9896 }
9897 break;
9898
9899 /* Special case:
9900 __builtin_expect (<test>, 0) and
9901 __builtin_expect (<test>, 1)
9902
9903 We need to do this here, so that <test> is not converted to a SCC
9904 operation on machines that use condition code registers and COMPARE
9905 like the PowerPC, and then the jump is done based on whether the SCC
9906 operation produced a 1 or 0. */
9907 case CALL_EXPR:
9908 /* Check for a built-in function. */
9909 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9910 {
9911 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9912 tree arglist = TREE_OPERAND (exp, 1);
9913
9914 if (TREE_CODE (fndecl) == FUNCTION_DECL
9915 && DECL_BUILT_IN (fndecl)
9916 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9917 && arglist != NULL_TREE
9918 && TREE_CHAIN (arglist) != NULL_TREE)
9919 {
9920 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9921 if_true_label);
9922
9923 if (seq != NULL_RTX)
9924 {
9925 emit_insn (seq);
9926 return;
9927 }
9928 }
9929 }
9930 /* fall through and generate the normal code. */
9931
9932 default:
9933 normal:
9934 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9935 #if 0
9936 /* This is not needed any more and causes poor code since it causes
9937 comparisons and tests from non-SI objects to have different code
9938 sequences. */
9939 /* Copy to register to avoid generating bad insns by cse
9940 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9941 if (!cse_not_expected && GET_CODE (temp) == MEM)
9942 temp = copy_to_reg (temp);
9943 #endif
9944 do_pending_stack_adjust ();
9945 /* Do any postincrements in the expression that was tested. */
9946 emit_queue ();
9947
9948 if (GET_CODE (temp) == CONST_INT
9949 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9950 || GET_CODE (temp) == LABEL_REF)
9951 {
9952 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9953 if (target)
9954 emit_jump (target);
9955 }
9956 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9957 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9958 /* Note swapping the labels gives us not-equal. */
9959 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9960 else if (GET_MODE (temp) != VOIDmode)
9961 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9962 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9963 GET_MODE (temp), NULL_RTX, 0,
9964 if_false_label, if_true_label);
9965 else
9966 abort ();
9967 }
9968
9969 if (drop_through_label)
9970 {
9971 /* If do_jump produces code that might be jumped around,
9972 do any stack adjusts from that code, before the place
9973 where control merges in. */
9974 do_pending_stack_adjust ();
9975 emit_label (drop_through_label);
9976 }
9977 }
9978 \f
9979 /* Given a comparison expression EXP for values too wide to be compared
9980 with one insn, test the comparison and jump to the appropriate label.
9981 The code of EXP is ignored; we always test GT if SWAP is 0,
9982 and LT if SWAP is 1. */
9983
9984 static void
9985 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9986 tree exp;
9987 int swap;
9988 rtx if_false_label, if_true_label;
9989 {
9990 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9991 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9992 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9993 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9994
9995 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9996 }
9997
9998 /* Compare OP0 with OP1, word at a time, in mode MODE.
9999 UNSIGNEDP says to do unsigned comparison.
10000 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10001
10002 void
10003 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10004 enum machine_mode mode;
10005 int unsignedp;
10006 rtx op0, op1;
10007 rtx if_false_label, if_true_label;
10008 {
10009 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10010 rtx drop_through_label = 0;
10011 int i;
10012
10013 if (! if_true_label || ! if_false_label)
10014 drop_through_label = gen_label_rtx ();
10015 if (! if_true_label)
10016 if_true_label = drop_through_label;
10017 if (! if_false_label)
10018 if_false_label = drop_through_label;
10019
10020 /* Compare a word at a time, high order first. */
10021 for (i = 0; i < nwords; i++)
10022 {
10023 rtx op0_word, op1_word;
10024
10025 if (WORDS_BIG_ENDIAN)
10026 {
10027 op0_word = operand_subword_force (op0, i, mode);
10028 op1_word = operand_subword_force (op1, i, mode);
10029 }
10030 else
10031 {
10032 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10033 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10034 }
10035
10036 /* All but high-order word must be compared as unsigned. */
10037 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10038 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10039 NULL_RTX, if_true_label);
10040
10041 /* Consider lower words only if these are equal. */
10042 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10043 NULL_RTX, 0, NULL_RTX, if_false_label);
10044 }
10045
10046 if (if_false_label)
10047 emit_jump (if_false_label);
10048 if (drop_through_label)
10049 emit_label (drop_through_label);
10050 }
10051
10052 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10053 with one insn, test the comparison and jump to the appropriate label. */
10054
10055 static void
10056 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10057 tree exp;
10058 rtx if_false_label, if_true_label;
10059 {
10060 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10061 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10062 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10063 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10064 int i;
10065 rtx drop_through_label = 0;
10066
10067 if (! if_false_label)
10068 drop_through_label = if_false_label = gen_label_rtx ();
10069
10070 for (i = 0; i < nwords; i++)
10071 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10072 operand_subword_force (op1, i, mode),
10073 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10074 word_mode, NULL_RTX, 0, if_false_label,
10075 NULL_RTX);
10076
10077 if (if_true_label)
10078 emit_jump (if_true_label);
10079 if (drop_through_label)
10080 emit_label (drop_through_label);
10081 }
10082 \f
10083 /* Jump according to whether OP0 is 0.
10084 We assume that OP0 has an integer mode that is too wide
10085 for the available compare insns. */
10086
10087 void
10088 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10089 rtx op0;
10090 rtx if_false_label, if_true_label;
10091 {
10092 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10093 rtx part;
10094 int i;
10095 rtx drop_through_label = 0;
10096
10097 /* The fastest way of doing this comparison on almost any machine is to
10098 "or" all the words and compare the result. If all have to be loaded
10099 from memory and this is a very wide item, it's possible this may
10100 be slower, but that's highly unlikely. */
10101
10102 part = gen_reg_rtx (word_mode);
10103 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10104 for (i = 1; i < nwords && part != 0; i++)
10105 part = expand_binop (word_mode, ior_optab, part,
10106 operand_subword_force (op0, i, GET_MODE (op0)),
10107 part, 1, OPTAB_WIDEN);
10108
10109 if (part != 0)
10110 {
10111 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10112 NULL_RTX, 0, if_false_label, if_true_label);
10113
10114 return;
10115 }
10116
10117 /* If we couldn't do the "or" simply, do this with a series of compares. */
10118 if (! if_false_label)
10119 drop_through_label = if_false_label = gen_label_rtx ();
10120
10121 for (i = 0; i < nwords; i++)
10122 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10123 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10124 if_false_label, NULL_RTX);
10125
10126 if (if_true_label)
10127 emit_jump (if_true_label);
10128
10129 if (drop_through_label)
10130 emit_label (drop_through_label);
10131 }
10132 \f
10133 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10134 (including code to compute the values to be compared)
10135 and set (CC0) according to the result.
10136 The decision as to signed or unsigned comparison must be made by the caller.
10137
10138 We force a stack adjustment unless there are currently
10139 things pushed on the stack that aren't yet used.
10140
10141 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10142 compared.
10143
10144 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10145 size of MODE should be used. */
10146
10147 rtx
10148 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10149 register rtx op0, op1;
10150 enum rtx_code code;
10151 int unsignedp;
10152 enum machine_mode mode;
10153 rtx size;
10154 unsigned int align;
10155 {
10156 rtx tem;
10157
10158 /* If one operand is constant, make it the second one. Only do this
10159 if the other operand is not constant as well. */
10160
10161 if (swap_commutative_operands_p (op0, op1))
10162 {
10163 tem = op0;
10164 op0 = op1;
10165 op1 = tem;
10166 code = swap_condition (code);
10167 }
10168
10169 if (flag_force_mem)
10170 {
10171 op0 = force_not_mem (op0);
10172 op1 = force_not_mem (op1);
10173 }
10174
10175 do_pending_stack_adjust ();
10176
10177 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10178 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10179 return tem;
10180
10181 #if 0
10182 /* There's no need to do this now that combine.c can eliminate lots of
10183 sign extensions. This can be less efficient in certain cases on other
10184 machines. */
10185
10186 /* If this is a signed equality comparison, we can do it as an
10187 unsigned comparison since zero-extension is cheaper than sign
10188 extension and comparisons with zero are done as unsigned. This is
10189 the case even on machines that can do fast sign extension, since
10190 zero-extension is easier to combine with other operations than
10191 sign-extension is. If we are comparing against a constant, we must
10192 convert it to what it would look like unsigned. */
10193 if ((code == EQ || code == NE) && ! unsignedp
10194 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10195 {
10196 if (GET_CODE (op1) == CONST_INT
10197 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10198 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10199 unsignedp = 1;
10200 }
10201 #endif
10202
10203 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10204
10205 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10206 }
10207
10208 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10209 The decision as to signed or unsigned comparison must be made by the caller.
10210
10211 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10212 compared.
10213
10214 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10215 size of MODE should be used. */
10216
10217 void
10218 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10219 if_false_label, if_true_label)
10220 register rtx op0, op1;
10221 enum rtx_code code;
10222 int unsignedp;
10223 enum machine_mode mode;
10224 rtx size;
10225 unsigned int align;
10226 rtx if_false_label, if_true_label;
10227 {
10228 rtx tem;
10229 int dummy_true_label = 0;
10230
10231 /* Reverse the comparison if that is safe and we want to jump if it is
10232 false. */
10233 if (! if_true_label && ! FLOAT_MODE_P (mode))
10234 {
10235 if_true_label = if_false_label;
10236 if_false_label = 0;
10237 code = reverse_condition (code);
10238 }
10239
10240 /* If one operand is constant, make it the second one. Only do this
10241 if the other operand is not constant as well. */
10242
10243 if (swap_commutative_operands_p (op0, op1))
10244 {
10245 tem = op0;
10246 op0 = op1;
10247 op1 = tem;
10248 code = swap_condition (code);
10249 }
10250
10251 if (flag_force_mem)
10252 {
10253 op0 = force_not_mem (op0);
10254 op1 = force_not_mem (op1);
10255 }
10256
10257 do_pending_stack_adjust ();
10258
10259 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10260 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10261 {
10262 if (tem == const_true_rtx)
10263 {
10264 if (if_true_label)
10265 emit_jump (if_true_label);
10266 }
10267 else
10268 {
10269 if (if_false_label)
10270 emit_jump (if_false_label);
10271 }
10272 return;
10273 }
10274
10275 #if 0
10276 /* There's no need to do this now that combine.c can eliminate lots of
10277 sign extensions. This can be less efficient in certain cases on other
10278 machines. */
10279
10280 /* If this is a signed equality comparison, we can do it as an
10281 unsigned comparison since zero-extension is cheaper than sign
10282 extension and comparisons with zero are done as unsigned. This is
10283 the case even on machines that can do fast sign extension, since
10284 zero-extension is easier to combine with other operations than
10285 sign-extension is. If we are comparing against a constant, we must
10286 convert it to what it would look like unsigned. */
10287 if ((code == EQ || code == NE) && ! unsignedp
10288 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10289 {
10290 if (GET_CODE (op1) == CONST_INT
10291 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10292 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10293 unsignedp = 1;
10294 }
10295 #endif
10296
10297 if (! if_true_label)
10298 {
10299 dummy_true_label = 1;
10300 if_true_label = gen_label_rtx ();
10301 }
10302
10303 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10304 if_true_label);
10305
10306 if (if_false_label)
10307 emit_jump (if_false_label);
10308 if (dummy_true_label)
10309 emit_label (if_true_label);
10310 }
10311
10312 /* Generate code for a comparison expression EXP (including code to compute
10313 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10314 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10315 generated code will drop through.
10316 SIGNED_CODE should be the rtx operation for this comparison for
10317 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10318
10319 We force a stack adjustment unless there are currently
10320 things pushed on the stack that aren't yet used. */
10321
10322 static void
10323 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10324 if_true_label)
10325 register tree exp;
10326 enum rtx_code signed_code, unsigned_code;
10327 rtx if_false_label, if_true_label;
10328 {
10329 unsigned int align0, align1;
10330 register rtx op0, op1;
10331 register tree type;
10332 register enum machine_mode mode;
10333 int unsignedp;
10334 enum rtx_code code;
10335
10336 /* Don't crash if the comparison was erroneous. */
10337 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10338 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10339 return;
10340
10341 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10342 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10343 return;
10344
10345 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10346 mode = TYPE_MODE (type);
10347 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10348 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10349 || (GET_MODE_BITSIZE (mode)
10350 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10351 1)))))))
10352 {
10353 /* op0 might have been replaced by promoted constant, in which
10354 case the type of second argument should be used. */
10355 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10356 mode = TYPE_MODE (type);
10357 }
10358 unsignedp = TREE_UNSIGNED (type);
10359 code = unsignedp ? unsigned_code : signed_code;
10360
10361 #ifdef HAVE_canonicalize_funcptr_for_compare
10362 /* If function pointers need to be "canonicalized" before they can
10363 be reliably compared, then canonicalize them. */
10364 if (HAVE_canonicalize_funcptr_for_compare
10365 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10366 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10367 == FUNCTION_TYPE))
10368 {
10369 rtx new_op0 = gen_reg_rtx (mode);
10370
10371 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10372 op0 = new_op0;
10373 }
10374
10375 if (HAVE_canonicalize_funcptr_for_compare
10376 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10377 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10378 == FUNCTION_TYPE))
10379 {
10380 rtx new_op1 = gen_reg_rtx (mode);
10381
10382 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10383 op1 = new_op1;
10384 }
10385 #endif
10386
10387 /* Do any postincrements in the expression that was tested. */
10388 emit_queue ();
10389
10390 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10391 ((mode == BLKmode)
10392 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10393 MIN (align0, align1),
10394 if_false_label, if_true_label);
10395 }
10396 \f
10397 /* Generate code to calculate EXP using a store-flag instruction
10398 and return an rtx for the result. EXP is either a comparison
10399 or a TRUTH_NOT_EXPR whose operand is a comparison.
10400
10401 If TARGET is nonzero, store the result there if convenient.
10402
10403 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10404 cheap.
10405
10406 Return zero if there is no suitable set-flag instruction
10407 available on this machine.
10408
10409 Once expand_expr has been called on the arguments of the comparison,
10410 we are committed to doing the store flag, since it is not safe to
10411 re-evaluate the expression. We emit the store-flag insn by calling
10412 emit_store_flag, but only expand the arguments if we have a reason
10413 to believe that emit_store_flag will be successful. If we think that
10414 it will, but it isn't, we have to simulate the store-flag with a
10415 set/jump/set sequence. */
10416
10417 static rtx
10418 do_store_flag (exp, target, mode, only_cheap)
10419 tree exp;
10420 rtx target;
10421 enum machine_mode mode;
10422 int only_cheap;
10423 {
10424 enum rtx_code code;
10425 tree arg0, arg1, type;
10426 tree tem;
10427 enum machine_mode operand_mode;
10428 int invert = 0;
10429 int unsignedp;
10430 rtx op0, op1;
10431 enum insn_code icode;
10432 rtx subtarget = target;
10433 rtx result, label;
10434
10435 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10436 result at the end. We can't simply invert the test since it would
10437 have already been inverted if it were valid. This case occurs for
10438 some floating-point comparisons. */
10439
10440 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10441 invert = 1, exp = TREE_OPERAND (exp, 0);
10442
10443 arg0 = TREE_OPERAND (exp, 0);
10444 arg1 = TREE_OPERAND (exp, 1);
10445
10446 /* Don't crash if the comparison was erroneous. */
10447 if (arg0 == error_mark_node || arg1 == error_mark_node)
10448 return const0_rtx;
10449
10450 type = TREE_TYPE (arg0);
10451 operand_mode = TYPE_MODE (type);
10452 unsignedp = TREE_UNSIGNED (type);
10453
10454 /* We won't bother with BLKmode store-flag operations because it would mean
10455 passing a lot of information to emit_store_flag. */
10456 if (operand_mode == BLKmode)
10457 return 0;
10458
10459 /* We won't bother with store-flag operations involving function pointers
10460 when function pointers must be canonicalized before comparisons. */
10461 #ifdef HAVE_canonicalize_funcptr_for_compare
10462 if (HAVE_canonicalize_funcptr_for_compare
10463 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10464 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10465 == FUNCTION_TYPE))
10466 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10467 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10468 == FUNCTION_TYPE))))
10469 return 0;
10470 #endif
10471
10472 STRIP_NOPS (arg0);
10473 STRIP_NOPS (arg1);
10474
10475 /* Get the rtx comparison code to use. We know that EXP is a comparison
10476 operation of some type. Some comparisons against 1 and -1 can be
10477 converted to comparisons with zero. Do so here so that the tests
10478 below will be aware that we have a comparison with zero. These
10479 tests will not catch constants in the first operand, but constants
10480 are rarely passed as the first operand. */
10481
10482 switch (TREE_CODE (exp))
10483 {
10484 case EQ_EXPR:
10485 code = EQ;
10486 break;
10487 case NE_EXPR:
10488 code = NE;
10489 break;
10490 case LT_EXPR:
10491 if (integer_onep (arg1))
10492 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10493 else
10494 code = unsignedp ? LTU : LT;
10495 break;
10496 case LE_EXPR:
10497 if (! unsignedp && integer_all_onesp (arg1))
10498 arg1 = integer_zero_node, code = LT;
10499 else
10500 code = unsignedp ? LEU : LE;
10501 break;
10502 case GT_EXPR:
10503 if (! unsignedp && integer_all_onesp (arg1))
10504 arg1 = integer_zero_node, code = GE;
10505 else
10506 code = unsignedp ? GTU : GT;
10507 break;
10508 case GE_EXPR:
10509 if (integer_onep (arg1))
10510 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10511 else
10512 code = unsignedp ? GEU : GE;
10513 break;
10514
10515 case UNORDERED_EXPR:
10516 code = UNORDERED;
10517 break;
10518 case ORDERED_EXPR:
10519 code = ORDERED;
10520 break;
10521 case UNLT_EXPR:
10522 code = UNLT;
10523 break;
10524 case UNLE_EXPR:
10525 code = UNLE;
10526 break;
10527 case UNGT_EXPR:
10528 code = UNGT;
10529 break;
10530 case UNGE_EXPR:
10531 code = UNGE;
10532 break;
10533 case UNEQ_EXPR:
10534 code = UNEQ;
10535 break;
10536
10537 default:
10538 abort ();
10539 }
10540
10541 /* Put a constant second. */
10542 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10543 {
10544 tem = arg0; arg0 = arg1; arg1 = tem;
10545 code = swap_condition (code);
10546 }
10547
10548 /* If this is an equality or inequality test of a single bit, we can
10549 do this by shifting the bit being tested to the low-order bit and
10550 masking the result with the constant 1. If the condition was EQ,
10551 we xor it with 1. This does not require an scc insn and is faster
10552 than an scc insn even if we have it. */
10553
10554 if ((code == NE || code == EQ)
10555 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10556 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10557 {
10558 tree inner = TREE_OPERAND (arg0, 0);
10559 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10560 int ops_unsignedp;
10561
10562 /* If INNER is a right shift of a constant and it plus BITNUM does
10563 not overflow, adjust BITNUM and INNER. */
10564
10565 if (TREE_CODE (inner) == RSHIFT_EXPR
10566 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10567 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10568 && bitnum < TYPE_PRECISION (type)
10569 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10570 bitnum - TYPE_PRECISION (type)))
10571 {
10572 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10573 inner = TREE_OPERAND (inner, 0);
10574 }
10575
10576 /* If we are going to be able to omit the AND below, we must do our
10577 operations as unsigned. If we must use the AND, we have a choice.
10578 Normally unsigned is faster, but for some machines signed is. */
10579 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10580 #ifdef LOAD_EXTEND_OP
10581 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10582 #else
10583 : 1
10584 #endif
10585 );
10586
10587 if (! get_subtarget (subtarget)
10588 || GET_MODE (subtarget) != operand_mode
10589 || ! safe_from_p (subtarget, inner, 1))
10590 subtarget = 0;
10591
10592 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10593
10594 if (bitnum != 0)
10595 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10596 size_int (bitnum), subtarget, ops_unsignedp);
10597
10598 if (GET_MODE (op0) != mode)
10599 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10600
10601 if ((code == EQ && ! invert) || (code == NE && invert))
10602 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10603 ops_unsignedp, OPTAB_LIB_WIDEN);
10604
10605 /* Put the AND last so it can combine with more things. */
10606 if (bitnum != TYPE_PRECISION (type) - 1)
10607 op0 = expand_and (op0, const1_rtx, subtarget);
10608
10609 return op0;
10610 }
10611
10612 /* Now see if we are likely to be able to do this. Return if not. */
10613 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10614 return 0;
10615
10616 icode = setcc_gen_code[(int) code];
10617 if (icode == CODE_FOR_nothing
10618 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10619 {
10620 /* We can only do this if it is one of the special cases that
10621 can be handled without an scc insn. */
10622 if ((code == LT && integer_zerop (arg1))
10623 || (! only_cheap && code == GE && integer_zerop (arg1)))
10624 ;
10625 else if (BRANCH_COST >= 0
10626 && ! only_cheap && (code == NE || code == EQ)
10627 && TREE_CODE (type) != REAL_TYPE
10628 && ((abs_optab->handlers[(int) operand_mode].insn_code
10629 != CODE_FOR_nothing)
10630 || (ffs_optab->handlers[(int) operand_mode].insn_code
10631 != CODE_FOR_nothing)))
10632 ;
10633 else
10634 return 0;
10635 }
10636
10637 if (! get_subtarget (target)
10638 || GET_MODE (subtarget) != operand_mode
10639 || ! safe_from_p (subtarget, arg1, 1))
10640 subtarget = 0;
10641
10642 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10643 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10644
10645 if (target == 0)
10646 target = gen_reg_rtx (mode);
10647
10648 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10649 because, if the emit_store_flag does anything it will succeed and
10650 OP0 and OP1 will not be used subsequently. */
10651
10652 result = emit_store_flag (target, code,
10653 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10654 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10655 operand_mode, unsignedp, 1);
10656
10657 if (result)
10658 {
10659 if (invert)
10660 result = expand_binop (mode, xor_optab, result, const1_rtx,
10661 result, 0, OPTAB_LIB_WIDEN);
10662 return result;
10663 }
10664
10665 /* If this failed, we have to do this with set/compare/jump/set code. */
10666 if (GET_CODE (target) != REG
10667 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10668 target = gen_reg_rtx (GET_MODE (target));
10669
10670 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10671 result = compare_from_rtx (op0, op1, code, unsignedp,
10672 operand_mode, NULL_RTX, 0);
10673 if (GET_CODE (result) == CONST_INT)
10674 return (((result == const0_rtx && ! invert)
10675 || (result != const0_rtx && invert))
10676 ? const0_rtx : const1_rtx);
10677
10678 label = gen_label_rtx ();
10679 if (bcc_gen_fctn[(int) code] == 0)
10680 abort ();
10681
10682 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10683 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10684 emit_label (label);
10685
10686 return target;
10687 }
10688 \f
10689 /* Generate a tablejump instruction (used for switch statements). */
10690
10691 #ifdef HAVE_tablejump
10692
10693 /* INDEX is the value being switched on, with the lowest value
10694 in the table already subtracted.
10695 MODE is its expected mode (needed if INDEX is constant).
10696 RANGE is the length of the jump table.
10697 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10698
10699 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10700 index value is out of range. */
10701
10702 void
10703 do_tablejump (index, mode, range, table_label, default_label)
10704 rtx index, range, table_label, default_label;
10705 enum machine_mode mode;
10706 {
10707 register rtx temp, vector;
10708
10709 /* Do an unsigned comparison (in the proper mode) between the index
10710 expression and the value which represents the length of the range.
10711 Since we just finished subtracting the lower bound of the range
10712 from the index expression, this comparison allows us to simultaneously
10713 check that the original index expression value is both greater than
10714 or equal to the minimum value of the range and less than or equal to
10715 the maximum value of the range. */
10716
10717 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10718 0, default_label);
10719
10720 /* If index is in range, it must fit in Pmode.
10721 Convert to Pmode so we can index with it. */
10722 if (mode != Pmode)
10723 index = convert_to_mode (Pmode, index, 1);
10724
10725 /* Don't let a MEM slip thru, because then INDEX that comes
10726 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10727 and break_out_memory_refs will go to work on it and mess it up. */
10728 #ifdef PIC_CASE_VECTOR_ADDRESS
10729 if (flag_pic && GET_CODE (index) != REG)
10730 index = copy_to_mode_reg (Pmode, index);
10731 #endif
10732
10733 /* If flag_force_addr were to affect this address
10734 it could interfere with the tricky assumptions made
10735 about addresses that contain label-refs,
10736 which may be valid only very near the tablejump itself. */
10737 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10738 GET_MODE_SIZE, because this indicates how large insns are. The other
10739 uses should all be Pmode, because they are addresses. This code
10740 could fail if addresses and insns are not the same size. */
10741 index = gen_rtx_PLUS (Pmode,
10742 gen_rtx_MULT (Pmode, index,
10743 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10744 gen_rtx_LABEL_REF (Pmode, table_label));
10745 #ifdef PIC_CASE_VECTOR_ADDRESS
10746 if (flag_pic)
10747 index = PIC_CASE_VECTOR_ADDRESS (index);
10748 else
10749 #endif
10750 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10751 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10752 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10753 RTX_UNCHANGING_P (vector) = 1;
10754 convert_move (temp, vector, 0);
10755
10756 emit_jump_insn (gen_tablejump (temp, table_label));
10757
10758 /* If we are generating PIC code or if the table is PC-relative, the
10759 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10760 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10761 emit_barrier ();
10762 }
10763
10764 #endif /* HAVE_tablejump */