expr.c (store_constructor): Allow variable bounds of array type.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
47
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
50 #endif
51
52 /* Supply a default definition for PUSH_ARGS. */
53 #ifndef PUSH_ARGS
54 #ifdef PUSH_ROUNDING
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
56 #else
57 #define PUSH_ARGS 0
58 #endif
59 #endif
60
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
63
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
66
67 #ifdef PUSH_ROUNDING
68
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
72
73 #endif
74
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
82
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
86 #endif
87
88 /* If this is nonzero, we do not bother generating VOLATILE
89 around volatile memory references, and we are willing to
90 output indirect addresses. If cse is to follow, we reject
91 indirect addresses so a useful potential cse is generated;
92 if it is used only once, instruction combination will produce
93 the same indirect address eventually. */
94 int cse_not_expected;
95
96 /* Nonzero to generate code for all the subroutines within an
97 expression before generating the upper levels of the expression.
98 Nowadays this is never zero. */
99 int do_preexpand_calls = 1;
100
101 /* Don't check memory usage, since code is being emitted to check a memory
102 usage. Used when current_function_check_memory_usage is true, to avoid
103 infinite recursion. */
104 static int in_check_memory_usage;
105
106 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
107 static tree placeholder_list = 0;
108
109 /* This structure is used by move_by_pieces to describe the move to
110 be performed. */
111 struct move_by_pieces
112 {
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 rtx from;
118 rtx from_addr;
119 int autinc_from;
120 int explicit_inc_from;
121 unsigned HOST_WIDE_INT len;
122 HOST_WIDE_INT offset;
123 int reverse;
124 };
125
126 /* This structure is used by clear_by_pieces to describe the clear to
127 be performed. */
128
129 struct clear_by_pieces
130 {
131 rtx to;
132 rtx to_addr;
133 int autinc_to;
134 int explicit_inc_to;
135 unsigned HOST_WIDE_INT len;
136 HOST_WIDE_INT offset;
137 int reverse;
138 };
139
140 extern struct obstack permanent_obstack;
141
142 static rtx get_push_address PARAMS ((int));
143
144 static rtx enqueue_insn PARAMS ((rtx, rtx));
145 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
146 PARAMS ((unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
149 struct move_by_pieces *));
150 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
151 unsigned int));
152 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
153 enum machine_mode,
154 struct clear_by_pieces *));
155 static rtx get_subtarget PARAMS ((rtx));
156 static int is_zeros_p PARAMS ((tree));
157 static int mostly_zeros_p PARAMS ((tree));
158 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, tree, unsigned int, int));
161 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
162 HOST_WIDE_INT));
163 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
164 HOST_WIDE_INT, enum machine_mode,
165 tree, enum machine_mode, int,
166 unsigned int, HOST_WIDE_INT, int));
167 static enum memory_use_mode
168 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
169 static tree save_noncopied_parts PARAMS ((tree, tree));
170 static tree init_noncopied_parts PARAMS ((tree, tree));
171 static int safe_from_p PARAMS ((rtx, tree, int));
172 static int fixed_type_p PARAMS ((tree));
173 static rtx var_rtx PARAMS ((tree));
174 static int readonly_fields_p PARAMS ((tree));
175 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
176 static rtx expand_increment PARAMS ((tree, int, int));
177 static void preexpand_calls PARAMS ((tree));
178 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
179 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
180 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
181 rtx, rtx));
182 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
183
184 /* Record for each mode whether we can move a register directly to or
185 from an object of that mode in memory. If we can't, we won't try
186 to use that mode directly when accessing a field of that mode. */
187
188 static char direct_load[NUM_MACHINE_MODES];
189 static char direct_store[NUM_MACHINE_MODES];
190
191 /* If a memory-to-memory move would take MOVE_RATIO or more simple
192 move-instruction sequences, we will do a movstr or libcall instead. */
193
194 #ifndef MOVE_RATIO
195 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
196 #define MOVE_RATIO 2
197 #else
198 /* If we are optimizing for space (-Os), cut down the default move ratio. */
199 #define MOVE_RATIO (optimize_size ? 3 : 15)
200 #endif
201 #endif
202
203 /* This macro is used to determine whether move_by_pieces should be called
204 to perform a structure copy. */
205 #ifndef MOVE_BY_PIECES_P
206 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
207 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
208 #endif
209
210 /* This array records the insn_code of insns to perform block moves. */
211 enum insn_code movstr_optab[NUM_MACHINE_MODES];
212
213 /* This array records the insn_code of insns to perform block clears. */
214 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
215
216 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
217
218 #ifndef SLOW_UNALIGNED_ACCESS
219 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
220 #endif
221 \f
222 /* This is run once per compilation to set up which modes can be used
223 directly in memory and to initialize the block move optab. */
224
225 void
226 init_expr_once ()
227 {
228 rtx insn, pat;
229 enum machine_mode mode;
230 int num_clobbers;
231 rtx mem, mem1;
232 char *free_point;
233
234 start_sequence ();
235
236 /* Since we are on the permanent obstack, we must be sure we save this
237 spot AFTER we call start_sequence, since it will reuse the rtl it
238 makes. */
239 free_point = (char *) oballoc (0);
240
241 /* Try indexing by frame ptr and try by stack ptr.
242 It is known that on the Convex the stack ptr isn't a valid index.
243 With luck, one or the other is valid on any machine. */
244 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
245 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
246
247 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
248 pat = PATTERN (insn);
249
250 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
251 mode = (enum machine_mode) ((int) mode + 1))
252 {
253 int regno;
254 rtx reg;
255
256 direct_load[(int) mode] = direct_store[(int) mode] = 0;
257 PUT_MODE (mem, mode);
258 PUT_MODE (mem1, mode);
259
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
262
263 if (mode != VOIDmode && mode != BLKmode)
264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
265 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
266 regno++)
267 {
268 if (! HARD_REGNO_MODE_OK (regno, mode))
269 continue;
270
271 reg = gen_rtx_REG (mode, regno);
272
273 SET_SRC (pat) = mem;
274 SET_DEST (pat) = reg;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_load[(int) mode] = 1;
277
278 SET_SRC (pat) = mem1;
279 SET_DEST (pat) = reg;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_load[(int) mode] = 1;
282
283 SET_SRC (pat) = reg;
284 SET_DEST (pat) = mem;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_store[(int) mode] = 1;
287
288 SET_SRC (pat) = reg;
289 SET_DEST (pat) = mem1;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_store[(int) mode] = 1;
292 }
293 }
294
295 end_sequence ();
296 obfree (free_point);
297 }
298
299 /* This is run at the start of compiling a function. */
300
301 void
302 init_expr ()
303 {
304 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
305
306 pending_chain = 0;
307 pending_stack_adjust = 0;
308 stack_pointer_delta = 0;
309 inhibit_defer_pop = 0;
310 saveregs_value = 0;
311 apply_args_value = 0;
312 forced_labels = 0;
313 }
314
315 void
316 mark_expr_status (p)
317 struct expr_status *p;
318 {
319 if (p == NULL)
320 return;
321
322 ggc_mark_rtx (p->x_saveregs_value);
323 ggc_mark_rtx (p->x_apply_args_value);
324 ggc_mark_rtx (p->x_forced_labels);
325 }
326
327 void
328 free_expr_status (f)
329 struct function *f;
330 {
331 free (f->expr);
332 f->expr = NULL;
333 }
334
335 /* Small sanity check that the queue is empty at the end of a function. */
336
337 void
338 finish_expr_for_function ()
339 {
340 if (pending_chain)
341 abort ();
342 }
343 \f
344 /* Manage the queue of increment instructions to be output
345 for POSTINCREMENT_EXPR expressions, etc. */
346
347 /* Queue up to increment (or change) VAR later. BODY says how:
348 BODY should be the same thing you would pass to emit_insn
349 to increment right away. It will go to emit_insn later on.
350
351 The value is a QUEUED expression to be used in place of VAR
352 where you want to guarantee the pre-incrementation value of VAR. */
353
354 static rtx
355 enqueue_insn (var, body)
356 rtx var, body;
357 {
358 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
359 body, pending_chain);
360 return pending_chain;
361 }
362
363 /* Use protect_from_queue to convert a QUEUED expression
364 into something that you can put immediately into an instruction.
365 If the queued incrementation has not happened yet,
366 protect_from_queue returns the variable itself.
367 If the incrementation has happened, protect_from_queue returns a temp
368 that contains a copy of the old value of the variable.
369
370 Any time an rtx which might possibly be a QUEUED is to be put
371 into an instruction, it must be passed through protect_from_queue first.
372 QUEUED expressions are not meaningful in instructions.
373
374 Do not pass a value through protect_from_queue and then hold
375 on to it for a while before putting it in an instruction!
376 If the queue is flushed in between, incorrect code will result. */
377
378 rtx
379 protect_from_queue (x, modify)
380 register rtx x;
381 int modify;
382 {
383 register RTX_CODE code = GET_CODE (x);
384
385 #if 0 /* A QUEUED can hang around after the queue is forced out. */
386 /* Shortcut for most common case. */
387 if (pending_chain == 0)
388 return x;
389 #endif
390
391 if (code != QUEUED)
392 {
393 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
394 use of autoincrement. Make a copy of the contents of the memory
395 location rather than a copy of the address, but not if the value is
396 of mode BLKmode. Don't modify X in place since it might be
397 shared. */
398 if (code == MEM && GET_MODE (x) != BLKmode
399 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
400 {
401 register rtx y = XEXP (x, 0);
402 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
403
404 MEM_COPY_ATTRIBUTES (new, x);
405
406 if (QUEUED_INSN (y))
407 {
408 register rtx temp = gen_reg_rtx (GET_MODE (new));
409 emit_insn_before (gen_move_insn (temp, new),
410 QUEUED_INSN (y));
411 return temp;
412 }
413 return new;
414 }
415 /* Otherwise, recursively protect the subexpressions of all
416 the kinds of rtx's that can contain a QUEUED. */
417 if (code == MEM)
418 {
419 rtx tem = protect_from_queue (XEXP (x, 0), 0);
420 if (tem != XEXP (x, 0))
421 {
422 x = copy_rtx (x);
423 XEXP (x, 0) = tem;
424 }
425 }
426 else if (code == PLUS || code == MULT)
427 {
428 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
429 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
430 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
431 {
432 x = copy_rtx (x);
433 XEXP (x, 0) = new0;
434 XEXP (x, 1) = new1;
435 }
436 }
437 return x;
438 }
439 /* If the increment has not happened, use the variable itself. */
440 if (QUEUED_INSN (x) == 0)
441 return QUEUED_VAR (x);
442 /* If the increment has happened and a pre-increment copy exists,
443 use that copy. */
444 if (QUEUED_COPY (x) != 0)
445 return QUEUED_COPY (x);
446 /* The increment has happened but we haven't set up a pre-increment copy.
447 Set one up now, and use it. */
448 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
449 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
450 QUEUED_INSN (x));
451 return QUEUED_COPY (x);
452 }
453
454 /* Return nonzero if X contains a QUEUED expression:
455 if it contains anything that will be altered by a queued increment.
456 We handle only combinations of MEM, PLUS, MINUS and MULT operators
457 since memory addresses generally contain only those. */
458
459 int
460 queued_subexp_p (x)
461 rtx x;
462 {
463 register enum rtx_code code = GET_CODE (x);
464 switch (code)
465 {
466 case QUEUED:
467 return 1;
468 case MEM:
469 return queued_subexp_p (XEXP (x, 0));
470 case MULT:
471 case PLUS:
472 case MINUS:
473 return (queued_subexp_p (XEXP (x, 0))
474 || queued_subexp_p (XEXP (x, 1)));
475 default:
476 return 0;
477 }
478 }
479
480 /* Perform all the pending incrementations. */
481
482 void
483 emit_queue ()
484 {
485 register rtx p;
486 while ((p = pending_chain))
487 {
488 rtx body = QUEUED_BODY (p);
489
490 if (GET_CODE (body) == SEQUENCE)
491 {
492 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
493 emit_insn (QUEUED_BODY (p));
494 }
495 else
496 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
497 pending_chain = QUEUED_NEXT (p);
498 }
499 }
500 \f
501 /* Copy data from FROM to TO, where the machine modes are not the same.
502 Both modes may be integer, or both may be floating.
503 UNSIGNEDP should be nonzero if FROM is an unsigned type.
504 This causes zero-extension instead of sign-extension. */
505
506 void
507 convert_move (to, from, unsignedp)
508 register rtx to, from;
509 int unsignedp;
510 {
511 enum machine_mode to_mode = GET_MODE (to);
512 enum machine_mode from_mode = GET_MODE (from);
513 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
514 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
515 enum insn_code code;
516 rtx libcall;
517
518 /* rtx code for making an equivalent value. */
519 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
520
521 to = protect_from_queue (to, 1);
522 from = protect_from_queue (from, 0);
523
524 if (to_real != from_real)
525 abort ();
526
527 /* If FROM is a SUBREG that indicates that we have already done at least
528 the required extension, strip it. We don't handle such SUBREGs as
529 TO here. */
530
531 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
532 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
533 >= GET_MODE_SIZE (to_mode))
534 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
535 from = gen_lowpart (to_mode, from), from_mode = to_mode;
536
537 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
538 abort ();
539
540 if (to_mode == from_mode
541 || (from_mode == VOIDmode && CONSTANT_P (from)))
542 {
543 emit_move_insn (to, from);
544 return;
545 }
546
547 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
548 {
549 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
550 abort ();
551
552 if (VECTOR_MODE_P (to_mode))
553 from = gen_rtx_SUBREG (to_mode, from, 0);
554 else
555 to = gen_rtx_SUBREG (from_mode, to, 0);
556
557 emit_move_insn (to, from);
558 return;
559 }
560
561 if (to_real != from_real)
562 abort ();
563
564 if (to_real)
565 {
566 rtx value;
567
568 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
569 {
570 /* Try converting directly if the insn is supported. */
571 if ((code = can_extend_p (to_mode, from_mode, 0))
572 != CODE_FOR_nothing)
573 {
574 emit_unop_insn (code, to, from, UNKNOWN);
575 return;
576 }
577 }
578
579 #ifdef HAVE_trunchfqf2
580 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
583 return;
584 }
585 #endif
586 #ifdef HAVE_trunctqfqf2
587 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
588 {
589 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
590 return;
591 }
592 #endif
593 #ifdef HAVE_truncsfqf2
594 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
595 {
596 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
597 return;
598 }
599 #endif
600 #ifdef HAVE_truncdfqf2
601 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
602 {
603 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
604 return;
605 }
606 #endif
607 #ifdef HAVE_truncxfqf2
608 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
609 {
610 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
611 return;
612 }
613 #endif
614 #ifdef HAVE_trunctfqf2
615 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
616 {
617 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
618 return;
619 }
620 #endif
621
622 #ifdef HAVE_trunctqfhf2
623 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
624 {
625 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
626 return;
627 }
628 #endif
629 #ifdef HAVE_truncsfhf2
630 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
631 {
632 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
633 return;
634 }
635 #endif
636 #ifdef HAVE_truncdfhf2
637 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
638 {
639 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
640 return;
641 }
642 #endif
643 #ifdef HAVE_truncxfhf2
644 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
645 {
646 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
647 return;
648 }
649 #endif
650 #ifdef HAVE_trunctfhf2
651 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
652 {
653 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
654 return;
655 }
656 #endif
657
658 #ifdef HAVE_truncsftqf2
659 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
660 {
661 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
662 return;
663 }
664 #endif
665 #ifdef HAVE_truncdftqf2
666 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
667 {
668 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
669 return;
670 }
671 #endif
672 #ifdef HAVE_truncxftqf2
673 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
674 {
675 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
676 return;
677 }
678 #endif
679 #ifdef HAVE_trunctftqf2
680 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
681 {
682 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
683 return;
684 }
685 #endif
686
687 #ifdef HAVE_truncdfsf2
688 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
689 {
690 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694 #ifdef HAVE_truncxfsf2
695 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
696 {
697 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
698 return;
699 }
700 #endif
701 #ifdef HAVE_trunctfsf2
702 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
703 {
704 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
705 return;
706 }
707 #endif
708 #ifdef HAVE_truncxfdf2
709 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
710 {
711 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
712 return;
713 }
714 #endif
715 #ifdef HAVE_trunctfdf2
716 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
717 {
718 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
719 return;
720 }
721 #endif
722
723 libcall = (rtx) 0;
724 switch (from_mode)
725 {
726 case SFmode:
727 switch (to_mode)
728 {
729 case DFmode:
730 libcall = extendsfdf2_libfunc;
731 break;
732
733 case XFmode:
734 libcall = extendsfxf2_libfunc;
735 break;
736
737 case TFmode:
738 libcall = extendsftf2_libfunc;
739 break;
740
741 default:
742 break;
743 }
744 break;
745
746 case DFmode:
747 switch (to_mode)
748 {
749 case SFmode:
750 libcall = truncdfsf2_libfunc;
751 break;
752
753 case XFmode:
754 libcall = extenddfxf2_libfunc;
755 break;
756
757 case TFmode:
758 libcall = extenddftf2_libfunc;
759 break;
760
761 default:
762 break;
763 }
764 break;
765
766 case XFmode:
767 switch (to_mode)
768 {
769 case SFmode:
770 libcall = truncxfsf2_libfunc;
771 break;
772
773 case DFmode:
774 libcall = truncxfdf2_libfunc;
775 break;
776
777 default:
778 break;
779 }
780 break;
781
782 case TFmode:
783 switch (to_mode)
784 {
785 case SFmode:
786 libcall = trunctfsf2_libfunc;
787 break;
788
789 case DFmode:
790 libcall = trunctfdf2_libfunc;
791 break;
792
793 default:
794 break;
795 }
796 break;
797
798 default:
799 break;
800 }
801
802 if (libcall == (rtx) 0)
803 /* This conversion is not implemented yet. */
804 abort ();
805
806 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
807 1, from, from_mode);
808 emit_move_insn (to, value);
809 return;
810 }
811
812 /* Now both modes are integers. */
813
814 /* Handle expanding beyond a word. */
815 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
816 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
817 {
818 rtx insns;
819 rtx lowpart;
820 rtx fill_value;
821 rtx lowfrom;
822 int i;
823 enum machine_mode lowpart_mode;
824 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
825
826 /* Try converting directly if the insn is supported. */
827 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 != CODE_FOR_nothing)
829 {
830 /* If FROM is a SUBREG, put it into a register. Do this
831 so that we always generate the same set of insns for
832 better cse'ing; if an intermediate assignment occurred,
833 we won't be doing the operation directly on the SUBREG. */
834 if (optimize > 0 && GET_CODE (from) == SUBREG)
835 from = force_reg (from_mode, from);
836 emit_unop_insn (code, to, from, equiv_code);
837 return;
838 }
839 /* Next, try converting via full word. */
840 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
841 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
842 != CODE_FOR_nothing))
843 {
844 if (GET_CODE (to) == REG)
845 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
846 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
847 emit_unop_insn (code, to,
848 gen_lowpart (word_mode, to), equiv_code);
849 return;
850 }
851
852 /* No special multiword conversion insn; do it by hand. */
853 start_sequence ();
854
855 /* Since we will turn this into a no conflict block, we must ensure
856 that the source does not overlap the target. */
857
858 if (reg_overlap_mentioned_p (to, from))
859 from = force_reg (from_mode, from);
860
861 /* Get a copy of FROM widened to a word, if necessary. */
862 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
863 lowpart_mode = word_mode;
864 else
865 lowpart_mode = from_mode;
866
867 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
868
869 lowpart = gen_lowpart (lowpart_mode, to);
870 emit_move_insn (lowpart, lowfrom);
871
872 /* Compute the value to put in each remaining word. */
873 if (unsignedp)
874 fill_value = const0_rtx;
875 else
876 {
877 #ifdef HAVE_slt
878 if (HAVE_slt
879 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
880 && STORE_FLAG_VALUE == -1)
881 {
882 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
883 lowpart_mode, 0, 0);
884 fill_value = gen_reg_rtx (word_mode);
885 emit_insn (gen_slt (fill_value));
886 }
887 else
888 #endif
889 {
890 fill_value
891 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
892 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
893 NULL_RTX, 0);
894 fill_value = convert_to_mode (word_mode, fill_value, 1);
895 }
896 }
897
898 /* Fill the remaining words. */
899 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
900 {
901 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
902 rtx subword = operand_subword (to, index, 1, to_mode);
903
904 if (subword == 0)
905 abort ();
906
907 if (fill_value != subword)
908 emit_move_insn (subword, fill_value);
909 }
910
911 insns = get_insns ();
912 end_sequence ();
913
914 emit_no_conflict_block (insns, to, from, NULL_RTX,
915 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
916 return;
917 }
918
919 /* Truncating multi-word to a word or less. */
920 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
921 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
922 {
923 if (!((GET_CODE (from) == MEM
924 && ! MEM_VOLATILE_P (from)
925 && direct_load[(int) to_mode]
926 && ! mode_dependent_address_p (XEXP (from, 0)))
927 || GET_CODE (from) == REG
928 || GET_CODE (from) == SUBREG))
929 from = force_reg (from_mode, from);
930 convert_move (to, gen_lowpart (word_mode, from), 0);
931 return;
932 }
933
934 /* Handle pointer conversion. */ /* SPEE 900220. */
935 if (to_mode == PQImode)
936 {
937 if (from_mode != QImode)
938 from = convert_to_mode (QImode, from, unsignedp);
939
940 #ifdef HAVE_truncqipqi2
941 if (HAVE_truncqipqi2)
942 {
943 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
944 return;
945 }
946 #endif /* HAVE_truncqipqi2 */
947 abort ();
948 }
949
950 if (from_mode == PQImode)
951 {
952 if (to_mode != QImode)
953 {
954 from = convert_to_mode (QImode, from, unsignedp);
955 from_mode = QImode;
956 }
957 else
958 {
959 #ifdef HAVE_extendpqiqi2
960 if (HAVE_extendpqiqi2)
961 {
962 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
963 return;
964 }
965 #endif /* HAVE_extendpqiqi2 */
966 abort ();
967 }
968 }
969
970 if (to_mode == PSImode)
971 {
972 if (from_mode != SImode)
973 from = convert_to_mode (SImode, from, unsignedp);
974
975 #ifdef HAVE_truncsipsi2
976 if (HAVE_truncsipsi2)
977 {
978 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
979 return;
980 }
981 #endif /* HAVE_truncsipsi2 */
982 abort ();
983 }
984
985 if (from_mode == PSImode)
986 {
987 if (to_mode != SImode)
988 {
989 from = convert_to_mode (SImode, from, unsignedp);
990 from_mode = SImode;
991 }
992 else
993 {
994 #ifdef HAVE_extendpsisi2
995 if (HAVE_extendpsisi2)
996 {
997 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
998 return;
999 }
1000 #endif /* HAVE_extendpsisi2 */
1001 abort ();
1002 }
1003 }
1004
1005 if (to_mode == PDImode)
1006 {
1007 if (from_mode != DImode)
1008 from = convert_to_mode (DImode, from, unsignedp);
1009
1010 #ifdef HAVE_truncdipdi2
1011 if (HAVE_truncdipdi2)
1012 {
1013 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1014 return;
1015 }
1016 #endif /* HAVE_truncdipdi2 */
1017 abort ();
1018 }
1019
1020 if (from_mode == PDImode)
1021 {
1022 if (to_mode != DImode)
1023 {
1024 from = convert_to_mode (DImode, from, unsignedp);
1025 from_mode = DImode;
1026 }
1027 else
1028 {
1029 #ifdef HAVE_extendpdidi2
1030 if (HAVE_extendpdidi2)
1031 {
1032 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1033 return;
1034 }
1035 #endif /* HAVE_extendpdidi2 */
1036 abort ();
1037 }
1038 }
1039
1040 /* Now follow all the conversions between integers
1041 no more than a word long. */
1042
1043 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1044 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1045 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1046 GET_MODE_BITSIZE (from_mode)))
1047 {
1048 if (!((GET_CODE (from) == MEM
1049 && ! MEM_VOLATILE_P (from)
1050 && direct_load[(int) to_mode]
1051 && ! mode_dependent_address_p (XEXP (from, 0)))
1052 || GET_CODE (from) == REG
1053 || GET_CODE (from) == SUBREG))
1054 from = force_reg (from_mode, from);
1055 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1056 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1057 from = copy_to_reg (from);
1058 emit_move_insn (to, gen_lowpart (to_mode, from));
1059 return;
1060 }
1061
1062 /* Handle extension. */
1063 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1064 {
1065 /* Convert directly if that works. */
1066 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1067 != CODE_FOR_nothing)
1068 {
1069 emit_unop_insn (code, to, from, equiv_code);
1070 return;
1071 }
1072 else
1073 {
1074 enum machine_mode intermediate;
1075 rtx tmp;
1076 tree shift_amount;
1077
1078 /* Search for a mode to convert via. */
1079 for (intermediate = from_mode; intermediate != VOIDmode;
1080 intermediate = GET_MODE_WIDER_MODE (intermediate))
1081 if (((can_extend_p (to_mode, intermediate, unsignedp)
1082 != CODE_FOR_nothing)
1083 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1084 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1085 GET_MODE_BITSIZE (intermediate))))
1086 && (can_extend_p (intermediate, from_mode, unsignedp)
1087 != CODE_FOR_nothing))
1088 {
1089 convert_move (to, convert_to_mode (intermediate, from,
1090 unsignedp), unsignedp);
1091 return;
1092 }
1093
1094 /* No suitable intermediate mode.
1095 Generate what we need with shifts. */
1096 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1097 - GET_MODE_BITSIZE (from_mode), 0);
1098 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1099 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1100 to, unsignedp);
1101 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1102 to, unsignedp);
1103 if (tmp != to)
1104 emit_move_insn (to, tmp);
1105 return;
1106 }
1107 }
1108
1109 /* Support special truncate insns for certain modes. */
1110
1111 if (from_mode == DImode && to_mode == SImode)
1112 {
1113 #ifdef HAVE_truncdisi2
1114 if (HAVE_truncdisi2)
1115 {
1116 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1117 return;
1118 }
1119 #endif
1120 convert_move (to, force_reg (from_mode, from), unsignedp);
1121 return;
1122 }
1123
1124 if (from_mode == DImode && to_mode == HImode)
1125 {
1126 #ifdef HAVE_truncdihi2
1127 if (HAVE_truncdihi2)
1128 {
1129 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1130 return;
1131 }
1132 #endif
1133 convert_move (to, force_reg (from_mode, from), unsignedp);
1134 return;
1135 }
1136
1137 if (from_mode == DImode && to_mode == QImode)
1138 {
1139 #ifdef HAVE_truncdiqi2
1140 if (HAVE_truncdiqi2)
1141 {
1142 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1143 return;
1144 }
1145 #endif
1146 convert_move (to, force_reg (from_mode, from), unsignedp);
1147 return;
1148 }
1149
1150 if (from_mode == SImode && to_mode == HImode)
1151 {
1152 #ifdef HAVE_truncsihi2
1153 if (HAVE_truncsihi2)
1154 {
1155 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1156 return;
1157 }
1158 #endif
1159 convert_move (to, force_reg (from_mode, from), unsignedp);
1160 return;
1161 }
1162
1163 if (from_mode == SImode && to_mode == QImode)
1164 {
1165 #ifdef HAVE_truncsiqi2
1166 if (HAVE_truncsiqi2)
1167 {
1168 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1169 return;
1170 }
1171 #endif
1172 convert_move (to, force_reg (from_mode, from), unsignedp);
1173 return;
1174 }
1175
1176 if (from_mode == HImode && to_mode == QImode)
1177 {
1178 #ifdef HAVE_trunchiqi2
1179 if (HAVE_trunchiqi2)
1180 {
1181 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1182 return;
1183 }
1184 #endif
1185 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 return;
1187 }
1188
1189 if (from_mode == TImode && to_mode == DImode)
1190 {
1191 #ifdef HAVE_trunctidi2
1192 if (HAVE_trunctidi2)
1193 {
1194 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1195 return;
1196 }
1197 #endif
1198 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 return;
1200 }
1201
1202 if (from_mode == TImode && to_mode == SImode)
1203 {
1204 #ifdef HAVE_trunctisi2
1205 if (HAVE_trunctisi2)
1206 {
1207 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1208 return;
1209 }
1210 #endif
1211 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 return;
1213 }
1214
1215 if (from_mode == TImode && to_mode == HImode)
1216 {
1217 #ifdef HAVE_trunctihi2
1218 if (HAVE_trunctihi2)
1219 {
1220 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1221 return;
1222 }
1223 #endif
1224 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 return;
1226 }
1227
1228 if (from_mode == TImode && to_mode == QImode)
1229 {
1230 #ifdef HAVE_trunctiqi2
1231 if (HAVE_trunctiqi2)
1232 {
1233 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1234 return;
1235 }
1236 #endif
1237 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 return;
1239 }
1240
1241 /* Handle truncation of volatile memrefs, and so on;
1242 the things that couldn't be truncated directly,
1243 and for which there was no special instruction. */
1244 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1245 {
1246 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1247 emit_move_insn (to, temp);
1248 return;
1249 }
1250
1251 /* Mode combination is not recognized. */
1252 abort ();
1253 }
1254
1255 /* Return an rtx for a value that would result
1256 from converting X to mode MODE.
1257 Both X and MODE may be floating, or both integer.
1258 UNSIGNEDP is nonzero if X is an unsigned value.
1259 This can be done by referring to a part of X in place
1260 or by copying to a new temporary with conversion.
1261
1262 This function *must not* call protect_from_queue
1263 except when putting X into an insn (in which case convert_move does it). */
1264
1265 rtx
1266 convert_to_mode (mode, x, unsignedp)
1267 enum machine_mode mode;
1268 rtx x;
1269 int unsignedp;
1270 {
1271 return convert_modes (mode, VOIDmode, x, unsignedp);
1272 }
1273
1274 /* Return an rtx for a value that would result
1275 from converting X from mode OLDMODE to mode MODE.
1276 Both modes may be floating, or both integer.
1277 UNSIGNEDP is nonzero if X is an unsigned value.
1278
1279 This can be done by referring to a part of X in place
1280 or by copying to a new temporary with conversion.
1281
1282 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1283
1284 This function *must not* call protect_from_queue
1285 except when putting X into an insn (in which case convert_move does it). */
1286
1287 rtx
1288 convert_modes (mode, oldmode, x, unsignedp)
1289 enum machine_mode mode, oldmode;
1290 rtx x;
1291 int unsignedp;
1292 {
1293 register rtx temp;
1294
1295 /* If FROM is a SUBREG that indicates that we have already done at least
1296 the required extension, strip it. */
1297
1298 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1299 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1300 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1301 x = gen_lowpart (mode, x);
1302
1303 if (GET_MODE (x) != VOIDmode)
1304 oldmode = GET_MODE (x);
1305
1306 if (mode == oldmode)
1307 return x;
1308
1309 /* There is one case that we must handle specially: If we are converting
1310 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1311 we are to interpret the constant as unsigned, gen_lowpart will do
1312 the wrong if the constant appears negative. What we want to do is
1313 make the high-order word of the constant zero, not all ones. */
1314
1315 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1316 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1317 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1318 {
1319 HOST_WIDE_INT val = INTVAL (x);
1320
1321 if (oldmode != VOIDmode
1322 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1323 {
1324 int width = GET_MODE_BITSIZE (oldmode);
1325
1326 /* We need to zero extend VAL. */
1327 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1328 }
1329
1330 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1331 }
1332
1333 /* We can do this with a gen_lowpart if both desired and current modes
1334 are integer, and this is either a constant integer, a register, or a
1335 non-volatile MEM. Except for the constant case where MODE is no
1336 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1337
1338 if ((GET_CODE (x) == CONST_INT
1339 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1340 || (GET_MODE_CLASS (mode) == MODE_INT
1341 && GET_MODE_CLASS (oldmode) == MODE_INT
1342 && (GET_CODE (x) == CONST_DOUBLE
1343 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1344 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1345 && direct_load[(int) mode])
1346 || (GET_CODE (x) == REG
1347 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1348 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1349 {
1350 /* ?? If we don't know OLDMODE, we have to assume here that
1351 X does not need sign- or zero-extension. This may not be
1352 the case, but it's the best we can do. */
1353 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1354 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1355 {
1356 HOST_WIDE_INT val = INTVAL (x);
1357 int width = GET_MODE_BITSIZE (oldmode);
1358
1359 /* We must sign or zero-extend in this case. Start by
1360 zero-extending, then sign extend if we need to. */
1361 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1362 if (! unsignedp
1363 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1364 val |= (HOST_WIDE_INT) (-1) << width;
1365
1366 return GEN_INT (val);
1367 }
1368
1369 return gen_lowpart (mode, x);
1370 }
1371
1372 temp = gen_reg_rtx (mode);
1373 convert_move (temp, x, unsignedp);
1374 return temp;
1375 }
1376 \f
1377 /* This macro is used to determine what the largest unit size that
1378 move_by_pieces can use is. */
1379
1380 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1381 move efficiently, as opposed to MOVE_MAX which is the maximum
1382 number of bytes we can move with a single instruction. */
1383
1384 #ifndef MOVE_MAX_PIECES
1385 #define MOVE_MAX_PIECES MOVE_MAX
1386 #endif
1387
1388 /* Generate several move instructions to copy LEN bytes
1389 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1390 The caller must pass FROM and TO
1391 through protect_from_queue before calling.
1392 ALIGN is maximum alignment we can assume. */
1393
1394 void
1395 move_by_pieces (to, from, len, align)
1396 rtx to, from;
1397 unsigned HOST_WIDE_INT len;
1398 unsigned int align;
1399 {
1400 struct move_by_pieces data;
1401 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1402 unsigned int max_size = MOVE_MAX_PIECES + 1;
1403 enum machine_mode mode = VOIDmode, tmode;
1404 enum insn_code icode;
1405
1406 data.offset = 0;
1407 data.to_addr = to_addr;
1408 data.from_addr = from_addr;
1409 data.to = to;
1410 data.from = from;
1411 data.autinc_to
1412 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1413 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1414 data.autinc_from
1415 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1416 || GET_CODE (from_addr) == POST_INC
1417 || GET_CODE (from_addr) == POST_DEC);
1418
1419 data.explicit_inc_from = 0;
1420 data.explicit_inc_to = 0;
1421 data.reverse
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1423 if (data.reverse) data.offset = len;
1424 data.len = len;
1425
1426 /* If copying requires more than two move insns,
1427 copy addresses to registers (to make displacements shorter)
1428 and use post-increment if available. */
1429 if (!(data.autinc_from && data.autinc_to)
1430 && move_by_pieces_ninsns (len, align) > 2)
1431 {
1432 /* Find the mode of the largest move... */
1433 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1434 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1435 if (GET_MODE_SIZE (tmode) < max_size)
1436 mode = tmode;
1437
1438 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1439 {
1440 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1441 data.autinc_from = 1;
1442 data.explicit_inc_from = -1;
1443 }
1444 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1445 {
1446 data.from_addr = copy_addr_to_reg (from_addr);
1447 data.autinc_from = 1;
1448 data.explicit_inc_from = 1;
1449 }
1450 if (!data.autinc_from && CONSTANT_P (from_addr))
1451 data.from_addr = copy_addr_to_reg (from_addr);
1452 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1453 {
1454 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1455 data.autinc_to = 1;
1456 data.explicit_inc_to = -1;
1457 }
1458 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1459 {
1460 data.to_addr = copy_addr_to_reg (to_addr);
1461 data.autinc_to = 1;
1462 data.explicit_inc_to = 1;
1463 }
1464 if (!data.autinc_to && CONSTANT_P (to_addr))
1465 data.to_addr = copy_addr_to_reg (to_addr);
1466 }
1467
1468 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1469 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1470 align = MOVE_MAX * BITS_PER_UNIT;
1471
1472 /* First move what we can in the largest integer mode, then go to
1473 successively smaller modes. */
1474
1475 while (max_size > 1)
1476 {
1477 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1478 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1479 if (GET_MODE_SIZE (tmode) < max_size)
1480 mode = tmode;
1481
1482 if (mode == VOIDmode)
1483 break;
1484
1485 icode = mov_optab->handlers[(int) mode].insn_code;
1486 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1487 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1488
1489 max_size = GET_MODE_SIZE (mode);
1490 }
1491
1492 /* The code above should have handled everything. */
1493 if (data.len > 0)
1494 abort ();
1495 }
1496
1497 /* Return number of insns required to move L bytes by pieces.
1498 ALIGN (in bytes) is maximum alignment we can assume. */
1499
1500 static unsigned HOST_WIDE_INT
1501 move_by_pieces_ninsns (l, align)
1502 unsigned HOST_WIDE_INT l;
1503 unsigned int align;
1504 {
1505 unsigned HOST_WIDE_INT n_insns = 0;
1506 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1507
1508 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1509 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1510 align = MOVE_MAX * BITS_PER_UNIT;
1511
1512 while (max_size > 1)
1513 {
1514 enum machine_mode mode = VOIDmode, tmode;
1515 enum insn_code icode;
1516
1517 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1518 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1519 if (GET_MODE_SIZE (tmode) < max_size)
1520 mode = tmode;
1521
1522 if (mode == VOIDmode)
1523 break;
1524
1525 icode = mov_optab->handlers[(int) mode].insn_code;
1526 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1527 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1528
1529 max_size = GET_MODE_SIZE (mode);
1530 }
1531
1532 return n_insns;
1533 }
1534
1535 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1536 with move instructions for mode MODE. GENFUN is the gen_... function
1537 to make a move insn for that mode. DATA has all the other info. */
1538
1539 static void
1540 move_by_pieces_1 (genfun, mode, data)
1541 rtx (*genfun) PARAMS ((rtx, ...));
1542 enum machine_mode mode;
1543 struct move_by_pieces *data;
1544 {
1545 unsigned int size = GET_MODE_SIZE (mode);
1546 rtx to1, from1;
1547
1548 while (data->len >= size)
1549 {
1550 if (data->reverse)
1551 data->offset -= size;
1552
1553 if (data->autinc_to)
1554 {
1555 to1 = gen_rtx_MEM (mode, data->to_addr);
1556 MEM_COPY_ATTRIBUTES (to1, data->to);
1557 }
1558 else
1559 to1 = change_address (data->to, mode,
1560 plus_constant (data->to_addr, data->offset));
1561
1562 if (data->autinc_from)
1563 {
1564 from1 = gen_rtx_MEM (mode, data->from_addr);
1565 MEM_COPY_ATTRIBUTES (from1, data->from);
1566 }
1567 else
1568 from1 = change_address (data->from, mode,
1569 plus_constant (data->from_addr, data->offset));
1570
1571 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1572 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1573 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1574 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1575
1576 emit_insn ((*genfun) (to1, from1));
1577
1578 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1579 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1580 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1581 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1582
1583 if (! data->reverse)
1584 data->offset += size;
1585
1586 data->len -= size;
1587 }
1588 }
1589 \f
1590 /* Emit code to move a block Y to a block X.
1591 This may be done with string-move instructions,
1592 with multiple scalar move instructions, or with a library call.
1593
1594 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1595 with mode BLKmode.
1596 SIZE is an rtx that says how long they are.
1597 ALIGN is the maximum alignment we can assume they have.
1598
1599 Return the address of the new block, if memcpy is called and returns it,
1600 0 otherwise. */
1601
1602 rtx
1603 emit_block_move (x, y, size, align)
1604 rtx x, y;
1605 rtx size;
1606 unsigned int align;
1607 {
1608 rtx retval = 0;
1609 #ifdef TARGET_MEM_FUNCTIONS
1610 static tree fn;
1611 tree call_expr, arg_list;
1612 #endif
1613
1614 if (GET_MODE (x) != BLKmode)
1615 abort ();
1616
1617 if (GET_MODE (y) != BLKmode)
1618 abort ();
1619
1620 x = protect_from_queue (x, 1);
1621 y = protect_from_queue (y, 0);
1622 size = protect_from_queue (size, 0);
1623
1624 if (GET_CODE (x) != MEM)
1625 abort ();
1626 if (GET_CODE (y) != MEM)
1627 abort ();
1628 if (size == 0)
1629 abort ();
1630
1631 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1632 move_by_pieces (x, y, INTVAL (size), align);
1633 else
1634 {
1635 /* Try the most limited insn first, because there's no point
1636 including more than one in the machine description unless
1637 the more limited one has some advantage. */
1638
1639 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1640 enum machine_mode mode;
1641
1642 /* Since this is a move insn, we don't care about volatility. */
1643 volatile_ok = 1;
1644
1645 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1646 mode = GET_MODE_WIDER_MODE (mode))
1647 {
1648 enum insn_code code = movstr_optab[(int) mode];
1649 insn_operand_predicate_fn pred;
1650
1651 if (code != CODE_FOR_nothing
1652 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1653 here because if SIZE is less than the mode mask, as it is
1654 returned by the macro, it will definitely be less than the
1655 actual mode mask. */
1656 && ((GET_CODE (size) == CONST_INT
1657 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1658 <= (GET_MODE_MASK (mode) >> 1)))
1659 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1660 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1661 || (*pred) (x, BLKmode))
1662 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1663 || (*pred) (y, BLKmode))
1664 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1665 || (*pred) (opalign, VOIDmode)))
1666 {
1667 rtx op2;
1668 rtx last = get_last_insn ();
1669 rtx pat;
1670
1671 op2 = convert_to_mode (mode, size, 1);
1672 pred = insn_data[(int) code].operand[2].predicate;
1673 if (pred != 0 && ! (*pred) (op2, mode))
1674 op2 = copy_to_mode_reg (mode, op2);
1675
1676 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1677 if (pat)
1678 {
1679 emit_insn (pat);
1680 volatile_ok = 0;
1681 return 0;
1682 }
1683 else
1684 delete_insns_since (last);
1685 }
1686 }
1687
1688 volatile_ok = 0;
1689
1690 /* X, Y, or SIZE may have been passed through protect_from_queue.
1691
1692 It is unsafe to save the value generated by protect_from_queue
1693 and reuse it later. Consider what happens if emit_queue is
1694 called before the return value from protect_from_queue is used.
1695
1696 Expansion of the CALL_EXPR below will call emit_queue before
1697 we are finished emitting RTL for argument setup. So if we are
1698 not careful we could get the wrong value for an argument.
1699
1700 To avoid this problem we go ahead and emit code to copy X, Y &
1701 SIZE into new pseudos. We can then place those new pseudos
1702 into an RTL_EXPR and use them later, even after a call to
1703 emit_queue.
1704
1705 Note this is not strictly needed for library calls since they
1706 do not call emit_queue before loading their arguments. However,
1707 we may need to have library calls call emit_queue in the future
1708 since failing to do so could cause problems for targets which
1709 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1710 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1711 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1712
1713 #ifdef TARGET_MEM_FUNCTIONS
1714 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1715 #else
1716 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1717 TREE_UNSIGNED (integer_type_node));
1718 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1719 #endif
1720
1721 #ifdef TARGET_MEM_FUNCTIONS
1722 /* It is incorrect to use the libcall calling conventions to call
1723 memcpy in this context.
1724
1725 This could be a user call to memcpy and the user may wish to
1726 examine the return value from memcpy.
1727
1728 For targets where libcalls and normal calls have different conventions
1729 for returning pointers, we could end up generating incorrect code.
1730
1731 So instead of using a libcall sequence we build up a suitable
1732 CALL_EXPR and expand the call in the normal fashion. */
1733 if (fn == NULL_TREE)
1734 {
1735 tree fntype;
1736
1737 /* This was copied from except.c, I don't know if all this is
1738 necessary in this context or not. */
1739 fn = get_identifier ("memcpy");
1740 push_obstacks_nochange ();
1741 end_temporary_allocation ();
1742 fntype = build_pointer_type (void_type_node);
1743 fntype = build_function_type (fntype, NULL_TREE);
1744 fn = build_decl (FUNCTION_DECL, fn, fntype);
1745 ggc_add_tree_root (&fn, 1);
1746 DECL_EXTERNAL (fn) = 1;
1747 TREE_PUBLIC (fn) = 1;
1748 DECL_ARTIFICIAL (fn) = 1;
1749 make_decl_rtl (fn, NULL_PTR, 1);
1750 assemble_external (fn);
1751 pop_obstacks ();
1752 }
1753
1754 /* We need to make an argument list for the function call.
1755
1756 memcpy has three arguments, the first two are void * addresses and
1757 the last is a size_t byte count for the copy. */
1758 arg_list
1759 = build_tree_list (NULL_TREE,
1760 make_tree (build_pointer_type (void_type_node), x));
1761 TREE_CHAIN (arg_list)
1762 = build_tree_list (NULL_TREE,
1763 make_tree (build_pointer_type (void_type_node), y));
1764 TREE_CHAIN (TREE_CHAIN (arg_list))
1765 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1766 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1767
1768 /* Now we have to build up the CALL_EXPR itself. */
1769 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1770 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1771 call_expr, arg_list, NULL_TREE);
1772 TREE_SIDE_EFFECTS (call_expr) = 1;
1773
1774 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1775 #else
1776 emit_library_call (bcopy_libfunc, 0,
1777 VOIDmode, 3, y, Pmode, x, Pmode,
1778 convert_to_mode (TYPE_MODE (integer_type_node), size,
1779 TREE_UNSIGNED (integer_type_node)),
1780 TYPE_MODE (integer_type_node));
1781 #endif
1782 }
1783
1784 return retval;
1785 }
1786 \f
1787 /* Copy all or part of a value X into registers starting at REGNO.
1788 The number of registers to be filled is NREGS. */
1789
1790 void
1791 move_block_to_reg (regno, x, nregs, mode)
1792 int regno;
1793 rtx x;
1794 int nregs;
1795 enum machine_mode mode;
1796 {
1797 int i;
1798 #ifdef HAVE_load_multiple
1799 rtx pat;
1800 rtx last;
1801 #endif
1802
1803 if (nregs == 0)
1804 return;
1805
1806 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1807 x = validize_mem (force_const_mem (mode, x));
1808
1809 /* See if the machine can do this with a load multiple insn. */
1810 #ifdef HAVE_load_multiple
1811 if (HAVE_load_multiple)
1812 {
1813 last = get_last_insn ();
1814 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1815 GEN_INT (nregs));
1816 if (pat)
1817 {
1818 emit_insn (pat);
1819 return;
1820 }
1821 else
1822 delete_insns_since (last);
1823 }
1824 #endif
1825
1826 for (i = 0; i < nregs; i++)
1827 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1828 operand_subword_force (x, i, mode));
1829 }
1830
1831 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1832 The number of registers to be filled is NREGS. SIZE indicates the number
1833 of bytes in the object X. */
1834
1835 void
1836 move_block_from_reg (regno, x, nregs, size)
1837 int regno;
1838 rtx x;
1839 int nregs;
1840 int size;
1841 {
1842 int i;
1843 #ifdef HAVE_store_multiple
1844 rtx pat;
1845 rtx last;
1846 #endif
1847 enum machine_mode mode;
1848
1849 /* If SIZE is that of a mode no bigger than a word, just use that
1850 mode's store operation. */
1851 if (size <= UNITS_PER_WORD
1852 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1853 {
1854 emit_move_insn (change_address (x, mode, NULL),
1855 gen_rtx_REG (mode, regno));
1856 return;
1857 }
1858
1859 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1860 to the left before storing to memory. Note that the previous test
1861 doesn't handle all cases (e.g. SIZE == 3). */
1862 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1863 {
1864 rtx tem = operand_subword (x, 0, 1, BLKmode);
1865 rtx shift;
1866
1867 if (tem == 0)
1868 abort ();
1869
1870 shift = expand_shift (LSHIFT_EXPR, word_mode,
1871 gen_rtx_REG (word_mode, regno),
1872 build_int_2 ((UNITS_PER_WORD - size)
1873 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1874 emit_move_insn (tem, shift);
1875 return;
1876 }
1877
1878 /* See if the machine can do this with a store multiple insn. */
1879 #ifdef HAVE_store_multiple
1880 if (HAVE_store_multiple)
1881 {
1882 last = get_last_insn ();
1883 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1884 GEN_INT (nregs));
1885 if (pat)
1886 {
1887 emit_insn (pat);
1888 return;
1889 }
1890 else
1891 delete_insns_since (last);
1892 }
1893 #endif
1894
1895 for (i = 0; i < nregs; i++)
1896 {
1897 rtx tem = operand_subword (x, i, 1, BLKmode);
1898
1899 if (tem == 0)
1900 abort ();
1901
1902 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1903 }
1904 }
1905
1906 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1907 registers represented by a PARALLEL. SSIZE represents the total size of
1908 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1909 SRC in bits. */
1910 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1911 the balance will be in what would be the low-order memory addresses, i.e.
1912 left justified for big endian, right justified for little endian. This
1913 happens to be true for the targets currently using this support. If this
1914 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1915 would be needed. */
1916
1917 void
1918 emit_group_load (dst, orig_src, ssize, align)
1919 rtx dst, orig_src;
1920 unsigned int align;
1921 int ssize;
1922 {
1923 rtx *tmps, src;
1924 int start, i;
1925
1926 if (GET_CODE (dst) != PARALLEL)
1927 abort ();
1928
1929 /* Check for a NULL entry, used to indicate that the parameter goes
1930 both on the stack and in registers. */
1931 if (XEXP (XVECEXP (dst, 0, 0), 0))
1932 start = 0;
1933 else
1934 start = 1;
1935
1936 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1937
1938 /* If we won't be loading directly from memory, protect the real source
1939 from strange tricks we might play. */
1940 src = orig_src;
1941 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
1942 {
1943 if (GET_MODE (src) == VOIDmode)
1944 src = gen_reg_rtx (GET_MODE (dst));
1945 else
1946 src = gen_reg_rtx (GET_MODE (orig_src));
1947 emit_move_insn (src, orig_src);
1948 }
1949
1950 /* Process the pieces. */
1951 for (i = start; i < XVECLEN (dst, 0); i++)
1952 {
1953 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1954 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1955 unsigned int bytelen = GET_MODE_SIZE (mode);
1956 int shift = 0;
1957
1958 /* Handle trailing fragments that run over the size of the struct. */
1959 if (ssize >= 0 && bytepos + bytelen > ssize)
1960 {
1961 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1962 bytelen = ssize - bytepos;
1963 if (bytelen <= 0)
1964 abort ();
1965 }
1966
1967 /* Optimize the access just a bit. */
1968 if (GET_CODE (src) == MEM
1969 && align >= GET_MODE_ALIGNMENT (mode)
1970 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1971 && bytelen == GET_MODE_SIZE (mode))
1972 {
1973 tmps[i] = gen_reg_rtx (mode);
1974 emit_move_insn (tmps[i],
1975 change_address (src, mode,
1976 plus_constant (XEXP (src, 0),
1977 bytepos)));
1978 }
1979 else if (GET_CODE (src) == CONCAT)
1980 {
1981 if (bytepos == 0
1982 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1983 tmps[i] = XEXP (src, 0);
1984 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1985 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1986 tmps[i] = XEXP (src, 1);
1987 else
1988 abort ();
1989 }
1990 else if ((CONSTANT_P (src)
1991 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
1992 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1993 tmps[i] = src;
1994 else
1995 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1996 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1997 mode, mode, align, ssize);
1998
1999 if (BYTES_BIG_ENDIAN && shift)
2000 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2001 tmps[i], 0, OPTAB_WIDEN);
2002 }
2003
2004 emit_queue ();
2005
2006 /* Copy the extracted pieces into the proper (probable) hard regs. */
2007 for (i = start; i < XVECLEN (dst, 0); i++)
2008 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2009 }
2010
2011 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2012 registers represented by a PARALLEL. SSIZE represents the total size of
2013 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2014
2015 void
2016 emit_group_store (orig_dst, src, ssize, align)
2017 rtx orig_dst, src;
2018 int ssize;
2019 unsigned int align;
2020 {
2021 rtx *tmps, dst;
2022 int start, i;
2023
2024 if (GET_CODE (src) != PARALLEL)
2025 abort ();
2026
2027 /* Check for a NULL entry, used to indicate that the parameter goes
2028 both on the stack and in registers. */
2029 if (XEXP (XVECEXP (src, 0, 0), 0))
2030 start = 0;
2031 else
2032 start = 1;
2033
2034 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2035
2036 /* Copy the (probable) hard regs into pseudos. */
2037 for (i = start; i < XVECLEN (src, 0); i++)
2038 {
2039 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2040 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2041 emit_move_insn (tmps[i], reg);
2042 }
2043 emit_queue ();
2044
2045 /* If we won't be storing directly into memory, protect the real destination
2046 from strange tricks we might play. */
2047 dst = orig_dst;
2048 if (GET_CODE (dst) == PARALLEL)
2049 {
2050 rtx temp;
2051
2052 /* We can get a PARALLEL dst if there is a conditional expression in
2053 a return statement. In that case, the dst and src are the same,
2054 so no action is necessary. */
2055 if (rtx_equal_p (dst, src))
2056 return;
2057
2058 /* It is unclear if we can ever reach here, but we may as well handle
2059 it. Allocate a temporary, and split this into a store/load to/from
2060 the temporary. */
2061
2062 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2063 emit_group_store (temp, src, ssize, align);
2064 emit_group_load (dst, temp, ssize, align);
2065 return;
2066 }
2067 else if (GET_CODE (dst) != MEM)
2068 {
2069 dst = gen_reg_rtx (GET_MODE (orig_dst));
2070 /* Make life a bit easier for combine. */
2071 emit_move_insn (dst, const0_rtx);
2072 }
2073
2074 /* Process the pieces. */
2075 for (i = start; i < XVECLEN (src, 0); i++)
2076 {
2077 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2078 enum machine_mode mode = GET_MODE (tmps[i]);
2079 unsigned int bytelen = GET_MODE_SIZE (mode);
2080
2081 /* Handle trailing fragments that run over the size of the struct. */
2082 if (ssize >= 0 && bytepos + bytelen > ssize)
2083 {
2084 if (BYTES_BIG_ENDIAN)
2085 {
2086 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2087 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2088 tmps[i], 0, OPTAB_WIDEN);
2089 }
2090 bytelen = ssize - bytepos;
2091 }
2092
2093 /* Optimize the access just a bit. */
2094 if (GET_CODE (dst) == MEM
2095 && align >= GET_MODE_ALIGNMENT (mode)
2096 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2097 && bytelen == GET_MODE_SIZE (mode))
2098 emit_move_insn (change_address (dst, mode,
2099 plus_constant (XEXP (dst, 0),
2100 bytepos)),
2101 tmps[i]);
2102 else
2103 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2104 mode, tmps[i], align, ssize);
2105 }
2106
2107 emit_queue ();
2108
2109 /* Copy from the pseudo into the (probable) hard reg. */
2110 if (GET_CODE (dst) == REG)
2111 emit_move_insn (orig_dst, dst);
2112 }
2113
2114 /* Generate code to copy a BLKmode object of TYPE out of a
2115 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2116 is null, a stack temporary is created. TGTBLK is returned.
2117
2118 The primary purpose of this routine is to handle functions
2119 that return BLKmode structures in registers. Some machines
2120 (the PA for example) want to return all small structures
2121 in registers regardless of the structure's alignment. */
2122
2123 rtx
2124 copy_blkmode_from_reg (tgtblk, srcreg, type)
2125 rtx tgtblk;
2126 rtx srcreg;
2127 tree type;
2128 {
2129 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2130 rtx src = NULL, dst = NULL;
2131 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2132 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2133
2134 if (tgtblk == 0)
2135 {
2136 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2137 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2138 preserve_temp_slots (tgtblk);
2139 }
2140
2141 /* This code assumes srcreg is at least a full word. If it isn't,
2142 copy it into a new pseudo which is a full word. */
2143 if (GET_MODE (srcreg) != BLKmode
2144 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2145 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2146
2147 /* Structures whose size is not a multiple of a word are aligned
2148 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2149 machine, this means we must skip the empty high order bytes when
2150 calculating the bit offset. */
2151 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2152 big_endian_correction
2153 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2154
2155 /* Copy the structure BITSIZE bites at a time.
2156
2157 We could probably emit more efficient code for machines which do not use
2158 strict alignment, but it doesn't seem worth the effort at the current
2159 time. */
2160 for (bitpos = 0, xbitpos = big_endian_correction;
2161 bitpos < bytes * BITS_PER_UNIT;
2162 bitpos += bitsize, xbitpos += bitsize)
2163 {
2164 /* We need a new source operand each time xbitpos is on a
2165 word boundary and when xbitpos == big_endian_correction
2166 (the first time through). */
2167 if (xbitpos % BITS_PER_WORD == 0
2168 || xbitpos == big_endian_correction)
2169 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2170
2171 /* We need a new destination operand each time bitpos is on
2172 a word boundary. */
2173 if (bitpos % BITS_PER_WORD == 0)
2174 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2175
2176 /* Use xbitpos for the source extraction (right justified) and
2177 xbitpos for the destination store (left justified). */
2178 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2179 extract_bit_field (src, bitsize,
2180 xbitpos % BITS_PER_WORD, 1,
2181 NULL_RTX, word_mode, word_mode,
2182 bitsize, BITS_PER_WORD),
2183 bitsize, BITS_PER_WORD);
2184 }
2185
2186 return tgtblk;
2187 }
2188
2189 /* Add a USE expression for REG to the (possibly empty) list pointed
2190 to by CALL_FUSAGE. REG must denote a hard register. */
2191
2192 void
2193 use_reg (call_fusage, reg)
2194 rtx *call_fusage, reg;
2195 {
2196 if (GET_CODE (reg) != REG
2197 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2198 abort ();
2199
2200 *call_fusage
2201 = gen_rtx_EXPR_LIST (VOIDmode,
2202 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2203 }
2204
2205 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2206 starting at REGNO. All of these registers must be hard registers. */
2207
2208 void
2209 use_regs (call_fusage, regno, nregs)
2210 rtx *call_fusage;
2211 int regno;
2212 int nregs;
2213 {
2214 int i;
2215
2216 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2217 abort ();
2218
2219 for (i = 0; i < nregs; i++)
2220 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2221 }
2222
2223 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2224 PARALLEL REGS. This is for calls that pass values in multiple
2225 non-contiguous locations. The Irix 6 ABI has examples of this. */
2226
2227 void
2228 use_group_regs (call_fusage, regs)
2229 rtx *call_fusage;
2230 rtx regs;
2231 {
2232 int i;
2233
2234 for (i = 0; i < XVECLEN (regs, 0); i++)
2235 {
2236 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2237
2238 /* A NULL entry means the parameter goes both on the stack and in
2239 registers. This can also be a MEM for targets that pass values
2240 partially on the stack and partially in registers. */
2241 if (reg != 0 && GET_CODE (reg) == REG)
2242 use_reg (call_fusage, reg);
2243 }
2244 }
2245 \f
2246 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2247 rtx with BLKmode). The caller must pass TO through protect_from_queue
2248 before calling. ALIGN is maximum alignment we can assume. */
2249
2250 static void
2251 clear_by_pieces (to, len, align)
2252 rtx to;
2253 unsigned HOST_WIDE_INT len;
2254 unsigned int align;
2255 {
2256 struct clear_by_pieces data;
2257 rtx to_addr = XEXP (to, 0);
2258 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2259 enum machine_mode mode = VOIDmode, tmode;
2260 enum insn_code icode;
2261
2262 data.offset = 0;
2263 data.to_addr = to_addr;
2264 data.to = to;
2265 data.autinc_to
2266 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2267 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2268
2269 data.explicit_inc_to = 0;
2270 data.reverse
2271 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2272 if (data.reverse)
2273 data.offset = len;
2274 data.len = len;
2275
2276 /* If copying requires more than two move insns,
2277 copy addresses to registers (to make displacements shorter)
2278 and use post-increment if available. */
2279 if (!data.autinc_to
2280 && move_by_pieces_ninsns (len, align) > 2)
2281 {
2282 /* Determine the main mode we'll be using. */
2283 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2284 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2285 if (GET_MODE_SIZE (tmode) < max_size)
2286 mode = tmode;
2287
2288 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2289 {
2290 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2291 data.autinc_to = 1;
2292 data.explicit_inc_to = -1;
2293 }
2294
2295 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse
2296 && ! data.autinc_to)
2297 {
2298 data.to_addr = copy_addr_to_reg (to_addr);
2299 data.autinc_to = 1;
2300 data.explicit_inc_to = 1;
2301 }
2302
2303 if ( !data.autinc_to && CONSTANT_P (to_addr))
2304 data.to_addr = copy_addr_to_reg (to_addr);
2305 }
2306
2307 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2308 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2309 align = MOVE_MAX * BITS_PER_UNIT;
2310
2311 /* First move what we can in the largest integer mode, then go to
2312 successively smaller modes. */
2313
2314 while (max_size > 1)
2315 {
2316 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2317 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2318 if (GET_MODE_SIZE (tmode) < max_size)
2319 mode = tmode;
2320
2321 if (mode == VOIDmode)
2322 break;
2323
2324 icode = mov_optab->handlers[(int) mode].insn_code;
2325 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2326 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2327
2328 max_size = GET_MODE_SIZE (mode);
2329 }
2330
2331 /* The code above should have handled everything. */
2332 if (data.len != 0)
2333 abort ();
2334 }
2335
2336 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2337 with move instructions for mode MODE. GENFUN is the gen_... function
2338 to make a move insn for that mode. DATA has all the other info. */
2339
2340 static void
2341 clear_by_pieces_1 (genfun, mode, data)
2342 rtx (*genfun) PARAMS ((rtx, ...));
2343 enum machine_mode mode;
2344 struct clear_by_pieces *data;
2345 {
2346 unsigned int size = GET_MODE_SIZE (mode);
2347 rtx to1;
2348
2349 while (data->len >= size)
2350 {
2351 if (data->reverse)
2352 data->offset -= size;
2353
2354 if (data->autinc_to)
2355 {
2356 to1 = gen_rtx_MEM (mode, data->to_addr);
2357 MEM_COPY_ATTRIBUTES (to1, data->to);
2358 }
2359 else
2360 to1 = change_address (data->to, mode,
2361 plus_constant (data->to_addr, data->offset));
2362
2363 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2364 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2365
2366 emit_insn ((*genfun) (to1, const0_rtx));
2367
2368 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2369 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2370
2371 if (! data->reverse)
2372 data->offset += size;
2373
2374 data->len -= size;
2375 }
2376 }
2377 \f
2378 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2379 its length in bytes and ALIGN is the maximum alignment we can is has.
2380
2381 If we call a function that returns the length of the block, return it. */
2382
2383 rtx
2384 clear_storage (object, size, align)
2385 rtx object;
2386 rtx size;
2387 unsigned int align;
2388 {
2389 #ifdef TARGET_MEM_FUNCTIONS
2390 static tree fn;
2391 tree call_expr, arg_list;
2392 #endif
2393 rtx retval = 0;
2394
2395 if (GET_MODE (object) == BLKmode)
2396 {
2397 object = protect_from_queue (object, 1);
2398 size = protect_from_queue (size, 0);
2399
2400 if (GET_CODE (size) == CONST_INT
2401 && MOVE_BY_PIECES_P (INTVAL (size), align))
2402 clear_by_pieces (object, INTVAL (size), align);
2403 else
2404 {
2405 /* Try the most limited insn first, because there's no point
2406 including more than one in the machine description unless
2407 the more limited one has some advantage. */
2408
2409 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2410 enum machine_mode mode;
2411
2412 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2413 mode = GET_MODE_WIDER_MODE (mode))
2414 {
2415 enum insn_code code = clrstr_optab[(int) mode];
2416 insn_operand_predicate_fn pred;
2417
2418 if (code != CODE_FOR_nothing
2419 /* We don't need MODE to be narrower than
2420 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2421 the mode mask, as it is returned by the macro, it will
2422 definitely be less than the actual mode mask. */
2423 && ((GET_CODE (size) == CONST_INT
2424 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2425 <= (GET_MODE_MASK (mode) >> 1)))
2426 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2427 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2428 || (*pred) (object, BLKmode))
2429 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2430 || (*pred) (opalign, VOIDmode)))
2431 {
2432 rtx op1;
2433 rtx last = get_last_insn ();
2434 rtx pat;
2435
2436 op1 = convert_to_mode (mode, size, 1);
2437 pred = insn_data[(int) code].operand[1].predicate;
2438 if (pred != 0 && ! (*pred) (op1, mode))
2439 op1 = copy_to_mode_reg (mode, op1);
2440
2441 pat = GEN_FCN ((int) code) (object, op1, opalign);
2442 if (pat)
2443 {
2444 emit_insn (pat);
2445 return 0;
2446 }
2447 else
2448 delete_insns_since (last);
2449 }
2450 }
2451
2452 /* OBJECT or SIZE may have been passed through protect_from_queue.
2453
2454 It is unsafe to save the value generated by protect_from_queue
2455 and reuse it later. Consider what happens if emit_queue is
2456 called before the return value from protect_from_queue is used.
2457
2458 Expansion of the CALL_EXPR below will call emit_queue before
2459 we are finished emitting RTL for argument setup. So if we are
2460 not careful we could get the wrong value for an argument.
2461
2462 To avoid this problem we go ahead and emit code to copy OBJECT
2463 and SIZE into new pseudos. We can then place those new pseudos
2464 into an RTL_EXPR and use them later, even after a call to
2465 emit_queue.
2466
2467 Note this is not strictly needed for library calls since they
2468 do not call emit_queue before loading their arguments. However,
2469 we may need to have library calls call emit_queue in the future
2470 since failing to do so could cause problems for targets which
2471 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2472 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2473
2474 #ifdef TARGET_MEM_FUNCTIONS
2475 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2476 #else
2477 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2478 TREE_UNSIGNED (integer_type_node));
2479 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2480 #endif
2481
2482 #ifdef TARGET_MEM_FUNCTIONS
2483 /* It is incorrect to use the libcall calling conventions to call
2484 memset in this context.
2485
2486 This could be a user call to memset and the user may wish to
2487 examine the return value from memset.
2488
2489 For targets where libcalls and normal calls have different
2490 conventions for returning pointers, we could end up generating
2491 incorrect code.
2492
2493 So instead of using a libcall sequence we build up a suitable
2494 CALL_EXPR and expand the call in the normal fashion. */
2495 if (fn == NULL_TREE)
2496 {
2497 tree fntype;
2498
2499 /* This was copied from except.c, I don't know if all this is
2500 necessary in this context or not. */
2501 fn = get_identifier ("memset");
2502 push_obstacks_nochange ();
2503 end_temporary_allocation ();
2504 fntype = build_pointer_type (void_type_node);
2505 fntype = build_function_type (fntype, NULL_TREE);
2506 fn = build_decl (FUNCTION_DECL, fn, fntype);
2507 ggc_add_tree_root (&fn, 1);
2508 DECL_EXTERNAL (fn) = 1;
2509 TREE_PUBLIC (fn) = 1;
2510 DECL_ARTIFICIAL (fn) = 1;
2511 make_decl_rtl (fn, NULL_PTR, 1);
2512 assemble_external (fn);
2513 pop_obstacks ();
2514 }
2515
2516 /* We need to make an argument list for the function call.
2517
2518 memset has three arguments, the first is a void * addresses, the
2519 second a integer with the initialization value, the last is a
2520 size_t byte count for the copy. */
2521 arg_list
2522 = build_tree_list (NULL_TREE,
2523 make_tree (build_pointer_type (void_type_node),
2524 object));
2525 TREE_CHAIN (arg_list)
2526 = build_tree_list (NULL_TREE,
2527 make_tree (integer_type_node, const0_rtx));
2528 TREE_CHAIN (TREE_CHAIN (arg_list))
2529 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2530 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2531
2532 /* Now we have to build up the CALL_EXPR itself. */
2533 call_expr = build1 (ADDR_EXPR,
2534 build_pointer_type (TREE_TYPE (fn)), fn);
2535 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2536 call_expr, arg_list, NULL_TREE);
2537 TREE_SIDE_EFFECTS (call_expr) = 1;
2538
2539 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2540 #else
2541 emit_library_call (bzero_libfunc, 0,
2542 VOIDmode, 2, object, Pmode, size,
2543 TYPE_MODE (integer_type_node));
2544 #endif
2545 }
2546 }
2547 else
2548 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2549
2550 return retval;
2551 }
2552
2553 /* Generate code to copy Y into X.
2554 Both Y and X must have the same mode, except that
2555 Y can be a constant with VOIDmode.
2556 This mode cannot be BLKmode; use emit_block_move for that.
2557
2558 Return the last instruction emitted. */
2559
2560 rtx
2561 emit_move_insn (x, y)
2562 rtx x, y;
2563 {
2564 enum machine_mode mode = GET_MODE (x);
2565
2566 x = protect_from_queue (x, 1);
2567 y = protect_from_queue (y, 0);
2568
2569 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2570 abort ();
2571
2572 /* Never force constant_p_rtx to memory. */
2573 if (GET_CODE (y) == CONSTANT_P_RTX)
2574 ;
2575 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2576 y = force_const_mem (mode, y);
2577
2578 /* If X or Y are memory references, verify that their addresses are valid
2579 for the machine. */
2580 if (GET_CODE (x) == MEM
2581 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2582 && ! push_operand (x, GET_MODE (x)))
2583 || (flag_force_addr
2584 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2585 x = change_address (x, VOIDmode, XEXP (x, 0));
2586
2587 if (GET_CODE (y) == MEM
2588 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2589 || (flag_force_addr
2590 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2591 y = change_address (y, VOIDmode, XEXP (y, 0));
2592
2593 if (mode == BLKmode)
2594 abort ();
2595
2596 return emit_move_insn_1 (x, y);
2597 }
2598
2599 /* Low level part of emit_move_insn.
2600 Called just like emit_move_insn, but assumes X and Y
2601 are basically valid. */
2602
2603 rtx
2604 emit_move_insn_1 (x, y)
2605 rtx x, y;
2606 {
2607 enum machine_mode mode = GET_MODE (x);
2608 enum machine_mode submode;
2609 enum mode_class class = GET_MODE_CLASS (mode);
2610 unsigned int i;
2611
2612 if (mode >= MAX_MACHINE_MODE)
2613 abort ();
2614
2615 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2616 return
2617 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2618
2619 /* Expand complex moves by moving real part and imag part, if possible. */
2620 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2621 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2622 * BITS_PER_UNIT),
2623 (class == MODE_COMPLEX_INT
2624 ? MODE_INT : MODE_FLOAT),
2625 0))
2626 && (mov_optab->handlers[(int) submode].insn_code
2627 != CODE_FOR_nothing))
2628 {
2629 /* Don't split destination if it is a stack push. */
2630 int stack = push_operand (x, GET_MODE (x));
2631
2632 /* If this is a stack, push the highpart first, so it
2633 will be in the argument order.
2634
2635 In that case, change_address is used only to convert
2636 the mode, not to change the address. */
2637 if (stack)
2638 {
2639 /* Note that the real part always precedes the imag part in memory
2640 regardless of machine's endianness. */
2641 #ifdef STACK_GROWS_DOWNWARD
2642 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2643 (gen_rtx_MEM (submode, XEXP (x, 0)),
2644 gen_imagpart (submode, y)));
2645 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2646 (gen_rtx_MEM (submode, XEXP (x, 0)),
2647 gen_realpart (submode, y)));
2648 #else
2649 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2650 (gen_rtx_MEM (submode, XEXP (x, 0)),
2651 gen_realpart (submode, y)));
2652 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2653 (gen_rtx_MEM (submode, XEXP (x, 0)),
2654 gen_imagpart (submode, y)));
2655 #endif
2656 }
2657 else
2658 {
2659 rtx realpart_x, realpart_y;
2660 rtx imagpart_x, imagpart_y;
2661
2662 /* If this is a complex value with each part being smaller than a
2663 word, the usual calling sequence will likely pack the pieces into
2664 a single register. Unfortunately, SUBREG of hard registers only
2665 deals in terms of words, so we have a problem converting input
2666 arguments to the CONCAT of two registers that is used elsewhere
2667 for complex values. If this is before reload, we can copy it into
2668 memory and reload. FIXME, we should see about using extract and
2669 insert on integer registers, but complex short and complex char
2670 variables should be rarely used. */
2671 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2672 && (reload_in_progress | reload_completed) == 0)
2673 {
2674 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2675 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2676
2677 if (packed_dest_p || packed_src_p)
2678 {
2679 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2680 ? MODE_FLOAT : MODE_INT);
2681
2682 enum machine_mode reg_mode =
2683 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2684
2685 if (reg_mode != BLKmode)
2686 {
2687 rtx mem = assign_stack_temp (reg_mode,
2688 GET_MODE_SIZE (mode), 0);
2689
2690 rtx cmem = change_address (mem, mode, NULL_RTX);
2691
2692 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2693
2694 if (packed_dest_p)
2695 {
2696 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2697 emit_move_insn_1 (cmem, y);
2698 return emit_move_insn_1 (sreg, mem);
2699 }
2700 else
2701 {
2702 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2703 emit_move_insn_1 (mem, sreg);
2704 return emit_move_insn_1 (x, cmem);
2705 }
2706 }
2707 }
2708 }
2709
2710 realpart_x = gen_realpart (submode, x);
2711 realpart_y = gen_realpart (submode, y);
2712 imagpart_x = gen_imagpart (submode, x);
2713 imagpart_y = gen_imagpart (submode, y);
2714
2715 /* Show the output dies here. This is necessary for SUBREGs
2716 of pseudos since we cannot track their lifetimes correctly;
2717 hard regs shouldn't appear here except as return values.
2718 We never want to emit such a clobber after reload. */
2719 if (x != y
2720 && ! (reload_in_progress || reload_completed)
2721 && (GET_CODE (realpart_x) == SUBREG
2722 || GET_CODE (imagpart_x) == SUBREG))
2723 {
2724 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2725 }
2726
2727 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2728 (realpart_x, realpart_y));
2729 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2730 (imagpart_x, imagpart_y));
2731 }
2732
2733 return get_last_insn ();
2734 }
2735
2736 /* This will handle any multi-word mode that lacks a move_insn pattern.
2737 However, you will get better code if you define such patterns,
2738 even if they must turn into multiple assembler instructions. */
2739 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2740 {
2741 rtx last_insn = 0;
2742 rtx seq, inner;
2743 int need_clobber;
2744
2745 #ifdef PUSH_ROUNDING
2746
2747 /* If X is a push on the stack, do the push now and replace
2748 X with a reference to the stack pointer. */
2749 if (push_operand (x, GET_MODE (x)))
2750 {
2751 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2752 x = change_address (x, VOIDmode, stack_pointer_rtx);
2753 }
2754 #endif
2755
2756 /* If we are in reload, see if either operand is a MEM whose address
2757 is scheduled for replacement. */
2758 if (reload_in_progress && GET_CODE (x) == MEM
2759 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2760 {
2761 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2762
2763 MEM_COPY_ATTRIBUTES (new, x);
2764 x = new;
2765 }
2766 if (reload_in_progress && GET_CODE (y) == MEM
2767 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2768 {
2769 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2770
2771 MEM_COPY_ATTRIBUTES (new, y);
2772 y = new;
2773 }
2774
2775 start_sequence ();
2776
2777 need_clobber = 0;
2778 for (i = 0;
2779 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2780 i++)
2781 {
2782 rtx xpart = operand_subword (x, i, 1, mode);
2783 rtx ypart = operand_subword (y, i, 1, mode);
2784
2785 /* If we can't get a part of Y, put Y into memory if it is a
2786 constant. Otherwise, force it into a register. If we still
2787 can't get a part of Y, abort. */
2788 if (ypart == 0 && CONSTANT_P (y))
2789 {
2790 y = force_const_mem (mode, y);
2791 ypart = operand_subword (y, i, 1, mode);
2792 }
2793 else if (ypart == 0)
2794 ypart = operand_subword_force (y, i, mode);
2795
2796 if (xpart == 0 || ypart == 0)
2797 abort ();
2798
2799 need_clobber |= (GET_CODE (xpart) == SUBREG);
2800
2801 last_insn = emit_move_insn (xpart, ypart);
2802 }
2803
2804 seq = gen_sequence ();
2805 end_sequence ();
2806
2807 /* Show the output dies here. This is necessary for SUBREGs
2808 of pseudos since we cannot track their lifetimes correctly;
2809 hard regs shouldn't appear here except as return values.
2810 We never want to emit such a clobber after reload. */
2811 if (x != y
2812 && ! (reload_in_progress || reload_completed)
2813 && need_clobber != 0)
2814 {
2815 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2816 }
2817
2818 emit_insn (seq);
2819
2820 return last_insn;
2821 }
2822 else
2823 abort ();
2824 }
2825 \f
2826 /* Pushing data onto the stack. */
2827
2828 /* Push a block of length SIZE (perhaps variable)
2829 and return an rtx to address the beginning of the block.
2830 Note that it is not possible for the value returned to be a QUEUED.
2831 The value may be virtual_outgoing_args_rtx.
2832
2833 EXTRA is the number of bytes of padding to push in addition to SIZE.
2834 BELOW nonzero means this padding comes at low addresses;
2835 otherwise, the padding comes at high addresses. */
2836
2837 rtx
2838 push_block (size, extra, below)
2839 rtx size;
2840 int extra, below;
2841 {
2842 register rtx temp;
2843
2844 size = convert_modes (Pmode, ptr_mode, size, 1);
2845 if (CONSTANT_P (size))
2846 anti_adjust_stack (plus_constant (size, extra));
2847 else if (GET_CODE (size) == REG && extra == 0)
2848 anti_adjust_stack (size);
2849 else
2850 {
2851 temp = copy_to_mode_reg (Pmode, size);
2852 if (extra != 0)
2853 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2854 temp, 0, OPTAB_LIB_WIDEN);
2855 anti_adjust_stack (temp);
2856 }
2857
2858 #ifndef STACK_GROWS_DOWNWARD
2859 #ifdef ARGS_GROW_DOWNWARD
2860 if (!ACCUMULATE_OUTGOING_ARGS)
2861 #else
2862 if (0)
2863 #endif
2864 #else
2865 if (1)
2866 #endif
2867 {
2868 /* Return the lowest stack address when STACK or ARGS grow downward and
2869 we are not aaccumulating outgoing arguments (the c4x port uses such
2870 conventions). */
2871 temp = virtual_outgoing_args_rtx;
2872 if (extra != 0 && below)
2873 temp = plus_constant (temp, extra);
2874 }
2875 else
2876 {
2877 if (GET_CODE (size) == CONST_INT)
2878 temp = plus_constant (virtual_outgoing_args_rtx,
2879 -INTVAL (size) - (below ? 0 : extra));
2880 else if (extra != 0 && !below)
2881 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2882 negate_rtx (Pmode, plus_constant (size, extra)));
2883 else
2884 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2885 negate_rtx (Pmode, size));
2886 }
2887
2888 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2889 }
2890
2891 rtx
2892 gen_push_operand ()
2893 {
2894 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2895 }
2896
2897 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2898 block of SIZE bytes. */
2899
2900 static rtx
2901 get_push_address (size)
2902 int size;
2903 {
2904 register rtx temp;
2905
2906 if (STACK_PUSH_CODE == POST_DEC)
2907 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2908 else if (STACK_PUSH_CODE == POST_INC)
2909 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2910 else
2911 temp = stack_pointer_rtx;
2912
2913 return copy_to_reg (temp);
2914 }
2915
2916 /* Generate code to push X onto the stack, assuming it has mode MODE and
2917 type TYPE.
2918 MODE is redundant except when X is a CONST_INT (since they don't
2919 carry mode info).
2920 SIZE is an rtx for the size of data to be copied (in bytes),
2921 needed only if X is BLKmode.
2922
2923 ALIGN is maximum alignment we can assume.
2924
2925 If PARTIAL and REG are both nonzero, then copy that many of the first
2926 words of X into registers starting with REG, and push the rest of X.
2927 The amount of space pushed is decreased by PARTIAL words,
2928 rounded *down* to a multiple of PARM_BOUNDARY.
2929 REG must be a hard register in this case.
2930 If REG is zero but PARTIAL is not, take any all others actions for an
2931 argument partially in registers, but do not actually load any
2932 registers.
2933
2934 EXTRA is the amount in bytes of extra space to leave next to this arg.
2935 This is ignored if an argument block has already been allocated.
2936
2937 On a machine that lacks real push insns, ARGS_ADDR is the address of
2938 the bottom of the argument block for this call. We use indexing off there
2939 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2940 argument block has not been preallocated.
2941
2942 ARGS_SO_FAR is the size of args previously pushed for this call.
2943
2944 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2945 for arguments passed in registers. If nonzero, it will be the number
2946 of bytes required. */
2947
2948 void
2949 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2950 args_addr, args_so_far, reg_parm_stack_space,
2951 alignment_pad)
2952 register rtx x;
2953 enum machine_mode mode;
2954 tree type;
2955 rtx size;
2956 unsigned int align;
2957 int partial;
2958 rtx reg;
2959 int extra;
2960 rtx args_addr;
2961 rtx args_so_far;
2962 int reg_parm_stack_space;
2963 rtx alignment_pad;
2964 {
2965 rtx xinner;
2966 enum direction stack_direction
2967 #ifdef STACK_GROWS_DOWNWARD
2968 = downward;
2969 #else
2970 = upward;
2971 #endif
2972
2973 /* Decide where to pad the argument: `downward' for below,
2974 `upward' for above, or `none' for don't pad it.
2975 Default is below for small data on big-endian machines; else above. */
2976 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2977
2978 /* Invert direction if stack is post-update. */
2979 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2980 if (where_pad != none)
2981 where_pad = (where_pad == downward ? upward : downward);
2982
2983 xinner = x = protect_from_queue (x, 0);
2984
2985 if (mode == BLKmode)
2986 {
2987 /* Copy a block into the stack, entirely or partially. */
2988
2989 register rtx temp;
2990 int used = partial * UNITS_PER_WORD;
2991 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2992 int skip;
2993
2994 if (size == 0)
2995 abort ();
2996
2997 used -= offset;
2998
2999 /* USED is now the # of bytes we need not copy to the stack
3000 because registers will take care of them. */
3001
3002 if (partial != 0)
3003 xinner = change_address (xinner, BLKmode,
3004 plus_constant (XEXP (xinner, 0), used));
3005
3006 /* If the partial register-part of the arg counts in its stack size,
3007 skip the part of stack space corresponding to the registers.
3008 Otherwise, start copying to the beginning of the stack space,
3009 by setting SKIP to 0. */
3010 skip = (reg_parm_stack_space == 0) ? 0 : used;
3011
3012 #ifdef PUSH_ROUNDING
3013 /* Do it with several push insns if that doesn't take lots of insns
3014 and if there is no difficulty with push insns that skip bytes
3015 on the stack for alignment purposes. */
3016 if (args_addr == 0
3017 && PUSH_ARGS
3018 && GET_CODE (size) == CONST_INT
3019 && skip == 0
3020 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3021 /* Here we avoid the case of a structure whose weak alignment
3022 forces many pushes of a small amount of data,
3023 and such small pushes do rounding that causes trouble. */
3024 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3025 || align >= BIGGEST_ALIGNMENT
3026 || PUSH_ROUNDING (align) == align)
3027 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3028 {
3029 /* Push padding now if padding above and stack grows down,
3030 or if padding below and stack grows up.
3031 But if space already allocated, this has already been done. */
3032 if (extra && args_addr == 0
3033 && where_pad != none && where_pad != stack_direction)
3034 anti_adjust_stack (GEN_INT (extra));
3035
3036 stack_pointer_delta += INTVAL (size) - used;
3037 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3038 INTVAL (size) - used, align);
3039
3040 if (current_function_check_memory_usage && ! in_check_memory_usage)
3041 {
3042 rtx temp;
3043
3044 in_check_memory_usage = 1;
3045 temp = get_push_address (INTVAL (size) - used);
3046 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3047 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3048 temp, Pmode,
3049 XEXP (xinner, 0), Pmode,
3050 GEN_INT (INTVAL (size) - used),
3051 TYPE_MODE (sizetype));
3052 else
3053 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3054 temp, Pmode,
3055 GEN_INT (INTVAL (size) - used),
3056 TYPE_MODE (sizetype),
3057 GEN_INT (MEMORY_USE_RW),
3058 TYPE_MODE (integer_type_node));
3059 in_check_memory_usage = 0;
3060 }
3061 }
3062 else
3063 #endif /* PUSH_ROUNDING */
3064 {
3065 rtx target;
3066
3067 /* Otherwise make space on the stack and copy the data
3068 to the address of that space. */
3069
3070 /* Deduct words put into registers from the size we must copy. */
3071 if (partial != 0)
3072 {
3073 if (GET_CODE (size) == CONST_INT)
3074 size = GEN_INT (INTVAL (size) - used);
3075 else
3076 size = expand_binop (GET_MODE (size), sub_optab, size,
3077 GEN_INT (used), NULL_RTX, 0,
3078 OPTAB_LIB_WIDEN);
3079 }
3080
3081 /* Get the address of the stack space.
3082 In this case, we do not deal with EXTRA separately.
3083 A single stack adjust will do. */
3084 if (! args_addr)
3085 {
3086 temp = push_block (size, extra, where_pad == downward);
3087 extra = 0;
3088 }
3089 else if (GET_CODE (args_so_far) == CONST_INT)
3090 temp = memory_address (BLKmode,
3091 plus_constant (args_addr,
3092 skip + INTVAL (args_so_far)));
3093 else
3094 temp = memory_address (BLKmode,
3095 plus_constant (gen_rtx_PLUS (Pmode,
3096 args_addr,
3097 args_so_far),
3098 skip));
3099 if (current_function_check_memory_usage && ! in_check_memory_usage)
3100 {
3101 in_check_memory_usage = 1;
3102 target = copy_to_reg (temp);
3103 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3104 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3105 target, Pmode,
3106 XEXP (xinner, 0), Pmode,
3107 size, TYPE_MODE (sizetype));
3108 else
3109 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3110 target, Pmode,
3111 size, TYPE_MODE (sizetype),
3112 GEN_INT (MEMORY_USE_RW),
3113 TYPE_MODE (integer_type_node));
3114 in_check_memory_usage = 0;
3115 }
3116
3117 target = gen_rtx_MEM (BLKmode, temp);
3118
3119 if (type != 0)
3120 {
3121 set_mem_attributes (target, type, 1);
3122 /* Function incoming arguments may overlap with sibling call
3123 outgoing arguments and we cannot allow reordering of reads
3124 from function arguments with stores to outgoing arguments
3125 of sibling calls. */
3126 MEM_ALIAS_SET (target) = 0;
3127 }
3128
3129 /* TEMP is the address of the block. Copy the data there. */
3130 if (GET_CODE (size) == CONST_INT
3131 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3132 {
3133 move_by_pieces (target, xinner, INTVAL (size), align);
3134 goto ret;
3135 }
3136 else
3137 {
3138 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3139 enum machine_mode mode;
3140
3141 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3142 mode != VOIDmode;
3143 mode = GET_MODE_WIDER_MODE (mode))
3144 {
3145 enum insn_code code = movstr_optab[(int) mode];
3146 insn_operand_predicate_fn pred;
3147
3148 if (code != CODE_FOR_nothing
3149 && ((GET_CODE (size) == CONST_INT
3150 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3151 <= (GET_MODE_MASK (mode) >> 1)))
3152 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3153 && (!(pred = insn_data[(int) code].operand[0].predicate)
3154 || ((*pred) (target, BLKmode)))
3155 && (!(pred = insn_data[(int) code].operand[1].predicate)
3156 || ((*pred) (xinner, BLKmode)))
3157 && (!(pred = insn_data[(int) code].operand[3].predicate)
3158 || ((*pred) (opalign, VOIDmode))))
3159 {
3160 rtx op2 = convert_to_mode (mode, size, 1);
3161 rtx last = get_last_insn ();
3162 rtx pat;
3163
3164 pred = insn_data[(int) code].operand[2].predicate;
3165 if (pred != 0 && ! (*pred) (op2, mode))
3166 op2 = copy_to_mode_reg (mode, op2);
3167
3168 pat = GEN_FCN ((int) code) (target, xinner,
3169 op2, opalign);
3170 if (pat)
3171 {
3172 emit_insn (pat);
3173 goto ret;
3174 }
3175 else
3176 delete_insns_since (last);
3177 }
3178 }
3179 }
3180
3181 if (!ACCUMULATE_OUTGOING_ARGS)
3182 {
3183 /* If the source is referenced relative to the stack pointer,
3184 copy it to another register to stabilize it. We do not need
3185 to do this if we know that we won't be changing sp. */
3186
3187 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3188 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3189 temp = copy_to_reg (temp);
3190 }
3191
3192 /* Make inhibit_defer_pop nonzero around the library call
3193 to force it to pop the bcopy-arguments right away. */
3194 NO_DEFER_POP;
3195 #ifdef TARGET_MEM_FUNCTIONS
3196 emit_library_call (memcpy_libfunc, 0,
3197 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3198 convert_to_mode (TYPE_MODE (sizetype),
3199 size, TREE_UNSIGNED (sizetype)),
3200 TYPE_MODE (sizetype));
3201 #else
3202 emit_library_call (bcopy_libfunc, 0,
3203 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3204 convert_to_mode (TYPE_MODE (integer_type_node),
3205 size,
3206 TREE_UNSIGNED (integer_type_node)),
3207 TYPE_MODE (integer_type_node));
3208 #endif
3209 OK_DEFER_POP;
3210 }
3211 }
3212 else if (partial > 0)
3213 {
3214 /* Scalar partly in registers. */
3215
3216 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3217 int i;
3218 int not_stack;
3219 /* # words of start of argument
3220 that we must make space for but need not store. */
3221 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3222 int args_offset = INTVAL (args_so_far);
3223 int skip;
3224
3225 /* Push padding now if padding above and stack grows down,
3226 or if padding below and stack grows up.
3227 But if space already allocated, this has already been done. */
3228 if (extra && args_addr == 0
3229 && where_pad != none && where_pad != stack_direction)
3230 anti_adjust_stack (GEN_INT (extra));
3231
3232 /* If we make space by pushing it, we might as well push
3233 the real data. Otherwise, we can leave OFFSET nonzero
3234 and leave the space uninitialized. */
3235 if (args_addr == 0)
3236 offset = 0;
3237
3238 /* Now NOT_STACK gets the number of words that we don't need to
3239 allocate on the stack. */
3240 not_stack = partial - offset;
3241
3242 /* If the partial register-part of the arg counts in its stack size,
3243 skip the part of stack space corresponding to the registers.
3244 Otherwise, start copying to the beginning of the stack space,
3245 by setting SKIP to 0. */
3246 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3247
3248 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3249 x = validize_mem (force_const_mem (mode, x));
3250
3251 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3252 SUBREGs of such registers are not allowed. */
3253 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3254 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3255 x = copy_to_reg (x);
3256
3257 /* Loop over all the words allocated on the stack for this arg. */
3258 /* We can do it by words, because any scalar bigger than a word
3259 has a size a multiple of a word. */
3260 #ifndef PUSH_ARGS_REVERSED
3261 for (i = not_stack; i < size; i++)
3262 #else
3263 for (i = size - 1; i >= not_stack; i--)
3264 #endif
3265 if (i >= not_stack + offset)
3266 emit_push_insn (operand_subword_force (x, i, mode),
3267 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3268 0, args_addr,
3269 GEN_INT (args_offset + ((i - not_stack + skip)
3270 * UNITS_PER_WORD)),
3271 reg_parm_stack_space, alignment_pad);
3272 }
3273 else
3274 {
3275 rtx addr;
3276 rtx target = NULL_RTX;
3277 rtx dest;
3278
3279 /* Push padding now if padding above and stack grows down,
3280 or if padding below and stack grows up.
3281 But if space already allocated, this has already been done. */
3282 if (extra && args_addr == 0
3283 && where_pad != none && where_pad != stack_direction)
3284 anti_adjust_stack (GEN_INT (extra));
3285
3286 #ifdef PUSH_ROUNDING
3287 if (args_addr == 0 && PUSH_ARGS)
3288 {
3289 addr = gen_push_operand ();
3290 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3291 }
3292 else
3293 #endif
3294 {
3295 if (GET_CODE (args_so_far) == CONST_INT)
3296 addr
3297 = memory_address (mode,
3298 plus_constant (args_addr,
3299 INTVAL (args_so_far)));
3300 else
3301 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3302 args_so_far));
3303 target = addr;
3304 }
3305
3306 dest = gen_rtx_MEM (mode, addr);
3307 if (type != 0)
3308 {
3309 set_mem_attributes (dest, type, 1);
3310 /* Function incoming arguments may overlap with sibling call
3311 outgoing arguments and we cannot allow reordering of reads
3312 from function arguments with stores to outgoing arguments
3313 of sibling calls. */
3314 MEM_ALIAS_SET (dest) = 0;
3315 }
3316
3317 emit_move_insn (dest, x);
3318
3319 if (current_function_check_memory_usage && ! in_check_memory_usage)
3320 {
3321 in_check_memory_usage = 1;
3322 if (target == 0)
3323 target = get_push_address (GET_MODE_SIZE (mode));
3324
3325 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3326 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3327 target, Pmode,
3328 XEXP (x, 0), Pmode,
3329 GEN_INT (GET_MODE_SIZE (mode)),
3330 TYPE_MODE (sizetype));
3331 else
3332 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3333 target, Pmode,
3334 GEN_INT (GET_MODE_SIZE (mode)),
3335 TYPE_MODE (sizetype),
3336 GEN_INT (MEMORY_USE_RW),
3337 TYPE_MODE (integer_type_node));
3338 in_check_memory_usage = 0;
3339 }
3340 }
3341
3342 ret:
3343 /* If part should go in registers, copy that part
3344 into the appropriate registers. Do this now, at the end,
3345 since mem-to-mem copies above may do function calls. */
3346 if (partial > 0 && reg != 0)
3347 {
3348 /* Handle calls that pass values in multiple non-contiguous locations.
3349 The Irix 6 ABI has examples of this. */
3350 if (GET_CODE (reg) == PARALLEL)
3351 emit_group_load (reg, x, -1, align); /* ??? size? */
3352 else
3353 move_block_to_reg (REGNO (reg), x, partial, mode);
3354 }
3355
3356 if (extra && args_addr == 0 && where_pad == stack_direction)
3357 anti_adjust_stack (GEN_INT (extra));
3358
3359 if (alignment_pad && args_addr == 0)
3360 anti_adjust_stack (alignment_pad);
3361 }
3362 \f
3363 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3364 operations. */
3365
3366 static rtx
3367 get_subtarget (x)
3368 rtx x;
3369 {
3370 return ((x == 0
3371 /* Only registers can be subtargets. */
3372 || GET_CODE (x) != REG
3373 /* If the register is readonly, it can't be set more than once. */
3374 || RTX_UNCHANGING_P (x)
3375 /* Don't use hard regs to avoid extending their life. */
3376 || REGNO (x) < FIRST_PSEUDO_REGISTER
3377 /* Avoid subtargets inside loops,
3378 since they hide some invariant expressions. */
3379 || preserve_subexpressions_p ())
3380 ? 0 : x);
3381 }
3382
3383 /* Expand an assignment that stores the value of FROM into TO.
3384 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3385 (This may contain a QUEUED rtx;
3386 if the value is constant, this rtx is a constant.)
3387 Otherwise, the returned value is NULL_RTX.
3388
3389 SUGGEST_REG is no longer actually used.
3390 It used to mean, copy the value through a register
3391 and return that register, if that is possible.
3392 We now use WANT_VALUE to decide whether to do this. */
3393
3394 rtx
3395 expand_assignment (to, from, want_value, suggest_reg)
3396 tree to, from;
3397 int want_value;
3398 int suggest_reg ATTRIBUTE_UNUSED;
3399 {
3400 register rtx to_rtx = 0;
3401 rtx result;
3402
3403 /* Don't crash if the lhs of the assignment was erroneous. */
3404
3405 if (TREE_CODE (to) == ERROR_MARK)
3406 {
3407 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3408 return want_value ? result : NULL_RTX;
3409 }
3410
3411 /* Assignment of a structure component needs special treatment
3412 if the structure component's rtx is not simply a MEM.
3413 Assignment of an array element at a constant index, and assignment of
3414 an array element in an unaligned packed structure field, has the same
3415 problem. */
3416
3417 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3418 || TREE_CODE (to) == ARRAY_REF)
3419 {
3420 enum machine_mode mode1;
3421 HOST_WIDE_INT bitsize, bitpos;
3422 tree offset;
3423 int unsignedp;
3424 int volatilep = 0;
3425 tree tem;
3426 unsigned int alignment;
3427
3428 push_temp_slots ();
3429 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3430 &unsignedp, &volatilep, &alignment);
3431
3432 /* If we are going to use store_bit_field and extract_bit_field,
3433 make sure to_rtx will be safe for multiple use. */
3434
3435 if (mode1 == VOIDmode && want_value)
3436 tem = stabilize_reference (tem);
3437
3438 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3439 if (offset != 0)
3440 {
3441 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3442
3443 if (GET_CODE (to_rtx) != MEM)
3444 abort ();
3445
3446 if (GET_MODE (offset_rtx) != ptr_mode)
3447 {
3448 #ifdef POINTERS_EXTEND_UNSIGNED
3449 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3450 #else
3451 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3452 #endif
3453 }
3454
3455 /* A constant address in TO_RTX can have VOIDmode, we must not try
3456 to call force_reg for that case. Avoid that case. */
3457 if (GET_CODE (to_rtx) == MEM
3458 && GET_MODE (to_rtx) == BLKmode
3459 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3460 && bitsize
3461 && (bitpos % bitsize) == 0
3462 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3463 && alignment == GET_MODE_ALIGNMENT (mode1))
3464 {
3465 rtx temp = change_address (to_rtx, mode1,
3466 plus_constant (XEXP (to_rtx, 0),
3467 (bitpos /
3468 BITS_PER_UNIT)));
3469 if (GET_CODE (XEXP (temp, 0)) == REG)
3470 to_rtx = temp;
3471 else
3472 to_rtx = change_address (to_rtx, mode1,
3473 force_reg (GET_MODE (XEXP (temp, 0)),
3474 XEXP (temp, 0)));
3475 bitpos = 0;
3476 }
3477
3478 to_rtx = change_address (to_rtx, VOIDmode,
3479 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3480 force_reg (ptr_mode,
3481 offset_rtx)));
3482 }
3483
3484 if (volatilep)
3485 {
3486 if (GET_CODE (to_rtx) == MEM)
3487 {
3488 /* When the offset is zero, to_rtx is the address of the
3489 structure we are storing into, and hence may be shared.
3490 We must make a new MEM before setting the volatile bit. */
3491 if (offset == 0)
3492 to_rtx = copy_rtx (to_rtx);
3493
3494 MEM_VOLATILE_P (to_rtx) = 1;
3495 }
3496 #if 0 /* This was turned off because, when a field is volatile
3497 in an object which is not volatile, the object may be in a register,
3498 and then we would abort over here. */
3499 else
3500 abort ();
3501 #endif
3502 }
3503
3504 if (TREE_CODE (to) == COMPONENT_REF
3505 && TREE_READONLY (TREE_OPERAND (to, 1)))
3506 {
3507 if (offset == 0)
3508 to_rtx = copy_rtx (to_rtx);
3509
3510 RTX_UNCHANGING_P (to_rtx) = 1;
3511 }
3512
3513 /* Check the access. */
3514 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3515 {
3516 rtx to_addr;
3517 int size;
3518 int best_mode_size;
3519 enum machine_mode best_mode;
3520
3521 best_mode = get_best_mode (bitsize, bitpos,
3522 TYPE_ALIGN (TREE_TYPE (tem)),
3523 mode1, volatilep);
3524 if (best_mode == VOIDmode)
3525 best_mode = QImode;
3526
3527 best_mode_size = GET_MODE_BITSIZE (best_mode);
3528 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3529 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3530 size *= GET_MODE_SIZE (best_mode);
3531
3532 /* Check the access right of the pointer. */
3533 in_check_memory_usage = 1;
3534 if (size)
3535 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3536 to_addr, Pmode,
3537 GEN_INT (size), TYPE_MODE (sizetype),
3538 GEN_INT (MEMORY_USE_WO),
3539 TYPE_MODE (integer_type_node));
3540 in_check_memory_usage = 0;
3541 }
3542
3543 /* If this is a varying-length object, we must get the address of
3544 the source and do an explicit block move. */
3545 if (bitsize < 0)
3546 {
3547 unsigned int from_align;
3548 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3549 rtx inner_to_rtx
3550 = change_address (to_rtx, VOIDmode,
3551 plus_constant (XEXP (to_rtx, 0),
3552 bitpos / BITS_PER_UNIT));
3553
3554 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3555 MIN (alignment, from_align));
3556 free_temp_slots ();
3557 pop_temp_slots ();
3558 return to_rtx;
3559 }
3560 else
3561 {
3562 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3563 (want_value
3564 /* Spurious cast for HPUX compiler. */
3565 ? ((enum machine_mode)
3566 TYPE_MODE (TREE_TYPE (to)))
3567 : VOIDmode),
3568 unsignedp,
3569 alignment,
3570 int_size_in_bytes (TREE_TYPE (tem)),
3571 get_alias_set (to));
3572
3573 preserve_temp_slots (result);
3574 free_temp_slots ();
3575 pop_temp_slots ();
3576
3577 /* If the value is meaningful, convert RESULT to the proper mode.
3578 Otherwise, return nothing. */
3579 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3580 TYPE_MODE (TREE_TYPE (from)),
3581 result,
3582 TREE_UNSIGNED (TREE_TYPE (to)))
3583 : NULL_RTX);
3584 }
3585 }
3586
3587 /* If the rhs is a function call and its value is not an aggregate,
3588 call the function before we start to compute the lhs.
3589 This is needed for correct code for cases such as
3590 val = setjmp (buf) on machines where reference to val
3591 requires loading up part of an address in a separate insn.
3592
3593 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3594 since it might be a promoted variable where the zero- or sign- extension
3595 needs to be done. Handling this in the normal way is safe because no
3596 computation is done before the call. */
3597 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3598 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3599 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3600 && GET_CODE (DECL_RTL (to)) == REG))
3601 {
3602 rtx value;
3603
3604 push_temp_slots ();
3605 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3606 if (to_rtx == 0)
3607 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3608
3609 /* Handle calls that return values in multiple non-contiguous locations.
3610 The Irix 6 ABI has examples of this. */
3611 if (GET_CODE (to_rtx) == PARALLEL)
3612 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3613 TYPE_ALIGN (TREE_TYPE (from)));
3614 else if (GET_MODE (to_rtx) == BLKmode)
3615 emit_block_move (to_rtx, value, expr_size (from),
3616 TYPE_ALIGN (TREE_TYPE (from)));
3617 else
3618 {
3619 #ifdef POINTERS_EXTEND_UNSIGNED
3620 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3621 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3622 value = convert_memory_address (GET_MODE (to_rtx), value);
3623 #endif
3624 emit_move_insn (to_rtx, value);
3625 }
3626 preserve_temp_slots (to_rtx);
3627 free_temp_slots ();
3628 pop_temp_slots ();
3629 return want_value ? to_rtx : NULL_RTX;
3630 }
3631
3632 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3633 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3634
3635 if (to_rtx == 0)
3636 {
3637 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3638 if (GET_CODE (to_rtx) == MEM)
3639 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3640 }
3641
3642 /* Don't move directly into a return register. */
3643 if (TREE_CODE (to) == RESULT_DECL
3644 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3645 {
3646 rtx temp;
3647
3648 push_temp_slots ();
3649 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3650
3651 if (GET_CODE (to_rtx) == PARALLEL)
3652 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3653 TYPE_ALIGN (TREE_TYPE (from)));
3654 else
3655 emit_move_insn (to_rtx, temp);
3656
3657 preserve_temp_slots (to_rtx);
3658 free_temp_slots ();
3659 pop_temp_slots ();
3660 return want_value ? to_rtx : NULL_RTX;
3661 }
3662
3663 /* In case we are returning the contents of an object which overlaps
3664 the place the value is being stored, use a safe function when copying
3665 a value through a pointer into a structure value return block. */
3666 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3667 && current_function_returns_struct
3668 && !current_function_returns_pcc_struct)
3669 {
3670 rtx from_rtx, size;
3671
3672 push_temp_slots ();
3673 size = expr_size (from);
3674 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3675 EXPAND_MEMORY_USE_DONT);
3676
3677 /* Copy the rights of the bitmap. */
3678 if (current_function_check_memory_usage)
3679 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3680 XEXP (to_rtx, 0), Pmode,
3681 XEXP (from_rtx, 0), Pmode,
3682 convert_to_mode (TYPE_MODE (sizetype),
3683 size, TREE_UNSIGNED (sizetype)),
3684 TYPE_MODE (sizetype));
3685
3686 #ifdef TARGET_MEM_FUNCTIONS
3687 emit_library_call (memcpy_libfunc, 0,
3688 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3689 XEXP (from_rtx, 0), Pmode,
3690 convert_to_mode (TYPE_MODE (sizetype),
3691 size, TREE_UNSIGNED (sizetype)),
3692 TYPE_MODE (sizetype));
3693 #else
3694 emit_library_call (bcopy_libfunc, 0,
3695 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3696 XEXP (to_rtx, 0), Pmode,
3697 convert_to_mode (TYPE_MODE (integer_type_node),
3698 size, TREE_UNSIGNED (integer_type_node)),
3699 TYPE_MODE (integer_type_node));
3700 #endif
3701
3702 preserve_temp_slots (to_rtx);
3703 free_temp_slots ();
3704 pop_temp_slots ();
3705 return want_value ? to_rtx : NULL_RTX;
3706 }
3707
3708 /* Compute FROM and store the value in the rtx we got. */
3709
3710 push_temp_slots ();
3711 result = store_expr (from, to_rtx, want_value);
3712 preserve_temp_slots (result);
3713 free_temp_slots ();
3714 pop_temp_slots ();
3715 return want_value ? result : NULL_RTX;
3716 }
3717
3718 /* Generate code for computing expression EXP,
3719 and storing the value into TARGET.
3720 TARGET may contain a QUEUED rtx.
3721
3722 If WANT_VALUE is nonzero, return a copy of the value
3723 not in TARGET, so that we can be sure to use the proper
3724 value in a containing expression even if TARGET has something
3725 else stored in it. If possible, we copy the value through a pseudo
3726 and return that pseudo. Or, if the value is constant, we try to
3727 return the constant. In some cases, we return a pseudo
3728 copied *from* TARGET.
3729
3730 If the mode is BLKmode then we may return TARGET itself.
3731 It turns out that in BLKmode it doesn't cause a problem.
3732 because C has no operators that could combine two different
3733 assignments into the same BLKmode object with different values
3734 with no sequence point. Will other languages need this to
3735 be more thorough?
3736
3737 If WANT_VALUE is 0, we return NULL, to make sure
3738 to catch quickly any cases where the caller uses the value
3739 and fails to set WANT_VALUE. */
3740
3741 rtx
3742 store_expr (exp, target, want_value)
3743 register tree exp;
3744 register rtx target;
3745 int want_value;
3746 {
3747 register rtx temp;
3748 int dont_return_target = 0;
3749
3750 if (TREE_CODE (exp) == COMPOUND_EXPR)
3751 {
3752 /* Perform first part of compound expression, then assign from second
3753 part. */
3754 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3755 emit_queue ();
3756 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3757 }
3758 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3759 {
3760 /* For conditional expression, get safe form of the target. Then
3761 test the condition, doing the appropriate assignment on either
3762 side. This avoids the creation of unnecessary temporaries.
3763 For non-BLKmode, it is more efficient not to do this. */
3764
3765 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3766
3767 emit_queue ();
3768 target = protect_from_queue (target, 1);
3769
3770 do_pending_stack_adjust ();
3771 NO_DEFER_POP;
3772 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3773 start_cleanup_deferral ();
3774 store_expr (TREE_OPERAND (exp, 1), target, 0);
3775 end_cleanup_deferral ();
3776 emit_queue ();
3777 emit_jump_insn (gen_jump (lab2));
3778 emit_barrier ();
3779 emit_label (lab1);
3780 start_cleanup_deferral ();
3781 store_expr (TREE_OPERAND (exp, 2), target, 0);
3782 end_cleanup_deferral ();
3783 emit_queue ();
3784 emit_label (lab2);
3785 OK_DEFER_POP;
3786
3787 return want_value ? target : NULL_RTX;
3788 }
3789 else if (queued_subexp_p (target))
3790 /* If target contains a postincrement, let's not risk
3791 using it as the place to generate the rhs. */
3792 {
3793 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3794 {
3795 /* Expand EXP into a new pseudo. */
3796 temp = gen_reg_rtx (GET_MODE (target));
3797 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3798 }
3799 else
3800 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3801
3802 /* If target is volatile, ANSI requires accessing the value
3803 *from* the target, if it is accessed. So make that happen.
3804 In no case return the target itself. */
3805 if (! MEM_VOLATILE_P (target) && want_value)
3806 dont_return_target = 1;
3807 }
3808 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3809 && GET_MODE (target) != BLKmode)
3810 /* If target is in memory and caller wants value in a register instead,
3811 arrange that. Pass TARGET as target for expand_expr so that,
3812 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3813 We know expand_expr will not use the target in that case.
3814 Don't do this if TARGET is volatile because we are supposed
3815 to write it and then read it. */
3816 {
3817 temp = expand_expr (exp, target, GET_MODE (target), 0);
3818 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3819 temp = copy_to_reg (temp);
3820 dont_return_target = 1;
3821 }
3822 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3823 /* If this is an scalar in a register that is stored in a wider mode
3824 than the declared mode, compute the result into its declared mode
3825 and then convert to the wider mode. Our value is the computed
3826 expression. */
3827 {
3828 /* If we don't want a value, we can do the conversion inside EXP,
3829 which will often result in some optimizations. Do the conversion
3830 in two steps: first change the signedness, if needed, then
3831 the extend. But don't do this if the type of EXP is a subtype
3832 of something else since then the conversion might involve
3833 more than just converting modes. */
3834 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3835 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3836 {
3837 if (TREE_UNSIGNED (TREE_TYPE (exp))
3838 != SUBREG_PROMOTED_UNSIGNED_P (target))
3839 exp
3840 = convert
3841 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3842 TREE_TYPE (exp)),
3843 exp);
3844
3845 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3846 SUBREG_PROMOTED_UNSIGNED_P (target)),
3847 exp);
3848 }
3849
3850 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3851
3852 /* If TEMP is a volatile MEM and we want a result value, make
3853 the access now so it gets done only once. Likewise if
3854 it contains TARGET. */
3855 if (GET_CODE (temp) == MEM && want_value
3856 && (MEM_VOLATILE_P (temp)
3857 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3858 temp = copy_to_reg (temp);
3859
3860 /* If TEMP is a VOIDmode constant, use convert_modes to make
3861 sure that we properly convert it. */
3862 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3863 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3864 TYPE_MODE (TREE_TYPE (exp)), temp,
3865 SUBREG_PROMOTED_UNSIGNED_P (target));
3866
3867 convert_move (SUBREG_REG (target), temp,
3868 SUBREG_PROMOTED_UNSIGNED_P (target));
3869
3870 /* If we promoted a constant, change the mode back down to match
3871 target. Otherwise, the caller might get confused by a result whose
3872 mode is larger than expected. */
3873
3874 if (want_value && GET_MODE (temp) != GET_MODE (target)
3875 && GET_MODE (temp) != VOIDmode)
3876 {
3877 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3878 SUBREG_PROMOTED_VAR_P (temp) = 1;
3879 SUBREG_PROMOTED_UNSIGNED_P (temp)
3880 = SUBREG_PROMOTED_UNSIGNED_P (target);
3881 }
3882
3883 return want_value ? temp : NULL_RTX;
3884 }
3885 else
3886 {
3887 temp = expand_expr (exp, target, GET_MODE (target), 0);
3888 /* Return TARGET if it's a specified hardware register.
3889 If TARGET is a volatile mem ref, either return TARGET
3890 or return a reg copied *from* TARGET; ANSI requires this.
3891
3892 Otherwise, if TEMP is not TARGET, return TEMP
3893 if it is constant (for efficiency),
3894 or if we really want the correct value. */
3895 if (!(target && GET_CODE (target) == REG
3896 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3897 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3898 && ! rtx_equal_p (temp, target)
3899 && (CONSTANT_P (temp) || want_value))
3900 dont_return_target = 1;
3901 }
3902
3903 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3904 the same as that of TARGET, adjust the constant. This is needed, for
3905 example, in case it is a CONST_DOUBLE and we want only a word-sized
3906 value. */
3907 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3908 && TREE_CODE (exp) != ERROR_MARK
3909 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3910 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3911 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3912
3913 if (current_function_check_memory_usage
3914 && GET_CODE (target) == MEM
3915 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3916 {
3917 in_check_memory_usage = 1;
3918 if (GET_CODE (temp) == MEM)
3919 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3920 XEXP (target, 0), Pmode,
3921 XEXP (temp, 0), Pmode,
3922 expr_size (exp), TYPE_MODE (sizetype));
3923 else
3924 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3925 XEXP (target, 0), Pmode,
3926 expr_size (exp), TYPE_MODE (sizetype),
3927 GEN_INT (MEMORY_USE_WO),
3928 TYPE_MODE (integer_type_node));
3929 in_check_memory_usage = 0;
3930 }
3931
3932 /* If value was not generated in the target, store it there.
3933 Convert the value to TARGET's type first if nec. */
3934 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3935 one or both of them are volatile memory refs, we have to distinguish
3936 two cases:
3937 - expand_expr has used TARGET. In this case, we must not generate
3938 another copy. This can be detected by TARGET being equal according
3939 to == .
3940 - expand_expr has not used TARGET - that means that the source just
3941 happens to have the same RTX form. Since temp will have been created
3942 by expand_expr, it will compare unequal according to == .
3943 We must generate a copy in this case, to reach the correct number
3944 of volatile memory references. */
3945
3946 if ((! rtx_equal_p (temp, target)
3947 || (temp != target && (side_effects_p (temp)
3948 || side_effects_p (target))))
3949 && TREE_CODE (exp) != ERROR_MARK)
3950 {
3951 target = protect_from_queue (target, 1);
3952 if (GET_MODE (temp) != GET_MODE (target)
3953 && GET_MODE (temp) != VOIDmode)
3954 {
3955 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3956 if (dont_return_target)
3957 {
3958 /* In this case, we will return TEMP,
3959 so make sure it has the proper mode.
3960 But don't forget to store the value into TARGET. */
3961 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3962 emit_move_insn (target, temp);
3963 }
3964 else
3965 convert_move (target, temp, unsignedp);
3966 }
3967
3968 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3969 {
3970 /* Handle copying a string constant into an array.
3971 The string constant may be shorter than the array.
3972 So copy just the string's actual length, and clear the rest. */
3973 rtx size;
3974 rtx addr;
3975
3976 /* Get the size of the data type of the string,
3977 which is actually the size of the target. */
3978 size = expr_size (exp);
3979 if (GET_CODE (size) == CONST_INT
3980 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3981 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3982 else
3983 {
3984 /* Compute the size of the data to copy from the string. */
3985 tree copy_size
3986 = size_binop (MIN_EXPR,
3987 make_tree (sizetype, size),
3988 size_int (TREE_STRING_LENGTH (exp)));
3989 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
3990 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3991 VOIDmode, 0);
3992 rtx label = 0;
3993
3994 /* Copy that much. */
3995 emit_block_move (target, temp, copy_size_rtx,
3996 TYPE_ALIGN (TREE_TYPE (exp)));
3997
3998 /* Figure out how much is left in TARGET that we have to clear.
3999 Do all calculations in ptr_mode. */
4000
4001 addr = XEXP (target, 0);
4002 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4003
4004 if (GET_CODE (copy_size_rtx) == CONST_INT)
4005 {
4006 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4007 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4008 align = MIN (align, (BITS_PER_UNIT
4009 * (INTVAL (copy_size_rtx)
4010 & - INTVAL (copy_size_rtx))));
4011 }
4012 else
4013 {
4014 addr = force_reg (ptr_mode, addr);
4015 addr = expand_binop (ptr_mode, add_optab, addr,
4016 copy_size_rtx, NULL_RTX, 0,
4017 OPTAB_LIB_WIDEN);
4018
4019 size = expand_binop (ptr_mode, sub_optab, size,
4020 copy_size_rtx, NULL_RTX, 0,
4021 OPTAB_LIB_WIDEN);
4022
4023 align = BITS_PER_UNIT;
4024 label = gen_label_rtx ();
4025 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4026 GET_MODE (size), 0, 0, label);
4027 }
4028 align = MIN (align, expr_align (copy_size));
4029
4030 if (size != const0_rtx)
4031 {
4032 rtx dest = gen_rtx_MEM (BLKmode, addr);
4033
4034 MEM_COPY_ATTRIBUTES (dest, target);
4035
4036 /* Be sure we can write on ADDR. */
4037 in_check_memory_usage = 1;
4038 if (current_function_check_memory_usage)
4039 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
4040 addr, Pmode,
4041 size, TYPE_MODE (sizetype),
4042 GEN_INT (MEMORY_USE_WO),
4043 TYPE_MODE (integer_type_node));
4044 in_check_memory_usage = 0;
4045 clear_storage (dest, size, align);
4046 }
4047
4048 if (label)
4049 emit_label (label);
4050 }
4051 }
4052 /* Handle calls that return values in multiple non-contiguous locations.
4053 The Irix 6 ABI has examples of this. */
4054 else if (GET_CODE (target) == PARALLEL)
4055 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4056 TYPE_ALIGN (TREE_TYPE (exp)));
4057 else if (GET_MODE (temp) == BLKmode)
4058 emit_block_move (target, temp, expr_size (exp),
4059 TYPE_ALIGN (TREE_TYPE (exp)));
4060 else
4061 emit_move_insn (target, temp);
4062 }
4063
4064 /* If we don't want a value, return NULL_RTX. */
4065 if (! want_value)
4066 return NULL_RTX;
4067
4068 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4069 ??? The latter test doesn't seem to make sense. */
4070 else if (dont_return_target && GET_CODE (temp) != MEM)
4071 return temp;
4072
4073 /* Return TARGET itself if it is a hard register. */
4074 else if (want_value && GET_MODE (target) != BLKmode
4075 && ! (GET_CODE (target) == REG
4076 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4077 return copy_to_reg (target);
4078
4079 else
4080 return target;
4081 }
4082 \f
4083 /* Return 1 if EXP just contains zeros. */
4084
4085 static int
4086 is_zeros_p (exp)
4087 tree exp;
4088 {
4089 tree elt;
4090
4091 switch (TREE_CODE (exp))
4092 {
4093 case CONVERT_EXPR:
4094 case NOP_EXPR:
4095 case NON_LVALUE_EXPR:
4096 return is_zeros_p (TREE_OPERAND (exp, 0));
4097
4098 case INTEGER_CST:
4099 return integer_zerop (exp);
4100
4101 case COMPLEX_CST:
4102 return
4103 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4104
4105 case REAL_CST:
4106 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4107
4108 case CONSTRUCTOR:
4109 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4110 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4111 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4112 if (! is_zeros_p (TREE_VALUE (elt)))
4113 return 0;
4114
4115 return 1;
4116
4117 default:
4118 return 0;
4119 }
4120 }
4121
4122 /* Return 1 if EXP contains mostly (3/4) zeros. */
4123
4124 static int
4125 mostly_zeros_p (exp)
4126 tree exp;
4127 {
4128 if (TREE_CODE (exp) == CONSTRUCTOR)
4129 {
4130 int elts = 0, zeros = 0;
4131 tree elt = CONSTRUCTOR_ELTS (exp);
4132 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4133 {
4134 /* If there are no ranges of true bits, it is all zero. */
4135 return elt == NULL_TREE;
4136 }
4137 for (; elt; elt = TREE_CHAIN (elt))
4138 {
4139 /* We do not handle the case where the index is a RANGE_EXPR,
4140 so the statistic will be somewhat inaccurate.
4141 We do make a more accurate count in store_constructor itself,
4142 so since this function is only used for nested array elements,
4143 this should be close enough. */
4144 if (mostly_zeros_p (TREE_VALUE (elt)))
4145 zeros++;
4146 elts++;
4147 }
4148
4149 return 4 * zeros >= 3 * elts;
4150 }
4151
4152 return is_zeros_p (exp);
4153 }
4154 \f
4155 /* Helper function for store_constructor.
4156 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4157 TYPE is the type of the CONSTRUCTOR, not the element type.
4158 ALIGN and CLEARED are as for store_constructor.
4159
4160 This provides a recursive shortcut back to store_constructor when it isn't
4161 necessary to go through store_field. This is so that we can pass through
4162 the cleared field to let store_constructor know that we may not have to
4163 clear a substructure if the outer structure has already been cleared. */
4164
4165 static void
4166 store_constructor_field (target, bitsize, bitpos,
4167 mode, exp, type, align, cleared)
4168 rtx target;
4169 unsigned HOST_WIDE_INT bitsize;
4170 HOST_WIDE_INT bitpos;
4171 enum machine_mode mode;
4172 tree exp, type;
4173 unsigned int align;
4174 int cleared;
4175 {
4176 if (TREE_CODE (exp) == CONSTRUCTOR
4177 && bitpos % BITS_PER_UNIT == 0
4178 /* If we have a non-zero bitpos for a register target, then we just
4179 let store_field do the bitfield handling. This is unlikely to
4180 generate unnecessary clear instructions anyways. */
4181 && (bitpos == 0 || GET_CODE (target) == MEM))
4182 {
4183 if (bitpos != 0)
4184 target
4185 = change_address (target,
4186 GET_MODE (target) == BLKmode
4187 || 0 != (bitpos
4188 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4189 ? BLKmode : VOIDmode,
4190 plus_constant (XEXP (target, 0),
4191 bitpos / BITS_PER_UNIT));
4192 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4193 }
4194 else
4195 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4196 int_size_in_bytes (type), 0);
4197 }
4198
4199 /* Store the value of constructor EXP into the rtx TARGET.
4200 TARGET is either a REG or a MEM.
4201 ALIGN is the maximum known alignment for TARGET.
4202 CLEARED is true if TARGET is known to have been zero'd.
4203 SIZE is the number of bytes of TARGET we are allowed to modify: this
4204 may not be the same as the size of EXP if we are assigning to a field
4205 which has been packed to exclude padding bits. */
4206
4207 static void
4208 store_constructor (exp, target, align, cleared, size)
4209 tree exp;
4210 rtx target;
4211 unsigned int align;
4212 int cleared;
4213 HOST_WIDE_INT size;
4214 {
4215 tree type = TREE_TYPE (exp);
4216 #ifdef WORD_REGISTER_OPERATIONS
4217 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4218 #endif
4219
4220 /* We know our target cannot conflict, since safe_from_p has been called. */
4221 #if 0
4222 /* Don't try copying piece by piece into a hard register
4223 since that is vulnerable to being clobbered by EXP.
4224 Instead, construct in a pseudo register and then copy it all. */
4225 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4226 {
4227 rtx temp = gen_reg_rtx (GET_MODE (target));
4228 store_constructor (exp, temp, align, cleared, size);
4229 emit_move_insn (target, temp);
4230 return;
4231 }
4232 #endif
4233
4234 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4235 || TREE_CODE (type) == QUAL_UNION_TYPE)
4236 {
4237 register tree elt;
4238
4239 /* Inform later passes that the whole union value is dead. */
4240 if ((TREE_CODE (type) == UNION_TYPE
4241 || TREE_CODE (type) == QUAL_UNION_TYPE)
4242 && ! cleared)
4243 {
4244 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4245
4246 /* If the constructor is empty, clear the union. */
4247 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4248 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4249 }
4250
4251 /* If we are building a static constructor into a register,
4252 set the initial value as zero so we can fold the value into
4253 a constant. But if more than one register is involved,
4254 this probably loses. */
4255 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4256 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4257 {
4258 if (! cleared)
4259 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4260
4261 cleared = 1;
4262 }
4263
4264 /* If the constructor has fewer fields than the structure
4265 or if we are initializing the structure to mostly zeros,
4266 clear the whole structure first. */
4267 else if (size > 0
4268 && ((list_length (CONSTRUCTOR_ELTS (exp))
4269 != fields_length (type))
4270 || mostly_zeros_p (exp)))
4271 {
4272 if (! cleared)
4273 clear_storage (target, GEN_INT (size), align);
4274
4275 cleared = 1;
4276 }
4277 else if (! cleared)
4278 /* Inform later passes that the old value is dead. */
4279 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4280
4281 /* Store each element of the constructor into
4282 the corresponding field of TARGET. */
4283
4284 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4285 {
4286 register tree field = TREE_PURPOSE (elt);
4287 #ifdef WORD_REGISTER_OPERATIONS
4288 tree value = TREE_VALUE (elt);
4289 #endif
4290 register enum machine_mode mode;
4291 HOST_WIDE_INT bitsize;
4292 HOST_WIDE_INT bitpos = 0;
4293 int unsignedp;
4294 tree offset;
4295 rtx to_rtx = target;
4296
4297 /* Just ignore missing fields.
4298 We cleared the whole structure, above,
4299 if any fields are missing. */
4300 if (field == 0)
4301 continue;
4302
4303 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4304 continue;
4305
4306 if (host_integerp (DECL_SIZE (field), 1))
4307 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4308 else
4309 bitsize = -1;
4310
4311 unsignedp = TREE_UNSIGNED (field);
4312 mode = DECL_MODE (field);
4313 if (DECL_BIT_FIELD (field))
4314 mode = VOIDmode;
4315
4316 offset = DECL_FIELD_OFFSET (field);
4317 if (host_integerp (offset, 0)
4318 && host_integerp (bit_position (field), 0))
4319 {
4320 bitpos = int_bit_position (field);
4321 offset = 0;
4322 }
4323 else
4324 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4325
4326 if (offset)
4327 {
4328 rtx offset_rtx;
4329
4330 if (contains_placeholder_p (offset))
4331 offset = build (WITH_RECORD_EXPR, sizetype,
4332 offset, make_tree (TREE_TYPE (exp), target));
4333
4334 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4335 if (GET_CODE (to_rtx) != MEM)
4336 abort ();
4337
4338 if (GET_MODE (offset_rtx) != ptr_mode)
4339 {
4340 #ifdef POINTERS_EXTEND_UNSIGNED
4341 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4342 #else
4343 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4344 #endif
4345 }
4346
4347 to_rtx
4348 = change_address (to_rtx, VOIDmode,
4349 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4350 force_reg (ptr_mode,
4351 offset_rtx)));
4352 align = DECL_OFFSET_ALIGN (field);
4353 }
4354
4355 if (TREE_READONLY (field))
4356 {
4357 if (GET_CODE (to_rtx) == MEM)
4358 to_rtx = copy_rtx (to_rtx);
4359
4360 RTX_UNCHANGING_P (to_rtx) = 1;
4361 }
4362
4363 #ifdef WORD_REGISTER_OPERATIONS
4364 /* If this initializes a field that is smaller than a word, at the
4365 start of a word, try to widen it to a full word.
4366 This special case allows us to output C++ member function
4367 initializations in a form that the optimizers can understand. */
4368 if (GET_CODE (target) == REG
4369 && bitsize < BITS_PER_WORD
4370 && bitpos % BITS_PER_WORD == 0
4371 && GET_MODE_CLASS (mode) == MODE_INT
4372 && TREE_CODE (value) == INTEGER_CST
4373 && exp_size >= 0
4374 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4375 {
4376 tree type = TREE_TYPE (value);
4377 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4378 {
4379 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4380 value = convert (type, value);
4381 }
4382 if (BYTES_BIG_ENDIAN)
4383 value
4384 = fold (build (LSHIFT_EXPR, type, value,
4385 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4386 bitsize = BITS_PER_WORD;
4387 mode = word_mode;
4388 }
4389 #endif
4390 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4391 TREE_VALUE (elt), type, align, cleared);
4392 }
4393 }
4394 else if (TREE_CODE (type) == ARRAY_TYPE)
4395 {
4396 register tree elt;
4397 register int i;
4398 int need_to_clear;
4399 tree domain = TYPE_DOMAIN (type);
4400 tree elttype = TREE_TYPE (type);
4401 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4402 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4403 HOST_WIDE_INT minelt;
4404 HOST_WIDE_INT maxelt;
4405
4406 /* If we have constant bounds for the range of the type, get them. */
4407 if (const_bounds_p)
4408 {
4409 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4410 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4411 }
4412
4413 /* If the constructor has fewer elements than the array,
4414 clear the whole array first. Similarly if this is
4415 static constructor of a non-BLKmode object. */
4416 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4417 need_to_clear = 1;
4418 else
4419 {
4420 HOST_WIDE_INT count = 0, zero_count = 0;
4421 need_to_clear = ! const_bounds_p;
4422
4423 /* This loop is a more accurate version of the loop in
4424 mostly_zeros_p (it handles RANGE_EXPR in an index).
4425 It is also needed to check for missing elements. */
4426 for (elt = CONSTRUCTOR_ELTS (exp);
4427 elt != NULL_TREE && ! need_to_clear;
4428 elt = TREE_CHAIN (elt))
4429 {
4430 tree index = TREE_PURPOSE (elt);
4431 HOST_WIDE_INT this_node_count;
4432
4433 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4434 {
4435 tree lo_index = TREE_OPERAND (index, 0);
4436 tree hi_index = TREE_OPERAND (index, 1);
4437
4438 if (! host_integerp (lo_index, 1)
4439 || ! host_integerp (hi_index, 1))
4440 {
4441 need_to_clear = 1;
4442 break;
4443 }
4444
4445 this_node_count = (tree_low_cst (hi_index, 1)
4446 - tree_low_cst (lo_index, 1) + 1);
4447 }
4448 else
4449 this_node_count = 1;
4450
4451 count += this_node_count;
4452 if (mostly_zeros_p (TREE_VALUE (elt)))
4453 zero_count += this_node_count;
4454 }
4455
4456 /* Clear the entire array first if there are any missing elements,
4457 or if the incidence of zero elements is >= 75%. */
4458 if (! need_to_clear
4459 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4460 need_to_clear = 1;
4461 }
4462
4463 if (need_to_clear && size > 0)
4464 {
4465 if (! cleared)
4466 clear_storage (target, GEN_INT (size), align);
4467 cleared = 1;
4468 }
4469 else
4470 /* Inform later passes that the old value is dead. */
4471 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4472
4473 /* Store each element of the constructor into
4474 the corresponding element of TARGET, determined
4475 by counting the elements. */
4476 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4477 elt;
4478 elt = TREE_CHAIN (elt), i++)
4479 {
4480 register enum machine_mode mode;
4481 HOST_WIDE_INT bitsize;
4482 HOST_WIDE_INT bitpos;
4483 int unsignedp;
4484 tree value = TREE_VALUE (elt);
4485 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4486 tree index = TREE_PURPOSE (elt);
4487 rtx xtarget = target;
4488
4489 if (cleared && is_zeros_p (value))
4490 continue;
4491
4492 unsignedp = TREE_UNSIGNED (elttype);
4493 mode = TYPE_MODE (elttype);
4494 if (mode == BLKmode)
4495 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4496 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4497 : -1);
4498 else
4499 bitsize = GET_MODE_BITSIZE (mode);
4500
4501 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4502 {
4503 tree lo_index = TREE_OPERAND (index, 0);
4504 tree hi_index = TREE_OPERAND (index, 1);
4505 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4506 struct nesting *loop;
4507 HOST_WIDE_INT lo, hi, count;
4508 tree position;
4509
4510 /* If the range is constant and "small", unroll the loop. */
4511 if (const_bounds_p
4512 && host_integerp (lo_index, 0)
4513 && host_integerp (hi_index, 0)
4514 && (lo = tree_low_cst (lo_index, 0),
4515 hi = tree_low_cst (hi_index, 0),
4516 count = hi - lo + 1,
4517 (GET_CODE (target) != MEM
4518 || count <= 2
4519 || (host_integerp (TYPE_SIZE (elttype), 1)
4520 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4521 <= 40 * 8)))))
4522 {
4523 lo -= minelt; hi -= minelt;
4524 for (; lo <= hi; lo++)
4525 {
4526 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4527 store_constructor_field (target, bitsize, bitpos, mode,
4528 value, type, align, cleared);
4529 }
4530 }
4531 else
4532 {
4533 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4534 loop_top = gen_label_rtx ();
4535 loop_end = gen_label_rtx ();
4536
4537 unsignedp = TREE_UNSIGNED (domain);
4538
4539 index = build_decl (VAR_DECL, NULL_TREE, domain);
4540
4541 DECL_RTL (index) = index_r
4542 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4543 &unsignedp, 0));
4544
4545 if (TREE_CODE (value) == SAVE_EXPR
4546 && SAVE_EXPR_RTL (value) == 0)
4547 {
4548 /* Make sure value gets expanded once before the
4549 loop. */
4550 expand_expr (value, const0_rtx, VOIDmode, 0);
4551 emit_queue ();
4552 }
4553 store_expr (lo_index, index_r, 0);
4554 loop = expand_start_loop (0);
4555
4556 /* Assign value to element index. */
4557 position
4558 = convert (ssizetype,
4559 fold (build (MINUS_EXPR, TREE_TYPE (index),
4560 index, TYPE_MIN_VALUE (domain))));
4561 position = size_binop (MULT_EXPR, position,
4562 convert (ssizetype,
4563 TYPE_SIZE_UNIT (elttype)));
4564
4565 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4566 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4567 xtarget = change_address (target, mode, addr);
4568 if (TREE_CODE (value) == CONSTRUCTOR)
4569 store_constructor (value, xtarget, align, cleared,
4570 bitsize / BITS_PER_UNIT);
4571 else
4572 store_expr (value, xtarget, 0);
4573
4574 expand_exit_loop_if_false (loop,
4575 build (LT_EXPR, integer_type_node,
4576 index, hi_index));
4577
4578 expand_increment (build (PREINCREMENT_EXPR,
4579 TREE_TYPE (index),
4580 index, integer_one_node), 0, 0);
4581 expand_end_loop ();
4582 emit_label (loop_end);
4583 }
4584 }
4585 else if ((index != 0 && ! host_integerp (index, 0))
4586 || ! host_integerp (TYPE_SIZE (elttype), 1))
4587 {
4588 rtx pos_rtx, addr;
4589 tree position;
4590
4591 if (index == 0)
4592 index = ssize_int (1);
4593
4594 if (minelt)
4595 index = convert (ssizetype,
4596 fold (build (MINUS_EXPR, index,
4597 TYPE_MIN_VALUE (domain))));
4598
4599 position = size_binop (MULT_EXPR, index,
4600 convert (ssizetype,
4601 TYPE_SIZE_UNIT (elttype)));
4602 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4603 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4604 xtarget = change_address (target, mode, addr);
4605 store_expr (value, xtarget, 0);
4606 }
4607 else
4608 {
4609 if (index != 0)
4610 bitpos = ((tree_low_cst (index, 0) - minelt)
4611 * tree_low_cst (TYPE_SIZE (elttype), 1));
4612 else
4613 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4614
4615 store_constructor_field (target, bitsize, bitpos, mode, value,
4616 type, align, cleared);
4617 }
4618 }
4619 }
4620
4621 /* Set constructor assignments. */
4622 else if (TREE_CODE (type) == SET_TYPE)
4623 {
4624 tree elt = CONSTRUCTOR_ELTS (exp);
4625 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4626 tree domain = TYPE_DOMAIN (type);
4627 tree domain_min, domain_max, bitlength;
4628
4629 /* The default implementation strategy is to extract the constant
4630 parts of the constructor, use that to initialize the target,
4631 and then "or" in whatever non-constant ranges we need in addition.
4632
4633 If a large set is all zero or all ones, it is
4634 probably better to set it using memset (if available) or bzero.
4635 Also, if a large set has just a single range, it may also be
4636 better to first clear all the first clear the set (using
4637 bzero/memset), and set the bits we want. */
4638
4639 /* Check for all zeros. */
4640 if (elt == NULL_TREE && size > 0)
4641 {
4642 if (!cleared)
4643 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4644 return;
4645 }
4646
4647 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4648 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4649 bitlength = size_binop (PLUS_EXPR,
4650 size_diffop (domain_max, domain_min),
4651 ssize_int (1));
4652
4653 nbits = tree_low_cst (bitlength, 1);
4654
4655 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4656 are "complicated" (more than one range), initialize (the
4657 constant parts) by copying from a constant. */
4658 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4659 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4660 {
4661 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4662 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4663 char *bit_buffer = (char *) alloca (nbits);
4664 HOST_WIDE_INT word = 0;
4665 unsigned int bit_pos = 0;
4666 unsigned int ibit = 0;
4667 unsigned int offset = 0; /* In bytes from beginning of set. */
4668
4669 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4670 for (;;)
4671 {
4672 if (bit_buffer[ibit])
4673 {
4674 if (BYTES_BIG_ENDIAN)
4675 word |= (1 << (set_word_size - 1 - bit_pos));
4676 else
4677 word |= 1 << bit_pos;
4678 }
4679
4680 bit_pos++; ibit++;
4681 if (bit_pos >= set_word_size || ibit == nbits)
4682 {
4683 if (word != 0 || ! cleared)
4684 {
4685 rtx datum = GEN_INT (word);
4686 rtx to_rtx;
4687
4688 /* The assumption here is that it is safe to use
4689 XEXP if the set is multi-word, but not if
4690 it's single-word. */
4691 if (GET_CODE (target) == MEM)
4692 {
4693 to_rtx = plus_constant (XEXP (target, 0), offset);
4694 to_rtx = change_address (target, mode, to_rtx);
4695 }
4696 else if (offset == 0)
4697 to_rtx = target;
4698 else
4699 abort ();
4700 emit_move_insn (to_rtx, datum);
4701 }
4702
4703 if (ibit == nbits)
4704 break;
4705 word = 0;
4706 bit_pos = 0;
4707 offset += set_word_size / BITS_PER_UNIT;
4708 }
4709 }
4710 }
4711 else if (!cleared)
4712 /* Don't bother clearing storage if the set is all ones. */
4713 if (TREE_CHAIN (elt) != NULL_TREE
4714 || (TREE_PURPOSE (elt) == NULL_TREE
4715 ? nbits != 1
4716 : ( ! host_integerp (TREE_VALUE (elt), 0)
4717 || ! host_integerp (TREE_PURPOSE (elt), 0)
4718 || (tree_low_cst (TREE_VALUE (elt), 0)
4719 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4720 != (HOST_WIDE_INT) nbits))))
4721 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4722
4723 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4724 {
4725 /* Start of range of element or NULL. */
4726 tree startbit = TREE_PURPOSE (elt);
4727 /* End of range of element, or element value. */
4728 tree endbit = TREE_VALUE (elt);
4729 #ifdef TARGET_MEM_FUNCTIONS
4730 HOST_WIDE_INT startb, endb;
4731 #endif
4732 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4733
4734 bitlength_rtx = expand_expr (bitlength,
4735 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4736
4737 /* Handle non-range tuple element like [ expr ]. */
4738 if (startbit == NULL_TREE)
4739 {
4740 startbit = save_expr (endbit);
4741 endbit = startbit;
4742 }
4743
4744 startbit = convert (sizetype, startbit);
4745 endbit = convert (sizetype, endbit);
4746 if (! integer_zerop (domain_min))
4747 {
4748 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4749 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4750 }
4751 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4752 EXPAND_CONST_ADDRESS);
4753 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4754 EXPAND_CONST_ADDRESS);
4755
4756 if (REG_P (target))
4757 {
4758 targetx = assign_stack_temp (GET_MODE (target),
4759 GET_MODE_SIZE (GET_MODE (target)),
4760 0);
4761 emit_move_insn (targetx, target);
4762 }
4763
4764 else if (GET_CODE (target) == MEM)
4765 targetx = target;
4766 else
4767 abort ();
4768
4769 #ifdef TARGET_MEM_FUNCTIONS
4770 /* Optimization: If startbit and endbit are
4771 constants divisible by BITS_PER_UNIT,
4772 call memset instead. */
4773 if (TREE_CODE (startbit) == INTEGER_CST
4774 && TREE_CODE (endbit) == INTEGER_CST
4775 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4776 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4777 {
4778 emit_library_call (memset_libfunc, 0,
4779 VOIDmode, 3,
4780 plus_constant (XEXP (targetx, 0),
4781 startb / BITS_PER_UNIT),
4782 Pmode,
4783 constm1_rtx, TYPE_MODE (integer_type_node),
4784 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4785 TYPE_MODE (sizetype));
4786 }
4787 else
4788 #endif
4789 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4790 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4791 bitlength_rtx, TYPE_MODE (sizetype),
4792 startbit_rtx, TYPE_MODE (sizetype),
4793 endbit_rtx, TYPE_MODE (sizetype));
4794
4795 if (REG_P (target))
4796 emit_move_insn (target, targetx);
4797 }
4798 }
4799
4800 else
4801 abort ();
4802 }
4803
4804 /* Store the value of EXP (an expression tree)
4805 into a subfield of TARGET which has mode MODE and occupies
4806 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4807 If MODE is VOIDmode, it means that we are storing into a bit-field.
4808
4809 If VALUE_MODE is VOIDmode, return nothing in particular.
4810 UNSIGNEDP is not used in this case.
4811
4812 Otherwise, return an rtx for the value stored. This rtx
4813 has mode VALUE_MODE if that is convenient to do.
4814 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4815
4816 ALIGN is the alignment that TARGET is known to have.
4817 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4818
4819 ALIAS_SET is the alias set for the destination. This value will
4820 (in general) be different from that for TARGET, since TARGET is a
4821 reference to the containing structure. */
4822
4823 static rtx
4824 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4825 unsignedp, align, total_size, alias_set)
4826 rtx target;
4827 HOST_WIDE_INT bitsize;
4828 HOST_WIDE_INT bitpos;
4829 enum machine_mode mode;
4830 tree exp;
4831 enum machine_mode value_mode;
4832 int unsignedp;
4833 unsigned int align;
4834 HOST_WIDE_INT total_size;
4835 int alias_set;
4836 {
4837 HOST_WIDE_INT width_mask = 0;
4838
4839 if (TREE_CODE (exp) == ERROR_MARK)
4840 return const0_rtx;
4841
4842 if (bitsize < HOST_BITS_PER_WIDE_INT)
4843 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4844
4845 /* If we are storing into an unaligned field of an aligned union that is
4846 in a register, we may have the mode of TARGET being an integer mode but
4847 MODE == BLKmode. In that case, get an aligned object whose size and
4848 alignment are the same as TARGET and store TARGET into it (we can avoid
4849 the store if the field being stored is the entire width of TARGET). Then
4850 call ourselves recursively to store the field into a BLKmode version of
4851 that object. Finally, load from the object into TARGET. This is not
4852 very efficient in general, but should only be slightly more expensive
4853 than the otherwise-required unaligned accesses. Perhaps this can be
4854 cleaned up later. */
4855
4856 if (mode == BLKmode
4857 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4858 {
4859 rtx object = assign_stack_temp (GET_MODE (target),
4860 GET_MODE_SIZE (GET_MODE (target)), 0);
4861 rtx blk_object = copy_rtx (object);
4862
4863 MEM_SET_IN_STRUCT_P (object, 1);
4864 MEM_SET_IN_STRUCT_P (blk_object, 1);
4865 PUT_MODE (blk_object, BLKmode);
4866
4867 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4868 emit_move_insn (object, target);
4869
4870 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4871 align, total_size, alias_set);
4872
4873 /* Even though we aren't returning target, we need to
4874 give it the updated value. */
4875 emit_move_insn (target, object);
4876
4877 return blk_object;
4878 }
4879
4880 if (GET_CODE (target) == CONCAT)
4881 {
4882 /* We're storing into a struct containing a single __complex. */
4883
4884 if (bitpos != 0)
4885 abort ();
4886 return store_expr (exp, target, 0);
4887 }
4888
4889 /* If the structure is in a register or if the component
4890 is a bit field, we cannot use addressing to access it.
4891 Use bit-field techniques or SUBREG to store in it. */
4892
4893 if (mode == VOIDmode
4894 || (mode != BLKmode && ! direct_store[(int) mode]
4895 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4896 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4897 || GET_CODE (target) == REG
4898 || GET_CODE (target) == SUBREG
4899 /* If the field isn't aligned enough to store as an ordinary memref,
4900 store it as a bit field. */
4901 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4902 && (align < GET_MODE_ALIGNMENT (mode)
4903 || bitpos % GET_MODE_ALIGNMENT (mode)))
4904 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4905 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4906 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4907 /* If the RHS and field are a constant size and the size of the
4908 RHS isn't the same size as the bitfield, we must use bitfield
4909 operations. */
4910 || (bitsize >= 0
4911 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4912 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4913 {
4914 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4915
4916 /* If BITSIZE is narrower than the size of the type of EXP
4917 we will be narrowing TEMP. Normally, what's wanted are the
4918 low-order bits. However, if EXP's type is a record and this is
4919 big-endian machine, we want the upper BITSIZE bits. */
4920 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4921 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4922 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4923 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4924 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4925 - bitsize),
4926 temp, 1);
4927
4928 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4929 MODE. */
4930 if (mode != VOIDmode && mode != BLKmode
4931 && mode != TYPE_MODE (TREE_TYPE (exp)))
4932 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4933
4934 /* If the modes of TARGET and TEMP are both BLKmode, both
4935 must be in memory and BITPOS must be aligned on a byte
4936 boundary. If so, we simply do a block copy. */
4937 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4938 {
4939 unsigned int exp_align = expr_align (exp);
4940
4941 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4942 || bitpos % BITS_PER_UNIT != 0)
4943 abort ();
4944
4945 target = change_address (target, VOIDmode,
4946 plus_constant (XEXP (target, 0),
4947 bitpos / BITS_PER_UNIT));
4948
4949 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4950 align = MIN (exp_align, align);
4951
4952 /* Find an alignment that is consistent with the bit position. */
4953 while ((bitpos % align) != 0)
4954 align >>= 1;
4955
4956 emit_block_move (target, temp,
4957 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4958 / BITS_PER_UNIT),
4959 align);
4960
4961 return value_mode == VOIDmode ? const0_rtx : target;
4962 }
4963
4964 /* Store the value in the bitfield. */
4965 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4966 if (value_mode != VOIDmode)
4967 {
4968 /* The caller wants an rtx for the value. */
4969 /* If possible, avoid refetching from the bitfield itself. */
4970 if (width_mask != 0
4971 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4972 {
4973 tree count;
4974 enum machine_mode tmode;
4975
4976 if (unsignedp)
4977 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4978 tmode = GET_MODE (temp);
4979 if (tmode == VOIDmode)
4980 tmode = value_mode;
4981 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4982 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4983 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4984 }
4985 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4986 NULL_RTX, value_mode, 0, align,
4987 total_size);
4988 }
4989 return const0_rtx;
4990 }
4991 else
4992 {
4993 rtx addr = XEXP (target, 0);
4994 rtx to_rtx;
4995
4996 /* If a value is wanted, it must be the lhs;
4997 so make the address stable for multiple use. */
4998
4999 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5000 && ! CONSTANT_ADDRESS_P (addr)
5001 /* A frame-pointer reference is already stable. */
5002 && ! (GET_CODE (addr) == PLUS
5003 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5004 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5005 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5006 addr = copy_to_reg (addr);
5007
5008 /* Now build a reference to just the desired component. */
5009
5010 to_rtx = copy_rtx (change_address (target, mode,
5011 plus_constant (addr,
5012 (bitpos
5013 / BITS_PER_UNIT))));
5014 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5015 MEM_ALIAS_SET (to_rtx) = alias_set;
5016
5017 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5018 }
5019 }
5020 \f
5021 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5022 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5023 ARRAY_REFs and find the ultimate containing object, which we return.
5024
5025 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5026 bit position, and *PUNSIGNEDP to the signedness of the field.
5027 If the position of the field is variable, we store a tree
5028 giving the variable offset (in units) in *POFFSET.
5029 This offset is in addition to the bit position.
5030 If the position is not variable, we store 0 in *POFFSET.
5031 We set *PALIGNMENT to the alignment of the address that will be
5032 computed. This is the alignment of the thing we return if *POFFSET
5033 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5034
5035 If any of the extraction expressions is volatile,
5036 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5037
5038 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5039 is a mode that can be used to access the field. In that case, *PBITSIZE
5040 is redundant.
5041
5042 If the field describes a variable-sized object, *PMODE is set to
5043 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5044 this case, but the address of the object can be found. */
5045
5046 tree
5047 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5048 punsignedp, pvolatilep, palignment)
5049 tree exp;
5050 HOST_WIDE_INT *pbitsize;
5051 HOST_WIDE_INT *pbitpos;
5052 tree *poffset;
5053 enum machine_mode *pmode;
5054 int *punsignedp;
5055 int *pvolatilep;
5056 unsigned int *palignment;
5057 {
5058 tree size_tree = 0;
5059 enum machine_mode mode = VOIDmode;
5060 tree offset = size_zero_node;
5061 tree bit_offset = bitsize_zero_node;
5062 unsigned int alignment = BIGGEST_ALIGNMENT;
5063 tree tem;
5064
5065 /* First get the mode, signedness, and size. We do this from just the
5066 outermost expression. */
5067 if (TREE_CODE (exp) == COMPONENT_REF)
5068 {
5069 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5070 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5071 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5072
5073 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5074 }
5075 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5076 {
5077 size_tree = TREE_OPERAND (exp, 1);
5078 *punsignedp = TREE_UNSIGNED (exp);
5079 }
5080 else
5081 {
5082 mode = TYPE_MODE (TREE_TYPE (exp));
5083 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5084
5085 if (mode == BLKmode)
5086 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5087 else
5088 *pbitsize = GET_MODE_BITSIZE (mode);
5089 }
5090
5091 if (size_tree != 0)
5092 {
5093 if (! host_integerp (size_tree, 1))
5094 mode = BLKmode, *pbitsize = -1;
5095 else
5096 *pbitsize = tree_low_cst (size_tree, 1);
5097 }
5098
5099 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5100 and find the ultimate containing object. */
5101 while (1)
5102 {
5103 if (TREE_CODE (exp) == BIT_FIELD_REF)
5104 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5105 else if (TREE_CODE (exp) == COMPONENT_REF)
5106 {
5107 tree field = TREE_OPERAND (exp, 1);
5108 tree this_offset = DECL_FIELD_OFFSET (field);
5109
5110 /* If this field hasn't been filled in yet, don't go
5111 past it. This should only happen when folding expressions
5112 made during type construction. */
5113 if (this_offset == 0)
5114 break;
5115 else if (! TREE_CONSTANT (this_offset)
5116 && contains_placeholder_p (this_offset))
5117 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5118
5119 offset = size_binop (PLUS_EXPR, offset, this_offset);
5120 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5121 DECL_FIELD_BIT_OFFSET (field));
5122
5123 if (! host_integerp (offset, 0))
5124 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5125 }
5126
5127 else if (TREE_CODE (exp) == ARRAY_REF)
5128 {
5129 tree index = TREE_OPERAND (exp, 1);
5130 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5131 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5132 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5133
5134 /* We assume all arrays have sizes that are a multiple of a byte.
5135 First subtract the lower bound, if any, in the type of the
5136 index, then convert to sizetype and multiply by the size of the
5137 array element. */
5138 if (low_bound != 0 && ! integer_zerop (low_bound))
5139 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5140 index, low_bound));
5141
5142 /* If the index has a self-referential type, pass it to a
5143 WITH_RECORD_EXPR; if the component size is, pass our
5144 component to one. */
5145 if (! TREE_CONSTANT (index)
5146 && contains_placeholder_p (index))
5147 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5148 if (! TREE_CONSTANT (unit_size)
5149 && contains_placeholder_p (unit_size))
5150 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5151 TREE_OPERAND (exp, 0));
5152
5153 offset = size_binop (PLUS_EXPR, offset,
5154 size_binop (MULT_EXPR,
5155 convert (sizetype, index),
5156 unit_size));
5157 }
5158
5159 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5160 && ! ((TREE_CODE (exp) == NOP_EXPR
5161 || TREE_CODE (exp) == CONVERT_EXPR)
5162 && (TYPE_MODE (TREE_TYPE (exp))
5163 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5164 break;
5165
5166 /* If any reference in the chain is volatile, the effect is volatile. */
5167 if (TREE_THIS_VOLATILE (exp))
5168 *pvolatilep = 1;
5169
5170 /* If the offset is non-constant already, then we can't assume any
5171 alignment more than the alignment here. */
5172 if (! TREE_CONSTANT (offset))
5173 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5174
5175 exp = TREE_OPERAND (exp, 0);
5176 }
5177
5178 if (DECL_P (exp))
5179 alignment = MIN (alignment, DECL_ALIGN (exp));
5180 else if (TREE_TYPE (exp) != 0)
5181 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5182
5183 /* If OFFSET is constant, see if we can return the whole thing as a
5184 constant bit position. Otherwise, split it up. */
5185 if (host_integerp (offset, 0)
5186 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5187 bitsize_unit_node))
5188 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5189 && host_integerp (tem, 0))
5190 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5191 else
5192 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5193
5194 *pmode = mode;
5195 *palignment = alignment;
5196 return exp;
5197 }
5198
5199 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5200
5201 static enum memory_use_mode
5202 get_memory_usage_from_modifier (modifier)
5203 enum expand_modifier modifier;
5204 {
5205 switch (modifier)
5206 {
5207 case EXPAND_NORMAL:
5208 case EXPAND_SUM:
5209 return MEMORY_USE_RO;
5210 break;
5211 case EXPAND_MEMORY_USE_WO:
5212 return MEMORY_USE_WO;
5213 break;
5214 case EXPAND_MEMORY_USE_RW:
5215 return MEMORY_USE_RW;
5216 break;
5217 case EXPAND_MEMORY_USE_DONT:
5218 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5219 MEMORY_USE_DONT, because they are modifiers to a call of
5220 expand_expr in the ADDR_EXPR case of expand_expr. */
5221 case EXPAND_CONST_ADDRESS:
5222 case EXPAND_INITIALIZER:
5223 return MEMORY_USE_DONT;
5224 case EXPAND_MEMORY_USE_BAD:
5225 default:
5226 abort ();
5227 }
5228 }
5229 \f
5230 /* Given an rtx VALUE that may contain additions and multiplications,
5231 return an equivalent value that just refers to a register or memory.
5232 This is done by generating instructions to perform the arithmetic
5233 and returning a pseudo-register containing the value.
5234
5235 The returned value may be a REG, SUBREG, MEM or constant. */
5236
5237 rtx
5238 force_operand (value, target)
5239 rtx value, target;
5240 {
5241 register optab binoptab = 0;
5242 /* Use a temporary to force order of execution of calls to
5243 `force_operand'. */
5244 rtx tmp;
5245 register rtx op2;
5246 /* Use subtarget as the target for operand 0 of a binary operation. */
5247 register rtx subtarget = get_subtarget (target);
5248
5249 /* Check for a PIC address load. */
5250 if (flag_pic
5251 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5252 && XEXP (value, 0) == pic_offset_table_rtx
5253 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5254 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5255 || GET_CODE (XEXP (value, 1)) == CONST))
5256 {
5257 if (!subtarget)
5258 subtarget = gen_reg_rtx (GET_MODE (value));
5259 emit_move_insn (subtarget, value);
5260 return subtarget;
5261 }
5262
5263 if (GET_CODE (value) == PLUS)
5264 binoptab = add_optab;
5265 else if (GET_CODE (value) == MINUS)
5266 binoptab = sub_optab;
5267 else if (GET_CODE (value) == MULT)
5268 {
5269 op2 = XEXP (value, 1);
5270 if (!CONSTANT_P (op2)
5271 && !(GET_CODE (op2) == REG && op2 != subtarget))
5272 subtarget = 0;
5273 tmp = force_operand (XEXP (value, 0), subtarget);
5274 return expand_mult (GET_MODE (value), tmp,
5275 force_operand (op2, NULL_RTX),
5276 target, 0);
5277 }
5278
5279 if (binoptab)
5280 {
5281 op2 = XEXP (value, 1);
5282 if (!CONSTANT_P (op2)
5283 && !(GET_CODE (op2) == REG && op2 != subtarget))
5284 subtarget = 0;
5285 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5286 {
5287 binoptab = add_optab;
5288 op2 = negate_rtx (GET_MODE (value), op2);
5289 }
5290
5291 /* Check for an addition with OP2 a constant integer and our first
5292 operand a PLUS of a virtual register and something else. In that
5293 case, we want to emit the sum of the virtual register and the
5294 constant first and then add the other value. This allows virtual
5295 register instantiation to simply modify the constant rather than
5296 creating another one around this addition. */
5297 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5298 && GET_CODE (XEXP (value, 0)) == PLUS
5299 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5300 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5301 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5302 {
5303 rtx temp = expand_binop (GET_MODE (value), binoptab,
5304 XEXP (XEXP (value, 0), 0), op2,
5305 subtarget, 0, OPTAB_LIB_WIDEN);
5306 return expand_binop (GET_MODE (value), binoptab, temp,
5307 force_operand (XEXP (XEXP (value, 0), 1), 0),
5308 target, 0, OPTAB_LIB_WIDEN);
5309 }
5310
5311 tmp = force_operand (XEXP (value, 0), subtarget);
5312 return expand_binop (GET_MODE (value), binoptab, tmp,
5313 force_operand (op2, NULL_RTX),
5314 target, 0, OPTAB_LIB_WIDEN);
5315 /* We give UNSIGNEDP = 0 to expand_binop
5316 because the only operations we are expanding here are signed ones. */
5317 }
5318 return value;
5319 }
5320 \f
5321 /* Subroutine of expand_expr:
5322 save the non-copied parts (LIST) of an expr (LHS), and return a list
5323 which can restore these values to their previous values,
5324 should something modify their storage. */
5325
5326 static tree
5327 save_noncopied_parts (lhs, list)
5328 tree lhs;
5329 tree list;
5330 {
5331 tree tail;
5332 tree parts = 0;
5333
5334 for (tail = list; tail; tail = TREE_CHAIN (tail))
5335 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5336 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5337 else
5338 {
5339 tree part = TREE_VALUE (tail);
5340 tree part_type = TREE_TYPE (part);
5341 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5342 rtx target = assign_temp (part_type, 0, 1, 1);
5343 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5344 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5345 parts = tree_cons (to_be_saved,
5346 build (RTL_EXPR, part_type, NULL_TREE,
5347 (tree) target),
5348 parts);
5349 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5350 }
5351 return parts;
5352 }
5353
5354 /* Subroutine of expand_expr:
5355 record the non-copied parts (LIST) of an expr (LHS), and return a list
5356 which specifies the initial values of these parts. */
5357
5358 static tree
5359 init_noncopied_parts (lhs, list)
5360 tree lhs;
5361 tree list;
5362 {
5363 tree tail;
5364 tree parts = 0;
5365
5366 for (tail = list; tail; tail = TREE_CHAIN (tail))
5367 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5368 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5369 else if (TREE_PURPOSE (tail))
5370 {
5371 tree part = TREE_VALUE (tail);
5372 tree part_type = TREE_TYPE (part);
5373 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5374 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5375 }
5376 return parts;
5377 }
5378
5379 /* Subroutine of expand_expr: return nonzero iff there is no way that
5380 EXP can reference X, which is being modified. TOP_P is nonzero if this
5381 call is going to be used to determine whether we need a temporary
5382 for EXP, as opposed to a recursive call to this function.
5383
5384 It is always safe for this routine to return zero since it merely
5385 searches for optimization opportunities. */
5386
5387 static int
5388 safe_from_p (x, exp, top_p)
5389 rtx x;
5390 tree exp;
5391 int top_p;
5392 {
5393 rtx exp_rtl = 0;
5394 int i, nops;
5395 static int save_expr_count;
5396 static int save_expr_size = 0;
5397 static tree *save_expr_rewritten;
5398 static tree save_expr_trees[256];
5399
5400 if (x == 0
5401 /* If EXP has varying size, we MUST use a target since we currently
5402 have no way of allocating temporaries of variable size
5403 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5404 So we assume here that something at a higher level has prevented a
5405 clash. This is somewhat bogus, but the best we can do. Only
5406 do this when X is BLKmode and when we are at the top level. */
5407 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5408 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5409 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5410 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5411 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5412 != INTEGER_CST)
5413 && GET_MODE (x) == BLKmode))
5414 return 1;
5415
5416 if (top_p && save_expr_size == 0)
5417 {
5418 int rtn;
5419
5420 save_expr_count = 0;
5421 save_expr_size = ARRAY_SIZE (save_expr_trees);
5422 save_expr_rewritten = &save_expr_trees[0];
5423
5424 rtn = safe_from_p (x, exp, 1);
5425
5426 for (i = 0; i < save_expr_count; ++i)
5427 {
5428 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5429 abort ();
5430 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5431 }
5432
5433 save_expr_size = 0;
5434
5435 return rtn;
5436 }
5437
5438 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5439 find the underlying pseudo. */
5440 if (GET_CODE (x) == SUBREG)
5441 {
5442 x = SUBREG_REG (x);
5443 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5444 return 0;
5445 }
5446
5447 /* If X is a location in the outgoing argument area, it is always safe. */
5448 if (GET_CODE (x) == MEM
5449 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5450 || (GET_CODE (XEXP (x, 0)) == PLUS
5451 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5452 return 1;
5453
5454 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5455 {
5456 case 'd':
5457 exp_rtl = DECL_RTL (exp);
5458 break;
5459
5460 case 'c':
5461 return 1;
5462
5463 case 'x':
5464 if (TREE_CODE (exp) == TREE_LIST)
5465 return ((TREE_VALUE (exp) == 0
5466 || safe_from_p (x, TREE_VALUE (exp), 0))
5467 && (TREE_CHAIN (exp) == 0
5468 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5469 else if (TREE_CODE (exp) == ERROR_MARK)
5470 return 1; /* An already-visited SAVE_EXPR? */
5471 else
5472 return 0;
5473
5474 case '1':
5475 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5476
5477 case '2':
5478 case '<':
5479 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5480 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5481
5482 case 'e':
5483 case 'r':
5484 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5485 the expression. If it is set, we conflict iff we are that rtx or
5486 both are in memory. Otherwise, we check all operands of the
5487 expression recursively. */
5488
5489 switch (TREE_CODE (exp))
5490 {
5491 case ADDR_EXPR:
5492 return (staticp (TREE_OPERAND (exp, 0))
5493 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5494 || TREE_STATIC (exp));
5495
5496 case INDIRECT_REF:
5497 if (GET_CODE (x) == MEM)
5498 return 0;
5499 break;
5500
5501 case CALL_EXPR:
5502 exp_rtl = CALL_EXPR_RTL (exp);
5503 if (exp_rtl == 0)
5504 {
5505 /* Assume that the call will clobber all hard registers and
5506 all of memory. */
5507 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5508 || GET_CODE (x) == MEM)
5509 return 0;
5510 }
5511
5512 break;
5513
5514 case RTL_EXPR:
5515 /* If a sequence exists, we would have to scan every instruction
5516 in the sequence to see if it was safe. This is probably not
5517 worthwhile. */
5518 if (RTL_EXPR_SEQUENCE (exp))
5519 return 0;
5520
5521 exp_rtl = RTL_EXPR_RTL (exp);
5522 break;
5523
5524 case WITH_CLEANUP_EXPR:
5525 exp_rtl = RTL_EXPR_RTL (exp);
5526 break;
5527
5528 case CLEANUP_POINT_EXPR:
5529 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5530
5531 case SAVE_EXPR:
5532 exp_rtl = SAVE_EXPR_RTL (exp);
5533 if (exp_rtl)
5534 break;
5535
5536 /* This SAVE_EXPR might appear many times in the top-level
5537 safe_from_p() expression, and if it has a complex
5538 subexpression, examining it multiple times could result
5539 in a combinatorial explosion. E.g. on an Alpha
5540 running at least 200MHz, a Fortran test case compiled with
5541 optimization took about 28 minutes to compile -- even though
5542 it was only a few lines long, and the complicated line causing
5543 so much time to be spent in the earlier version of safe_from_p()
5544 had only 293 or so unique nodes.
5545
5546 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5547 where it is so we can turn it back in the top-level safe_from_p()
5548 when we're done. */
5549
5550 /* For now, don't bother re-sizing the array. */
5551 if (save_expr_count >= save_expr_size)
5552 return 0;
5553 save_expr_rewritten[save_expr_count++] = exp;
5554
5555 nops = TREE_CODE_LENGTH (SAVE_EXPR);
5556 for (i = 0; i < nops; i++)
5557 {
5558 tree operand = TREE_OPERAND (exp, i);
5559 if (operand == NULL_TREE)
5560 continue;
5561 TREE_SET_CODE (exp, ERROR_MARK);
5562 if (!safe_from_p (x, operand, 0))
5563 return 0;
5564 TREE_SET_CODE (exp, SAVE_EXPR);
5565 }
5566 TREE_SET_CODE (exp, ERROR_MARK);
5567 return 1;
5568
5569 case BIND_EXPR:
5570 /* The only operand we look at is operand 1. The rest aren't
5571 part of the expression. */
5572 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5573
5574 case METHOD_CALL_EXPR:
5575 /* This takes a rtx argument, but shouldn't appear here. */
5576 abort ();
5577
5578 default:
5579 break;
5580 }
5581
5582 /* If we have an rtx, we do not need to scan our operands. */
5583 if (exp_rtl)
5584 break;
5585
5586 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
5587 for (i = 0; i < nops; i++)
5588 if (TREE_OPERAND (exp, i) != 0
5589 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5590 return 0;
5591 }
5592
5593 /* If we have an rtl, find any enclosed object. Then see if we conflict
5594 with it. */
5595 if (exp_rtl)
5596 {
5597 if (GET_CODE (exp_rtl) == SUBREG)
5598 {
5599 exp_rtl = SUBREG_REG (exp_rtl);
5600 if (GET_CODE (exp_rtl) == REG
5601 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5602 return 0;
5603 }
5604
5605 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5606 are memory and EXP is not readonly. */
5607 return ! (rtx_equal_p (x, exp_rtl)
5608 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5609 && ! TREE_READONLY (exp)));
5610 }
5611
5612 /* If we reach here, it is safe. */
5613 return 1;
5614 }
5615
5616 /* Subroutine of expand_expr: return nonzero iff EXP is an
5617 expression whose type is statically determinable. */
5618
5619 static int
5620 fixed_type_p (exp)
5621 tree exp;
5622 {
5623 if (TREE_CODE (exp) == PARM_DECL
5624 || TREE_CODE (exp) == VAR_DECL
5625 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5626 || TREE_CODE (exp) == COMPONENT_REF
5627 || TREE_CODE (exp) == ARRAY_REF)
5628 return 1;
5629 return 0;
5630 }
5631
5632 /* Subroutine of expand_expr: return rtx if EXP is a
5633 variable or parameter; else return 0. */
5634
5635 static rtx
5636 var_rtx (exp)
5637 tree exp;
5638 {
5639 STRIP_NOPS (exp);
5640 switch (TREE_CODE (exp))
5641 {
5642 case PARM_DECL:
5643 case VAR_DECL:
5644 return DECL_RTL (exp);
5645 default:
5646 return 0;
5647 }
5648 }
5649
5650 #ifdef MAX_INTEGER_COMPUTATION_MODE
5651 void
5652 check_max_integer_computation_mode (exp)
5653 tree exp;
5654 {
5655 enum tree_code code;
5656 enum machine_mode mode;
5657
5658 /* Strip any NOPs that don't change the mode. */
5659 STRIP_NOPS (exp);
5660 code = TREE_CODE (exp);
5661
5662 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5663 if (code == NOP_EXPR
5664 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5665 return;
5666
5667 /* First check the type of the overall operation. We need only look at
5668 unary, binary and relational operations. */
5669 if (TREE_CODE_CLASS (code) == '1'
5670 || TREE_CODE_CLASS (code) == '2'
5671 || TREE_CODE_CLASS (code) == '<')
5672 {
5673 mode = TYPE_MODE (TREE_TYPE (exp));
5674 if (GET_MODE_CLASS (mode) == MODE_INT
5675 && mode > MAX_INTEGER_COMPUTATION_MODE)
5676 fatal ("unsupported wide integer operation");
5677 }
5678
5679 /* Check operand of a unary op. */
5680 if (TREE_CODE_CLASS (code) == '1')
5681 {
5682 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5683 if (GET_MODE_CLASS (mode) == MODE_INT
5684 && mode > MAX_INTEGER_COMPUTATION_MODE)
5685 fatal ("unsupported wide integer operation");
5686 }
5687
5688 /* Check operands of a binary/comparison op. */
5689 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5690 {
5691 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5692 if (GET_MODE_CLASS (mode) == MODE_INT
5693 && mode > MAX_INTEGER_COMPUTATION_MODE)
5694 fatal ("unsupported wide integer operation");
5695
5696 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5697 if (GET_MODE_CLASS (mode) == MODE_INT
5698 && mode > MAX_INTEGER_COMPUTATION_MODE)
5699 fatal ("unsupported wide integer operation");
5700 }
5701 }
5702 #endif
5703 \f
5704 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5705 has any readonly fields. If any of the fields have types that
5706 contain readonly fields, return true as well. */
5707
5708 static int
5709 readonly_fields_p (type)
5710 tree type;
5711 {
5712 tree field;
5713
5714 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5715 if (TREE_CODE (field) == FIELD_DECL
5716 && (TREE_READONLY (field)
5717 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5718 && readonly_fields_p (TREE_TYPE (field)))))
5719 return 1;
5720
5721 return 0;
5722 }
5723 \f
5724 /* expand_expr: generate code for computing expression EXP.
5725 An rtx for the computed value is returned. The value is never null.
5726 In the case of a void EXP, const0_rtx is returned.
5727
5728 The value may be stored in TARGET if TARGET is nonzero.
5729 TARGET is just a suggestion; callers must assume that
5730 the rtx returned may not be the same as TARGET.
5731
5732 If TARGET is CONST0_RTX, it means that the value will be ignored.
5733
5734 If TMODE is not VOIDmode, it suggests generating the
5735 result in mode TMODE. But this is done only when convenient.
5736 Otherwise, TMODE is ignored and the value generated in its natural mode.
5737 TMODE is just a suggestion; callers must assume that
5738 the rtx returned may not have mode TMODE.
5739
5740 Note that TARGET may have neither TMODE nor MODE. In that case, it
5741 probably will not be used.
5742
5743 If MODIFIER is EXPAND_SUM then when EXP is an addition
5744 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5745 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5746 products as above, or REG or MEM, or constant.
5747 Ordinarily in such cases we would output mul or add instructions
5748 and then return a pseudo reg containing the sum.
5749
5750 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5751 it also marks a label as absolutely required (it can't be dead).
5752 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5753 This is used for outputting expressions used in initializers.
5754
5755 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5756 with a constant address even if that address is not normally legitimate.
5757 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5758
5759 rtx
5760 expand_expr (exp, target, tmode, modifier)
5761 register tree exp;
5762 rtx target;
5763 enum machine_mode tmode;
5764 enum expand_modifier modifier;
5765 {
5766 register rtx op0, op1, temp;
5767 tree type = TREE_TYPE (exp);
5768 int unsignedp = TREE_UNSIGNED (type);
5769 register enum machine_mode mode;
5770 register enum tree_code code = TREE_CODE (exp);
5771 optab this_optab;
5772 rtx subtarget, original_target;
5773 int ignore;
5774 tree context;
5775 /* Used by check-memory-usage to make modifier read only. */
5776 enum expand_modifier ro_modifier;
5777
5778 /* Handle ERROR_MARK before anybody tries to access its type. */
5779 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5780 {
5781 op0 = CONST0_RTX (tmode);
5782 if (op0 != 0)
5783 return op0;
5784 return const0_rtx;
5785 }
5786
5787 mode = TYPE_MODE (type);
5788 /* Use subtarget as the target for operand 0 of a binary operation. */
5789 subtarget = get_subtarget (target);
5790 original_target = target;
5791 ignore = (target == const0_rtx
5792 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5793 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5794 || code == COND_EXPR)
5795 && TREE_CODE (type) == VOID_TYPE));
5796
5797 /* Make a read-only version of the modifier. */
5798 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5799 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5800 ro_modifier = modifier;
5801 else
5802 ro_modifier = EXPAND_NORMAL;
5803
5804 /* If we are going to ignore this result, we need only do something
5805 if there is a side-effect somewhere in the expression. If there
5806 is, short-circuit the most common cases here. Note that we must
5807 not call expand_expr with anything but const0_rtx in case this
5808 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5809
5810 if (ignore)
5811 {
5812 if (! TREE_SIDE_EFFECTS (exp))
5813 return const0_rtx;
5814
5815 /* Ensure we reference a volatile object even if value is ignored, but
5816 don't do this if all we are doing is taking its address. */
5817 if (TREE_THIS_VOLATILE (exp)
5818 && TREE_CODE (exp) != FUNCTION_DECL
5819 && mode != VOIDmode && mode != BLKmode
5820 && modifier != EXPAND_CONST_ADDRESS)
5821 {
5822 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5823 if (GET_CODE (temp) == MEM)
5824 temp = copy_to_reg (temp);
5825 return const0_rtx;
5826 }
5827
5828 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5829 || code == INDIRECT_REF || code == BUFFER_REF)
5830 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5831 VOIDmode, ro_modifier);
5832 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5833 || code == ARRAY_REF)
5834 {
5835 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5836 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5837 return const0_rtx;
5838 }
5839 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5840 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5841 /* If the second operand has no side effects, just evaluate
5842 the first. */
5843 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5844 VOIDmode, ro_modifier);
5845 else if (code == BIT_FIELD_REF)
5846 {
5847 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5848 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5849 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5850 return const0_rtx;
5851 }
5852 ;
5853 target = 0;
5854 }
5855
5856 #ifdef MAX_INTEGER_COMPUTATION_MODE
5857 /* Only check stuff here if the mode we want is different from the mode
5858 of the expression; if it's the same, check_max_integer_computiation_mode
5859 will handle it. Do we really need to check this stuff at all? */
5860
5861 if (target
5862 && GET_MODE (target) != mode
5863 && TREE_CODE (exp) != INTEGER_CST
5864 && TREE_CODE (exp) != PARM_DECL
5865 && TREE_CODE (exp) != ARRAY_REF
5866 && TREE_CODE (exp) != COMPONENT_REF
5867 && TREE_CODE (exp) != BIT_FIELD_REF
5868 && TREE_CODE (exp) != INDIRECT_REF
5869 && TREE_CODE (exp) != CALL_EXPR
5870 && TREE_CODE (exp) != VAR_DECL
5871 && TREE_CODE (exp) != RTL_EXPR)
5872 {
5873 enum machine_mode mode = GET_MODE (target);
5874
5875 if (GET_MODE_CLASS (mode) == MODE_INT
5876 && mode > MAX_INTEGER_COMPUTATION_MODE)
5877 fatal ("unsupported wide integer operation");
5878 }
5879
5880 if (tmode != mode
5881 && TREE_CODE (exp) != INTEGER_CST
5882 && TREE_CODE (exp) != PARM_DECL
5883 && TREE_CODE (exp) != ARRAY_REF
5884 && TREE_CODE (exp) != COMPONENT_REF
5885 && TREE_CODE (exp) != BIT_FIELD_REF
5886 && TREE_CODE (exp) != INDIRECT_REF
5887 && TREE_CODE (exp) != VAR_DECL
5888 && TREE_CODE (exp) != CALL_EXPR
5889 && TREE_CODE (exp) != RTL_EXPR
5890 && GET_MODE_CLASS (tmode) == MODE_INT
5891 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5892 fatal ("unsupported wide integer operation");
5893
5894 check_max_integer_computation_mode (exp);
5895 #endif
5896
5897 /* If will do cse, generate all results into pseudo registers
5898 since 1) that allows cse to find more things
5899 and 2) otherwise cse could produce an insn the machine
5900 cannot support. */
5901
5902 if (! cse_not_expected && mode != BLKmode && target
5903 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5904 target = subtarget;
5905
5906 switch (code)
5907 {
5908 case LABEL_DECL:
5909 {
5910 tree function = decl_function_context (exp);
5911 /* Handle using a label in a containing function. */
5912 if (function != current_function_decl
5913 && function != inline_function_decl && function != 0)
5914 {
5915 struct function *p = find_function_data (function);
5916 /* Allocate in the memory associated with the function
5917 that the label is in. */
5918 push_obstacks (p->function_obstack,
5919 p->function_maybepermanent_obstack);
5920
5921 p->expr->x_forced_labels
5922 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5923 p->expr->x_forced_labels);
5924 pop_obstacks ();
5925 }
5926 else
5927 {
5928 if (modifier == EXPAND_INITIALIZER)
5929 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5930 label_rtx (exp),
5931 forced_labels);
5932 }
5933
5934 temp = gen_rtx_MEM (FUNCTION_MODE,
5935 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5936 if (function != current_function_decl
5937 && function != inline_function_decl && function != 0)
5938 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5939 return temp;
5940 }
5941
5942 case PARM_DECL:
5943 if (DECL_RTL (exp) == 0)
5944 {
5945 error_with_decl (exp, "prior parameter's size depends on `%s'");
5946 return CONST0_RTX (mode);
5947 }
5948
5949 /* ... fall through ... */
5950
5951 case VAR_DECL:
5952 /* If a static var's type was incomplete when the decl was written,
5953 but the type is complete now, lay out the decl now. */
5954 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5955 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5956 {
5957 push_obstacks_nochange ();
5958 end_temporary_allocation ();
5959 layout_decl (exp, 0);
5960 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5961 pop_obstacks ();
5962 }
5963
5964 /* Although static-storage variables start off initialized, according to
5965 ANSI C, a memcpy could overwrite them with uninitialized values. So
5966 we check them too. This also lets us check for read-only variables
5967 accessed via a non-const declaration, in case it won't be detected
5968 any other way (e.g., in an embedded system or OS kernel without
5969 memory protection).
5970
5971 Aggregates are not checked here; they're handled elsewhere. */
5972 if (cfun && current_function_check_memory_usage
5973 && code == VAR_DECL
5974 && GET_CODE (DECL_RTL (exp)) == MEM
5975 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5976 {
5977 enum memory_use_mode memory_usage;
5978 memory_usage = get_memory_usage_from_modifier (modifier);
5979
5980 in_check_memory_usage = 1;
5981 if (memory_usage != MEMORY_USE_DONT)
5982 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5983 XEXP (DECL_RTL (exp), 0), Pmode,
5984 GEN_INT (int_size_in_bytes (type)),
5985 TYPE_MODE (sizetype),
5986 GEN_INT (memory_usage),
5987 TYPE_MODE (integer_type_node));
5988 in_check_memory_usage = 0;
5989 }
5990
5991 /* ... fall through ... */
5992
5993 case FUNCTION_DECL:
5994 case RESULT_DECL:
5995 if (DECL_RTL (exp) == 0)
5996 abort ();
5997
5998 /* Ensure variable marked as used even if it doesn't go through
5999 a parser. If it hasn't be used yet, write out an external
6000 definition. */
6001 if (! TREE_USED (exp))
6002 {
6003 assemble_external (exp);
6004 TREE_USED (exp) = 1;
6005 }
6006
6007 /* Show we haven't gotten RTL for this yet. */
6008 temp = 0;
6009
6010 /* Handle variables inherited from containing functions. */
6011 context = decl_function_context (exp);
6012
6013 /* We treat inline_function_decl as an alias for the current function
6014 because that is the inline function whose vars, types, etc.
6015 are being merged into the current function.
6016 See expand_inline_function. */
6017
6018 if (context != 0 && context != current_function_decl
6019 && context != inline_function_decl
6020 /* If var is static, we don't need a static chain to access it. */
6021 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6022 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6023 {
6024 rtx addr;
6025
6026 /* Mark as non-local and addressable. */
6027 DECL_NONLOCAL (exp) = 1;
6028 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6029 abort ();
6030 mark_addressable (exp);
6031 if (GET_CODE (DECL_RTL (exp)) != MEM)
6032 abort ();
6033 addr = XEXP (DECL_RTL (exp), 0);
6034 if (GET_CODE (addr) == MEM)
6035 addr = change_address (addr, Pmode,
6036 fix_lexical_addr (XEXP (addr, 0), exp));
6037 else
6038 addr = fix_lexical_addr (addr, exp);
6039
6040 temp = change_address (DECL_RTL (exp), mode, addr);
6041 }
6042
6043 /* This is the case of an array whose size is to be determined
6044 from its initializer, while the initializer is still being parsed.
6045 See expand_decl. */
6046
6047 else if (GET_CODE (DECL_RTL (exp)) == MEM
6048 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6049 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6050 XEXP (DECL_RTL (exp), 0));
6051
6052 /* If DECL_RTL is memory, we are in the normal case and either
6053 the address is not valid or it is not a register and -fforce-addr
6054 is specified, get the address into a register. */
6055
6056 else if (GET_CODE (DECL_RTL (exp)) == MEM
6057 && modifier != EXPAND_CONST_ADDRESS
6058 && modifier != EXPAND_SUM
6059 && modifier != EXPAND_INITIALIZER
6060 && (! memory_address_p (DECL_MODE (exp),
6061 XEXP (DECL_RTL (exp), 0))
6062 || (flag_force_addr
6063 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6064 temp = change_address (DECL_RTL (exp), VOIDmode,
6065 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6066
6067 /* If we got something, return it. But first, set the alignment
6068 the address is a register. */
6069 if (temp != 0)
6070 {
6071 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6072 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6073
6074 return temp;
6075 }
6076
6077 /* If the mode of DECL_RTL does not match that of the decl, it
6078 must be a promoted value. We return a SUBREG of the wanted mode,
6079 but mark it so that we know that it was already extended. */
6080
6081 if (GET_CODE (DECL_RTL (exp)) == REG
6082 && GET_MODE (DECL_RTL (exp)) != mode)
6083 {
6084 /* Get the signedness used for this variable. Ensure we get the
6085 same mode we got when the variable was declared. */
6086 if (GET_MODE (DECL_RTL (exp))
6087 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6088 abort ();
6089
6090 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6091 SUBREG_PROMOTED_VAR_P (temp) = 1;
6092 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6093 return temp;
6094 }
6095
6096 return DECL_RTL (exp);
6097
6098 case INTEGER_CST:
6099 return immed_double_const (TREE_INT_CST_LOW (exp),
6100 TREE_INT_CST_HIGH (exp), mode);
6101
6102 case CONST_DECL:
6103 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6104 EXPAND_MEMORY_USE_BAD);
6105
6106 case REAL_CST:
6107 /* If optimized, generate immediate CONST_DOUBLE
6108 which will be turned into memory by reload if necessary.
6109
6110 We used to force a register so that loop.c could see it. But
6111 this does not allow gen_* patterns to perform optimizations with
6112 the constants. It also produces two insns in cases like "x = 1.0;".
6113 On most machines, floating-point constants are not permitted in
6114 many insns, so we'd end up copying it to a register in any case.
6115
6116 Now, we do the copying in expand_binop, if appropriate. */
6117 return immed_real_const (exp);
6118
6119 case COMPLEX_CST:
6120 case STRING_CST:
6121 if (! TREE_CST_RTL (exp))
6122 output_constant_def (exp);
6123
6124 /* TREE_CST_RTL probably contains a constant address.
6125 On RISC machines where a constant address isn't valid,
6126 make some insns to get that address into a register. */
6127 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6128 && modifier != EXPAND_CONST_ADDRESS
6129 && modifier != EXPAND_INITIALIZER
6130 && modifier != EXPAND_SUM
6131 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6132 || (flag_force_addr
6133 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6134 return change_address (TREE_CST_RTL (exp), VOIDmode,
6135 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6136 return TREE_CST_RTL (exp);
6137
6138 case EXPR_WITH_FILE_LOCATION:
6139 {
6140 rtx to_return;
6141 const char *saved_input_filename = input_filename;
6142 int saved_lineno = lineno;
6143 input_filename = EXPR_WFL_FILENAME (exp);
6144 lineno = EXPR_WFL_LINENO (exp);
6145 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6146 emit_line_note (input_filename, lineno);
6147 /* Possibly avoid switching back and force here. */
6148 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6149 input_filename = saved_input_filename;
6150 lineno = saved_lineno;
6151 return to_return;
6152 }
6153
6154 case SAVE_EXPR:
6155 context = decl_function_context (exp);
6156
6157 /* If this SAVE_EXPR was at global context, assume we are an
6158 initialization function and move it into our context. */
6159 if (context == 0)
6160 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6161
6162 /* We treat inline_function_decl as an alias for the current function
6163 because that is the inline function whose vars, types, etc.
6164 are being merged into the current function.
6165 See expand_inline_function. */
6166 if (context == current_function_decl || context == inline_function_decl)
6167 context = 0;
6168
6169 /* If this is non-local, handle it. */
6170 if (context)
6171 {
6172 /* The following call just exists to abort if the context is
6173 not of a containing function. */
6174 find_function_data (context);
6175
6176 temp = SAVE_EXPR_RTL (exp);
6177 if (temp && GET_CODE (temp) == REG)
6178 {
6179 put_var_into_stack (exp);
6180 temp = SAVE_EXPR_RTL (exp);
6181 }
6182 if (temp == 0 || GET_CODE (temp) != MEM)
6183 abort ();
6184 return change_address (temp, mode,
6185 fix_lexical_addr (XEXP (temp, 0), exp));
6186 }
6187 if (SAVE_EXPR_RTL (exp) == 0)
6188 {
6189 if (mode == VOIDmode)
6190 temp = const0_rtx;
6191 else
6192 temp = assign_temp (type, 3, 0, 0);
6193
6194 SAVE_EXPR_RTL (exp) = temp;
6195 if (!optimize && GET_CODE (temp) == REG)
6196 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6197 save_expr_regs);
6198
6199 /* If the mode of TEMP does not match that of the expression, it
6200 must be a promoted value. We pass store_expr a SUBREG of the
6201 wanted mode but mark it so that we know that it was already
6202 extended. Note that `unsignedp' was modified above in
6203 this case. */
6204
6205 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6206 {
6207 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6208 SUBREG_PROMOTED_VAR_P (temp) = 1;
6209 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6210 }
6211
6212 if (temp == const0_rtx)
6213 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6214 EXPAND_MEMORY_USE_BAD);
6215 else
6216 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6217
6218 TREE_USED (exp) = 1;
6219 }
6220
6221 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6222 must be a promoted value. We return a SUBREG of the wanted mode,
6223 but mark it so that we know that it was already extended. */
6224
6225 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6226 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6227 {
6228 /* Compute the signedness and make the proper SUBREG. */
6229 promote_mode (type, mode, &unsignedp, 0);
6230 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6231 SUBREG_PROMOTED_VAR_P (temp) = 1;
6232 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6233 return temp;
6234 }
6235
6236 return SAVE_EXPR_RTL (exp);
6237
6238 case UNSAVE_EXPR:
6239 {
6240 rtx temp;
6241 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6242 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6243 return temp;
6244 }
6245
6246 case PLACEHOLDER_EXPR:
6247 {
6248 tree placeholder_expr;
6249
6250 /* If there is an object on the head of the placeholder list,
6251 see if some object in it of type TYPE or a pointer to it. For
6252 further information, see tree.def. */
6253 for (placeholder_expr = placeholder_list;
6254 placeholder_expr != 0;
6255 placeholder_expr = TREE_CHAIN (placeholder_expr))
6256 {
6257 tree need_type = TYPE_MAIN_VARIANT (type);
6258 tree object = 0;
6259 tree old_list = placeholder_list;
6260 tree elt;
6261
6262 /* Find the outermost reference that is of the type we want.
6263 If none, see if any object has a type that is a pointer to
6264 the type we want. */
6265 for (elt = TREE_PURPOSE (placeholder_expr);
6266 elt != 0 && object == 0;
6267 elt
6268 = ((TREE_CODE (elt) == COMPOUND_EXPR
6269 || TREE_CODE (elt) == COND_EXPR)
6270 ? TREE_OPERAND (elt, 1)
6271 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6272 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6273 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6274 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6275 ? TREE_OPERAND (elt, 0) : 0))
6276 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6277 object = elt;
6278
6279 for (elt = TREE_PURPOSE (placeholder_expr);
6280 elt != 0 && object == 0;
6281 elt
6282 = ((TREE_CODE (elt) == COMPOUND_EXPR
6283 || TREE_CODE (elt) == COND_EXPR)
6284 ? TREE_OPERAND (elt, 1)
6285 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6286 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6287 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6288 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6289 ? TREE_OPERAND (elt, 0) : 0))
6290 if (POINTER_TYPE_P (TREE_TYPE (elt))
6291 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6292 == need_type))
6293 object = build1 (INDIRECT_REF, need_type, elt);
6294
6295 if (object != 0)
6296 {
6297 /* Expand this object skipping the list entries before
6298 it was found in case it is also a PLACEHOLDER_EXPR.
6299 In that case, we want to translate it using subsequent
6300 entries. */
6301 placeholder_list = TREE_CHAIN (placeholder_expr);
6302 temp = expand_expr (object, original_target, tmode,
6303 ro_modifier);
6304 placeholder_list = old_list;
6305 return temp;
6306 }
6307 }
6308 }
6309
6310 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6311 abort ();
6312
6313 case WITH_RECORD_EXPR:
6314 /* Put the object on the placeholder list, expand our first operand,
6315 and pop the list. */
6316 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6317 placeholder_list);
6318 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6319 tmode, ro_modifier);
6320 placeholder_list = TREE_CHAIN (placeholder_list);
6321 return target;
6322
6323 case GOTO_EXPR:
6324 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6325 expand_goto (TREE_OPERAND (exp, 0));
6326 else
6327 expand_computed_goto (TREE_OPERAND (exp, 0));
6328 return const0_rtx;
6329
6330 case EXIT_EXPR:
6331 expand_exit_loop_if_false (NULL_PTR,
6332 invert_truthvalue (TREE_OPERAND (exp, 0)));
6333 return const0_rtx;
6334
6335 case LABELED_BLOCK_EXPR:
6336 if (LABELED_BLOCK_BODY (exp))
6337 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6338 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6339 return const0_rtx;
6340
6341 case EXIT_BLOCK_EXPR:
6342 if (EXIT_BLOCK_RETURN (exp))
6343 sorry ("returned value in block_exit_expr");
6344 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6345 return const0_rtx;
6346
6347 case LOOP_EXPR:
6348 push_temp_slots ();
6349 expand_start_loop (1);
6350 expand_expr_stmt (TREE_OPERAND (exp, 0));
6351 expand_end_loop ();
6352 pop_temp_slots ();
6353
6354 return const0_rtx;
6355
6356 case BIND_EXPR:
6357 {
6358 tree vars = TREE_OPERAND (exp, 0);
6359 int vars_need_expansion = 0;
6360
6361 /* Need to open a binding contour here because
6362 if there are any cleanups they must be contained here. */
6363 expand_start_bindings (2);
6364
6365 /* Mark the corresponding BLOCK for output in its proper place. */
6366 if (TREE_OPERAND (exp, 2) != 0
6367 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6368 insert_block (TREE_OPERAND (exp, 2));
6369
6370 /* If VARS have not yet been expanded, expand them now. */
6371 while (vars)
6372 {
6373 if (DECL_RTL (vars) == 0)
6374 {
6375 vars_need_expansion = 1;
6376 expand_decl (vars);
6377 }
6378 expand_decl_init (vars);
6379 vars = TREE_CHAIN (vars);
6380 }
6381
6382 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6383
6384 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6385
6386 return temp;
6387 }
6388
6389 case RTL_EXPR:
6390 if (RTL_EXPR_SEQUENCE (exp))
6391 {
6392 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6393 abort ();
6394 emit_insns (RTL_EXPR_SEQUENCE (exp));
6395 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6396 }
6397 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6398 free_temps_for_rtl_expr (exp);
6399 return RTL_EXPR_RTL (exp);
6400
6401 case CONSTRUCTOR:
6402 /* If we don't need the result, just ensure we evaluate any
6403 subexpressions. */
6404 if (ignore)
6405 {
6406 tree elt;
6407 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6408 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6409 EXPAND_MEMORY_USE_BAD);
6410 return const0_rtx;
6411 }
6412
6413 /* All elts simple constants => refer to a constant in memory. But
6414 if this is a non-BLKmode mode, let it store a field at a time
6415 since that should make a CONST_INT or CONST_DOUBLE when we
6416 fold. Likewise, if we have a target we can use, it is best to
6417 store directly into the target unless the type is large enough
6418 that memcpy will be used. If we are making an initializer and
6419 all operands are constant, put it in memory as well. */
6420 else if ((TREE_STATIC (exp)
6421 && ((mode == BLKmode
6422 && ! (target != 0 && safe_from_p (target, exp, 1)))
6423 || TREE_ADDRESSABLE (exp)
6424 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6425 && (! MOVE_BY_PIECES_P
6426 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6427 TYPE_ALIGN (type)))
6428 && ! mostly_zeros_p (exp))))
6429 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6430 {
6431 rtx constructor = output_constant_def (exp);
6432
6433 if (modifier != EXPAND_CONST_ADDRESS
6434 && modifier != EXPAND_INITIALIZER
6435 && modifier != EXPAND_SUM
6436 && (! memory_address_p (GET_MODE (constructor),
6437 XEXP (constructor, 0))
6438 || (flag_force_addr
6439 && GET_CODE (XEXP (constructor, 0)) != REG)))
6440 constructor = change_address (constructor, VOIDmode,
6441 XEXP (constructor, 0));
6442 return constructor;
6443 }
6444
6445 else
6446 {
6447 /* Handle calls that pass values in multiple non-contiguous
6448 locations. The Irix 6 ABI has examples of this. */
6449 if (target == 0 || ! safe_from_p (target, exp, 1)
6450 || GET_CODE (target) == PARALLEL)
6451 {
6452 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6453 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6454 else
6455 target = assign_temp (type, 0, 1, 1);
6456 }
6457
6458 if (TREE_READONLY (exp))
6459 {
6460 if (GET_CODE (target) == MEM)
6461 target = copy_rtx (target);
6462
6463 RTX_UNCHANGING_P (target) = 1;
6464 }
6465
6466 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6467 int_size_in_bytes (TREE_TYPE (exp)));
6468 return target;
6469 }
6470
6471 case INDIRECT_REF:
6472 {
6473 tree exp1 = TREE_OPERAND (exp, 0);
6474 tree index;
6475 tree string = string_constant (exp1, &index);
6476
6477 /* Try to optimize reads from const strings. */
6478 if (string
6479 && TREE_CODE (string) == STRING_CST
6480 && TREE_CODE (index) == INTEGER_CST
6481 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6482 && GET_MODE_CLASS (mode) == MODE_INT
6483 && GET_MODE_SIZE (mode) == 1
6484 && modifier != EXPAND_MEMORY_USE_WO)
6485 return
6486 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6487
6488 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6489 op0 = memory_address (mode, op0);
6490
6491 if (cfun && current_function_check_memory_usage
6492 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6493 {
6494 enum memory_use_mode memory_usage;
6495 memory_usage = get_memory_usage_from_modifier (modifier);
6496
6497 if (memory_usage != MEMORY_USE_DONT)
6498 {
6499 in_check_memory_usage = 1;
6500 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6501 op0, Pmode,
6502 GEN_INT (int_size_in_bytes (type)),
6503 TYPE_MODE (sizetype),
6504 GEN_INT (memory_usage),
6505 TYPE_MODE (integer_type_node));
6506 in_check_memory_usage = 0;
6507 }
6508 }
6509
6510 temp = gen_rtx_MEM (mode, op0);
6511 set_mem_attributes (temp, exp, 0);
6512
6513 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6514 here, because, in C and C++, the fact that a location is accessed
6515 through a pointer to const does not mean that the value there can
6516 never change. Languages where it can never change should
6517 also set TREE_STATIC. */
6518 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6519
6520 /* If we are writing to this object and its type is a record with
6521 readonly fields, we must mark it as readonly so it will
6522 conflict with readonly references to those fields. */
6523 if (modifier == EXPAND_MEMORY_USE_WO
6524 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6525 RTX_UNCHANGING_P (temp) = 1;
6526
6527 return temp;
6528 }
6529
6530 case ARRAY_REF:
6531 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6532 abort ();
6533
6534 {
6535 tree array = TREE_OPERAND (exp, 0);
6536 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6537 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6538 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6539 HOST_WIDE_INT i;
6540
6541 /* Optimize the special-case of a zero lower bound.
6542
6543 We convert the low_bound to sizetype to avoid some problems
6544 with constant folding. (E.g. suppose the lower bound is 1,
6545 and its mode is QI. Without the conversion, (ARRAY
6546 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6547 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6548
6549 if (! integer_zerop (low_bound))
6550 index = size_diffop (index, convert (sizetype, low_bound));
6551
6552 /* Fold an expression like: "foo"[2].
6553 This is not done in fold so it won't happen inside &.
6554 Don't fold if this is for wide characters since it's too
6555 difficult to do correctly and this is a very rare case. */
6556
6557 if (TREE_CODE (array) == STRING_CST
6558 && TREE_CODE (index) == INTEGER_CST
6559 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6560 && GET_MODE_CLASS (mode) == MODE_INT
6561 && GET_MODE_SIZE (mode) == 1)
6562 return
6563 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6564
6565 /* If this is a constant index into a constant array,
6566 just get the value from the array. Handle both the cases when
6567 we have an explicit constructor and when our operand is a variable
6568 that was declared const. */
6569
6570 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6571 && TREE_CODE (index) == INTEGER_CST
6572 && 0 > compare_tree_int (index,
6573 list_length (CONSTRUCTOR_ELTS
6574 (TREE_OPERAND (exp, 0)))))
6575 {
6576 tree elem;
6577
6578 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6579 i = TREE_INT_CST_LOW (index);
6580 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6581 ;
6582
6583 if (elem)
6584 return expand_expr (fold (TREE_VALUE (elem)), target,
6585 tmode, ro_modifier);
6586 }
6587
6588 else if (optimize >= 1
6589 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6590 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6591 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6592 {
6593 if (TREE_CODE (index) == INTEGER_CST)
6594 {
6595 tree init = DECL_INITIAL (array);
6596
6597 if (TREE_CODE (init) == CONSTRUCTOR)
6598 {
6599 tree elem;
6600
6601 for (elem = CONSTRUCTOR_ELTS (init);
6602 (elem
6603 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6604 elem = TREE_CHAIN (elem))
6605 ;
6606
6607 if (elem)
6608 return expand_expr (fold (TREE_VALUE (elem)), target,
6609 tmode, ro_modifier);
6610 }
6611 else if (TREE_CODE (init) == STRING_CST
6612 && 0 > compare_tree_int (index,
6613 TREE_STRING_LENGTH (init)))
6614 return (GEN_INT
6615 (TREE_STRING_POINTER
6616 (init)[TREE_INT_CST_LOW (index)]));
6617 }
6618 }
6619 }
6620 /* Fall through. */
6621
6622 case COMPONENT_REF:
6623 case BIT_FIELD_REF:
6624 /* If the operand is a CONSTRUCTOR, we can just extract the
6625 appropriate field if it is present. Don't do this if we have
6626 already written the data since we want to refer to that copy
6627 and varasm.c assumes that's what we'll do. */
6628 if (code != ARRAY_REF
6629 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6630 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6631 {
6632 tree elt;
6633
6634 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6635 elt = TREE_CHAIN (elt))
6636 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6637 /* We can normally use the value of the field in the
6638 CONSTRUCTOR. However, if this is a bitfield in
6639 an integral mode that we can fit in a HOST_WIDE_INT,
6640 we must mask only the number of bits in the bitfield,
6641 since this is done implicitly by the constructor. If
6642 the bitfield does not meet either of those conditions,
6643 we can't do this optimization. */
6644 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6645 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6646 == MODE_INT)
6647 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6648 <= HOST_BITS_PER_WIDE_INT))))
6649 {
6650 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6651 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6652 {
6653 HOST_WIDE_INT bitsize
6654 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6655
6656 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6657 {
6658 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6659 op0 = expand_and (op0, op1, target);
6660 }
6661 else
6662 {
6663 enum machine_mode imode
6664 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6665 tree count
6666 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6667 0);
6668
6669 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6670 target, 0);
6671 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6672 target, 0);
6673 }
6674 }
6675
6676 return op0;
6677 }
6678 }
6679
6680 {
6681 enum machine_mode mode1;
6682 HOST_WIDE_INT bitsize, bitpos;
6683 tree offset;
6684 int volatilep = 0;
6685 unsigned int alignment;
6686 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6687 &mode1, &unsignedp, &volatilep,
6688 &alignment);
6689
6690 /* If we got back the original object, something is wrong. Perhaps
6691 we are evaluating an expression too early. In any event, don't
6692 infinitely recurse. */
6693 if (tem == exp)
6694 abort ();
6695
6696 /* If TEM's type is a union of variable size, pass TARGET to the inner
6697 computation, since it will need a temporary and TARGET is known
6698 to have to do. This occurs in unchecked conversion in Ada. */
6699
6700 op0 = expand_expr (tem,
6701 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6702 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6703 != INTEGER_CST)
6704 ? target : NULL_RTX),
6705 VOIDmode,
6706 (modifier == EXPAND_INITIALIZER
6707 || modifier == EXPAND_CONST_ADDRESS)
6708 ? modifier : EXPAND_NORMAL);
6709
6710 /* If this is a constant, put it into a register if it is a
6711 legitimate constant and OFFSET is 0 and memory if it isn't. */
6712 if (CONSTANT_P (op0))
6713 {
6714 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6715 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6716 && offset == 0)
6717 op0 = force_reg (mode, op0);
6718 else
6719 op0 = validize_mem (force_const_mem (mode, op0));
6720 }
6721
6722 if (offset != 0)
6723 {
6724 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6725
6726 /* If this object is in memory, put it into a register.
6727 This case can't occur in C, but can in Ada if we have
6728 unchecked conversion of an expression from a scalar type to
6729 an array or record type. */
6730 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6731 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6732 {
6733 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6734
6735 mark_temp_addr_taken (memloc);
6736 emit_move_insn (memloc, op0);
6737 op0 = memloc;
6738 }
6739
6740 if (GET_CODE (op0) != MEM)
6741 abort ();
6742
6743 if (GET_MODE (offset_rtx) != ptr_mode)
6744 {
6745 #ifdef POINTERS_EXTEND_UNSIGNED
6746 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6747 #else
6748 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6749 #endif
6750 }
6751
6752 /* A constant address in OP0 can have VOIDmode, we must not try
6753 to call force_reg for that case. Avoid that case. */
6754 if (GET_CODE (op0) == MEM
6755 && GET_MODE (op0) == BLKmode
6756 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6757 && bitsize != 0
6758 && (bitpos % bitsize) == 0
6759 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6760 && alignment == GET_MODE_ALIGNMENT (mode1))
6761 {
6762 rtx temp = change_address (op0, mode1,
6763 plus_constant (XEXP (op0, 0),
6764 (bitpos /
6765 BITS_PER_UNIT)));
6766 if (GET_CODE (XEXP (temp, 0)) == REG)
6767 op0 = temp;
6768 else
6769 op0 = change_address (op0, mode1,
6770 force_reg (GET_MODE (XEXP (temp, 0)),
6771 XEXP (temp, 0)));
6772 bitpos = 0;
6773 }
6774
6775 op0 = change_address (op0, VOIDmode,
6776 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6777 force_reg (ptr_mode,
6778 offset_rtx)));
6779 }
6780
6781 /* Don't forget about volatility even if this is a bitfield. */
6782 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6783 {
6784 op0 = copy_rtx (op0);
6785 MEM_VOLATILE_P (op0) = 1;
6786 }
6787
6788 /* Check the access. */
6789 if (cfun != 0 && current_function_check_memory_usage
6790 && GET_CODE (op0) == MEM)
6791 {
6792 enum memory_use_mode memory_usage;
6793 memory_usage = get_memory_usage_from_modifier (modifier);
6794
6795 if (memory_usage != MEMORY_USE_DONT)
6796 {
6797 rtx to;
6798 int size;
6799
6800 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6801 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6802
6803 /* Check the access right of the pointer. */
6804 in_check_memory_usage = 1;
6805 if (size > BITS_PER_UNIT)
6806 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6807 to, Pmode,
6808 GEN_INT (size / BITS_PER_UNIT),
6809 TYPE_MODE (sizetype),
6810 GEN_INT (memory_usage),
6811 TYPE_MODE (integer_type_node));
6812 in_check_memory_usage = 0;
6813 }
6814 }
6815
6816 /* In cases where an aligned union has an unaligned object
6817 as a field, we might be extracting a BLKmode value from
6818 an integer-mode (e.g., SImode) object. Handle this case
6819 by doing the extract into an object as wide as the field
6820 (which we know to be the width of a basic mode), then
6821 storing into memory, and changing the mode to BLKmode.
6822 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6823 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6824 if (mode1 == VOIDmode
6825 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6826 || (modifier != EXPAND_CONST_ADDRESS
6827 && modifier != EXPAND_INITIALIZER
6828 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6829 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6830 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6831 /* If the field isn't aligned enough to fetch as a memref,
6832 fetch it as a bit field. */
6833 || (mode1 != BLKmode
6834 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6835 && ((TYPE_ALIGN (TREE_TYPE (tem))
6836 < GET_MODE_ALIGNMENT (mode))
6837 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6838 /* If the type and the field are a constant size and the
6839 size of the type isn't the same size as the bitfield,
6840 we must use bitfield operations. */
6841 || ((bitsize >= 0
6842 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6843 == INTEGER_CST)
6844 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6845 bitsize)))))
6846 || (modifier != EXPAND_CONST_ADDRESS
6847 && modifier != EXPAND_INITIALIZER
6848 && mode == BLKmode
6849 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6850 && (TYPE_ALIGN (type) > alignment
6851 || bitpos % TYPE_ALIGN (type) != 0)))
6852 {
6853 enum machine_mode ext_mode = mode;
6854
6855 if (ext_mode == BLKmode
6856 && ! (target != 0 && GET_CODE (op0) == MEM
6857 && GET_CODE (target) == MEM
6858 && bitpos % BITS_PER_UNIT == 0))
6859 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6860
6861 if (ext_mode == BLKmode)
6862 {
6863 /* In this case, BITPOS must start at a byte boundary and
6864 TARGET, if specified, must be a MEM. */
6865 if (GET_CODE (op0) != MEM
6866 || (target != 0 && GET_CODE (target) != MEM)
6867 || bitpos % BITS_PER_UNIT != 0)
6868 abort ();
6869
6870 op0 = change_address (op0, VOIDmode,
6871 plus_constant (XEXP (op0, 0),
6872 bitpos / BITS_PER_UNIT));
6873 if (target == 0)
6874 target = assign_temp (type, 0, 1, 1);
6875
6876 emit_block_move (target, op0,
6877 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6878 / BITS_PER_UNIT),
6879 BITS_PER_UNIT);
6880
6881 return target;
6882 }
6883
6884 op0 = validize_mem (op0);
6885
6886 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6887 mark_reg_pointer (XEXP (op0, 0), alignment);
6888
6889 op0 = extract_bit_field (op0, bitsize, bitpos,
6890 unsignedp, target, ext_mode, ext_mode,
6891 alignment,
6892 int_size_in_bytes (TREE_TYPE (tem)));
6893
6894 /* If the result is a record type and BITSIZE is narrower than
6895 the mode of OP0, an integral mode, and this is a big endian
6896 machine, we must put the field into the high-order bits. */
6897 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6898 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6899 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6900 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6901 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6902 - bitsize),
6903 op0, 1);
6904
6905 if (mode == BLKmode)
6906 {
6907 rtx new = assign_stack_temp (ext_mode,
6908 bitsize / BITS_PER_UNIT, 0);
6909
6910 emit_move_insn (new, op0);
6911 op0 = copy_rtx (new);
6912 PUT_MODE (op0, BLKmode);
6913 MEM_SET_IN_STRUCT_P (op0, 1);
6914 }
6915
6916 return op0;
6917 }
6918
6919 /* If the result is BLKmode, use that to access the object
6920 now as well. */
6921 if (mode == BLKmode)
6922 mode1 = BLKmode;
6923
6924 /* Get a reference to just this component. */
6925 if (modifier == EXPAND_CONST_ADDRESS
6926 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6927 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6928 (bitpos / BITS_PER_UNIT)));
6929 else
6930 op0 = change_address (op0, mode1,
6931 plus_constant (XEXP (op0, 0),
6932 (bitpos / BITS_PER_UNIT)));
6933
6934 set_mem_attributes (op0, exp, 0);
6935 if (GET_CODE (XEXP (op0, 0)) == REG)
6936 mark_reg_pointer (XEXP (op0, 0), alignment);
6937
6938 MEM_VOLATILE_P (op0) |= volatilep;
6939 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6940 || modifier == EXPAND_CONST_ADDRESS
6941 || modifier == EXPAND_INITIALIZER)
6942 return op0;
6943 else if (target == 0)
6944 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6945
6946 convert_move (target, op0, unsignedp);
6947 return target;
6948 }
6949
6950 /* Intended for a reference to a buffer of a file-object in Pascal.
6951 But it's not certain that a special tree code will really be
6952 necessary for these. INDIRECT_REF might work for them. */
6953 case BUFFER_REF:
6954 abort ();
6955
6956 case IN_EXPR:
6957 {
6958 /* Pascal set IN expression.
6959
6960 Algorithm:
6961 rlo = set_low - (set_low%bits_per_word);
6962 the_word = set [ (index - rlo)/bits_per_word ];
6963 bit_index = index % bits_per_word;
6964 bitmask = 1 << bit_index;
6965 return !!(the_word & bitmask); */
6966
6967 tree set = TREE_OPERAND (exp, 0);
6968 tree index = TREE_OPERAND (exp, 1);
6969 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6970 tree set_type = TREE_TYPE (set);
6971 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6972 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6973 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6974 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6975 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6976 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6977 rtx setaddr = XEXP (setval, 0);
6978 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6979 rtx rlow;
6980 rtx diff, quo, rem, addr, bit, result;
6981
6982 preexpand_calls (exp);
6983
6984 /* If domain is empty, answer is no. Likewise if index is constant
6985 and out of bounds. */
6986 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6987 && TREE_CODE (set_low_bound) == INTEGER_CST
6988 && tree_int_cst_lt (set_high_bound, set_low_bound))
6989 || (TREE_CODE (index) == INTEGER_CST
6990 && TREE_CODE (set_low_bound) == INTEGER_CST
6991 && tree_int_cst_lt (index, set_low_bound))
6992 || (TREE_CODE (set_high_bound) == INTEGER_CST
6993 && TREE_CODE (index) == INTEGER_CST
6994 && tree_int_cst_lt (set_high_bound, index))))
6995 return const0_rtx;
6996
6997 if (target == 0)
6998 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6999
7000 /* If we get here, we have to generate the code for both cases
7001 (in range and out of range). */
7002
7003 op0 = gen_label_rtx ();
7004 op1 = gen_label_rtx ();
7005
7006 if (! (GET_CODE (index_val) == CONST_INT
7007 && GET_CODE (lo_r) == CONST_INT))
7008 {
7009 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7010 GET_MODE (index_val), iunsignedp, 0, op1);
7011 }
7012
7013 if (! (GET_CODE (index_val) == CONST_INT
7014 && GET_CODE (hi_r) == CONST_INT))
7015 {
7016 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7017 GET_MODE (index_val), iunsignedp, 0, op1);
7018 }
7019
7020 /* Calculate the element number of bit zero in the first word
7021 of the set. */
7022 if (GET_CODE (lo_r) == CONST_INT)
7023 rlow = GEN_INT (INTVAL (lo_r)
7024 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7025 else
7026 rlow = expand_binop (index_mode, and_optab, lo_r,
7027 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7028 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7029
7030 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7031 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7032
7033 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7034 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7035 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7036 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7037
7038 addr = memory_address (byte_mode,
7039 expand_binop (index_mode, add_optab, diff,
7040 setaddr, NULL_RTX, iunsignedp,
7041 OPTAB_LIB_WIDEN));
7042
7043 /* Extract the bit we want to examine. */
7044 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7045 gen_rtx_MEM (byte_mode, addr),
7046 make_tree (TREE_TYPE (index), rem),
7047 NULL_RTX, 1);
7048 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7049 GET_MODE (target) == byte_mode ? target : 0,
7050 1, OPTAB_LIB_WIDEN);
7051
7052 if (result != target)
7053 convert_move (target, result, 1);
7054
7055 /* Output the code to handle the out-of-range case. */
7056 emit_jump (op0);
7057 emit_label (op1);
7058 emit_move_insn (target, const0_rtx);
7059 emit_label (op0);
7060 return target;
7061 }
7062
7063 case WITH_CLEANUP_EXPR:
7064 if (RTL_EXPR_RTL (exp) == 0)
7065 {
7066 RTL_EXPR_RTL (exp)
7067 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7068 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7069
7070 /* That's it for this cleanup. */
7071 TREE_OPERAND (exp, 2) = 0;
7072 }
7073 return RTL_EXPR_RTL (exp);
7074
7075 case CLEANUP_POINT_EXPR:
7076 {
7077 /* Start a new binding layer that will keep track of all cleanup
7078 actions to be performed. */
7079 expand_start_bindings (2);
7080
7081 target_temp_slot_level = temp_slot_level;
7082
7083 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7084 /* If we're going to use this value, load it up now. */
7085 if (! ignore)
7086 op0 = force_not_mem (op0);
7087 preserve_temp_slots (op0);
7088 expand_end_bindings (NULL_TREE, 0, 0);
7089 }
7090 return op0;
7091
7092 case CALL_EXPR:
7093 /* Check for a built-in function. */
7094 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7095 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7096 == FUNCTION_DECL)
7097 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7098 return expand_builtin (exp, target, subtarget, tmode, ignore);
7099
7100 /* If this call was expanded already by preexpand_calls,
7101 just return the result we got. */
7102 if (CALL_EXPR_RTL (exp) != 0)
7103 return CALL_EXPR_RTL (exp);
7104
7105 return expand_call (exp, target, ignore);
7106
7107 case NON_LVALUE_EXPR:
7108 case NOP_EXPR:
7109 case CONVERT_EXPR:
7110 case REFERENCE_EXPR:
7111 if (TREE_OPERAND (exp, 0) == error_mark_node)
7112 return const0_rtx;
7113
7114 if (TREE_CODE (type) == UNION_TYPE)
7115 {
7116 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7117
7118 /* If both input and output are BLKmode, this conversion
7119 isn't actually doing anything unless we need to make the
7120 alignment stricter. */
7121 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7122 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7123 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7124 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7125 modifier);
7126
7127 if (target == 0)
7128 {
7129 if (mode != BLKmode)
7130 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7131 else
7132 target = assign_temp (type, 0, 1, 1);
7133 }
7134
7135 if (GET_CODE (target) == MEM)
7136 /* Store data into beginning of memory target. */
7137 store_expr (TREE_OPERAND (exp, 0),
7138 change_address (target, TYPE_MODE (valtype), 0), 0);
7139
7140 else if (GET_CODE (target) == REG)
7141 /* Store this field into a union of the proper type. */
7142 store_field (target,
7143 MIN ((int_size_in_bytes (TREE_TYPE
7144 (TREE_OPERAND (exp, 0)))
7145 * BITS_PER_UNIT),
7146 GET_MODE_BITSIZE (mode)),
7147 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7148 VOIDmode, 0, BITS_PER_UNIT,
7149 int_size_in_bytes (type), 0);
7150 else
7151 abort ();
7152
7153 /* Return the entire union. */
7154 return target;
7155 }
7156
7157 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7158 {
7159 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7160 ro_modifier);
7161
7162 /* If the signedness of the conversion differs and OP0 is
7163 a promoted SUBREG, clear that indication since we now
7164 have to do the proper extension. */
7165 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7166 && GET_CODE (op0) == SUBREG)
7167 SUBREG_PROMOTED_VAR_P (op0) = 0;
7168
7169 return op0;
7170 }
7171
7172 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7173 if (GET_MODE (op0) == mode)
7174 return op0;
7175
7176 /* If OP0 is a constant, just convert it into the proper mode. */
7177 if (CONSTANT_P (op0))
7178 return
7179 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7180 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7181
7182 if (modifier == EXPAND_INITIALIZER)
7183 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7184
7185 if (target == 0)
7186 return
7187 convert_to_mode (mode, op0,
7188 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7189 else
7190 convert_move (target, op0,
7191 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7192 return target;
7193
7194 case PLUS_EXPR:
7195 /* We come here from MINUS_EXPR when the second operand is a
7196 constant. */
7197 plus_expr:
7198 this_optab = add_optab;
7199
7200 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7201 something else, make sure we add the register to the constant and
7202 then to the other thing. This case can occur during strength
7203 reduction and doing it this way will produce better code if the
7204 frame pointer or argument pointer is eliminated.
7205
7206 fold-const.c will ensure that the constant is always in the inner
7207 PLUS_EXPR, so the only case we need to do anything about is if
7208 sp, ap, or fp is our second argument, in which case we must swap
7209 the innermost first argument and our second argument. */
7210
7211 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7212 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7213 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7214 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7215 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7216 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7217 {
7218 tree t = TREE_OPERAND (exp, 1);
7219
7220 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7221 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7222 }
7223
7224 /* If the result is to be ptr_mode and we are adding an integer to
7225 something, we might be forming a constant. So try to use
7226 plus_constant. If it produces a sum and we can't accept it,
7227 use force_operand. This allows P = &ARR[const] to generate
7228 efficient code on machines where a SYMBOL_REF is not a valid
7229 address.
7230
7231 If this is an EXPAND_SUM call, always return the sum. */
7232 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7233 || mode == ptr_mode)
7234 {
7235 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7236 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7237 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7238 {
7239 rtx constant_part;
7240
7241 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7242 EXPAND_SUM);
7243 /* Use immed_double_const to ensure that the constant is
7244 truncated according to the mode of OP1, then sign extended
7245 to a HOST_WIDE_INT. Using the constant directly can result
7246 in non-canonical RTL in a 64x32 cross compile. */
7247 constant_part
7248 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7249 (HOST_WIDE_INT) 0,
7250 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7251 op1 = plus_constant (op1, INTVAL (constant_part));
7252 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7253 op1 = force_operand (op1, target);
7254 return op1;
7255 }
7256
7257 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7258 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7259 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7260 {
7261 rtx constant_part;
7262
7263 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7264 EXPAND_SUM);
7265 if (! CONSTANT_P (op0))
7266 {
7267 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7268 VOIDmode, modifier);
7269 /* Don't go to both_summands if modifier
7270 says it's not right to return a PLUS. */
7271 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7272 goto binop2;
7273 goto both_summands;
7274 }
7275 /* Use immed_double_const to ensure that the constant is
7276 truncated according to the mode of OP1, then sign extended
7277 to a HOST_WIDE_INT. Using the constant directly can result
7278 in non-canonical RTL in a 64x32 cross compile. */
7279 constant_part
7280 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7281 (HOST_WIDE_INT) 0,
7282 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7283 op0 = plus_constant (op0, INTVAL (constant_part));
7284 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7285 op0 = force_operand (op0, target);
7286 return op0;
7287 }
7288 }
7289
7290 /* No sense saving up arithmetic to be done
7291 if it's all in the wrong mode to form part of an address.
7292 And force_operand won't know whether to sign-extend or
7293 zero-extend. */
7294 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7295 || mode != ptr_mode)
7296 goto binop;
7297
7298 preexpand_calls (exp);
7299 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7300 subtarget = 0;
7301
7302 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7303 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7304
7305 both_summands:
7306 /* Make sure any term that's a sum with a constant comes last. */
7307 if (GET_CODE (op0) == PLUS
7308 && CONSTANT_P (XEXP (op0, 1)))
7309 {
7310 temp = op0;
7311 op0 = op1;
7312 op1 = temp;
7313 }
7314 /* If adding to a sum including a constant,
7315 associate it to put the constant outside. */
7316 if (GET_CODE (op1) == PLUS
7317 && CONSTANT_P (XEXP (op1, 1)))
7318 {
7319 rtx constant_term = const0_rtx;
7320
7321 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7322 if (temp != 0)
7323 op0 = temp;
7324 /* Ensure that MULT comes first if there is one. */
7325 else if (GET_CODE (op0) == MULT)
7326 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7327 else
7328 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7329
7330 /* Let's also eliminate constants from op0 if possible. */
7331 op0 = eliminate_constant_term (op0, &constant_term);
7332
7333 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7334 their sum should be a constant. Form it into OP1, since the
7335 result we want will then be OP0 + OP1. */
7336
7337 temp = simplify_binary_operation (PLUS, mode, constant_term,
7338 XEXP (op1, 1));
7339 if (temp != 0)
7340 op1 = temp;
7341 else
7342 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7343 }
7344
7345 /* Put a constant term last and put a multiplication first. */
7346 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7347 temp = op1, op1 = op0, op0 = temp;
7348
7349 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7350 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7351
7352 case MINUS_EXPR:
7353 /* For initializers, we are allowed to return a MINUS of two
7354 symbolic constants. Here we handle all cases when both operands
7355 are constant. */
7356 /* Handle difference of two symbolic constants,
7357 for the sake of an initializer. */
7358 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7359 && really_constant_p (TREE_OPERAND (exp, 0))
7360 && really_constant_p (TREE_OPERAND (exp, 1)))
7361 {
7362 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7363 VOIDmode, ro_modifier);
7364 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7365 VOIDmode, ro_modifier);
7366
7367 /* If the last operand is a CONST_INT, use plus_constant of
7368 the negated constant. Else make the MINUS. */
7369 if (GET_CODE (op1) == CONST_INT)
7370 return plus_constant (op0, - INTVAL (op1));
7371 else
7372 return gen_rtx_MINUS (mode, op0, op1);
7373 }
7374 /* Convert A - const to A + (-const). */
7375 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7376 {
7377 tree negated = fold (build1 (NEGATE_EXPR, type,
7378 TREE_OPERAND (exp, 1)));
7379
7380 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7381 /* If we can't negate the constant in TYPE, leave it alone and
7382 expand_binop will negate it for us. We used to try to do it
7383 here in the signed version of TYPE, but that doesn't work
7384 on POINTER_TYPEs. */;
7385 else
7386 {
7387 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7388 goto plus_expr;
7389 }
7390 }
7391 this_optab = sub_optab;
7392 goto binop;
7393
7394 case MULT_EXPR:
7395 preexpand_calls (exp);
7396 /* If first operand is constant, swap them.
7397 Thus the following special case checks need only
7398 check the second operand. */
7399 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7400 {
7401 register tree t1 = TREE_OPERAND (exp, 0);
7402 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7403 TREE_OPERAND (exp, 1) = t1;
7404 }
7405
7406 /* Attempt to return something suitable for generating an
7407 indexed address, for machines that support that. */
7408
7409 if (modifier == EXPAND_SUM && mode == ptr_mode
7410 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7411 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7412 {
7413 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7414 EXPAND_SUM);
7415
7416 /* Apply distributive law if OP0 is x+c. */
7417 if (GET_CODE (op0) == PLUS
7418 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7419 return
7420 gen_rtx_PLUS
7421 (mode,
7422 gen_rtx_MULT
7423 (mode, XEXP (op0, 0),
7424 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7425 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7426 * INTVAL (XEXP (op0, 1))));
7427
7428 if (GET_CODE (op0) != REG)
7429 op0 = force_operand (op0, NULL_RTX);
7430 if (GET_CODE (op0) != REG)
7431 op0 = copy_to_mode_reg (mode, op0);
7432
7433 return
7434 gen_rtx_MULT (mode, op0,
7435 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7436 }
7437
7438 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7439 subtarget = 0;
7440
7441 /* Check for multiplying things that have been extended
7442 from a narrower type. If this machine supports multiplying
7443 in that narrower type with a result in the desired type,
7444 do it that way, and avoid the explicit type-conversion. */
7445 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7446 && TREE_CODE (type) == INTEGER_TYPE
7447 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7448 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7449 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7450 && int_fits_type_p (TREE_OPERAND (exp, 1),
7451 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7452 /* Don't use a widening multiply if a shift will do. */
7453 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7454 > HOST_BITS_PER_WIDE_INT)
7455 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7456 ||
7457 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7458 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7459 ==
7460 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7461 /* If both operands are extended, they must either both
7462 be zero-extended or both be sign-extended. */
7463 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7464 ==
7465 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7466 {
7467 enum machine_mode innermode
7468 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7469 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7470 ? smul_widen_optab : umul_widen_optab);
7471 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7472 ? umul_widen_optab : smul_widen_optab);
7473 if (mode == GET_MODE_WIDER_MODE (innermode))
7474 {
7475 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7476 {
7477 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7478 NULL_RTX, VOIDmode, 0);
7479 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7480 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7481 VOIDmode, 0);
7482 else
7483 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7484 NULL_RTX, VOIDmode, 0);
7485 goto binop2;
7486 }
7487 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7488 && innermode == word_mode)
7489 {
7490 rtx htem;
7491 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7492 NULL_RTX, VOIDmode, 0);
7493 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7494 op1 = convert_modes (innermode, mode,
7495 expand_expr (TREE_OPERAND (exp, 1),
7496 NULL_RTX, VOIDmode, 0),
7497 unsignedp);
7498 else
7499 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7500 NULL_RTX, VOIDmode, 0);
7501 temp = expand_binop (mode, other_optab, op0, op1, target,
7502 unsignedp, OPTAB_LIB_WIDEN);
7503 htem = expand_mult_highpart_adjust (innermode,
7504 gen_highpart (innermode, temp),
7505 op0, op1,
7506 gen_highpart (innermode, temp),
7507 unsignedp);
7508 emit_move_insn (gen_highpart (innermode, temp), htem);
7509 return temp;
7510 }
7511 }
7512 }
7513 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7514 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7515 return expand_mult (mode, op0, op1, target, unsignedp);
7516
7517 case TRUNC_DIV_EXPR:
7518 case FLOOR_DIV_EXPR:
7519 case CEIL_DIV_EXPR:
7520 case ROUND_DIV_EXPR:
7521 case EXACT_DIV_EXPR:
7522 preexpand_calls (exp);
7523 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7524 subtarget = 0;
7525 /* Possible optimization: compute the dividend with EXPAND_SUM
7526 then if the divisor is constant can optimize the case
7527 where some terms of the dividend have coeffs divisible by it. */
7528 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7529 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7530 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7531
7532 case RDIV_EXPR:
7533 this_optab = flodiv_optab;
7534 goto binop;
7535
7536 case TRUNC_MOD_EXPR:
7537 case FLOOR_MOD_EXPR:
7538 case CEIL_MOD_EXPR:
7539 case ROUND_MOD_EXPR:
7540 preexpand_calls (exp);
7541 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7542 subtarget = 0;
7543 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7544 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7545 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7546
7547 case FIX_ROUND_EXPR:
7548 case FIX_FLOOR_EXPR:
7549 case FIX_CEIL_EXPR:
7550 abort (); /* Not used for C. */
7551
7552 case FIX_TRUNC_EXPR:
7553 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7554 if (target == 0)
7555 target = gen_reg_rtx (mode);
7556 expand_fix (target, op0, unsignedp);
7557 return target;
7558
7559 case FLOAT_EXPR:
7560 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7561 if (target == 0)
7562 target = gen_reg_rtx (mode);
7563 /* expand_float can't figure out what to do if FROM has VOIDmode.
7564 So give it the correct mode. With -O, cse will optimize this. */
7565 if (GET_MODE (op0) == VOIDmode)
7566 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7567 op0);
7568 expand_float (target, op0,
7569 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7570 return target;
7571
7572 case NEGATE_EXPR:
7573 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7574 temp = expand_unop (mode, neg_optab, op0, target, 0);
7575 if (temp == 0)
7576 abort ();
7577 return temp;
7578
7579 case ABS_EXPR:
7580 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7581
7582 /* Handle complex values specially. */
7583 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7584 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7585 return expand_complex_abs (mode, op0, target, unsignedp);
7586
7587 /* Unsigned abs is simply the operand. Testing here means we don't
7588 risk generating incorrect code below. */
7589 if (TREE_UNSIGNED (type))
7590 return op0;
7591
7592 return expand_abs (mode, op0, target,
7593 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7594
7595 case MAX_EXPR:
7596 case MIN_EXPR:
7597 target = original_target;
7598 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7599 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7600 || GET_MODE (target) != mode
7601 || (GET_CODE (target) == REG
7602 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7603 target = gen_reg_rtx (mode);
7604 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7605 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7606
7607 /* First try to do it with a special MIN or MAX instruction.
7608 If that does not win, use a conditional jump to select the proper
7609 value. */
7610 this_optab = (TREE_UNSIGNED (type)
7611 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7612 : (code == MIN_EXPR ? smin_optab : smax_optab));
7613
7614 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7615 OPTAB_WIDEN);
7616 if (temp != 0)
7617 return temp;
7618
7619 /* At this point, a MEM target is no longer useful; we will get better
7620 code without it. */
7621
7622 if (GET_CODE (target) == MEM)
7623 target = gen_reg_rtx (mode);
7624
7625 if (target != op0)
7626 emit_move_insn (target, op0);
7627
7628 op0 = gen_label_rtx ();
7629
7630 /* If this mode is an integer too wide to compare properly,
7631 compare word by word. Rely on cse to optimize constant cases. */
7632 if (GET_MODE_CLASS (mode) == MODE_INT
7633 && ! can_compare_p (GE, mode, ccp_jump))
7634 {
7635 if (code == MAX_EXPR)
7636 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7637 target, op1, NULL_RTX, op0);
7638 else
7639 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7640 op1, target, NULL_RTX, op0);
7641 }
7642 else
7643 {
7644 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7645 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7646 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7647 op0);
7648 }
7649 emit_move_insn (target, op1);
7650 emit_label (op0);
7651 return target;
7652
7653 case BIT_NOT_EXPR:
7654 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7655 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7656 if (temp == 0)
7657 abort ();
7658 return temp;
7659
7660 case FFS_EXPR:
7661 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7662 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7663 if (temp == 0)
7664 abort ();
7665 return temp;
7666
7667 /* ??? Can optimize bitwise operations with one arg constant.
7668 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7669 and (a bitwise1 b) bitwise2 b (etc)
7670 but that is probably not worth while. */
7671
7672 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7673 boolean values when we want in all cases to compute both of them. In
7674 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7675 as actual zero-or-1 values and then bitwise anding. In cases where
7676 there cannot be any side effects, better code would be made by
7677 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7678 how to recognize those cases. */
7679
7680 case TRUTH_AND_EXPR:
7681 case BIT_AND_EXPR:
7682 this_optab = and_optab;
7683 goto binop;
7684
7685 case TRUTH_OR_EXPR:
7686 case BIT_IOR_EXPR:
7687 this_optab = ior_optab;
7688 goto binop;
7689
7690 case TRUTH_XOR_EXPR:
7691 case BIT_XOR_EXPR:
7692 this_optab = xor_optab;
7693 goto binop;
7694
7695 case LSHIFT_EXPR:
7696 case RSHIFT_EXPR:
7697 case LROTATE_EXPR:
7698 case RROTATE_EXPR:
7699 preexpand_calls (exp);
7700 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7701 subtarget = 0;
7702 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7703 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7704 unsignedp);
7705
7706 /* Could determine the answer when only additive constants differ. Also,
7707 the addition of one can be handled by changing the condition. */
7708 case LT_EXPR:
7709 case LE_EXPR:
7710 case GT_EXPR:
7711 case GE_EXPR:
7712 case EQ_EXPR:
7713 case NE_EXPR:
7714 case UNORDERED_EXPR:
7715 case ORDERED_EXPR:
7716 case UNLT_EXPR:
7717 case UNLE_EXPR:
7718 case UNGT_EXPR:
7719 case UNGE_EXPR:
7720 case UNEQ_EXPR:
7721 preexpand_calls (exp);
7722 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7723 if (temp != 0)
7724 return temp;
7725
7726 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7727 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7728 && original_target
7729 && GET_CODE (original_target) == REG
7730 && (GET_MODE (original_target)
7731 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7732 {
7733 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7734 VOIDmode, 0);
7735
7736 if (temp != original_target)
7737 temp = copy_to_reg (temp);
7738
7739 op1 = gen_label_rtx ();
7740 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7741 GET_MODE (temp), unsignedp, 0, op1);
7742 emit_move_insn (temp, const1_rtx);
7743 emit_label (op1);
7744 return temp;
7745 }
7746
7747 /* If no set-flag instruction, must generate a conditional
7748 store into a temporary variable. Drop through
7749 and handle this like && and ||. */
7750
7751 case TRUTH_ANDIF_EXPR:
7752 case TRUTH_ORIF_EXPR:
7753 if (! ignore
7754 && (target == 0 || ! safe_from_p (target, exp, 1)
7755 /* Make sure we don't have a hard reg (such as function's return
7756 value) live across basic blocks, if not optimizing. */
7757 || (!optimize && GET_CODE (target) == REG
7758 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7759 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7760
7761 if (target)
7762 emit_clr_insn (target);
7763
7764 op1 = gen_label_rtx ();
7765 jumpifnot (exp, op1);
7766
7767 if (target)
7768 emit_0_to_1_insn (target);
7769
7770 emit_label (op1);
7771 return ignore ? const0_rtx : target;
7772
7773 case TRUTH_NOT_EXPR:
7774 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7775 /* The parser is careful to generate TRUTH_NOT_EXPR
7776 only with operands that are always zero or one. */
7777 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7778 target, 1, OPTAB_LIB_WIDEN);
7779 if (temp == 0)
7780 abort ();
7781 return temp;
7782
7783 case COMPOUND_EXPR:
7784 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7785 emit_queue ();
7786 return expand_expr (TREE_OPERAND (exp, 1),
7787 (ignore ? const0_rtx : target),
7788 VOIDmode, 0);
7789
7790 case COND_EXPR:
7791 /* If we would have a "singleton" (see below) were it not for a
7792 conversion in each arm, bring that conversion back out. */
7793 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7794 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7795 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7796 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7797 {
7798 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7799 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7800
7801 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7802 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7803 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7804 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7805 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7806 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7807 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7808 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7809 return expand_expr (build1 (NOP_EXPR, type,
7810 build (COND_EXPR, TREE_TYPE (true),
7811 TREE_OPERAND (exp, 0),
7812 true, false)),
7813 target, tmode, modifier);
7814 }
7815
7816 {
7817 /* Note that COND_EXPRs whose type is a structure or union
7818 are required to be constructed to contain assignments of
7819 a temporary variable, so that we can evaluate them here
7820 for side effect only. If type is void, we must do likewise. */
7821
7822 /* If an arm of the branch requires a cleanup,
7823 only that cleanup is performed. */
7824
7825 tree singleton = 0;
7826 tree binary_op = 0, unary_op = 0;
7827
7828 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7829 convert it to our mode, if necessary. */
7830 if (integer_onep (TREE_OPERAND (exp, 1))
7831 && integer_zerop (TREE_OPERAND (exp, 2))
7832 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7833 {
7834 if (ignore)
7835 {
7836 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7837 ro_modifier);
7838 return const0_rtx;
7839 }
7840
7841 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7842 if (GET_MODE (op0) == mode)
7843 return op0;
7844
7845 if (target == 0)
7846 target = gen_reg_rtx (mode);
7847 convert_move (target, op0, unsignedp);
7848 return target;
7849 }
7850
7851 /* Check for X ? A + B : A. If we have this, we can copy A to the
7852 output and conditionally add B. Similarly for unary operations.
7853 Don't do this if X has side-effects because those side effects
7854 might affect A or B and the "?" operation is a sequence point in
7855 ANSI. (operand_equal_p tests for side effects.) */
7856
7857 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7858 && operand_equal_p (TREE_OPERAND (exp, 2),
7859 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7860 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7861 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7862 && operand_equal_p (TREE_OPERAND (exp, 1),
7863 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7864 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7865 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7866 && operand_equal_p (TREE_OPERAND (exp, 2),
7867 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7868 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7869 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7870 && operand_equal_p (TREE_OPERAND (exp, 1),
7871 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7872 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7873
7874 /* If we are not to produce a result, we have no target. Otherwise,
7875 if a target was specified use it; it will not be used as an
7876 intermediate target unless it is safe. If no target, use a
7877 temporary. */
7878
7879 if (ignore)
7880 temp = 0;
7881 else if (original_target
7882 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7883 || (singleton && GET_CODE (original_target) == REG
7884 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7885 && original_target == var_rtx (singleton)))
7886 && GET_MODE (original_target) == mode
7887 #ifdef HAVE_conditional_move
7888 && (! can_conditionally_move_p (mode)
7889 || GET_CODE (original_target) == REG
7890 || TREE_ADDRESSABLE (type))
7891 #endif
7892 && ! (GET_CODE (original_target) == MEM
7893 && MEM_VOLATILE_P (original_target)))
7894 temp = original_target;
7895 else if (TREE_ADDRESSABLE (type))
7896 abort ();
7897 else
7898 temp = assign_temp (type, 0, 0, 1);
7899
7900 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7901 do the test of X as a store-flag operation, do this as
7902 A + ((X != 0) << log C). Similarly for other simple binary
7903 operators. Only do for C == 1 if BRANCH_COST is low. */
7904 if (temp && singleton && binary_op
7905 && (TREE_CODE (binary_op) == PLUS_EXPR
7906 || TREE_CODE (binary_op) == MINUS_EXPR
7907 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7908 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7909 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7910 : integer_onep (TREE_OPERAND (binary_op, 1)))
7911 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7912 {
7913 rtx result;
7914 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7915 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7916 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7917 : xor_optab);
7918
7919 /* If we had X ? A : A + 1, do this as A + (X == 0).
7920
7921 We have to invert the truth value here and then put it
7922 back later if do_store_flag fails. We cannot simply copy
7923 TREE_OPERAND (exp, 0) to another variable and modify that
7924 because invert_truthvalue can modify the tree pointed to
7925 by its argument. */
7926 if (singleton == TREE_OPERAND (exp, 1))
7927 TREE_OPERAND (exp, 0)
7928 = invert_truthvalue (TREE_OPERAND (exp, 0));
7929
7930 result = do_store_flag (TREE_OPERAND (exp, 0),
7931 (safe_from_p (temp, singleton, 1)
7932 ? temp : NULL_RTX),
7933 mode, BRANCH_COST <= 1);
7934
7935 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7936 result = expand_shift (LSHIFT_EXPR, mode, result,
7937 build_int_2 (tree_log2
7938 (TREE_OPERAND
7939 (binary_op, 1)),
7940 0),
7941 (safe_from_p (temp, singleton, 1)
7942 ? temp : NULL_RTX), 0);
7943
7944 if (result)
7945 {
7946 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7947 return expand_binop (mode, boptab, op1, result, temp,
7948 unsignedp, OPTAB_LIB_WIDEN);
7949 }
7950 else if (singleton == TREE_OPERAND (exp, 1))
7951 TREE_OPERAND (exp, 0)
7952 = invert_truthvalue (TREE_OPERAND (exp, 0));
7953 }
7954
7955 do_pending_stack_adjust ();
7956 NO_DEFER_POP;
7957 op0 = gen_label_rtx ();
7958
7959 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7960 {
7961 if (temp != 0)
7962 {
7963 /* If the target conflicts with the other operand of the
7964 binary op, we can't use it. Also, we can't use the target
7965 if it is a hard register, because evaluating the condition
7966 might clobber it. */
7967 if ((binary_op
7968 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7969 || (GET_CODE (temp) == REG
7970 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7971 temp = gen_reg_rtx (mode);
7972 store_expr (singleton, temp, 0);
7973 }
7974 else
7975 expand_expr (singleton,
7976 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7977 if (singleton == TREE_OPERAND (exp, 1))
7978 jumpif (TREE_OPERAND (exp, 0), op0);
7979 else
7980 jumpifnot (TREE_OPERAND (exp, 0), op0);
7981
7982 start_cleanup_deferral ();
7983 if (binary_op && temp == 0)
7984 /* Just touch the other operand. */
7985 expand_expr (TREE_OPERAND (binary_op, 1),
7986 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7987 else if (binary_op)
7988 store_expr (build (TREE_CODE (binary_op), type,
7989 make_tree (type, temp),
7990 TREE_OPERAND (binary_op, 1)),
7991 temp, 0);
7992 else
7993 store_expr (build1 (TREE_CODE (unary_op), type,
7994 make_tree (type, temp)),
7995 temp, 0);
7996 op1 = op0;
7997 }
7998 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7999 comparison operator. If we have one of these cases, set the
8000 output to A, branch on A (cse will merge these two references),
8001 then set the output to FOO. */
8002 else if (temp
8003 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8004 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8005 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8006 TREE_OPERAND (exp, 1), 0)
8007 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8008 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8009 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8010 {
8011 if (GET_CODE (temp) == REG
8012 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8013 temp = gen_reg_rtx (mode);
8014 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8015 jumpif (TREE_OPERAND (exp, 0), op0);
8016
8017 start_cleanup_deferral ();
8018 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8019 op1 = op0;
8020 }
8021 else if (temp
8022 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8023 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8024 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8025 TREE_OPERAND (exp, 2), 0)
8026 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8027 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8028 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8029 {
8030 if (GET_CODE (temp) == REG
8031 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8032 temp = gen_reg_rtx (mode);
8033 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8034 jumpifnot (TREE_OPERAND (exp, 0), op0);
8035
8036 start_cleanup_deferral ();
8037 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8038 op1 = op0;
8039 }
8040 else
8041 {
8042 op1 = gen_label_rtx ();
8043 jumpifnot (TREE_OPERAND (exp, 0), op0);
8044
8045 start_cleanup_deferral ();
8046
8047 /* One branch of the cond can be void, if it never returns. For
8048 example A ? throw : E */
8049 if (temp != 0
8050 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8051 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8052 else
8053 expand_expr (TREE_OPERAND (exp, 1),
8054 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8055 end_cleanup_deferral ();
8056 emit_queue ();
8057 emit_jump_insn (gen_jump (op1));
8058 emit_barrier ();
8059 emit_label (op0);
8060 start_cleanup_deferral ();
8061 if (temp != 0
8062 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8063 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8064 else
8065 expand_expr (TREE_OPERAND (exp, 2),
8066 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8067 }
8068
8069 end_cleanup_deferral ();
8070
8071 emit_queue ();
8072 emit_label (op1);
8073 OK_DEFER_POP;
8074
8075 return temp;
8076 }
8077
8078 case TARGET_EXPR:
8079 {
8080 /* Something needs to be initialized, but we didn't know
8081 where that thing was when building the tree. For example,
8082 it could be the return value of a function, or a parameter
8083 to a function which lays down in the stack, or a temporary
8084 variable which must be passed by reference.
8085
8086 We guarantee that the expression will either be constructed
8087 or copied into our original target. */
8088
8089 tree slot = TREE_OPERAND (exp, 0);
8090 tree cleanups = NULL_TREE;
8091 tree exp1;
8092
8093 if (TREE_CODE (slot) != VAR_DECL)
8094 abort ();
8095
8096 if (! ignore)
8097 target = original_target;
8098
8099 /* Set this here so that if we get a target that refers to a
8100 register variable that's already been used, put_reg_into_stack
8101 knows that it should fix up those uses. */
8102 TREE_USED (slot) = 1;
8103
8104 if (target == 0)
8105 {
8106 if (DECL_RTL (slot) != 0)
8107 {
8108 target = DECL_RTL (slot);
8109 /* If we have already expanded the slot, so don't do
8110 it again. (mrs) */
8111 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8112 return target;
8113 }
8114 else
8115 {
8116 target = assign_temp (type, 2, 0, 1);
8117 /* All temp slots at this level must not conflict. */
8118 preserve_temp_slots (target);
8119 DECL_RTL (slot) = target;
8120 if (TREE_ADDRESSABLE (slot))
8121 put_var_into_stack (slot);
8122
8123 /* Since SLOT is not known to the called function
8124 to belong to its stack frame, we must build an explicit
8125 cleanup. This case occurs when we must build up a reference
8126 to pass the reference as an argument. In this case,
8127 it is very likely that such a reference need not be
8128 built here. */
8129
8130 if (TREE_OPERAND (exp, 2) == 0)
8131 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8132 cleanups = TREE_OPERAND (exp, 2);
8133 }
8134 }
8135 else
8136 {
8137 /* This case does occur, when expanding a parameter which
8138 needs to be constructed on the stack. The target
8139 is the actual stack address that we want to initialize.
8140 The function we call will perform the cleanup in this case. */
8141
8142 /* If we have already assigned it space, use that space,
8143 not target that we were passed in, as our target
8144 parameter is only a hint. */
8145 if (DECL_RTL (slot) != 0)
8146 {
8147 target = DECL_RTL (slot);
8148 /* If we have already expanded the slot, so don't do
8149 it again. (mrs) */
8150 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8151 return target;
8152 }
8153 else
8154 {
8155 DECL_RTL (slot) = target;
8156 /* If we must have an addressable slot, then make sure that
8157 the RTL that we just stored in slot is OK. */
8158 if (TREE_ADDRESSABLE (slot))
8159 put_var_into_stack (slot);
8160 }
8161 }
8162
8163 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8164 /* Mark it as expanded. */
8165 TREE_OPERAND (exp, 1) = NULL_TREE;
8166
8167 store_expr (exp1, target, 0);
8168
8169 expand_decl_cleanup (NULL_TREE, cleanups);
8170
8171 return target;
8172 }
8173
8174 case INIT_EXPR:
8175 {
8176 tree lhs = TREE_OPERAND (exp, 0);
8177 tree rhs = TREE_OPERAND (exp, 1);
8178 tree noncopied_parts = 0;
8179 tree lhs_type = TREE_TYPE (lhs);
8180
8181 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8182 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8183 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8184 TYPE_NONCOPIED_PARTS (lhs_type));
8185 while (noncopied_parts != 0)
8186 {
8187 expand_assignment (TREE_VALUE (noncopied_parts),
8188 TREE_PURPOSE (noncopied_parts), 0, 0);
8189 noncopied_parts = TREE_CHAIN (noncopied_parts);
8190 }
8191 return temp;
8192 }
8193
8194 case MODIFY_EXPR:
8195 {
8196 /* If lhs is complex, expand calls in rhs before computing it.
8197 That's so we don't compute a pointer and save it over a call.
8198 If lhs is simple, compute it first so we can give it as a
8199 target if the rhs is just a call. This avoids an extra temp and copy
8200 and that prevents a partial-subsumption which makes bad code.
8201 Actually we could treat component_ref's of vars like vars. */
8202
8203 tree lhs = TREE_OPERAND (exp, 0);
8204 tree rhs = TREE_OPERAND (exp, 1);
8205 tree noncopied_parts = 0;
8206 tree lhs_type = TREE_TYPE (lhs);
8207
8208 temp = 0;
8209
8210 if (TREE_CODE (lhs) != VAR_DECL
8211 && TREE_CODE (lhs) != RESULT_DECL
8212 && TREE_CODE (lhs) != PARM_DECL
8213 && ! (TREE_CODE (lhs) == INDIRECT_REF
8214 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8215 preexpand_calls (exp);
8216
8217 /* Check for |= or &= of a bitfield of size one into another bitfield
8218 of size 1. In this case, (unless we need the result of the
8219 assignment) we can do this more efficiently with a
8220 test followed by an assignment, if necessary.
8221
8222 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8223 things change so we do, this code should be enhanced to
8224 support it. */
8225 if (ignore
8226 && TREE_CODE (lhs) == COMPONENT_REF
8227 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8228 || TREE_CODE (rhs) == BIT_AND_EXPR)
8229 && TREE_OPERAND (rhs, 0) == lhs
8230 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8231 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8232 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8233 {
8234 rtx label = gen_label_rtx ();
8235
8236 do_jump (TREE_OPERAND (rhs, 1),
8237 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8238 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8239 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8240 (TREE_CODE (rhs) == BIT_IOR_EXPR
8241 ? integer_one_node
8242 : integer_zero_node)),
8243 0, 0);
8244 do_pending_stack_adjust ();
8245 emit_label (label);
8246 return const0_rtx;
8247 }
8248
8249 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8250 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8251 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8252 TYPE_NONCOPIED_PARTS (lhs_type));
8253
8254 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8255 while (noncopied_parts != 0)
8256 {
8257 expand_assignment (TREE_PURPOSE (noncopied_parts),
8258 TREE_VALUE (noncopied_parts), 0, 0);
8259 noncopied_parts = TREE_CHAIN (noncopied_parts);
8260 }
8261 return temp;
8262 }
8263
8264 case RETURN_EXPR:
8265 if (!TREE_OPERAND (exp, 0))
8266 expand_null_return ();
8267 else
8268 expand_return (TREE_OPERAND (exp, 0));
8269 return const0_rtx;
8270
8271 case PREINCREMENT_EXPR:
8272 case PREDECREMENT_EXPR:
8273 return expand_increment (exp, 0, ignore);
8274
8275 case POSTINCREMENT_EXPR:
8276 case POSTDECREMENT_EXPR:
8277 /* Faster to treat as pre-increment if result is not used. */
8278 return expand_increment (exp, ! ignore, ignore);
8279
8280 case ADDR_EXPR:
8281 /* If nonzero, TEMP will be set to the address of something that might
8282 be a MEM corresponding to a stack slot. */
8283 temp = 0;
8284
8285 /* Are we taking the address of a nested function? */
8286 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8287 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8288 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8289 && ! TREE_STATIC (exp))
8290 {
8291 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8292 op0 = force_operand (op0, target);
8293 }
8294 /* If we are taking the address of something erroneous, just
8295 return a zero. */
8296 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8297 return const0_rtx;
8298 else
8299 {
8300 /* We make sure to pass const0_rtx down if we came in with
8301 ignore set, to avoid doing the cleanups twice for something. */
8302 op0 = expand_expr (TREE_OPERAND (exp, 0),
8303 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8304 (modifier == EXPAND_INITIALIZER
8305 ? modifier : EXPAND_CONST_ADDRESS));
8306
8307 /* If we are going to ignore the result, OP0 will have been set
8308 to const0_rtx, so just return it. Don't get confused and
8309 think we are taking the address of the constant. */
8310 if (ignore)
8311 return op0;
8312
8313 op0 = protect_from_queue (op0, 0);
8314
8315 /* We would like the object in memory. If it is a constant, we can
8316 have it be statically allocated into memory. For a non-constant,
8317 we need to allocate some memory and store the value into it. */
8318
8319 if (CONSTANT_P (op0))
8320 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8321 op0);
8322 else if (GET_CODE (op0) == MEM)
8323 {
8324 mark_temp_addr_taken (op0);
8325 temp = XEXP (op0, 0);
8326 }
8327
8328 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8329 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8330 {
8331 /* If this object is in a register, it must be not
8332 be BLKmode. */
8333 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8334 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8335
8336 mark_temp_addr_taken (memloc);
8337 emit_move_insn (memloc, op0);
8338 op0 = memloc;
8339 }
8340
8341 if (GET_CODE (op0) != MEM)
8342 abort ();
8343
8344 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8345 {
8346 temp = XEXP (op0, 0);
8347 #ifdef POINTERS_EXTEND_UNSIGNED
8348 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8349 && mode == ptr_mode)
8350 temp = convert_memory_address (ptr_mode, temp);
8351 #endif
8352 return temp;
8353 }
8354
8355 op0 = force_operand (XEXP (op0, 0), target);
8356 }
8357
8358 if (flag_force_addr && GET_CODE (op0) != REG)
8359 op0 = force_reg (Pmode, op0);
8360
8361 if (GET_CODE (op0) == REG
8362 && ! REG_USERVAR_P (op0))
8363 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8364
8365 /* If we might have had a temp slot, add an equivalent address
8366 for it. */
8367 if (temp != 0)
8368 update_temp_slot_address (temp, op0);
8369
8370 #ifdef POINTERS_EXTEND_UNSIGNED
8371 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8372 && mode == ptr_mode)
8373 op0 = convert_memory_address (ptr_mode, op0);
8374 #endif
8375
8376 return op0;
8377
8378 case ENTRY_VALUE_EXPR:
8379 abort ();
8380
8381 /* COMPLEX type for Extended Pascal & Fortran */
8382 case COMPLEX_EXPR:
8383 {
8384 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8385 rtx insns;
8386
8387 /* Get the rtx code of the operands. */
8388 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8389 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8390
8391 if (! target)
8392 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8393
8394 start_sequence ();
8395
8396 /* Move the real (op0) and imaginary (op1) parts to their location. */
8397 emit_move_insn (gen_realpart (mode, target), op0);
8398 emit_move_insn (gen_imagpart (mode, target), op1);
8399
8400 insns = get_insns ();
8401 end_sequence ();
8402
8403 /* Complex construction should appear as a single unit. */
8404 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8405 each with a separate pseudo as destination.
8406 It's not correct for flow to treat them as a unit. */
8407 if (GET_CODE (target) != CONCAT)
8408 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8409 else
8410 emit_insns (insns);
8411
8412 return target;
8413 }
8414
8415 case REALPART_EXPR:
8416 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8417 return gen_realpart (mode, op0);
8418
8419 case IMAGPART_EXPR:
8420 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8421 return gen_imagpart (mode, op0);
8422
8423 case CONJ_EXPR:
8424 {
8425 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8426 rtx imag_t;
8427 rtx insns;
8428
8429 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8430
8431 if (! target)
8432 target = gen_reg_rtx (mode);
8433
8434 start_sequence ();
8435
8436 /* Store the realpart and the negated imagpart to target. */
8437 emit_move_insn (gen_realpart (partmode, target),
8438 gen_realpart (partmode, op0));
8439
8440 imag_t = gen_imagpart (partmode, target);
8441 temp = expand_unop (partmode, neg_optab,
8442 gen_imagpart (partmode, op0), imag_t, 0);
8443 if (temp != imag_t)
8444 emit_move_insn (imag_t, temp);
8445
8446 insns = get_insns ();
8447 end_sequence ();
8448
8449 /* Conjugate should appear as a single unit
8450 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8451 each with a separate pseudo as destination.
8452 It's not correct for flow to treat them as a unit. */
8453 if (GET_CODE (target) != CONCAT)
8454 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8455 else
8456 emit_insns (insns);
8457
8458 return target;
8459 }
8460
8461 case TRY_CATCH_EXPR:
8462 {
8463 tree handler = TREE_OPERAND (exp, 1);
8464
8465 expand_eh_region_start ();
8466
8467 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8468
8469 expand_eh_region_end (handler);
8470
8471 return op0;
8472 }
8473
8474 case TRY_FINALLY_EXPR:
8475 {
8476 tree try_block = TREE_OPERAND (exp, 0);
8477 tree finally_block = TREE_OPERAND (exp, 1);
8478 rtx finally_label = gen_label_rtx ();
8479 rtx done_label = gen_label_rtx ();
8480 rtx return_link = gen_reg_rtx (Pmode);
8481 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8482 (tree) finally_label, (tree) return_link);
8483 TREE_SIDE_EFFECTS (cleanup) = 1;
8484
8485 /* Start a new binding layer that will keep track of all cleanup
8486 actions to be performed. */
8487 expand_start_bindings (2);
8488
8489 target_temp_slot_level = temp_slot_level;
8490
8491 expand_decl_cleanup (NULL_TREE, cleanup);
8492 op0 = expand_expr (try_block, target, tmode, modifier);
8493
8494 preserve_temp_slots (op0);
8495 expand_end_bindings (NULL_TREE, 0, 0);
8496 emit_jump (done_label);
8497 emit_label (finally_label);
8498 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8499 emit_indirect_jump (return_link);
8500 emit_label (done_label);
8501 return op0;
8502 }
8503
8504 case GOTO_SUBROUTINE_EXPR:
8505 {
8506 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8507 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8508 rtx return_address = gen_label_rtx ();
8509 emit_move_insn (return_link,
8510 gen_rtx_LABEL_REF (Pmode, return_address));
8511 emit_jump (subr);
8512 emit_label (return_address);
8513 return const0_rtx;
8514 }
8515
8516 case POPDCC_EXPR:
8517 {
8518 rtx dcc = get_dynamic_cleanup_chain ();
8519 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8520 return const0_rtx;
8521 }
8522
8523 case POPDHC_EXPR:
8524 {
8525 rtx dhc = get_dynamic_handler_chain ();
8526 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8527 return const0_rtx;
8528 }
8529
8530 case VA_ARG_EXPR:
8531 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8532
8533 default:
8534 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8535 }
8536
8537 /* Here to do an ordinary binary operator, generating an instruction
8538 from the optab already placed in `this_optab'. */
8539 binop:
8540 preexpand_calls (exp);
8541 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8542 subtarget = 0;
8543 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8544 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8545 binop2:
8546 temp = expand_binop (mode, this_optab, op0, op1, target,
8547 unsignedp, OPTAB_LIB_WIDEN);
8548 if (temp == 0)
8549 abort ();
8550 return temp;
8551 }
8552 \f
8553 /* Similar to expand_expr, except that we don't specify a target, target
8554 mode, or modifier and we return the alignment of the inner type. This is
8555 used in cases where it is not necessary to align the result to the
8556 alignment of its type as long as we know the alignment of the result, for
8557 example for comparisons of BLKmode values. */
8558
8559 static rtx
8560 expand_expr_unaligned (exp, palign)
8561 register tree exp;
8562 unsigned int *palign;
8563 {
8564 register rtx op0;
8565 tree type = TREE_TYPE (exp);
8566 register enum machine_mode mode = TYPE_MODE (type);
8567
8568 /* Default the alignment we return to that of the type. */
8569 *palign = TYPE_ALIGN (type);
8570
8571 /* The only cases in which we do anything special is if the resulting mode
8572 is BLKmode. */
8573 if (mode != BLKmode)
8574 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8575
8576 switch (TREE_CODE (exp))
8577 {
8578 case CONVERT_EXPR:
8579 case NOP_EXPR:
8580 case NON_LVALUE_EXPR:
8581 /* Conversions between BLKmode values don't change the underlying
8582 alignment or value. */
8583 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8584 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8585 break;
8586
8587 case ARRAY_REF:
8588 /* Much of the code for this case is copied directly from expand_expr.
8589 We need to duplicate it here because we will do something different
8590 in the fall-through case, so we need to handle the same exceptions
8591 it does. */
8592 {
8593 tree array = TREE_OPERAND (exp, 0);
8594 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8595 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8596 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8597 HOST_WIDE_INT i;
8598
8599 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8600 abort ();
8601
8602 /* Optimize the special-case of a zero lower bound.
8603
8604 We convert the low_bound to sizetype to avoid some problems
8605 with constant folding. (E.g. suppose the lower bound is 1,
8606 and its mode is QI. Without the conversion, (ARRAY
8607 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8608 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8609
8610 if (! integer_zerop (low_bound))
8611 index = size_diffop (index, convert (sizetype, low_bound));
8612
8613 /* If this is a constant index into a constant array,
8614 just get the value from the array. Handle both the cases when
8615 we have an explicit constructor and when our operand is a variable
8616 that was declared const. */
8617
8618 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8619 && 0 > compare_tree_int (index,
8620 list_length (CONSTRUCTOR_ELTS
8621 (TREE_OPERAND (exp, 0)))))
8622 {
8623 tree elem;
8624
8625 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8626 i = TREE_INT_CST_LOW (index);
8627 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8628 ;
8629
8630 if (elem)
8631 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8632 }
8633
8634 else if (optimize >= 1
8635 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8636 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8637 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8638 {
8639 if (TREE_CODE (index) == INTEGER_CST)
8640 {
8641 tree init = DECL_INITIAL (array);
8642
8643 if (TREE_CODE (init) == CONSTRUCTOR)
8644 {
8645 tree elem;
8646
8647 for (elem = CONSTRUCTOR_ELTS (init);
8648 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8649 elem = TREE_CHAIN (elem))
8650 ;
8651
8652 if (elem)
8653 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8654 palign);
8655 }
8656 }
8657 }
8658 }
8659 /* Fall through. */
8660
8661 case COMPONENT_REF:
8662 case BIT_FIELD_REF:
8663 /* If the operand is a CONSTRUCTOR, we can just extract the
8664 appropriate field if it is present. Don't do this if we have
8665 already written the data since we want to refer to that copy
8666 and varasm.c assumes that's what we'll do. */
8667 if (TREE_CODE (exp) != ARRAY_REF
8668 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8669 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8670 {
8671 tree elt;
8672
8673 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8674 elt = TREE_CHAIN (elt))
8675 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8676 /* Note that unlike the case in expand_expr, we know this is
8677 BLKmode and hence not an integer. */
8678 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8679 }
8680
8681 {
8682 enum machine_mode mode1;
8683 HOST_WIDE_INT bitsize, bitpos;
8684 tree offset;
8685 int volatilep = 0;
8686 unsigned int alignment;
8687 int unsignedp;
8688 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8689 &mode1, &unsignedp, &volatilep,
8690 &alignment);
8691
8692 /* If we got back the original object, something is wrong. Perhaps
8693 we are evaluating an expression too early. In any event, don't
8694 infinitely recurse. */
8695 if (tem == exp)
8696 abort ();
8697
8698 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8699
8700 /* If this is a constant, put it into a register if it is a
8701 legitimate constant and OFFSET is 0 and memory if it isn't. */
8702 if (CONSTANT_P (op0))
8703 {
8704 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8705
8706 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8707 && offset == 0)
8708 op0 = force_reg (inner_mode, op0);
8709 else
8710 op0 = validize_mem (force_const_mem (inner_mode, op0));
8711 }
8712
8713 if (offset != 0)
8714 {
8715 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8716
8717 /* If this object is in a register, put it into memory.
8718 This case can't occur in C, but can in Ada if we have
8719 unchecked conversion of an expression from a scalar type to
8720 an array or record type. */
8721 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8722 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8723 {
8724 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8725
8726 mark_temp_addr_taken (memloc);
8727 emit_move_insn (memloc, op0);
8728 op0 = memloc;
8729 }
8730
8731 if (GET_CODE (op0) != MEM)
8732 abort ();
8733
8734 if (GET_MODE (offset_rtx) != ptr_mode)
8735 {
8736 #ifdef POINTERS_EXTEND_UNSIGNED
8737 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8738 #else
8739 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8740 #endif
8741 }
8742
8743 op0 = change_address (op0, VOIDmode,
8744 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8745 force_reg (ptr_mode,
8746 offset_rtx)));
8747 }
8748
8749 /* Don't forget about volatility even if this is a bitfield. */
8750 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8751 {
8752 op0 = copy_rtx (op0);
8753 MEM_VOLATILE_P (op0) = 1;
8754 }
8755
8756 /* Check the access. */
8757 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8758 {
8759 rtx to;
8760 int size;
8761
8762 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8763 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8764
8765 /* Check the access right of the pointer. */
8766 in_check_memory_usage = 1;
8767 if (size > BITS_PER_UNIT)
8768 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8769 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8770 TYPE_MODE (sizetype),
8771 GEN_INT (MEMORY_USE_RO),
8772 TYPE_MODE (integer_type_node));
8773 in_check_memory_usage = 0;
8774 }
8775
8776 /* In cases where an aligned union has an unaligned object
8777 as a field, we might be extracting a BLKmode value from
8778 an integer-mode (e.g., SImode) object. Handle this case
8779 by doing the extract into an object as wide as the field
8780 (which we know to be the width of a basic mode), then
8781 storing into memory, and changing the mode to BLKmode.
8782 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8783 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8784 if (mode1 == VOIDmode
8785 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8786 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8787 && (TYPE_ALIGN (type) > alignment
8788 || bitpos % TYPE_ALIGN (type) != 0)))
8789 {
8790 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8791
8792 if (ext_mode == BLKmode)
8793 {
8794 /* In this case, BITPOS must start at a byte boundary. */
8795 if (GET_CODE (op0) != MEM
8796 || bitpos % BITS_PER_UNIT != 0)
8797 abort ();
8798
8799 op0 = change_address (op0, VOIDmode,
8800 plus_constant (XEXP (op0, 0),
8801 bitpos / BITS_PER_UNIT));
8802 }
8803 else
8804 {
8805 rtx new = assign_stack_temp (ext_mode,
8806 bitsize / BITS_PER_UNIT, 0);
8807
8808 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8809 unsignedp, NULL_RTX, ext_mode,
8810 ext_mode, alignment,
8811 int_size_in_bytes (TREE_TYPE (tem)));
8812
8813 /* If the result is a record type and BITSIZE is narrower than
8814 the mode of OP0, an integral mode, and this is a big endian
8815 machine, we must put the field into the high-order bits. */
8816 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8817 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8818 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8819 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8820 size_int (GET_MODE_BITSIZE
8821 (GET_MODE (op0))
8822 - bitsize),
8823 op0, 1);
8824
8825 emit_move_insn (new, op0);
8826 op0 = copy_rtx (new);
8827 PUT_MODE (op0, BLKmode);
8828 }
8829 }
8830 else
8831 /* Get a reference to just this component. */
8832 op0 = change_address (op0, mode1,
8833 plus_constant (XEXP (op0, 0),
8834 (bitpos / BITS_PER_UNIT)));
8835
8836 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8837
8838 /* Adjust the alignment in case the bit position is not
8839 a multiple of the alignment of the inner object. */
8840 while (bitpos % alignment != 0)
8841 alignment >>= 1;
8842
8843 if (GET_CODE (XEXP (op0, 0)) == REG)
8844 mark_reg_pointer (XEXP (op0, 0), alignment);
8845
8846 MEM_IN_STRUCT_P (op0) = 1;
8847 MEM_VOLATILE_P (op0) |= volatilep;
8848
8849 *palign = alignment;
8850 return op0;
8851 }
8852
8853 default:
8854 break;
8855
8856 }
8857
8858 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8859 }
8860 \f
8861 /* Return the tree node if a ARG corresponds to a string constant or zero
8862 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8863 in bytes within the string that ARG is accessing. The type of the
8864 offset will be `sizetype'. */
8865
8866 tree
8867 string_constant (arg, ptr_offset)
8868 tree arg;
8869 tree *ptr_offset;
8870 {
8871 STRIP_NOPS (arg);
8872
8873 if (TREE_CODE (arg) == ADDR_EXPR
8874 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8875 {
8876 *ptr_offset = size_zero_node;
8877 return TREE_OPERAND (arg, 0);
8878 }
8879 else if (TREE_CODE (arg) == PLUS_EXPR)
8880 {
8881 tree arg0 = TREE_OPERAND (arg, 0);
8882 tree arg1 = TREE_OPERAND (arg, 1);
8883
8884 STRIP_NOPS (arg0);
8885 STRIP_NOPS (arg1);
8886
8887 if (TREE_CODE (arg0) == ADDR_EXPR
8888 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8889 {
8890 *ptr_offset = convert (sizetype, arg1);
8891 return TREE_OPERAND (arg0, 0);
8892 }
8893 else if (TREE_CODE (arg1) == ADDR_EXPR
8894 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8895 {
8896 *ptr_offset = convert (sizetype, arg0);
8897 return TREE_OPERAND (arg1, 0);
8898 }
8899 }
8900
8901 return 0;
8902 }
8903 \f
8904 /* Expand code for a post- or pre- increment or decrement
8905 and return the RTX for the result.
8906 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8907
8908 static rtx
8909 expand_increment (exp, post, ignore)
8910 register tree exp;
8911 int post, ignore;
8912 {
8913 register rtx op0, op1;
8914 register rtx temp, value;
8915 register tree incremented = TREE_OPERAND (exp, 0);
8916 optab this_optab = add_optab;
8917 int icode;
8918 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8919 int op0_is_copy = 0;
8920 int single_insn = 0;
8921 /* 1 means we can't store into OP0 directly,
8922 because it is a subreg narrower than a word,
8923 and we don't dare clobber the rest of the word. */
8924 int bad_subreg = 0;
8925
8926 /* Stabilize any component ref that might need to be
8927 evaluated more than once below. */
8928 if (!post
8929 || TREE_CODE (incremented) == BIT_FIELD_REF
8930 || (TREE_CODE (incremented) == COMPONENT_REF
8931 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8932 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8933 incremented = stabilize_reference (incremented);
8934 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8935 ones into save exprs so that they don't accidentally get evaluated
8936 more than once by the code below. */
8937 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8938 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8939 incremented = save_expr (incremented);
8940
8941 /* Compute the operands as RTX.
8942 Note whether OP0 is the actual lvalue or a copy of it:
8943 I believe it is a copy iff it is a register or subreg
8944 and insns were generated in computing it. */
8945
8946 temp = get_last_insn ();
8947 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8948
8949 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8950 in place but instead must do sign- or zero-extension during assignment,
8951 so we copy it into a new register and let the code below use it as
8952 a copy.
8953
8954 Note that we can safely modify this SUBREG since it is know not to be
8955 shared (it was made by the expand_expr call above). */
8956
8957 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8958 {
8959 if (post)
8960 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8961 else
8962 bad_subreg = 1;
8963 }
8964 else if (GET_CODE (op0) == SUBREG
8965 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8966 {
8967 /* We cannot increment this SUBREG in place. If we are
8968 post-incrementing, get a copy of the old value. Otherwise,
8969 just mark that we cannot increment in place. */
8970 if (post)
8971 op0 = copy_to_reg (op0);
8972 else
8973 bad_subreg = 1;
8974 }
8975
8976 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8977 && temp != get_last_insn ());
8978 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8979 EXPAND_MEMORY_USE_BAD);
8980
8981 /* Decide whether incrementing or decrementing. */
8982 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8983 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8984 this_optab = sub_optab;
8985
8986 /* Convert decrement by a constant into a negative increment. */
8987 if (this_optab == sub_optab
8988 && GET_CODE (op1) == CONST_INT)
8989 {
8990 op1 = GEN_INT (-INTVAL (op1));
8991 this_optab = add_optab;
8992 }
8993
8994 /* For a preincrement, see if we can do this with a single instruction. */
8995 if (!post)
8996 {
8997 icode = (int) this_optab->handlers[(int) mode].insn_code;
8998 if (icode != (int) CODE_FOR_nothing
8999 /* Make sure that OP0 is valid for operands 0 and 1
9000 of the insn we want to queue. */
9001 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9002 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9003 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9004 single_insn = 1;
9005 }
9006
9007 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9008 then we cannot just increment OP0. We must therefore contrive to
9009 increment the original value. Then, for postincrement, we can return
9010 OP0 since it is a copy of the old value. For preincrement, expand here
9011 unless we can do it with a single insn.
9012
9013 Likewise if storing directly into OP0 would clobber high bits
9014 we need to preserve (bad_subreg). */
9015 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9016 {
9017 /* This is the easiest way to increment the value wherever it is.
9018 Problems with multiple evaluation of INCREMENTED are prevented
9019 because either (1) it is a component_ref or preincrement,
9020 in which case it was stabilized above, or (2) it is an array_ref
9021 with constant index in an array in a register, which is
9022 safe to reevaluate. */
9023 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9024 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9025 ? MINUS_EXPR : PLUS_EXPR),
9026 TREE_TYPE (exp),
9027 incremented,
9028 TREE_OPERAND (exp, 1));
9029
9030 while (TREE_CODE (incremented) == NOP_EXPR
9031 || TREE_CODE (incremented) == CONVERT_EXPR)
9032 {
9033 newexp = convert (TREE_TYPE (incremented), newexp);
9034 incremented = TREE_OPERAND (incremented, 0);
9035 }
9036
9037 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9038 return post ? op0 : temp;
9039 }
9040
9041 if (post)
9042 {
9043 /* We have a true reference to the value in OP0.
9044 If there is an insn to add or subtract in this mode, queue it.
9045 Queueing the increment insn avoids the register shuffling
9046 that often results if we must increment now and first save
9047 the old value for subsequent use. */
9048
9049 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9050 op0 = stabilize (op0);
9051 #endif
9052
9053 icode = (int) this_optab->handlers[(int) mode].insn_code;
9054 if (icode != (int) CODE_FOR_nothing
9055 /* Make sure that OP0 is valid for operands 0 and 1
9056 of the insn we want to queue. */
9057 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9058 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9059 {
9060 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9061 op1 = force_reg (mode, op1);
9062
9063 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9064 }
9065 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9066 {
9067 rtx addr = (general_operand (XEXP (op0, 0), mode)
9068 ? force_reg (Pmode, XEXP (op0, 0))
9069 : copy_to_reg (XEXP (op0, 0)));
9070 rtx temp, result;
9071
9072 op0 = change_address (op0, VOIDmode, addr);
9073 temp = force_reg (GET_MODE (op0), op0);
9074 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9075 op1 = force_reg (mode, op1);
9076
9077 /* The increment queue is LIFO, thus we have to `queue'
9078 the instructions in reverse order. */
9079 enqueue_insn (op0, gen_move_insn (op0, temp));
9080 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9081 return result;
9082 }
9083 }
9084
9085 /* Preincrement, or we can't increment with one simple insn. */
9086 if (post)
9087 /* Save a copy of the value before inc or dec, to return it later. */
9088 temp = value = copy_to_reg (op0);
9089 else
9090 /* Arrange to return the incremented value. */
9091 /* Copy the rtx because expand_binop will protect from the queue,
9092 and the results of that would be invalid for us to return
9093 if our caller does emit_queue before using our result. */
9094 temp = copy_rtx (value = op0);
9095
9096 /* Increment however we can. */
9097 op1 = expand_binop (mode, this_optab, value, op1,
9098 current_function_check_memory_usage ? NULL_RTX : op0,
9099 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9100 /* Make sure the value is stored into OP0. */
9101 if (op1 != op0)
9102 emit_move_insn (op0, op1);
9103
9104 return temp;
9105 }
9106 \f
9107 /* Expand all function calls contained within EXP, innermost ones first.
9108 But don't look within expressions that have sequence points.
9109 For each CALL_EXPR, record the rtx for its value
9110 in the CALL_EXPR_RTL field. */
9111
9112 static void
9113 preexpand_calls (exp)
9114 tree exp;
9115 {
9116 register int nops, i;
9117 int class = TREE_CODE_CLASS (TREE_CODE (exp));
9118
9119 if (! do_preexpand_calls)
9120 return;
9121
9122 /* Only expressions and references can contain calls. */
9123
9124 if (! IS_EXPR_CODE_CLASS (class) && class != 'r')
9125 return;
9126
9127 switch (TREE_CODE (exp))
9128 {
9129 case CALL_EXPR:
9130 /* Do nothing if already expanded. */
9131 if (CALL_EXPR_RTL (exp) != 0
9132 /* Do nothing if the call returns a variable-sized object. */
9133 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9134 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9135 /* Do nothing to built-in functions. */
9136 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9137 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9138 == FUNCTION_DECL)
9139 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9140 return;
9141
9142 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9143 return;
9144
9145 case COMPOUND_EXPR:
9146 case COND_EXPR:
9147 case TRUTH_ANDIF_EXPR:
9148 case TRUTH_ORIF_EXPR:
9149 /* If we find one of these, then we can be sure
9150 the adjust will be done for it (since it makes jumps).
9151 Do it now, so that if this is inside an argument
9152 of a function, we don't get the stack adjustment
9153 after some other args have already been pushed. */
9154 do_pending_stack_adjust ();
9155 return;
9156
9157 case BLOCK:
9158 case RTL_EXPR:
9159 case WITH_CLEANUP_EXPR:
9160 case CLEANUP_POINT_EXPR:
9161 case TRY_CATCH_EXPR:
9162 return;
9163
9164 case SAVE_EXPR:
9165 if (SAVE_EXPR_RTL (exp) != 0)
9166 return;
9167
9168 default:
9169 break;
9170 }
9171
9172 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
9173 for (i = 0; i < nops; i++)
9174 if (TREE_OPERAND (exp, i) != 0)
9175 {
9176 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9177 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9178 It doesn't happen before the call is made. */
9179 ;
9180 else
9181 {
9182 class = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9183 if (IS_EXPR_CODE_CLASS (class) || class == 'r')
9184 preexpand_calls (TREE_OPERAND (exp, i));
9185 }
9186 }
9187 }
9188 \f
9189 /* At the start of a function, record that we have no previously-pushed
9190 arguments waiting to be popped. */
9191
9192 void
9193 init_pending_stack_adjust ()
9194 {
9195 pending_stack_adjust = 0;
9196 }
9197
9198 /* When exiting from function, if safe, clear out any pending stack adjust
9199 so the adjustment won't get done.
9200
9201 Note, if the current function calls alloca, then it must have a
9202 frame pointer regardless of the value of flag_omit_frame_pointer. */
9203
9204 void
9205 clear_pending_stack_adjust ()
9206 {
9207 #ifdef EXIT_IGNORE_STACK
9208 if (optimize > 0
9209 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9210 && EXIT_IGNORE_STACK
9211 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9212 && ! flag_inline_functions)
9213 {
9214 stack_pointer_delta -= pending_stack_adjust,
9215 pending_stack_adjust = 0;
9216 }
9217 #endif
9218 }
9219
9220 /* Pop any previously-pushed arguments that have not been popped yet. */
9221
9222 void
9223 do_pending_stack_adjust ()
9224 {
9225 if (inhibit_defer_pop == 0)
9226 {
9227 if (pending_stack_adjust != 0)
9228 adjust_stack (GEN_INT (pending_stack_adjust));
9229 pending_stack_adjust = 0;
9230 }
9231 }
9232 \f
9233 /* Expand conditional expressions. */
9234
9235 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9236 LABEL is an rtx of code CODE_LABEL, in this function and all the
9237 functions here. */
9238
9239 void
9240 jumpifnot (exp, label)
9241 tree exp;
9242 rtx label;
9243 {
9244 do_jump (exp, label, NULL_RTX);
9245 }
9246
9247 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9248
9249 void
9250 jumpif (exp, label)
9251 tree exp;
9252 rtx label;
9253 {
9254 do_jump (exp, NULL_RTX, label);
9255 }
9256
9257 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9258 the result is zero, or IF_TRUE_LABEL if the result is one.
9259 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9260 meaning fall through in that case.
9261
9262 do_jump always does any pending stack adjust except when it does not
9263 actually perform a jump. An example where there is no jump
9264 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9265
9266 This function is responsible for optimizing cases such as
9267 &&, || and comparison operators in EXP. */
9268
9269 void
9270 do_jump (exp, if_false_label, if_true_label)
9271 tree exp;
9272 rtx if_false_label, if_true_label;
9273 {
9274 register enum tree_code code = TREE_CODE (exp);
9275 /* Some cases need to create a label to jump to
9276 in order to properly fall through.
9277 These cases set DROP_THROUGH_LABEL nonzero. */
9278 rtx drop_through_label = 0;
9279 rtx temp;
9280 int i;
9281 tree type;
9282 enum machine_mode mode;
9283
9284 #ifdef MAX_INTEGER_COMPUTATION_MODE
9285 check_max_integer_computation_mode (exp);
9286 #endif
9287
9288 emit_queue ();
9289
9290 switch (code)
9291 {
9292 case ERROR_MARK:
9293 break;
9294
9295 case INTEGER_CST:
9296 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9297 if (temp)
9298 emit_jump (temp);
9299 break;
9300
9301 #if 0
9302 /* This is not true with #pragma weak */
9303 case ADDR_EXPR:
9304 /* The address of something can never be zero. */
9305 if (if_true_label)
9306 emit_jump (if_true_label);
9307 break;
9308 #endif
9309
9310 case NOP_EXPR:
9311 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9312 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9313 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9314 goto normal;
9315 case CONVERT_EXPR:
9316 /* If we are narrowing the operand, we have to do the compare in the
9317 narrower mode. */
9318 if ((TYPE_PRECISION (TREE_TYPE (exp))
9319 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9320 goto normal;
9321 case NON_LVALUE_EXPR:
9322 case REFERENCE_EXPR:
9323 case ABS_EXPR:
9324 case NEGATE_EXPR:
9325 case LROTATE_EXPR:
9326 case RROTATE_EXPR:
9327 /* These cannot change zero->non-zero or vice versa. */
9328 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9329 break;
9330
9331 case WITH_RECORD_EXPR:
9332 /* Put the object on the placeholder list, recurse through our first
9333 operand, and pop the list. */
9334 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9335 placeholder_list);
9336 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9337 placeholder_list = TREE_CHAIN (placeholder_list);
9338 break;
9339
9340 #if 0
9341 /* This is never less insns than evaluating the PLUS_EXPR followed by
9342 a test and can be longer if the test is eliminated. */
9343 case PLUS_EXPR:
9344 /* Reduce to minus. */
9345 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9346 TREE_OPERAND (exp, 0),
9347 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9348 TREE_OPERAND (exp, 1))));
9349 /* Process as MINUS. */
9350 #endif
9351
9352 case MINUS_EXPR:
9353 /* Non-zero iff operands of minus differ. */
9354 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9355 TREE_OPERAND (exp, 0),
9356 TREE_OPERAND (exp, 1)),
9357 NE, NE, if_false_label, if_true_label);
9358 break;
9359
9360 case BIT_AND_EXPR:
9361 /* If we are AND'ing with a small constant, do this comparison in the
9362 smallest type that fits. If the machine doesn't have comparisons
9363 that small, it will be converted back to the wider comparison.
9364 This helps if we are testing the sign bit of a narrower object.
9365 combine can't do this for us because it can't know whether a
9366 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9367
9368 if (! SLOW_BYTE_ACCESS
9369 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9370 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9371 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9372 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9373 && (type = type_for_mode (mode, 1)) != 0
9374 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9375 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9376 != CODE_FOR_nothing))
9377 {
9378 do_jump (convert (type, exp), if_false_label, if_true_label);
9379 break;
9380 }
9381 goto normal;
9382
9383 case TRUTH_NOT_EXPR:
9384 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9385 break;
9386
9387 case TRUTH_ANDIF_EXPR:
9388 if (if_false_label == 0)
9389 if_false_label = drop_through_label = gen_label_rtx ();
9390 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9391 start_cleanup_deferral ();
9392 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9393 end_cleanup_deferral ();
9394 break;
9395
9396 case TRUTH_ORIF_EXPR:
9397 if (if_true_label == 0)
9398 if_true_label = drop_through_label = gen_label_rtx ();
9399 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9400 start_cleanup_deferral ();
9401 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9402 end_cleanup_deferral ();
9403 break;
9404
9405 case COMPOUND_EXPR:
9406 push_temp_slots ();
9407 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9408 preserve_temp_slots (NULL_RTX);
9409 free_temp_slots ();
9410 pop_temp_slots ();
9411 emit_queue ();
9412 do_pending_stack_adjust ();
9413 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9414 break;
9415
9416 case COMPONENT_REF:
9417 case BIT_FIELD_REF:
9418 case ARRAY_REF:
9419 {
9420 HOST_WIDE_INT bitsize, bitpos;
9421 int unsignedp;
9422 enum machine_mode mode;
9423 tree type;
9424 tree offset;
9425 int volatilep = 0;
9426 unsigned int alignment;
9427
9428 /* Get description of this reference. We don't actually care
9429 about the underlying object here. */
9430 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9431 &unsignedp, &volatilep, &alignment);
9432
9433 type = type_for_size (bitsize, unsignedp);
9434 if (! SLOW_BYTE_ACCESS
9435 && type != 0 && bitsize >= 0
9436 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9437 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9438 != CODE_FOR_nothing))
9439 {
9440 do_jump (convert (type, exp), if_false_label, if_true_label);
9441 break;
9442 }
9443 goto normal;
9444 }
9445
9446 case COND_EXPR:
9447 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9448 if (integer_onep (TREE_OPERAND (exp, 1))
9449 && integer_zerop (TREE_OPERAND (exp, 2)))
9450 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9451
9452 else if (integer_zerop (TREE_OPERAND (exp, 1))
9453 && integer_onep (TREE_OPERAND (exp, 2)))
9454 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9455
9456 else
9457 {
9458 register rtx label1 = gen_label_rtx ();
9459 drop_through_label = gen_label_rtx ();
9460
9461 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9462
9463 start_cleanup_deferral ();
9464 /* Now the THEN-expression. */
9465 do_jump (TREE_OPERAND (exp, 1),
9466 if_false_label ? if_false_label : drop_through_label,
9467 if_true_label ? if_true_label : drop_through_label);
9468 /* In case the do_jump just above never jumps. */
9469 do_pending_stack_adjust ();
9470 emit_label (label1);
9471
9472 /* Now the ELSE-expression. */
9473 do_jump (TREE_OPERAND (exp, 2),
9474 if_false_label ? if_false_label : drop_through_label,
9475 if_true_label ? if_true_label : drop_through_label);
9476 end_cleanup_deferral ();
9477 }
9478 break;
9479
9480 case EQ_EXPR:
9481 {
9482 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9483
9484 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9485 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9486 {
9487 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9488 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9489 do_jump
9490 (fold
9491 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9492 fold (build (EQ_EXPR, TREE_TYPE (exp),
9493 fold (build1 (REALPART_EXPR,
9494 TREE_TYPE (inner_type),
9495 exp0)),
9496 fold (build1 (REALPART_EXPR,
9497 TREE_TYPE (inner_type),
9498 exp1)))),
9499 fold (build (EQ_EXPR, TREE_TYPE (exp),
9500 fold (build1 (IMAGPART_EXPR,
9501 TREE_TYPE (inner_type),
9502 exp0)),
9503 fold (build1 (IMAGPART_EXPR,
9504 TREE_TYPE (inner_type),
9505 exp1)))))),
9506 if_false_label, if_true_label);
9507 }
9508
9509 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9510 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9511
9512 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9513 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9514 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9515 else
9516 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9517 break;
9518 }
9519
9520 case NE_EXPR:
9521 {
9522 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9523
9524 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9525 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9526 {
9527 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9528 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9529 do_jump
9530 (fold
9531 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9532 fold (build (NE_EXPR, TREE_TYPE (exp),
9533 fold (build1 (REALPART_EXPR,
9534 TREE_TYPE (inner_type),
9535 exp0)),
9536 fold (build1 (REALPART_EXPR,
9537 TREE_TYPE (inner_type),
9538 exp1)))),
9539 fold (build (NE_EXPR, TREE_TYPE (exp),
9540 fold (build1 (IMAGPART_EXPR,
9541 TREE_TYPE (inner_type),
9542 exp0)),
9543 fold (build1 (IMAGPART_EXPR,
9544 TREE_TYPE (inner_type),
9545 exp1)))))),
9546 if_false_label, if_true_label);
9547 }
9548
9549 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9550 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9551
9552 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9553 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9554 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9555 else
9556 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9557 break;
9558 }
9559
9560 case LT_EXPR:
9561 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9562 if (GET_MODE_CLASS (mode) == MODE_INT
9563 && ! can_compare_p (LT, mode, ccp_jump))
9564 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9565 else
9566 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9567 break;
9568
9569 case LE_EXPR:
9570 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9571 if (GET_MODE_CLASS (mode) == MODE_INT
9572 && ! can_compare_p (LE, mode, ccp_jump))
9573 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9574 else
9575 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9576 break;
9577
9578 case GT_EXPR:
9579 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9580 if (GET_MODE_CLASS (mode) == MODE_INT
9581 && ! can_compare_p (GT, mode, ccp_jump))
9582 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9583 else
9584 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9585 break;
9586
9587 case GE_EXPR:
9588 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9589 if (GET_MODE_CLASS (mode) == MODE_INT
9590 && ! can_compare_p (GE, mode, ccp_jump))
9591 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9592 else
9593 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9594 break;
9595
9596 case UNORDERED_EXPR:
9597 case ORDERED_EXPR:
9598 {
9599 enum rtx_code cmp, rcmp;
9600 int do_rev;
9601
9602 if (code == UNORDERED_EXPR)
9603 cmp = UNORDERED, rcmp = ORDERED;
9604 else
9605 cmp = ORDERED, rcmp = UNORDERED;
9606 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9607
9608 do_rev = 0;
9609 if (! can_compare_p (cmp, mode, ccp_jump)
9610 && (can_compare_p (rcmp, mode, ccp_jump)
9611 /* If the target doesn't provide either UNORDERED or ORDERED
9612 comparisons, canonicalize on UNORDERED for the library. */
9613 || rcmp == UNORDERED))
9614 do_rev = 1;
9615
9616 if (! do_rev)
9617 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9618 else
9619 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9620 }
9621 break;
9622
9623 {
9624 enum rtx_code rcode1;
9625 enum tree_code tcode2;
9626
9627 case UNLT_EXPR:
9628 rcode1 = UNLT;
9629 tcode2 = LT_EXPR;
9630 goto unordered_bcc;
9631 case UNLE_EXPR:
9632 rcode1 = UNLE;
9633 tcode2 = LE_EXPR;
9634 goto unordered_bcc;
9635 case UNGT_EXPR:
9636 rcode1 = UNGT;
9637 tcode2 = GT_EXPR;
9638 goto unordered_bcc;
9639 case UNGE_EXPR:
9640 rcode1 = UNGE;
9641 tcode2 = GE_EXPR;
9642 goto unordered_bcc;
9643 case UNEQ_EXPR:
9644 rcode1 = UNEQ;
9645 tcode2 = EQ_EXPR;
9646 goto unordered_bcc;
9647
9648 unordered_bcc:
9649 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9650 if (can_compare_p (rcode1, mode, ccp_jump))
9651 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9652 if_true_label);
9653 else
9654 {
9655 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9656 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9657 tree cmp0, cmp1;
9658
9659 /* If the target doesn't support combined unordered
9660 compares, decompose into UNORDERED + comparison. */
9661 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9662 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9663 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9664 do_jump (exp, if_false_label, if_true_label);
9665 }
9666 }
9667 break;
9668
9669 default:
9670 normal:
9671 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9672 #if 0
9673 /* This is not needed any more and causes poor code since it causes
9674 comparisons and tests from non-SI objects to have different code
9675 sequences. */
9676 /* Copy to register to avoid generating bad insns by cse
9677 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9678 if (!cse_not_expected && GET_CODE (temp) == MEM)
9679 temp = copy_to_reg (temp);
9680 #endif
9681 do_pending_stack_adjust ();
9682 /* Do any postincrements in the expression that was tested. */
9683 emit_queue ();
9684
9685 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9686 {
9687 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9688 if (target)
9689 emit_jump (target);
9690 }
9691 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9692 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9693 /* Note swapping the labels gives us not-equal. */
9694 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9695 else if (GET_MODE (temp) != VOIDmode)
9696 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9697 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9698 GET_MODE (temp), NULL_RTX, 0,
9699 if_false_label, if_true_label);
9700 else
9701 abort ();
9702 }
9703
9704 if (drop_through_label)
9705 {
9706 /* If do_jump produces code that might be jumped around,
9707 do any stack adjusts from that code, before the place
9708 where control merges in. */
9709 do_pending_stack_adjust ();
9710 emit_label (drop_through_label);
9711 }
9712 }
9713 \f
9714 /* Given a comparison expression EXP for values too wide to be compared
9715 with one insn, test the comparison and jump to the appropriate label.
9716 The code of EXP is ignored; we always test GT if SWAP is 0,
9717 and LT if SWAP is 1. */
9718
9719 static void
9720 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9721 tree exp;
9722 int swap;
9723 rtx if_false_label, if_true_label;
9724 {
9725 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9726 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9727 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9728 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9729
9730 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9731 }
9732
9733 /* Compare OP0 with OP1, word at a time, in mode MODE.
9734 UNSIGNEDP says to do unsigned comparison.
9735 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9736
9737 void
9738 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9739 enum machine_mode mode;
9740 int unsignedp;
9741 rtx op0, op1;
9742 rtx if_false_label, if_true_label;
9743 {
9744 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9745 rtx drop_through_label = 0;
9746 int i;
9747
9748 if (! if_true_label || ! if_false_label)
9749 drop_through_label = gen_label_rtx ();
9750 if (! if_true_label)
9751 if_true_label = drop_through_label;
9752 if (! if_false_label)
9753 if_false_label = drop_through_label;
9754
9755 /* Compare a word at a time, high order first. */
9756 for (i = 0; i < nwords; i++)
9757 {
9758 rtx op0_word, op1_word;
9759
9760 if (WORDS_BIG_ENDIAN)
9761 {
9762 op0_word = operand_subword_force (op0, i, mode);
9763 op1_word = operand_subword_force (op1, i, mode);
9764 }
9765 else
9766 {
9767 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9768 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9769 }
9770
9771 /* All but high-order word must be compared as unsigned. */
9772 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9773 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9774 NULL_RTX, if_true_label);
9775
9776 /* Consider lower words only if these are equal. */
9777 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9778 NULL_RTX, 0, NULL_RTX, if_false_label);
9779 }
9780
9781 if (if_false_label)
9782 emit_jump (if_false_label);
9783 if (drop_through_label)
9784 emit_label (drop_through_label);
9785 }
9786
9787 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9788 with one insn, test the comparison and jump to the appropriate label. */
9789
9790 static void
9791 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9792 tree exp;
9793 rtx if_false_label, if_true_label;
9794 {
9795 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9796 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9797 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9798 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9799 int i;
9800 rtx drop_through_label = 0;
9801
9802 if (! if_false_label)
9803 drop_through_label = if_false_label = gen_label_rtx ();
9804
9805 for (i = 0; i < nwords; i++)
9806 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9807 operand_subword_force (op1, i, mode),
9808 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9809 word_mode, NULL_RTX, 0, if_false_label,
9810 NULL_RTX);
9811
9812 if (if_true_label)
9813 emit_jump (if_true_label);
9814 if (drop_through_label)
9815 emit_label (drop_through_label);
9816 }
9817 \f
9818 /* Jump according to whether OP0 is 0.
9819 We assume that OP0 has an integer mode that is too wide
9820 for the available compare insns. */
9821
9822 void
9823 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9824 rtx op0;
9825 rtx if_false_label, if_true_label;
9826 {
9827 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9828 rtx part;
9829 int i;
9830 rtx drop_through_label = 0;
9831
9832 /* The fastest way of doing this comparison on almost any machine is to
9833 "or" all the words and compare the result. If all have to be loaded
9834 from memory and this is a very wide item, it's possible this may
9835 be slower, but that's highly unlikely. */
9836
9837 part = gen_reg_rtx (word_mode);
9838 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9839 for (i = 1; i < nwords && part != 0; i++)
9840 part = expand_binop (word_mode, ior_optab, part,
9841 operand_subword_force (op0, i, GET_MODE (op0)),
9842 part, 1, OPTAB_WIDEN);
9843
9844 if (part != 0)
9845 {
9846 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9847 NULL_RTX, 0, if_false_label, if_true_label);
9848
9849 return;
9850 }
9851
9852 /* If we couldn't do the "or" simply, do this with a series of compares. */
9853 if (! if_false_label)
9854 drop_through_label = if_false_label = gen_label_rtx ();
9855
9856 for (i = 0; i < nwords; i++)
9857 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9858 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9859 if_false_label, NULL_RTX);
9860
9861 if (if_true_label)
9862 emit_jump (if_true_label);
9863
9864 if (drop_through_label)
9865 emit_label (drop_through_label);
9866 }
9867 \f
9868 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9869 (including code to compute the values to be compared)
9870 and set (CC0) according to the result.
9871 The decision as to signed or unsigned comparison must be made by the caller.
9872
9873 We force a stack adjustment unless there are currently
9874 things pushed on the stack that aren't yet used.
9875
9876 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9877 compared.
9878
9879 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9880 size of MODE should be used. */
9881
9882 rtx
9883 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9884 register rtx op0, op1;
9885 enum rtx_code code;
9886 int unsignedp;
9887 enum machine_mode mode;
9888 rtx size;
9889 unsigned int align;
9890 {
9891 rtx tem;
9892
9893 /* If one operand is constant, make it the second one. Only do this
9894 if the other operand is not constant as well. */
9895
9896 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9897 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9898 {
9899 tem = op0;
9900 op0 = op1;
9901 op1 = tem;
9902 code = swap_condition (code);
9903 }
9904
9905 if (flag_force_mem)
9906 {
9907 op0 = force_not_mem (op0);
9908 op1 = force_not_mem (op1);
9909 }
9910
9911 do_pending_stack_adjust ();
9912
9913 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9914 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9915 return tem;
9916
9917 #if 0
9918 /* There's no need to do this now that combine.c can eliminate lots of
9919 sign extensions. This can be less efficient in certain cases on other
9920 machines. */
9921
9922 /* If this is a signed equality comparison, we can do it as an
9923 unsigned comparison since zero-extension is cheaper than sign
9924 extension and comparisons with zero are done as unsigned. This is
9925 the case even on machines that can do fast sign extension, since
9926 zero-extension is easier to combine with other operations than
9927 sign-extension is. If we are comparing against a constant, we must
9928 convert it to what it would look like unsigned. */
9929 if ((code == EQ || code == NE) && ! unsignedp
9930 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9931 {
9932 if (GET_CODE (op1) == CONST_INT
9933 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9934 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9935 unsignedp = 1;
9936 }
9937 #endif
9938
9939 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9940
9941 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9942 }
9943
9944 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9945 The decision as to signed or unsigned comparison must be made by the caller.
9946
9947 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9948 compared.
9949
9950 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9951 size of MODE should be used. */
9952
9953 void
9954 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9955 if_false_label, if_true_label)
9956 register rtx op0, op1;
9957 enum rtx_code code;
9958 int unsignedp;
9959 enum machine_mode mode;
9960 rtx size;
9961 unsigned int align;
9962 rtx if_false_label, if_true_label;
9963 {
9964 rtx tem;
9965 int dummy_true_label = 0;
9966
9967 /* Reverse the comparison if that is safe and we want to jump if it is
9968 false. */
9969 if (! if_true_label && ! FLOAT_MODE_P (mode))
9970 {
9971 if_true_label = if_false_label;
9972 if_false_label = 0;
9973 code = reverse_condition (code);
9974 }
9975
9976 /* If one operand is constant, make it the second one. Only do this
9977 if the other operand is not constant as well. */
9978
9979 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9980 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9981 {
9982 tem = op0;
9983 op0 = op1;
9984 op1 = tem;
9985 code = swap_condition (code);
9986 }
9987
9988 if (flag_force_mem)
9989 {
9990 op0 = force_not_mem (op0);
9991 op1 = force_not_mem (op1);
9992 }
9993
9994 do_pending_stack_adjust ();
9995
9996 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9997 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9998 {
9999 if (tem == const_true_rtx)
10000 {
10001 if (if_true_label)
10002 emit_jump (if_true_label);
10003 }
10004 else
10005 {
10006 if (if_false_label)
10007 emit_jump (if_false_label);
10008 }
10009 return;
10010 }
10011
10012 #if 0
10013 /* There's no need to do this now that combine.c can eliminate lots of
10014 sign extensions. This can be less efficient in certain cases on other
10015 machines. */
10016
10017 /* If this is a signed equality comparison, we can do it as an
10018 unsigned comparison since zero-extension is cheaper than sign
10019 extension and comparisons with zero are done as unsigned. This is
10020 the case even on machines that can do fast sign extension, since
10021 zero-extension is easier to combine with other operations than
10022 sign-extension is. If we are comparing against a constant, we must
10023 convert it to what it would look like unsigned. */
10024 if ((code == EQ || code == NE) && ! unsignedp
10025 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10026 {
10027 if (GET_CODE (op1) == CONST_INT
10028 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10029 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10030 unsignedp = 1;
10031 }
10032 #endif
10033
10034 if (! if_true_label)
10035 {
10036 dummy_true_label = 1;
10037 if_true_label = gen_label_rtx ();
10038 }
10039
10040 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10041 if_true_label);
10042
10043 if (if_false_label)
10044 emit_jump (if_false_label);
10045 if (dummy_true_label)
10046 emit_label (if_true_label);
10047 }
10048
10049 /* Generate code for a comparison expression EXP (including code to compute
10050 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10051 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10052 generated code will drop through.
10053 SIGNED_CODE should be the rtx operation for this comparison for
10054 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10055
10056 We force a stack adjustment unless there are currently
10057 things pushed on the stack that aren't yet used. */
10058
10059 static void
10060 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10061 if_true_label)
10062 register tree exp;
10063 enum rtx_code signed_code, unsigned_code;
10064 rtx if_false_label, if_true_label;
10065 {
10066 unsigned int align0, align1;
10067 register rtx op0, op1;
10068 register tree type;
10069 register enum machine_mode mode;
10070 int unsignedp;
10071 enum rtx_code code;
10072
10073 /* Don't crash if the comparison was erroneous. */
10074 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10075 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10076 return;
10077
10078 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10079 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10080 mode = TYPE_MODE (type);
10081 unsignedp = TREE_UNSIGNED (type);
10082 code = unsignedp ? unsigned_code : signed_code;
10083
10084 #ifdef HAVE_canonicalize_funcptr_for_compare
10085 /* If function pointers need to be "canonicalized" before they can
10086 be reliably compared, then canonicalize them. */
10087 if (HAVE_canonicalize_funcptr_for_compare
10088 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10089 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10090 == FUNCTION_TYPE))
10091 {
10092 rtx new_op0 = gen_reg_rtx (mode);
10093
10094 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10095 op0 = new_op0;
10096 }
10097
10098 if (HAVE_canonicalize_funcptr_for_compare
10099 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10100 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10101 == FUNCTION_TYPE))
10102 {
10103 rtx new_op1 = gen_reg_rtx (mode);
10104
10105 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10106 op1 = new_op1;
10107 }
10108 #endif
10109
10110 /* Do any postincrements in the expression that was tested. */
10111 emit_queue ();
10112
10113 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10114 ((mode == BLKmode)
10115 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10116 MIN (align0, align1),
10117 if_false_label, if_true_label);
10118 }
10119 \f
10120 /* Generate code to calculate EXP using a store-flag instruction
10121 and return an rtx for the result. EXP is either a comparison
10122 or a TRUTH_NOT_EXPR whose operand is a comparison.
10123
10124 If TARGET is nonzero, store the result there if convenient.
10125
10126 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10127 cheap.
10128
10129 Return zero if there is no suitable set-flag instruction
10130 available on this machine.
10131
10132 Once expand_expr has been called on the arguments of the comparison,
10133 we are committed to doing the store flag, since it is not safe to
10134 re-evaluate the expression. We emit the store-flag insn by calling
10135 emit_store_flag, but only expand the arguments if we have a reason
10136 to believe that emit_store_flag will be successful. If we think that
10137 it will, but it isn't, we have to simulate the store-flag with a
10138 set/jump/set sequence. */
10139
10140 static rtx
10141 do_store_flag (exp, target, mode, only_cheap)
10142 tree exp;
10143 rtx target;
10144 enum machine_mode mode;
10145 int only_cheap;
10146 {
10147 enum rtx_code code;
10148 tree arg0, arg1, type;
10149 tree tem;
10150 enum machine_mode operand_mode;
10151 int invert = 0;
10152 int unsignedp;
10153 rtx op0, op1;
10154 enum insn_code icode;
10155 rtx subtarget = target;
10156 rtx result, label;
10157
10158 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10159 result at the end. We can't simply invert the test since it would
10160 have already been inverted if it were valid. This case occurs for
10161 some floating-point comparisons. */
10162
10163 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10164 invert = 1, exp = TREE_OPERAND (exp, 0);
10165
10166 arg0 = TREE_OPERAND (exp, 0);
10167 arg1 = TREE_OPERAND (exp, 1);
10168 type = TREE_TYPE (arg0);
10169 operand_mode = TYPE_MODE (type);
10170 unsignedp = TREE_UNSIGNED (type);
10171
10172 /* We won't bother with BLKmode store-flag operations because it would mean
10173 passing a lot of information to emit_store_flag. */
10174 if (operand_mode == BLKmode)
10175 return 0;
10176
10177 /* We won't bother with store-flag operations involving function pointers
10178 when function pointers must be canonicalized before comparisons. */
10179 #ifdef HAVE_canonicalize_funcptr_for_compare
10180 if (HAVE_canonicalize_funcptr_for_compare
10181 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10182 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10183 == FUNCTION_TYPE))
10184 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10185 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10186 == FUNCTION_TYPE))))
10187 return 0;
10188 #endif
10189
10190 STRIP_NOPS (arg0);
10191 STRIP_NOPS (arg1);
10192
10193 /* Get the rtx comparison code to use. We know that EXP is a comparison
10194 operation of some type. Some comparisons against 1 and -1 can be
10195 converted to comparisons with zero. Do so here so that the tests
10196 below will be aware that we have a comparison with zero. These
10197 tests will not catch constants in the first operand, but constants
10198 are rarely passed as the first operand. */
10199
10200 switch (TREE_CODE (exp))
10201 {
10202 case EQ_EXPR:
10203 code = EQ;
10204 break;
10205 case NE_EXPR:
10206 code = NE;
10207 break;
10208 case LT_EXPR:
10209 if (integer_onep (arg1))
10210 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10211 else
10212 code = unsignedp ? LTU : LT;
10213 break;
10214 case LE_EXPR:
10215 if (! unsignedp && integer_all_onesp (arg1))
10216 arg1 = integer_zero_node, code = LT;
10217 else
10218 code = unsignedp ? LEU : LE;
10219 break;
10220 case GT_EXPR:
10221 if (! unsignedp && integer_all_onesp (arg1))
10222 arg1 = integer_zero_node, code = GE;
10223 else
10224 code = unsignedp ? GTU : GT;
10225 break;
10226 case GE_EXPR:
10227 if (integer_onep (arg1))
10228 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10229 else
10230 code = unsignedp ? GEU : GE;
10231 break;
10232
10233 case UNORDERED_EXPR:
10234 code = UNORDERED;
10235 break;
10236 case ORDERED_EXPR:
10237 code = ORDERED;
10238 break;
10239 case UNLT_EXPR:
10240 code = UNLT;
10241 break;
10242 case UNLE_EXPR:
10243 code = UNLE;
10244 break;
10245 case UNGT_EXPR:
10246 code = UNGT;
10247 break;
10248 case UNGE_EXPR:
10249 code = UNGE;
10250 break;
10251 case UNEQ_EXPR:
10252 code = UNEQ;
10253 break;
10254
10255 default:
10256 abort ();
10257 }
10258
10259 /* Put a constant second. */
10260 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10261 {
10262 tem = arg0; arg0 = arg1; arg1 = tem;
10263 code = swap_condition (code);
10264 }
10265
10266 /* If this is an equality or inequality test of a single bit, we can
10267 do this by shifting the bit being tested to the low-order bit and
10268 masking the result with the constant 1. If the condition was EQ,
10269 we xor it with 1. This does not require an scc insn and is faster
10270 than an scc insn even if we have it. */
10271
10272 if ((code == NE || code == EQ)
10273 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10274 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10275 {
10276 tree inner = TREE_OPERAND (arg0, 0);
10277 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10278 int ops_unsignedp;
10279
10280 /* If INNER is a right shift of a constant and it plus BITNUM does
10281 not overflow, adjust BITNUM and INNER. */
10282
10283 if (TREE_CODE (inner) == RSHIFT_EXPR
10284 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10285 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10286 && bitnum < TYPE_PRECISION (type)
10287 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10288 bitnum - TYPE_PRECISION (type)))
10289 {
10290 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10291 inner = TREE_OPERAND (inner, 0);
10292 }
10293
10294 /* If we are going to be able to omit the AND below, we must do our
10295 operations as unsigned. If we must use the AND, we have a choice.
10296 Normally unsigned is faster, but for some machines signed is. */
10297 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10298 #ifdef LOAD_EXTEND_OP
10299 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10300 #else
10301 : 1
10302 #endif
10303 );
10304
10305 if (! get_subtarget (subtarget)
10306 || GET_MODE (subtarget) != operand_mode
10307 || ! safe_from_p (subtarget, inner, 1))
10308 subtarget = 0;
10309
10310 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10311
10312 if (bitnum != 0)
10313 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10314 size_int (bitnum), subtarget, ops_unsignedp);
10315
10316 if (GET_MODE (op0) != mode)
10317 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10318
10319 if ((code == EQ && ! invert) || (code == NE && invert))
10320 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10321 ops_unsignedp, OPTAB_LIB_WIDEN);
10322
10323 /* Put the AND last so it can combine with more things. */
10324 if (bitnum != TYPE_PRECISION (type) - 1)
10325 op0 = expand_and (op0, const1_rtx, subtarget);
10326
10327 return op0;
10328 }
10329
10330 /* Now see if we are likely to be able to do this. Return if not. */
10331 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10332 return 0;
10333
10334 icode = setcc_gen_code[(int) code];
10335 if (icode == CODE_FOR_nothing
10336 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10337 {
10338 /* We can only do this if it is one of the special cases that
10339 can be handled without an scc insn. */
10340 if ((code == LT && integer_zerop (arg1))
10341 || (! only_cheap && code == GE && integer_zerop (arg1)))
10342 ;
10343 else if (BRANCH_COST >= 0
10344 && ! only_cheap && (code == NE || code == EQ)
10345 && TREE_CODE (type) != REAL_TYPE
10346 && ((abs_optab->handlers[(int) operand_mode].insn_code
10347 != CODE_FOR_nothing)
10348 || (ffs_optab->handlers[(int) operand_mode].insn_code
10349 != CODE_FOR_nothing)))
10350 ;
10351 else
10352 return 0;
10353 }
10354
10355 preexpand_calls (exp);
10356 if (! get_subtarget (target)
10357 || GET_MODE (subtarget) != operand_mode
10358 || ! safe_from_p (subtarget, arg1, 1))
10359 subtarget = 0;
10360
10361 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10362 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10363
10364 if (target == 0)
10365 target = gen_reg_rtx (mode);
10366
10367 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10368 because, if the emit_store_flag does anything it will succeed and
10369 OP0 and OP1 will not be used subsequently. */
10370
10371 result = emit_store_flag (target, code,
10372 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10373 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10374 operand_mode, unsignedp, 1);
10375
10376 if (result)
10377 {
10378 if (invert)
10379 result = expand_binop (mode, xor_optab, result, const1_rtx,
10380 result, 0, OPTAB_LIB_WIDEN);
10381 return result;
10382 }
10383
10384 /* If this failed, we have to do this with set/compare/jump/set code. */
10385 if (GET_CODE (target) != REG
10386 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10387 target = gen_reg_rtx (GET_MODE (target));
10388
10389 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10390 result = compare_from_rtx (op0, op1, code, unsignedp,
10391 operand_mode, NULL_RTX, 0);
10392 if (GET_CODE (result) == CONST_INT)
10393 return (((result == const0_rtx && ! invert)
10394 || (result != const0_rtx && invert))
10395 ? const0_rtx : const1_rtx);
10396
10397 label = gen_label_rtx ();
10398 if (bcc_gen_fctn[(int) code] == 0)
10399 abort ();
10400
10401 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10402 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10403 emit_label (label);
10404
10405 return target;
10406 }
10407 \f
10408 /* Generate a tablejump instruction (used for switch statements). */
10409
10410 #ifdef HAVE_tablejump
10411
10412 /* INDEX is the value being switched on, with the lowest value
10413 in the table already subtracted.
10414 MODE is its expected mode (needed if INDEX is constant).
10415 RANGE is the length of the jump table.
10416 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10417
10418 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10419 index value is out of range. */
10420
10421 void
10422 do_tablejump (index, mode, range, table_label, default_label)
10423 rtx index, range, table_label, default_label;
10424 enum machine_mode mode;
10425 {
10426 register rtx temp, vector;
10427
10428 /* Do an unsigned comparison (in the proper mode) between the index
10429 expression and the value which represents the length of the range.
10430 Since we just finished subtracting the lower bound of the range
10431 from the index expression, this comparison allows us to simultaneously
10432 check that the original index expression value is both greater than
10433 or equal to the minimum value of the range and less than or equal to
10434 the maximum value of the range. */
10435
10436 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10437 0, default_label);
10438
10439 /* If index is in range, it must fit in Pmode.
10440 Convert to Pmode so we can index with it. */
10441 if (mode != Pmode)
10442 index = convert_to_mode (Pmode, index, 1);
10443
10444 /* Don't let a MEM slip thru, because then INDEX that comes
10445 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10446 and break_out_memory_refs will go to work on it and mess it up. */
10447 #ifdef PIC_CASE_VECTOR_ADDRESS
10448 if (flag_pic && GET_CODE (index) != REG)
10449 index = copy_to_mode_reg (Pmode, index);
10450 #endif
10451
10452 /* If flag_force_addr were to affect this address
10453 it could interfere with the tricky assumptions made
10454 about addresses that contain label-refs,
10455 which may be valid only very near the tablejump itself. */
10456 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10457 GET_MODE_SIZE, because this indicates how large insns are. The other
10458 uses should all be Pmode, because they are addresses. This code
10459 could fail if addresses and insns are not the same size. */
10460 index = gen_rtx_PLUS (Pmode,
10461 gen_rtx_MULT (Pmode, index,
10462 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10463 gen_rtx_LABEL_REF (Pmode, table_label));
10464 #ifdef PIC_CASE_VECTOR_ADDRESS
10465 if (flag_pic)
10466 index = PIC_CASE_VECTOR_ADDRESS (index);
10467 else
10468 #endif
10469 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10470 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10471 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10472 RTX_UNCHANGING_P (vector) = 1;
10473 convert_move (temp, vector, 0);
10474
10475 emit_jump_insn (gen_tablejump (temp, table_label));
10476
10477 /* If we are generating PIC code or if the table is PC-relative, the
10478 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10479 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10480 emit_barrier ();
10481 }
10482
10483 #endif /* HAVE_tablejump */