* expr.c (expand_expr): Return const0_rtx, not error_mark_node.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "machmode.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "defaults.h"
44 #include "toplev.h"
45 #include "ggc.h"
46 #include "intl.h"
47 #include "tm_p.h"
48
49 #ifndef ACCUMULATE_OUTGOING_ARGS
50 #define ACCUMULATE_OUTGOING_ARGS 0
51 #endif
52
53 /* Supply a default definition for PUSH_ARGS. */
54 #ifndef PUSH_ARGS
55 #ifdef PUSH_ROUNDING
56 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
57 #else
58 #define PUSH_ARGS 0
59 #endif
60 #endif
61
62 /* Decide whether a function's arguments should be processed
63 from first to last or from last to first.
64
65 They should if the stack and args grow in opposite directions, but
66 only if we have push insns. */
67
68 #ifdef PUSH_ROUNDING
69
70 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
71 #define PUSH_ARGS_REVERSED /* If it's last to first */
72 #endif
73
74 #endif
75
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
79 #else
80 #define STACK_PUSH_CODE PRE_INC
81 #endif
82 #endif
83
84 /* Assume that case vectors are not pc-relative. */
85 #ifndef CASE_VECTOR_PC_RELATIVE
86 #define CASE_VECTOR_PC_RELATIVE 0
87 #endif
88
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
96
97 /* Nonzero to generate code for all the subroutines within an
98 expression before generating the upper levels of the expression.
99 Nowadays this is never zero. */
100 int do_preexpand_calls = 1;
101
102 /* Don't check memory usage, since code is being emitted to check a memory
103 usage. Used when current_function_check_memory_usage is true, to avoid
104 infinite recursion. */
105 static int in_check_memory_usage;
106
107 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
108 static tree placeholder_list = 0;
109
110 /* This structure is used by move_by_pieces to describe the move to
111 be performed. */
112 struct move_by_pieces
113 {
114 rtx to;
115 rtx to_addr;
116 int autinc_to;
117 int explicit_inc_to;
118 rtx from;
119 rtx from_addr;
120 int autinc_from;
121 int explicit_inc_from;
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
124 int reverse;
125 };
126
127 /* This structure is used by clear_by_pieces to describe the clear to
128 be performed. */
129
130 struct clear_by_pieces
131 {
132 rtx to;
133 rtx to_addr;
134 int autinc_to;
135 int explicit_inc_to;
136 unsigned HOST_WIDE_INT len;
137 HOST_WIDE_INT offset;
138 int reverse;
139 };
140
141 extern struct obstack permanent_obstack;
142
143 static rtx get_push_address PARAMS ((int));
144
145 static rtx enqueue_insn PARAMS ((rtx, rtx));
146 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
147 PARAMS ((unsigned HOST_WIDE_INT,
148 unsigned int));
149 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
150 struct move_by_pieces *));
151 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
152 unsigned int));
153 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
154 enum machine_mode,
155 struct clear_by_pieces *));
156 static rtx get_subtarget PARAMS ((rtx));
157 static int is_zeros_p PARAMS ((tree));
158 static int mostly_zeros_p PARAMS ((tree));
159 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
160 HOST_WIDE_INT, enum machine_mode,
161 tree, tree, unsigned int, int));
162 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
163 HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int,
167 unsigned int, HOST_WIDE_INT, int));
168 static enum memory_use_mode
169 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
170 static tree save_noncopied_parts PARAMS ((tree, tree));
171 static tree init_noncopied_parts PARAMS ((tree, tree));
172 static int safe_from_p PARAMS ((rtx, tree, int));
173 static int fixed_type_p PARAMS ((tree));
174 static rtx var_rtx PARAMS ((tree));
175 static int readonly_fields_p PARAMS ((tree));
176 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
177 static rtx expand_increment PARAMS ((tree, int, int));
178 static void preexpand_calls PARAMS ((tree));
179 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
180 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
181 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
182 rtx, rtx));
183 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
184
185 /* Record for each mode whether we can move a register directly to or
186 from an object of that mode in memory. If we can't, we won't try
187 to use that mode directly when accessing a field of that mode. */
188
189 static char direct_load[NUM_MACHINE_MODES];
190 static char direct_store[NUM_MACHINE_MODES];
191
192 /* If a memory-to-memory move would take MOVE_RATIO or more simple
193 move-instruction sequences, we will do a movstr or libcall instead. */
194
195 #ifndef MOVE_RATIO
196 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
197 #define MOVE_RATIO 2
198 #else
199 /* If we are optimizing for space (-Os), cut down the default move ratio */
200 #define MOVE_RATIO (optimize_size ? 3 : 15)
201 #endif
202 #endif
203
204 /* This macro is used to determine whether move_by_pieces should be called
205 to perform a structure copy. */
206 #ifndef MOVE_BY_PIECES_P
207 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
208 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
209 #endif
210
211 /* This array records the insn_code of insns to perform block moves. */
212 enum insn_code movstr_optab[NUM_MACHINE_MODES];
213
214 /* This array records the insn_code of insns to perform block clears. */
215 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
216
217 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
218
219 #ifndef SLOW_UNALIGNED_ACCESS
220 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
221 #endif
222 \f
223 /* This is run once per compilation to set up which modes can be used
224 directly in memory and to initialize the block move optab. */
225
226 void
227 init_expr_once ()
228 {
229 rtx insn, pat;
230 enum machine_mode mode;
231 int num_clobbers;
232 rtx mem, mem1;
233 char *free_point;
234
235 start_sequence ();
236
237 /* Since we are on the permanent obstack, we must be sure we save this
238 spot AFTER we call start_sequence, since it will reuse the rtl it
239 makes. */
240 free_point = (char *) oballoc (0);
241
242 /* Try indexing by frame ptr and try by stack ptr.
243 It is known that on the Convex the stack ptr isn't a valid index.
244 With luck, one or the other is valid on any machine. */
245 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
246 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
247
248 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
249 pat = PATTERN (insn);
250
251 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
252 mode = (enum machine_mode) ((int) mode + 1))
253 {
254 int regno;
255 rtx reg;
256
257 direct_load[(int) mode] = direct_store[(int) mode] = 0;
258 PUT_MODE (mem, mode);
259 PUT_MODE (mem1, mode);
260
261 /* See if there is some register that can be used in this mode and
262 directly loaded or stored from memory. */
263
264 if (mode != VOIDmode && mode != BLKmode)
265 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
266 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
267 regno++)
268 {
269 if (! HARD_REGNO_MODE_OK (regno, mode))
270 continue;
271
272 reg = gen_rtx_REG (mode, regno);
273
274 SET_SRC (pat) = mem;
275 SET_DEST (pat) = reg;
276 if (recog (pat, insn, &num_clobbers) >= 0)
277 direct_load[(int) mode] = 1;
278
279 SET_SRC (pat) = mem1;
280 SET_DEST (pat) = reg;
281 if (recog (pat, insn, &num_clobbers) >= 0)
282 direct_load[(int) mode] = 1;
283
284 SET_SRC (pat) = reg;
285 SET_DEST (pat) = mem;
286 if (recog (pat, insn, &num_clobbers) >= 0)
287 direct_store[(int) mode] = 1;
288
289 SET_SRC (pat) = reg;
290 SET_DEST (pat) = mem1;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_store[(int) mode] = 1;
293 }
294 }
295
296 end_sequence ();
297 obfree (free_point);
298 }
299
300 /* This is run at the start of compiling a function. */
301
302 void
303 init_expr ()
304 {
305 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
306
307 pending_chain = 0;
308 pending_stack_adjust = 0;
309 stack_pointer_delta = 0;
310 inhibit_defer_pop = 0;
311 saveregs_value = 0;
312 apply_args_value = 0;
313 forced_labels = 0;
314 }
315
316 void
317 mark_expr_status (p)
318 struct expr_status *p;
319 {
320 if (p == NULL)
321 return;
322
323 ggc_mark_rtx (p->x_saveregs_value);
324 ggc_mark_rtx (p->x_apply_args_value);
325 ggc_mark_rtx (p->x_forced_labels);
326 }
327
328 void
329 free_expr_status (f)
330 struct function *f;
331 {
332 free (f->expr);
333 f->expr = NULL;
334 }
335
336 /* Small sanity check that the queue is empty at the end of a function. */
337
338 void
339 finish_expr_for_function ()
340 {
341 if (pending_chain)
342 abort ();
343 }
344 \f
345 /* Manage the queue of increment instructions to be output
346 for POSTINCREMENT_EXPR expressions, etc. */
347
348 /* Queue up to increment (or change) VAR later. BODY says how:
349 BODY should be the same thing you would pass to emit_insn
350 to increment right away. It will go to emit_insn later on.
351
352 The value is a QUEUED expression to be used in place of VAR
353 where you want to guarantee the pre-incrementation value of VAR. */
354
355 static rtx
356 enqueue_insn (var, body)
357 rtx var, body;
358 {
359 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
360 body, pending_chain);
361 return pending_chain;
362 }
363
364 /* Use protect_from_queue to convert a QUEUED expression
365 into something that you can put immediately into an instruction.
366 If the queued incrementation has not happened yet,
367 protect_from_queue returns the variable itself.
368 If the incrementation has happened, protect_from_queue returns a temp
369 that contains a copy of the old value of the variable.
370
371 Any time an rtx which might possibly be a QUEUED is to be put
372 into an instruction, it must be passed through protect_from_queue first.
373 QUEUED expressions are not meaningful in instructions.
374
375 Do not pass a value through protect_from_queue and then hold
376 on to it for a while before putting it in an instruction!
377 If the queue is flushed in between, incorrect code will result. */
378
379 rtx
380 protect_from_queue (x, modify)
381 register rtx x;
382 int modify;
383 {
384 register RTX_CODE code = GET_CODE (x);
385
386 #if 0 /* A QUEUED can hang around after the queue is forced out. */
387 /* Shortcut for most common case. */
388 if (pending_chain == 0)
389 return x;
390 #endif
391
392 if (code != QUEUED)
393 {
394 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
395 use of autoincrement. Make a copy of the contents of the memory
396 location rather than a copy of the address, but not if the value is
397 of mode BLKmode. Don't modify X in place since it might be
398 shared. */
399 if (code == MEM && GET_MODE (x) != BLKmode
400 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
401 {
402 register rtx y = XEXP (x, 0);
403 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
404
405 MEM_COPY_ATTRIBUTES (new, x);
406
407 if (QUEUED_INSN (y))
408 {
409 register rtx temp = gen_reg_rtx (GET_MODE (new));
410 emit_insn_before (gen_move_insn (temp, new),
411 QUEUED_INSN (y));
412 return temp;
413 }
414 return new;
415 }
416 /* Otherwise, recursively protect the subexpressions of all
417 the kinds of rtx's that can contain a QUEUED. */
418 if (code == MEM)
419 {
420 rtx tem = protect_from_queue (XEXP (x, 0), 0);
421 if (tem != XEXP (x, 0))
422 {
423 x = copy_rtx (x);
424 XEXP (x, 0) = tem;
425 }
426 }
427 else if (code == PLUS || code == MULT)
428 {
429 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
430 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
431 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
432 {
433 x = copy_rtx (x);
434 XEXP (x, 0) = new0;
435 XEXP (x, 1) = new1;
436 }
437 }
438 return x;
439 }
440 /* If the increment has not happened, use the variable itself. */
441 if (QUEUED_INSN (x) == 0)
442 return QUEUED_VAR (x);
443 /* If the increment has happened and a pre-increment copy exists,
444 use that copy. */
445 if (QUEUED_COPY (x) != 0)
446 return QUEUED_COPY (x);
447 /* The increment has happened but we haven't set up a pre-increment copy.
448 Set one up now, and use it. */
449 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
450 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
451 QUEUED_INSN (x));
452 return QUEUED_COPY (x);
453 }
454
455 /* Return nonzero if X contains a QUEUED expression:
456 if it contains anything that will be altered by a queued increment.
457 We handle only combinations of MEM, PLUS, MINUS and MULT operators
458 since memory addresses generally contain only those. */
459
460 int
461 queued_subexp_p (x)
462 rtx x;
463 {
464 register enum rtx_code code = GET_CODE (x);
465 switch (code)
466 {
467 case QUEUED:
468 return 1;
469 case MEM:
470 return queued_subexp_p (XEXP (x, 0));
471 case MULT:
472 case PLUS:
473 case MINUS:
474 return (queued_subexp_p (XEXP (x, 0))
475 || queued_subexp_p (XEXP (x, 1)));
476 default:
477 return 0;
478 }
479 }
480
481 /* Perform all the pending incrementations. */
482
483 void
484 emit_queue ()
485 {
486 register rtx p;
487 while ((p = pending_chain))
488 {
489 rtx body = QUEUED_BODY (p);
490
491 if (GET_CODE (body) == SEQUENCE)
492 {
493 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
494 emit_insn (QUEUED_BODY (p));
495 }
496 else
497 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
498 pending_chain = QUEUED_NEXT (p);
499 }
500 }
501 \f
502 /* Copy data from FROM to TO, where the machine modes are not the same.
503 Both modes may be integer, or both may be floating.
504 UNSIGNEDP should be nonzero if FROM is an unsigned type.
505 This causes zero-extension instead of sign-extension. */
506
507 void
508 convert_move (to, from, unsignedp)
509 register rtx to, from;
510 int unsignedp;
511 {
512 enum machine_mode to_mode = GET_MODE (to);
513 enum machine_mode from_mode = GET_MODE (from);
514 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
515 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
516 enum insn_code code;
517 rtx libcall;
518
519 /* rtx code for making an equivalent value. */
520 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
521
522 to = protect_from_queue (to, 1);
523 from = protect_from_queue (from, 0);
524
525 if (to_real != from_real)
526 abort ();
527
528 /* If FROM is a SUBREG that indicates that we have already done at least
529 the required extension, strip it. We don't handle such SUBREGs as
530 TO here. */
531
532 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
533 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
534 >= GET_MODE_SIZE (to_mode))
535 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
536 from = gen_lowpart (to_mode, from), from_mode = to_mode;
537
538 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
539 abort ();
540
541 if (to_mode == from_mode
542 || (from_mode == VOIDmode && CONSTANT_P (from)))
543 {
544 emit_move_insn (to, from);
545 return;
546 }
547
548 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
549 {
550 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
551 abort ();
552
553 if (VECTOR_MODE_P (to_mode))
554 from = gen_rtx_SUBREG (to_mode, from, 0);
555 else
556 to = gen_rtx_SUBREG (from_mode, to, 0);
557
558 emit_move_insn (to, from);
559 return;
560 }
561
562 if (to_real != from_real)
563 abort ();
564
565 if (to_real)
566 {
567 rtx value;
568
569 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
570 {
571 /* Try converting directly if the insn is supported. */
572 if ((code = can_extend_p (to_mode, from_mode, 0))
573 != CODE_FOR_nothing)
574 {
575 emit_unop_insn (code, to, from, UNKNOWN);
576 return;
577 }
578 }
579
580 #ifdef HAVE_trunchfqf2
581 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
582 {
583 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
584 return;
585 }
586 #endif
587 #ifdef HAVE_trunctqfqf2
588 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
589 {
590 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
591 return;
592 }
593 #endif
594 #ifdef HAVE_truncsfqf2
595 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
596 {
597 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
598 return;
599 }
600 #endif
601 #ifdef HAVE_truncdfqf2
602 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
603 {
604 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
605 return;
606 }
607 #endif
608 #ifdef HAVE_truncxfqf2
609 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
610 {
611 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
612 return;
613 }
614 #endif
615 #ifdef HAVE_trunctfqf2
616 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
617 {
618 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
619 return;
620 }
621 #endif
622
623 #ifdef HAVE_trunctqfhf2
624 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
625 {
626 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
627 return;
628 }
629 #endif
630 #ifdef HAVE_truncsfhf2
631 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
632 {
633 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
634 return;
635 }
636 #endif
637 #ifdef HAVE_truncdfhf2
638 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
639 {
640 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
641 return;
642 }
643 #endif
644 #ifdef HAVE_truncxfhf2
645 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
646 {
647 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
648 return;
649 }
650 #endif
651 #ifdef HAVE_trunctfhf2
652 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
653 {
654 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
655 return;
656 }
657 #endif
658
659 #ifdef HAVE_truncsftqf2
660 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
661 {
662 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
663 return;
664 }
665 #endif
666 #ifdef HAVE_truncdftqf2
667 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
668 {
669 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
670 return;
671 }
672 #endif
673 #ifdef HAVE_truncxftqf2
674 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
675 {
676 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
677 return;
678 }
679 #endif
680 #ifdef HAVE_trunctftqf2
681 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
682 {
683 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
684 return;
685 }
686 #endif
687
688 #ifdef HAVE_truncdfsf2
689 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
690 {
691 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
692 return;
693 }
694 #endif
695 #ifdef HAVE_truncxfsf2
696 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
697 {
698 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
699 return;
700 }
701 #endif
702 #ifdef HAVE_trunctfsf2
703 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
704 {
705 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
706 return;
707 }
708 #endif
709 #ifdef HAVE_truncxfdf2
710 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
711 {
712 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
713 return;
714 }
715 #endif
716 #ifdef HAVE_trunctfdf2
717 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
718 {
719 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
720 return;
721 }
722 #endif
723
724 libcall = (rtx) 0;
725 switch (from_mode)
726 {
727 case SFmode:
728 switch (to_mode)
729 {
730 case DFmode:
731 libcall = extendsfdf2_libfunc;
732 break;
733
734 case XFmode:
735 libcall = extendsfxf2_libfunc;
736 break;
737
738 case TFmode:
739 libcall = extendsftf2_libfunc;
740 break;
741
742 default:
743 break;
744 }
745 break;
746
747 case DFmode:
748 switch (to_mode)
749 {
750 case SFmode:
751 libcall = truncdfsf2_libfunc;
752 break;
753
754 case XFmode:
755 libcall = extenddfxf2_libfunc;
756 break;
757
758 case TFmode:
759 libcall = extenddftf2_libfunc;
760 break;
761
762 default:
763 break;
764 }
765 break;
766
767 case XFmode:
768 switch (to_mode)
769 {
770 case SFmode:
771 libcall = truncxfsf2_libfunc;
772 break;
773
774 case DFmode:
775 libcall = truncxfdf2_libfunc;
776 break;
777
778 default:
779 break;
780 }
781 break;
782
783 case TFmode:
784 switch (to_mode)
785 {
786 case SFmode:
787 libcall = trunctfsf2_libfunc;
788 break;
789
790 case DFmode:
791 libcall = trunctfdf2_libfunc;
792 break;
793
794 default:
795 break;
796 }
797 break;
798
799 default:
800 break;
801 }
802
803 if (libcall == (rtx) 0)
804 /* This conversion is not implemented yet. */
805 abort ();
806
807 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
808 1, from, from_mode);
809 emit_move_insn (to, value);
810 return;
811 }
812
813 /* Now both modes are integers. */
814
815 /* Handle expanding beyond a word. */
816 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
817 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
818 {
819 rtx insns;
820 rtx lowpart;
821 rtx fill_value;
822 rtx lowfrom;
823 int i;
824 enum machine_mode lowpart_mode;
825 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
826
827 /* Try converting directly if the insn is supported. */
828 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
829 != CODE_FOR_nothing)
830 {
831 /* If FROM is a SUBREG, put it into a register. Do this
832 so that we always generate the same set of insns for
833 better cse'ing; if an intermediate assignment occurred,
834 we won't be doing the operation directly on the SUBREG. */
835 if (optimize > 0 && GET_CODE (from) == SUBREG)
836 from = force_reg (from_mode, from);
837 emit_unop_insn (code, to, from, equiv_code);
838 return;
839 }
840 /* Next, try converting via full word. */
841 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
842 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
843 != CODE_FOR_nothing))
844 {
845 if (GET_CODE (to) == REG)
846 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
847 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
848 emit_unop_insn (code, to,
849 gen_lowpart (word_mode, to), equiv_code);
850 return;
851 }
852
853 /* No special multiword conversion insn; do it by hand. */
854 start_sequence ();
855
856 /* Since we will turn this into a no conflict block, we must ensure
857 that the source does not overlap the target. */
858
859 if (reg_overlap_mentioned_p (to, from))
860 from = force_reg (from_mode, from);
861
862 /* Get a copy of FROM widened to a word, if necessary. */
863 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
864 lowpart_mode = word_mode;
865 else
866 lowpart_mode = from_mode;
867
868 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
869
870 lowpart = gen_lowpart (lowpart_mode, to);
871 emit_move_insn (lowpart, lowfrom);
872
873 /* Compute the value to put in each remaining word. */
874 if (unsignedp)
875 fill_value = const0_rtx;
876 else
877 {
878 #ifdef HAVE_slt
879 if (HAVE_slt
880 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
881 && STORE_FLAG_VALUE == -1)
882 {
883 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
884 lowpart_mode, 0, 0);
885 fill_value = gen_reg_rtx (word_mode);
886 emit_insn (gen_slt (fill_value));
887 }
888 else
889 #endif
890 {
891 fill_value
892 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
893 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
894 NULL_RTX, 0);
895 fill_value = convert_to_mode (word_mode, fill_value, 1);
896 }
897 }
898
899 /* Fill the remaining words. */
900 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
901 {
902 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
903 rtx subword = operand_subword (to, index, 1, to_mode);
904
905 if (subword == 0)
906 abort ();
907
908 if (fill_value != subword)
909 emit_move_insn (subword, fill_value);
910 }
911
912 insns = get_insns ();
913 end_sequence ();
914
915 emit_no_conflict_block (insns, to, from, NULL_RTX,
916 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
917 return;
918 }
919
920 /* Truncating multi-word to a word or less. */
921 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
922 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
923 {
924 if (!((GET_CODE (from) == MEM
925 && ! MEM_VOLATILE_P (from)
926 && direct_load[(int) to_mode]
927 && ! mode_dependent_address_p (XEXP (from, 0)))
928 || GET_CODE (from) == REG
929 || GET_CODE (from) == SUBREG))
930 from = force_reg (from_mode, from);
931 convert_move (to, gen_lowpart (word_mode, from), 0);
932 return;
933 }
934
935 /* Handle pointer conversion */ /* SPEE 900220 */
936 if (to_mode == PQImode)
937 {
938 if (from_mode != QImode)
939 from = convert_to_mode (QImode, from, unsignedp);
940
941 #ifdef HAVE_truncqipqi2
942 if (HAVE_truncqipqi2)
943 {
944 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
945 return;
946 }
947 #endif /* HAVE_truncqipqi2 */
948 abort ();
949 }
950
951 if (from_mode == PQImode)
952 {
953 if (to_mode != QImode)
954 {
955 from = convert_to_mode (QImode, from, unsignedp);
956 from_mode = QImode;
957 }
958 else
959 {
960 #ifdef HAVE_extendpqiqi2
961 if (HAVE_extendpqiqi2)
962 {
963 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
964 return;
965 }
966 #endif /* HAVE_extendpqiqi2 */
967 abort ();
968 }
969 }
970
971 if (to_mode == PSImode)
972 {
973 if (from_mode != SImode)
974 from = convert_to_mode (SImode, from, unsignedp);
975
976 #ifdef HAVE_truncsipsi2
977 if (HAVE_truncsipsi2)
978 {
979 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
980 return;
981 }
982 #endif /* HAVE_truncsipsi2 */
983 abort ();
984 }
985
986 if (from_mode == PSImode)
987 {
988 if (to_mode != SImode)
989 {
990 from = convert_to_mode (SImode, from, unsignedp);
991 from_mode = SImode;
992 }
993 else
994 {
995 #ifdef HAVE_extendpsisi2
996 if (HAVE_extendpsisi2)
997 {
998 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
999 return;
1000 }
1001 #endif /* HAVE_extendpsisi2 */
1002 abort ();
1003 }
1004 }
1005
1006 if (to_mode == PDImode)
1007 {
1008 if (from_mode != DImode)
1009 from = convert_to_mode (DImode, from, unsignedp);
1010
1011 #ifdef HAVE_truncdipdi2
1012 if (HAVE_truncdipdi2)
1013 {
1014 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1015 return;
1016 }
1017 #endif /* HAVE_truncdipdi2 */
1018 abort ();
1019 }
1020
1021 if (from_mode == PDImode)
1022 {
1023 if (to_mode != DImode)
1024 {
1025 from = convert_to_mode (DImode, from, unsignedp);
1026 from_mode = DImode;
1027 }
1028 else
1029 {
1030 #ifdef HAVE_extendpdidi2
1031 if (HAVE_extendpdidi2)
1032 {
1033 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1034 return;
1035 }
1036 #endif /* HAVE_extendpdidi2 */
1037 abort ();
1038 }
1039 }
1040
1041 /* Now follow all the conversions between integers
1042 no more than a word long. */
1043
1044 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1045 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1046 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1047 GET_MODE_BITSIZE (from_mode)))
1048 {
1049 if (!((GET_CODE (from) == MEM
1050 && ! MEM_VOLATILE_P (from)
1051 && direct_load[(int) to_mode]
1052 && ! mode_dependent_address_p (XEXP (from, 0)))
1053 || GET_CODE (from) == REG
1054 || GET_CODE (from) == SUBREG))
1055 from = force_reg (from_mode, from);
1056 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1057 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1058 from = copy_to_reg (from);
1059 emit_move_insn (to, gen_lowpart (to_mode, from));
1060 return;
1061 }
1062
1063 /* Handle extension. */
1064 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1065 {
1066 /* Convert directly if that works. */
1067 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1068 != CODE_FOR_nothing)
1069 {
1070 emit_unop_insn (code, to, from, equiv_code);
1071 return;
1072 }
1073 else
1074 {
1075 enum machine_mode intermediate;
1076 rtx tmp;
1077 tree shift_amount;
1078
1079 /* Search for a mode to convert via. */
1080 for (intermediate = from_mode; intermediate != VOIDmode;
1081 intermediate = GET_MODE_WIDER_MODE (intermediate))
1082 if (((can_extend_p (to_mode, intermediate, unsignedp)
1083 != CODE_FOR_nothing)
1084 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1085 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1086 GET_MODE_BITSIZE (intermediate))))
1087 && (can_extend_p (intermediate, from_mode, unsignedp)
1088 != CODE_FOR_nothing))
1089 {
1090 convert_move (to, convert_to_mode (intermediate, from,
1091 unsignedp), unsignedp);
1092 return;
1093 }
1094
1095 /* No suitable intermediate mode.
1096 Generate what we need with shifts. */
1097 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1098 - GET_MODE_BITSIZE (from_mode), 0);
1099 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1100 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1101 to, unsignedp);
1102 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1103 to, unsignedp);
1104 if (tmp != to)
1105 emit_move_insn (to, tmp);
1106 return;
1107 }
1108 }
1109
1110 /* Support special truncate insns for certain modes. */
1111
1112 if (from_mode == DImode && to_mode == SImode)
1113 {
1114 #ifdef HAVE_truncdisi2
1115 if (HAVE_truncdisi2)
1116 {
1117 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1118 return;
1119 }
1120 #endif
1121 convert_move (to, force_reg (from_mode, from), unsignedp);
1122 return;
1123 }
1124
1125 if (from_mode == DImode && to_mode == HImode)
1126 {
1127 #ifdef HAVE_truncdihi2
1128 if (HAVE_truncdihi2)
1129 {
1130 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1131 return;
1132 }
1133 #endif
1134 convert_move (to, force_reg (from_mode, from), unsignedp);
1135 return;
1136 }
1137
1138 if (from_mode == DImode && to_mode == QImode)
1139 {
1140 #ifdef HAVE_truncdiqi2
1141 if (HAVE_truncdiqi2)
1142 {
1143 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1144 return;
1145 }
1146 #endif
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 return;
1149 }
1150
1151 if (from_mode == SImode && to_mode == HImode)
1152 {
1153 #ifdef HAVE_truncsihi2
1154 if (HAVE_truncsihi2)
1155 {
1156 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1157 return;
1158 }
1159 #endif
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 return;
1162 }
1163
1164 if (from_mode == SImode && to_mode == QImode)
1165 {
1166 #ifdef HAVE_truncsiqi2
1167 if (HAVE_truncsiqi2)
1168 {
1169 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1170 return;
1171 }
1172 #endif
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 return;
1175 }
1176
1177 if (from_mode == HImode && to_mode == QImode)
1178 {
1179 #ifdef HAVE_trunchiqi2
1180 if (HAVE_trunchiqi2)
1181 {
1182 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1183 return;
1184 }
1185 #endif
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 return;
1188 }
1189
1190 if (from_mode == TImode && to_mode == DImode)
1191 {
1192 #ifdef HAVE_trunctidi2
1193 if (HAVE_trunctidi2)
1194 {
1195 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1196 return;
1197 }
1198 #endif
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 return;
1201 }
1202
1203 if (from_mode == TImode && to_mode == SImode)
1204 {
1205 #ifdef HAVE_trunctisi2
1206 if (HAVE_trunctisi2)
1207 {
1208 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1209 return;
1210 }
1211 #endif
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 return;
1214 }
1215
1216 if (from_mode == TImode && to_mode == HImode)
1217 {
1218 #ifdef HAVE_trunctihi2
1219 if (HAVE_trunctihi2)
1220 {
1221 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1222 return;
1223 }
1224 #endif
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 return;
1227 }
1228
1229 if (from_mode == TImode && to_mode == QImode)
1230 {
1231 #ifdef HAVE_trunctiqi2
1232 if (HAVE_trunctiqi2)
1233 {
1234 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1235 return;
1236 }
1237 #endif
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 return;
1240 }
1241
1242 /* Handle truncation of volatile memrefs, and so on;
1243 the things that couldn't be truncated directly,
1244 and for which there was no special instruction. */
1245 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1246 {
1247 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1248 emit_move_insn (to, temp);
1249 return;
1250 }
1251
1252 /* Mode combination is not recognized. */
1253 abort ();
1254 }
1255
1256 /* Return an rtx for a value that would result
1257 from converting X to mode MODE.
1258 Both X and MODE may be floating, or both integer.
1259 UNSIGNEDP is nonzero if X is an unsigned value.
1260 This can be done by referring to a part of X in place
1261 or by copying to a new temporary with conversion.
1262
1263 This function *must not* call protect_from_queue
1264 except when putting X into an insn (in which case convert_move does it). */
1265
1266 rtx
1267 convert_to_mode (mode, x, unsignedp)
1268 enum machine_mode mode;
1269 rtx x;
1270 int unsignedp;
1271 {
1272 return convert_modes (mode, VOIDmode, x, unsignedp);
1273 }
1274
1275 /* Return an rtx for a value that would result
1276 from converting X from mode OLDMODE to mode MODE.
1277 Both modes may be floating, or both integer.
1278 UNSIGNEDP is nonzero if X is an unsigned value.
1279
1280 This can be done by referring to a part of X in place
1281 or by copying to a new temporary with conversion.
1282
1283 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1284
1285 This function *must not* call protect_from_queue
1286 except when putting X into an insn (in which case convert_move does it). */
1287
1288 rtx
1289 convert_modes (mode, oldmode, x, unsignedp)
1290 enum machine_mode mode, oldmode;
1291 rtx x;
1292 int unsignedp;
1293 {
1294 register rtx temp;
1295
1296 /* If FROM is a SUBREG that indicates that we have already done at least
1297 the required extension, strip it. */
1298
1299 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1300 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1301 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1302 x = gen_lowpart (mode, x);
1303
1304 if (GET_MODE (x) != VOIDmode)
1305 oldmode = GET_MODE (x);
1306
1307 if (mode == oldmode)
1308 return x;
1309
1310 /* There is one case that we must handle specially: If we are converting
1311 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1312 we are to interpret the constant as unsigned, gen_lowpart will do
1313 the wrong if the constant appears negative. What we want to do is
1314 make the high-order word of the constant zero, not all ones. */
1315
1316 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1317 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1318 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1319 {
1320 HOST_WIDE_INT val = INTVAL (x);
1321
1322 if (oldmode != VOIDmode
1323 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1324 {
1325 int width = GET_MODE_BITSIZE (oldmode);
1326
1327 /* We need to zero extend VAL. */
1328 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1329 }
1330
1331 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1332 }
1333
1334 /* We can do this with a gen_lowpart if both desired and current modes
1335 are integer, and this is either a constant integer, a register, or a
1336 non-volatile MEM. Except for the constant case where MODE is no
1337 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1338
1339 if ((GET_CODE (x) == CONST_INT
1340 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1341 || (GET_MODE_CLASS (mode) == MODE_INT
1342 && GET_MODE_CLASS (oldmode) == MODE_INT
1343 && (GET_CODE (x) == CONST_DOUBLE
1344 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1345 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1346 && direct_load[(int) mode])
1347 || (GET_CODE (x) == REG
1348 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1349 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1350 {
1351 /* ?? If we don't know OLDMODE, we have to assume here that
1352 X does not need sign- or zero-extension. This may not be
1353 the case, but it's the best we can do. */
1354 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1355 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1356 {
1357 HOST_WIDE_INT val = INTVAL (x);
1358 int width = GET_MODE_BITSIZE (oldmode);
1359
1360 /* We must sign or zero-extend in this case. Start by
1361 zero-extending, then sign extend if we need to. */
1362 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1363 if (! unsignedp
1364 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1365 val |= (HOST_WIDE_INT) (-1) << width;
1366
1367 return GEN_INT (val);
1368 }
1369
1370 return gen_lowpart (mode, x);
1371 }
1372
1373 temp = gen_reg_rtx (mode);
1374 convert_move (temp, x, unsignedp);
1375 return temp;
1376 }
1377 \f
1378
1379 /* This macro is used to determine what the largest unit size that
1380 move_by_pieces can use is. */
1381
1382 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1383 move efficiently, as opposed to MOVE_MAX which is the maximum
1384 number of bytes we can move with a single instruction. */
1385
1386 #ifndef MOVE_MAX_PIECES
1387 #define MOVE_MAX_PIECES MOVE_MAX
1388 #endif
1389
1390 /* Generate several move instructions to copy LEN bytes
1391 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1392 The caller must pass FROM and TO
1393 through protect_from_queue before calling.
1394 ALIGN is maximum alignment we can assume. */
1395
1396 void
1397 move_by_pieces (to, from, len, align)
1398 rtx to, from;
1399 unsigned HOST_WIDE_INT len;
1400 unsigned int align;
1401 {
1402 struct move_by_pieces data;
1403 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1404 unsigned int max_size = MOVE_MAX_PIECES + 1;
1405 enum machine_mode mode = VOIDmode, tmode;
1406 enum insn_code icode;
1407
1408 data.offset = 0;
1409 data.to_addr = to_addr;
1410 data.from_addr = from_addr;
1411 data.to = to;
1412 data.from = from;
1413 data.autinc_to
1414 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1415 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1416 data.autinc_from
1417 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1418 || GET_CODE (from_addr) == POST_INC
1419 || GET_CODE (from_addr) == POST_DEC);
1420
1421 data.explicit_inc_from = 0;
1422 data.explicit_inc_to = 0;
1423 data.reverse
1424 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1425 if (data.reverse) data.offset = len;
1426 data.len = len;
1427
1428 /* If copying requires more than two move insns,
1429 copy addresses to registers (to make displacements shorter)
1430 and use post-increment if available. */
1431 if (!(data.autinc_from && data.autinc_to)
1432 && move_by_pieces_ninsns (len, align) > 2)
1433 {
1434 /* Find the mode of the largest move... */
1435 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1436 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1437 if (GET_MODE_SIZE (tmode) < max_size)
1438 mode = tmode;
1439
1440 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1441 {
1442 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1443 data.autinc_from = 1;
1444 data.explicit_inc_from = -1;
1445 }
1446 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1447 {
1448 data.from_addr = copy_addr_to_reg (from_addr);
1449 data.autinc_from = 1;
1450 data.explicit_inc_from = 1;
1451 }
1452 if (!data.autinc_from && CONSTANT_P (from_addr))
1453 data.from_addr = copy_addr_to_reg (from_addr);
1454 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1455 {
1456 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1457 data.autinc_to = 1;
1458 data.explicit_inc_to = -1;
1459 }
1460 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1461 {
1462 data.to_addr = copy_addr_to_reg (to_addr);
1463 data.autinc_to = 1;
1464 data.explicit_inc_to = 1;
1465 }
1466 if (!data.autinc_to && CONSTANT_P (to_addr))
1467 data.to_addr = copy_addr_to_reg (to_addr);
1468 }
1469
1470 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1471 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1472 align = MOVE_MAX * BITS_PER_UNIT;
1473
1474 /* First move what we can in the largest integer mode, then go to
1475 successively smaller modes. */
1476
1477 while (max_size > 1)
1478 {
1479 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1480 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1481 if (GET_MODE_SIZE (tmode) < max_size)
1482 mode = tmode;
1483
1484 if (mode == VOIDmode)
1485 break;
1486
1487 icode = mov_optab->handlers[(int) mode].insn_code;
1488 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1489 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1490
1491 max_size = GET_MODE_SIZE (mode);
1492 }
1493
1494 /* The code above should have handled everything. */
1495 if (data.len > 0)
1496 abort ();
1497 }
1498
1499 /* Return number of insns required to move L bytes by pieces.
1500 ALIGN (in bytes) is maximum alignment we can assume. */
1501
1502 static unsigned HOST_WIDE_INT
1503 move_by_pieces_ninsns (l, align)
1504 unsigned HOST_WIDE_INT l;
1505 unsigned int align;
1506 {
1507 unsigned HOST_WIDE_INT n_insns = 0;
1508 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1509
1510 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1511 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1512 align = MOVE_MAX * BITS_PER_UNIT;
1513
1514 while (max_size > 1)
1515 {
1516 enum machine_mode mode = VOIDmode, tmode;
1517 enum insn_code icode;
1518
1519 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1520 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1521 if (GET_MODE_SIZE (tmode) < max_size)
1522 mode = tmode;
1523
1524 if (mode == VOIDmode)
1525 break;
1526
1527 icode = mov_optab->handlers[(int) mode].insn_code;
1528 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1529 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1530
1531 max_size = GET_MODE_SIZE (mode);
1532 }
1533
1534 return n_insns;
1535 }
1536
1537 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1538 with move instructions for mode MODE. GENFUN is the gen_... function
1539 to make a move insn for that mode. DATA has all the other info. */
1540
1541 static void
1542 move_by_pieces_1 (genfun, mode, data)
1543 rtx (*genfun) PARAMS ((rtx, ...));
1544 enum machine_mode mode;
1545 struct move_by_pieces *data;
1546 {
1547 unsigned int size = GET_MODE_SIZE (mode);
1548 rtx to1, from1;
1549
1550 while (data->len >= size)
1551 {
1552 if (data->reverse)
1553 data->offset -= size;
1554
1555 if (data->autinc_to)
1556 {
1557 to1 = gen_rtx_MEM (mode, data->to_addr);
1558 MEM_COPY_ATTRIBUTES (to1, data->to);
1559 }
1560 else
1561 to1 = change_address (data->to, mode,
1562 plus_constant (data->to_addr, data->offset));
1563
1564 if (data->autinc_from)
1565 {
1566 from1 = gen_rtx_MEM (mode, data->from_addr);
1567 MEM_COPY_ATTRIBUTES (from1, data->from);
1568 }
1569 else
1570 from1 = change_address (data->from, mode,
1571 plus_constant (data->from_addr, data->offset));
1572
1573 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1574 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1575 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1576 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1577
1578 emit_insn ((*genfun) (to1, from1));
1579
1580 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1581 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1582 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1583 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1584
1585 if (! data->reverse)
1586 data->offset += size;
1587
1588 data->len -= size;
1589 }
1590 }
1591 \f
1592 /* Emit code to move a block Y to a block X.
1593 This may be done with string-move instructions,
1594 with multiple scalar move instructions, or with a library call.
1595
1596 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1597 with mode BLKmode.
1598 SIZE is an rtx that says how long they are.
1599 ALIGN is the maximum alignment we can assume they have.
1600
1601 Return the address of the new block, if memcpy is called and returns it,
1602 0 otherwise. */
1603
1604 rtx
1605 emit_block_move (x, y, size, align)
1606 rtx x, y;
1607 rtx size;
1608 unsigned int align;
1609 {
1610 rtx retval = 0;
1611 #ifdef TARGET_MEM_FUNCTIONS
1612 static tree fn;
1613 tree call_expr, arg_list;
1614 #endif
1615
1616 if (GET_MODE (x) != BLKmode)
1617 abort ();
1618
1619 if (GET_MODE (y) != BLKmode)
1620 abort ();
1621
1622 x = protect_from_queue (x, 1);
1623 y = protect_from_queue (y, 0);
1624 size = protect_from_queue (size, 0);
1625
1626 if (GET_CODE (x) != MEM)
1627 abort ();
1628 if (GET_CODE (y) != MEM)
1629 abort ();
1630 if (size == 0)
1631 abort ();
1632
1633 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1634 move_by_pieces (x, y, INTVAL (size), align);
1635 else
1636 {
1637 /* Try the most limited insn first, because there's no point
1638 including more than one in the machine description unless
1639 the more limited one has some advantage. */
1640
1641 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1642 enum machine_mode mode;
1643
1644 /* Since this is a move insn, we don't care about volatility. */
1645 volatile_ok = 1;
1646
1647 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1648 mode = GET_MODE_WIDER_MODE (mode))
1649 {
1650 enum insn_code code = movstr_optab[(int) mode];
1651 insn_operand_predicate_fn pred;
1652
1653 if (code != CODE_FOR_nothing
1654 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1655 here because if SIZE is less than the mode mask, as it is
1656 returned by the macro, it will definitely be less than the
1657 actual mode mask. */
1658 && ((GET_CODE (size) == CONST_INT
1659 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1660 <= (GET_MODE_MASK (mode) >> 1)))
1661 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1662 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1663 || (*pred) (x, BLKmode))
1664 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1665 || (*pred) (y, BLKmode))
1666 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1667 || (*pred) (opalign, VOIDmode)))
1668 {
1669 rtx op2;
1670 rtx last = get_last_insn ();
1671 rtx pat;
1672
1673 op2 = convert_to_mode (mode, size, 1);
1674 pred = insn_data[(int) code].operand[2].predicate;
1675 if (pred != 0 && ! (*pred) (op2, mode))
1676 op2 = copy_to_mode_reg (mode, op2);
1677
1678 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1679 if (pat)
1680 {
1681 emit_insn (pat);
1682 volatile_ok = 0;
1683 return 0;
1684 }
1685 else
1686 delete_insns_since (last);
1687 }
1688 }
1689
1690 volatile_ok = 0;
1691
1692 /* X, Y, or SIZE may have been passed through protect_from_queue.
1693
1694 It is unsafe to save the value generated by protect_from_queue
1695 and reuse it later. Consider what happens if emit_queue is
1696 called before the return value from protect_from_queue is used.
1697
1698 Expansion of the CALL_EXPR below will call emit_queue before
1699 we are finished emitting RTL for argument setup. So if we are
1700 not careful we could get the wrong value for an argument.
1701
1702 To avoid this problem we go ahead and emit code to copy X, Y &
1703 SIZE into new pseudos. We can then place those new pseudos
1704 into an RTL_EXPR and use them later, even after a call to
1705 emit_queue.
1706
1707 Note this is not strictly needed for library calls since they
1708 do not call emit_queue before loading their arguments. However,
1709 we may need to have library calls call emit_queue in the future
1710 since failing to do so could cause problems for targets which
1711 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1712 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1713 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1714
1715 #ifdef TARGET_MEM_FUNCTIONS
1716 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1717 #else
1718 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1719 TREE_UNSIGNED (integer_type_node));
1720 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1721 #endif
1722
1723 #ifdef TARGET_MEM_FUNCTIONS
1724 /* It is incorrect to use the libcall calling conventions to call
1725 memcpy in this context.
1726
1727 This could be a user call to memcpy and the user may wish to
1728 examine the return value from memcpy.
1729
1730 For targets where libcalls and normal calls have different conventions
1731 for returning pointers, we could end up generating incorrect code.
1732
1733 So instead of using a libcall sequence we build up a suitable
1734 CALL_EXPR and expand the call in the normal fashion. */
1735 if (fn == NULL_TREE)
1736 {
1737 tree fntype;
1738
1739 /* This was copied from except.c, I don't know if all this is
1740 necessary in this context or not. */
1741 fn = get_identifier ("memcpy");
1742 push_obstacks_nochange ();
1743 end_temporary_allocation ();
1744 fntype = build_pointer_type (void_type_node);
1745 fntype = build_function_type (fntype, NULL_TREE);
1746 fn = build_decl (FUNCTION_DECL, fn, fntype);
1747 ggc_add_tree_root (&fn, 1);
1748 DECL_EXTERNAL (fn) = 1;
1749 TREE_PUBLIC (fn) = 1;
1750 DECL_ARTIFICIAL (fn) = 1;
1751 make_decl_rtl (fn, NULL_PTR, 1);
1752 assemble_external (fn);
1753 pop_obstacks ();
1754 }
1755
1756 /* We need to make an argument list for the function call.
1757
1758 memcpy has three arguments, the first two are void * addresses and
1759 the last is a size_t byte count for the copy. */
1760 arg_list
1761 = build_tree_list (NULL_TREE,
1762 make_tree (build_pointer_type (void_type_node), x));
1763 TREE_CHAIN (arg_list)
1764 = build_tree_list (NULL_TREE,
1765 make_tree (build_pointer_type (void_type_node), y));
1766 TREE_CHAIN (TREE_CHAIN (arg_list))
1767 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1768 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1769
1770 /* Now we have to build up the CALL_EXPR itself. */
1771 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1772 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1773 call_expr, arg_list, NULL_TREE);
1774 TREE_SIDE_EFFECTS (call_expr) = 1;
1775
1776 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1777 #else
1778 emit_library_call (bcopy_libfunc, 0,
1779 VOIDmode, 3, y, Pmode, x, Pmode,
1780 convert_to_mode (TYPE_MODE (integer_type_node), size,
1781 TREE_UNSIGNED (integer_type_node)),
1782 TYPE_MODE (integer_type_node));
1783 #endif
1784 }
1785
1786 return retval;
1787 }
1788 \f
1789 /* Copy all or part of a value X into registers starting at REGNO.
1790 The number of registers to be filled is NREGS. */
1791
1792 void
1793 move_block_to_reg (regno, x, nregs, mode)
1794 int regno;
1795 rtx x;
1796 int nregs;
1797 enum machine_mode mode;
1798 {
1799 int i;
1800 #ifdef HAVE_load_multiple
1801 rtx pat;
1802 rtx last;
1803 #endif
1804
1805 if (nregs == 0)
1806 return;
1807
1808 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1809 x = validize_mem (force_const_mem (mode, x));
1810
1811 /* See if the machine can do this with a load multiple insn. */
1812 #ifdef HAVE_load_multiple
1813 if (HAVE_load_multiple)
1814 {
1815 last = get_last_insn ();
1816 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1817 GEN_INT (nregs));
1818 if (pat)
1819 {
1820 emit_insn (pat);
1821 return;
1822 }
1823 else
1824 delete_insns_since (last);
1825 }
1826 #endif
1827
1828 for (i = 0; i < nregs; i++)
1829 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1830 operand_subword_force (x, i, mode));
1831 }
1832
1833 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1834 The number of registers to be filled is NREGS. SIZE indicates the number
1835 of bytes in the object X. */
1836
1837
1838 void
1839 move_block_from_reg (regno, x, nregs, size)
1840 int regno;
1841 rtx x;
1842 int nregs;
1843 int size;
1844 {
1845 int i;
1846 #ifdef HAVE_store_multiple
1847 rtx pat;
1848 rtx last;
1849 #endif
1850 enum machine_mode mode;
1851
1852 /* If SIZE is that of a mode no bigger than a word, just use that
1853 mode's store operation. */
1854 if (size <= UNITS_PER_WORD
1855 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1856 {
1857 emit_move_insn (change_address (x, mode, NULL),
1858 gen_rtx_REG (mode, regno));
1859 return;
1860 }
1861
1862 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1863 to the left before storing to memory. Note that the previous test
1864 doesn't handle all cases (e.g. SIZE == 3). */
1865 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1866 {
1867 rtx tem = operand_subword (x, 0, 1, BLKmode);
1868 rtx shift;
1869
1870 if (tem == 0)
1871 abort ();
1872
1873 shift = expand_shift (LSHIFT_EXPR, word_mode,
1874 gen_rtx_REG (word_mode, regno),
1875 build_int_2 ((UNITS_PER_WORD - size)
1876 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1877 emit_move_insn (tem, shift);
1878 return;
1879 }
1880
1881 /* See if the machine can do this with a store multiple insn. */
1882 #ifdef HAVE_store_multiple
1883 if (HAVE_store_multiple)
1884 {
1885 last = get_last_insn ();
1886 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1887 GEN_INT (nregs));
1888 if (pat)
1889 {
1890 emit_insn (pat);
1891 return;
1892 }
1893 else
1894 delete_insns_since (last);
1895 }
1896 #endif
1897
1898 for (i = 0; i < nregs; i++)
1899 {
1900 rtx tem = operand_subword (x, i, 1, BLKmode);
1901
1902 if (tem == 0)
1903 abort ();
1904
1905 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1906 }
1907 }
1908
1909 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1910 registers represented by a PARALLEL. SSIZE represents the total size of
1911 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1912 SRC in bits. */
1913 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1914 the balance will be in what would be the low-order memory addresses, i.e.
1915 left justified for big endian, right justified for little endian. This
1916 happens to be true for the targets currently using this support. If this
1917 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1918 would be needed. */
1919
1920 void
1921 emit_group_load (dst, orig_src, ssize, align)
1922 rtx dst, orig_src;
1923 unsigned int align;
1924 int ssize;
1925 {
1926 rtx *tmps, src;
1927 int start, i;
1928
1929 if (GET_CODE (dst) != PARALLEL)
1930 abort ();
1931
1932 /* Check for a NULL entry, used to indicate that the parameter goes
1933 both on the stack and in registers. */
1934 if (XEXP (XVECEXP (dst, 0, 0), 0))
1935 start = 0;
1936 else
1937 start = 1;
1938
1939 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1940
1941 /* If we won't be loading directly from memory, protect the real source
1942 from strange tricks we might play. */
1943 src = orig_src;
1944 if (GET_CODE (src) != MEM)
1945 {
1946 if (GET_MODE (src) == VOIDmode)
1947 src = gen_reg_rtx (GET_MODE (dst));
1948 else
1949 src = gen_reg_rtx (GET_MODE (orig_src));
1950 emit_move_insn (src, orig_src);
1951 }
1952
1953 /* Process the pieces. */
1954 for (i = start; i < XVECLEN (dst, 0); i++)
1955 {
1956 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1957 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1958 unsigned int bytelen = GET_MODE_SIZE (mode);
1959 int shift = 0;
1960
1961 /* Handle trailing fragments that run over the size of the struct. */
1962 if (ssize >= 0 && bytepos + bytelen > ssize)
1963 {
1964 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1965 bytelen = ssize - bytepos;
1966 if (bytelen <= 0)
1967 abort ();
1968 }
1969
1970 /* Optimize the access just a bit. */
1971 if (GET_CODE (src) == MEM
1972 && align >= GET_MODE_ALIGNMENT (mode)
1973 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1974 && bytelen == GET_MODE_SIZE (mode))
1975 {
1976 tmps[i] = gen_reg_rtx (mode);
1977 emit_move_insn (tmps[i],
1978 change_address (src, mode,
1979 plus_constant (XEXP (src, 0),
1980 bytepos)));
1981 }
1982 else if (GET_CODE (src) == CONCAT)
1983 {
1984 if (bytepos == 0
1985 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1986 tmps[i] = XEXP (src, 0);
1987 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1988 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1989 tmps[i] = XEXP (src, 1);
1990 else
1991 abort ();
1992 }
1993 else
1994 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1995 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1996 mode, mode, align, ssize);
1997
1998 if (BYTES_BIG_ENDIAN && shift)
1999 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2000 tmps[i], 0, OPTAB_WIDEN);
2001 }
2002
2003 emit_queue();
2004
2005 /* Copy the extracted pieces into the proper (probable) hard regs. */
2006 for (i = start; i < XVECLEN (dst, 0); i++)
2007 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2008 }
2009
2010 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2011 registers represented by a PARALLEL. SSIZE represents the total size of
2012 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2013
2014 void
2015 emit_group_store (orig_dst, src, ssize, align)
2016 rtx orig_dst, src;
2017 int ssize;
2018 unsigned int align;
2019 {
2020 rtx *tmps, dst;
2021 int start, i;
2022
2023 if (GET_CODE (src) != PARALLEL)
2024 abort ();
2025
2026 /* Check for a NULL entry, used to indicate that the parameter goes
2027 both on the stack and in registers. */
2028 if (XEXP (XVECEXP (src, 0, 0), 0))
2029 start = 0;
2030 else
2031 start = 1;
2032
2033 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2034
2035 /* Copy the (probable) hard regs into pseudos. */
2036 for (i = start; i < XVECLEN (src, 0); i++)
2037 {
2038 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2039 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2040 emit_move_insn (tmps[i], reg);
2041 }
2042 emit_queue();
2043
2044 /* If we won't be storing directly into memory, protect the real destination
2045 from strange tricks we might play. */
2046 dst = orig_dst;
2047 if (GET_CODE (dst) == PARALLEL)
2048 {
2049 rtx temp;
2050
2051 /* We can get a PARALLEL dst if there is a conditional expression in
2052 a return statement. In that case, the dst and src are the same,
2053 so no action is necessary. */
2054 if (rtx_equal_p (dst, src))
2055 return;
2056
2057 /* It is unclear if we can ever reach here, but we may as well handle
2058 it. Allocate a temporary, and split this into a store/load to/from
2059 the temporary. */
2060
2061 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2062 emit_group_store (temp, src, ssize, align);
2063 emit_group_load (dst, temp, ssize, align);
2064 return;
2065 }
2066 else if (GET_CODE (dst) != MEM)
2067 {
2068 dst = gen_reg_rtx (GET_MODE (orig_dst));
2069 /* Make life a bit easier for combine. */
2070 emit_move_insn (dst, const0_rtx);
2071 }
2072 else if (! MEM_IN_STRUCT_P (dst))
2073 {
2074 /* store_bit_field requires that memory operations have
2075 mem_in_struct_p set; we might not. */
2076
2077 dst = copy_rtx (orig_dst);
2078 MEM_SET_IN_STRUCT_P (dst, 1);
2079 }
2080
2081 /* Process the pieces. */
2082 for (i = start; i < XVECLEN (src, 0); i++)
2083 {
2084 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2085 enum machine_mode mode = GET_MODE (tmps[i]);
2086 unsigned int bytelen = GET_MODE_SIZE (mode);
2087
2088 /* Handle trailing fragments that run over the size of the struct. */
2089 if (ssize >= 0 && bytepos + bytelen > ssize)
2090 {
2091 if (BYTES_BIG_ENDIAN)
2092 {
2093 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2094 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2095 tmps[i], 0, OPTAB_WIDEN);
2096 }
2097 bytelen = ssize - bytepos;
2098 }
2099
2100 /* Optimize the access just a bit. */
2101 if (GET_CODE (dst) == MEM
2102 && align >= GET_MODE_ALIGNMENT (mode)
2103 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2104 && bytelen == GET_MODE_SIZE (mode))
2105 emit_move_insn (change_address (dst, mode,
2106 plus_constant (XEXP (dst, 0),
2107 bytepos)),
2108 tmps[i]);
2109 else
2110 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2111 mode, tmps[i], align, ssize);
2112 }
2113
2114 emit_queue();
2115
2116 /* Copy from the pseudo into the (probable) hard reg. */
2117 if (GET_CODE (dst) == REG)
2118 emit_move_insn (orig_dst, dst);
2119 }
2120
2121 /* Generate code to copy a BLKmode object of TYPE out of a
2122 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2123 is null, a stack temporary is created. TGTBLK is returned.
2124
2125 The primary purpose of this routine is to handle functions
2126 that return BLKmode structures in registers. Some machines
2127 (the PA for example) want to return all small structures
2128 in registers regardless of the structure's alignment. */
2129
2130 rtx
2131 copy_blkmode_from_reg (tgtblk, srcreg, type)
2132 rtx tgtblk;
2133 rtx srcreg;
2134 tree type;
2135 {
2136 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2137 rtx src = NULL, dst = NULL;
2138 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2139 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2140
2141 if (tgtblk == 0)
2142 {
2143 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2144 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2145 preserve_temp_slots (tgtblk);
2146 }
2147
2148 /* This code assumes srcreg is at least a full word. If it isn't,
2149 copy it into a new pseudo which is a full word. */
2150 if (GET_MODE (srcreg) != BLKmode
2151 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2152 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2153
2154 /* Structures whose size is not a multiple of a word are aligned
2155 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2156 machine, this means we must skip the empty high order bytes when
2157 calculating the bit offset. */
2158 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2159 big_endian_correction
2160 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2161
2162 /* Copy the structure BITSIZE bites at a time.
2163
2164 We could probably emit more efficient code for machines which do not use
2165 strict alignment, but it doesn't seem worth the effort at the current
2166 time. */
2167 for (bitpos = 0, xbitpos = big_endian_correction;
2168 bitpos < bytes * BITS_PER_UNIT;
2169 bitpos += bitsize, xbitpos += bitsize)
2170 {
2171 /* We need a new source operand each time xbitpos is on a
2172 word boundary and when xbitpos == big_endian_correction
2173 (the first time through). */
2174 if (xbitpos % BITS_PER_WORD == 0
2175 || xbitpos == big_endian_correction)
2176 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2177
2178 /* We need a new destination operand each time bitpos is on
2179 a word boundary. */
2180 if (bitpos % BITS_PER_WORD == 0)
2181 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2182
2183 /* Use xbitpos for the source extraction (right justified) and
2184 xbitpos for the destination store (left justified). */
2185 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2186 extract_bit_field (src, bitsize,
2187 xbitpos % BITS_PER_WORD, 1,
2188 NULL_RTX, word_mode, word_mode,
2189 bitsize, BITS_PER_WORD),
2190 bitsize, BITS_PER_WORD);
2191 }
2192
2193 return tgtblk;
2194 }
2195
2196 /* Add a USE expression for REG to the (possibly empty) list pointed
2197 to by CALL_FUSAGE. REG must denote a hard register. */
2198
2199 void
2200 use_reg (call_fusage, reg)
2201 rtx *call_fusage, reg;
2202 {
2203 if (GET_CODE (reg) != REG
2204 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2205 abort();
2206
2207 *call_fusage
2208 = gen_rtx_EXPR_LIST (VOIDmode,
2209 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2210 }
2211
2212 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2213 starting at REGNO. All of these registers must be hard registers. */
2214
2215 void
2216 use_regs (call_fusage, regno, nregs)
2217 rtx *call_fusage;
2218 int regno;
2219 int nregs;
2220 {
2221 int i;
2222
2223 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2224 abort ();
2225
2226 for (i = 0; i < nregs; i++)
2227 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2228 }
2229
2230 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2231 PARALLEL REGS. This is for calls that pass values in multiple
2232 non-contiguous locations. The Irix 6 ABI has examples of this. */
2233
2234 void
2235 use_group_regs (call_fusage, regs)
2236 rtx *call_fusage;
2237 rtx regs;
2238 {
2239 int i;
2240
2241 for (i = 0; i < XVECLEN (regs, 0); i++)
2242 {
2243 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2244
2245 /* A NULL entry means the parameter goes both on the stack and in
2246 registers. This can also be a MEM for targets that pass values
2247 partially on the stack and partially in registers. */
2248 if (reg != 0 && GET_CODE (reg) == REG)
2249 use_reg (call_fusage, reg);
2250 }
2251 }
2252 \f
2253 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2254 rtx with BLKmode). The caller must pass TO through protect_from_queue
2255 before calling. ALIGN is maximum alignment we can assume. */
2256
2257 static void
2258 clear_by_pieces (to, len, align)
2259 rtx to;
2260 unsigned HOST_WIDE_INT len;
2261 unsigned int align;
2262 {
2263 struct clear_by_pieces data;
2264 rtx to_addr = XEXP (to, 0);
2265 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2266 enum machine_mode mode = VOIDmode, tmode;
2267 enum insn_code icode;
2268
2269 data.offset = 0;
2270 data.to_addr = to_addr;
2271 data.to = to;
2272 data.autinc_to
2273 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2274 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2275
2276 data.explicit_inc_to = 0;
2277 data.reverse
2278 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2279 if (data.reverse) data.offset = len;
2280 data.len = len;
2281
2282 /* If copying requires more than two move insns,
2283 copy addresses to registers (to make displacements shorter)
2284 and use post-increment if available. */
2285 if (!data.autinc_to
2286 && move_by_pieces_ninsns (len, align) > 2)
2287 {
2288 /* Determine the main mode we'll be using */
2289 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2290 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2291 if (GET_MODE_SIZE (tmode) < max_size)
2292 mode = tmode;
2293
2294 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2295 {
2296 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2297 data.autinc_to = 1;
2298 data.explicit_inc_to = -1;
2299 }
2300
2301 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse
2302 && ! data.autinc_to)
2303 {
2304 data.to_addr = copy_addr_to_reg (to_addr);
2305 data.autinc_to = 1;
2306 data.explicit_inc_to = 1;
2307 }
2308
2309 if ( !data.autinc_to && CONSTANT_P (to_addr))
2310 data.to_addr = copy_addr_to_reg (to_addr);
2311 }
2312
2313 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2314 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2315 align = MOVE_MAX * BITS_PER_UNIT;
2316
2317 /* First move what we can in the largest integer mode, then go to
2318 successively smaller modes. */
2319
2320 while (max_size > 1)
2321 {
2322 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2323 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2324 if (GET_MODE_SIZE (tmode) < max_size)
2325 mode = tmode;
2326
2327 if (mode == VOIDmode)
2328 break;
2329
2330 icode = mov_optab->handlers[(int) mode].insn_code;
2331 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2332 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2333
2334 max_size = GET_MODE_SIZE (mode);
2335 }
2336
2337 /* The code above should have handled everything. */
2338 if (data.len != 0)
2339 abort ();
2340 }
2341
2342 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2343 with move instructions for mode MODE. GENFUN is the gen_... function
2344 to make a move insn for that mode. DATA has all the other info. */
2345
2346 static void
2347 clear_by_pieces_1 (genfun, mode, data)
2348 rtx (*genfun) PARAMS ((rtx, ...));
2349 enum machine_mode mode;
2350 struct clear_by_pieces *data;
2351 {
2352 unsigned int size = GET_MODE_SIZE (mode);
2353 rtx to1;
2354
2355 while (data->len >= size)
2356 {
2357 if (data->reverse)
2358 data->offset -= size;
2359
2360 if (data->autinc_to)
2361 {
2362 to1 = gen_rtx_MEM (mode, data->to_addr);
2363 MEM_COPY_ATTRIBUTES (to1, data->to);
2364 }
2365 else
2366 to1 = change_address (data->to, mode,
2367 plus_constant (data->to_addr, data->offset));
2368
2369 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2370 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2371
2372 emit_insn ((*genfun) (to1, const0_rtx));
2373
2374 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2375 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2376
2377 if (! data->reverse)
2378 data->offset += size;
2379
2380 data->len -= size;
2381 }
2382 }
2383 \f
2384 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2385 its length in bytes and ALIGN is the maximum alignment we can is has.
2386
2387 If we call a function that returns the length of the block, return it. */
2388
2389 rtx
2390 clear_storage (object, size, align)
2391 rtx object;
2392 rtx size;
2393 unsigned int align;
2394 {
2395 #ifdef TARGET_MEM_FUNCTIONS
2396 static tree fn;
2397 tree call_expr, arg_list;
2398 #endif
2399 rtx retval = 0;
2400
2401 if (GET_MODE (object) == BLKmode)
2402 {
2403 object = protect_from_queue (object, 1);
2404 size = protect_from_queue (size, 0);
2405
2406 if (GET_CODE (size) == CONST_INT
2407 && MOVE_BY_PIECES_P (INTVAL (size), align))
2408 clear_by_pieces (object, INTVAL (size), align);
2409 else
2410 {
2411 /* Try the most limited insn first, because there's no point
2412 including more than one in the machine description unless
2413 the more limited one has some advantage. */
2414
2415 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2416 enum machine_mode mode;
2417
2418 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2419 mode = GET_MODE_WIDER_MODE (mode))
2420 {
2421 enum insn_code code = clrstr_optab[(int) mode];
2422 insn_operand_predicate_fn pred;
2423
2424 if (code != CODE_FOR_nothing
2425 /* We don't need MODE to be narrower than
2426 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2427 the mode mask, as it is returned by the macro, it will
2428 definitely be less than the actual mode mask. */
2429 && ((GET_CODE (size) == CONST_INT
2430 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2431 <= (GET_MODE_MASK (mode) >> 1)))
2432 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2433 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2434 || (*pred) (object, BLKmode))
2435 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2436 || (*pred) (opalign, VOIDmode)))
2437 {
2438 rtx op1;
2439 rtx last = get_last_insn ();
2440 rtx pat;
2441
2442 op1 = convert_to_mode (mode, size, 1);
2443 pred = insn_data[(int) code].operand[1].predicate;
2444 if (pred != 0 && ! (*pred) (op1, mode))
2445 op1 = copy_to_mode_reg (mode, op1);
2446
2447 pat = GEN_FCN ((int) code) (object, op1, opalign);
2448 if (pat)
2449 {
2450 emit_insn (pat);
2451 return 0;
2452 }
2453 else
2454 delete_insns_since (last);
2455 }
2456 }
2457
2458 /* OBJECT or SIZE may have been passed through protect_from_queue.
2459
2460 It is unsafe to save the value generated by protect_from_queue
2461 and reuse it later. Consider what happens if emit_queue is
2462 called before the return value from protect_from_queue is used.
2463
2464 Expansion of the CALL_EXPR below will call emit_queue before
2465 we are finished emitting RTL for argument setup. So if we are
2466 not careful we could get the wrong value for an argument.
2467
2468 To avoid this problem we go ahead and emit code to copy OBJECT
2469 and SIZE into new pseudos. We can then place those new pseudos
2470 into an RTL_EXPR and use them later, even after a call to
2471 emit_queue.
2472
2473 Note this is not strictly needed for library calls since they
2474 do not call emit_queue before loading their arguments. However,
2475 we may need to have library calls call emit_queue in the future
2476 since failing to do so could cause problems for targets which
2477 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2478 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2479
2480 #ifdef TARGET_MEM_FUNCTIONS
2481 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2482 #else
2483 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2484 TREE_UNSIGNED (integer_type_node));
2485 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2486 #endif
2487
2488
2489 #ifdef TARGET_MEM_FUNCTIONS
2490 /* It is incorrect to use the libcall calling conventions to call
2491 memset in this context.
2492
2493 This could be a user call to memset and the user may wish to
2494 examine the return value from memset.
2495
2496 For targets where libcalls and normal calls have different
2497 conventions for returning pointers, we could end up generating
2498 incorrect code.
2499
2500 So instead of using a libcall sequence we build up a suitable
2501 CALL_EXPR and expand the call in the normal fashion. */
2502 if (fn == NULL_TREE)
2503 {
2504 tree fntype;
2505
2506 /* This was copied from except.c, I don't know if all this is
2507 necessary in this context or not. */
2508 fn = get_identifier ("memset");
2509 push_obstacks_nochange ();
2510 end_temporary_allocation ();
2511 fntype = build_pointer_type (void_type_node);
2512 fntype = build_function_type (fntype, NULL_TREE);
2513 fn = build_decl (FUNCTION_DECL, fn, fntype);
2514 ggc_add_tree_root (&fn, 1);
2515 DECL_EXTERNAL (fn) = 1;
2516 TREE_PUBLIC (fn) = 1;
2517 DECL_ARTIFICIAL (fn) = 1;
2518 make_decl_rtl (fn, NULL_PTR, 1);
2519 assemble_external (fn);
2520 pop_obstacks ();
2521 }
2522
2523 /* We need to make an argument list for the function call.
2524
2525 memset has three arguments, the first is a void * addresses, the
2526 second a integer with the initialization value, the last is a
2527 size_t byte count for the copy. */
2528 arg_list
2529 = build_tree_list (NULL_TREE,
2530 make_tree (build_pointer_type (void_type_node),
2531 object));
2532 TREE_CHAIN (arg_list)
2533 = build_tree_list (NULL_TREE,
2534 make_tree (integer_type_node, const0_rtx));
2535 TREE_CHAIN (TREE_CHAIN (arg_list))
2536 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2537 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2538
2539 /* Now we have to build up the CALL_EXPR itself. */
2540 call_expr = build1 (ADDR_EXPR,
2541 build_pointer_type (TREE_TYPE (fn)), fn);
2542 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2543 call_expr, arg_list, NULL_TREE);
2544 TREE_SIDE_EFFECTS (call_expr) = 1;
2545
2546 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2547 #else
2548 emit_library_call (bzero_libfunc, 0,
2549 VOIDmode, 2, object, Pmode, size,
2550 TYPE_MODE (integer_type_node));
2551 #endif
2552 }
2553 }
2554 else
2555 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2556
2557 return retval;
2558 }
2559
2560 /* Generate code to copy Y into X.
2561 Both Y and X must have the same mode, except that
2562 Y can be a constant with VOIDmode.
2563 This mode cannot be BLKmode; use emit_block_move for that.
2564
2565 Return the last instruction emitted. */
2566
2567 rtx
2568 emit_move_insn (x, y)
2569 rtx x, y;
2570 {
2571 enum machine_mode mode = GET_MODE (x);
2572
2573 x = protect_from_queue (x, 1);
2574 y = protect_from_queue (y, 0);
2575
2576 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2577 abort ();
2578
2579 /* Never force constant_p_rtx to memory. */
2580 if (GET_CODE (y) == CONSTANT_P_RTX)
2581 ;
2582 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2583 y = force_const_mem (mode, y);
2584
2585 /* If X or Y are memory references, verify that their addresses are valid
2586 for the machine. */
2587 if (GET_CODE (x) == MEM
2588 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2589 && ! push_operand (x, GET_MODE (x)))
2590 || (flag_force_addr
2591 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2592 x = change_address (x, VOIDmode, XEXP (x, 0));
2593
2594 if (GET_CODE (y) == MEM
2595 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2596 || (flag_force_addr
2597 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2598 y = change_address (y, VOIDmode, XEXP (y, 0));
2599
2600 if (mode == BLKmode)
2601 abort ();
2602
2603 return emit_move_insn_1 (x, y);
2604 }
2605
2606 /* Low level part of emit_move_insn.
2607 Called just like emit_move_insn, but assumes X and Y
2608 are basically valid. */
2609
2610 rtx
2611 emit_move_insn_1 (x, y)
2612 rtx x, y;
2613 {
2614 enum machine_mode mode = GET_MODE (x);
2615 enum machine_mode submode;
2616 enum mode_class class = GET_MODE_CLASS (mode);
2617 unsigned int i;
2618
2619 if (mode >= MAX_MACHINE_MODE)
2620 abort ();
2621
2622 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2623 return
2624 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2625
2626 /* Expand complex moves by moving real part and imag part, if possible. */
2627 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2628 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2629 * BITS_PER_UNIT),
2630 (class == MODE_COMPLEX_INT
2631 ? MODE_INT : MODE_FLOAT),
2632 0))
2633 && (mov_optab->handlers[(int) submode].insn_code
2634 != CODE_FOR_nothing))
2635 {
2636 /* Don't split destination if it is a stack push. */
2637 int stack = push_operand (x, GET_MODE (x));
2638
2639 /* If this is a stack, push the highpart first, so it
2640 will be in the argument order.
2641
2642 In that case, change_address is used only to convert
2643 the mode, not to change the address. */
2644 if (stack)
2645 {
2646 /* Note that the real part always precedes the imag part in memory
2647 regardless of machine's endianness. */
2648 #ifdef STACK_GROWS_DOWNWARD
2649 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2650 (gen_rtx_MEM (submode, XEXP (x, 0)),
2651 gen_imagpart (submode, y)));
2652 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2653 (gen_rtx_MEM (submode, XEXP (x, 0)),
2654 gen_realpart (submode, y)));
2655 #else
2656 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2657 (gen_rtx_MEM (submode, XEXP (x, 0)),
2658 gen_realpart (submode, y)));
2659 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2660 (gen_rtx_MEM (submode, XEXP (x, 0)),
2661 gen_imagpart (submode, y)));
2662 #endif
2663 }
2664 else
2665 {
2666 rtx realpart_x, realpart_y;
2667 rtx imagpart_x, imagpart_y;
2668
2669 /* If this is a complex value with each part being smaller than a
2670 word, the usual calling sequence will likely pack the pieces into
2671 a single register. Unfortunately, SUBREG of hard registers only
2672 deals in terms of words, so we have a problem converting input
2673 arguments to the CONCAT of two registers that is used elsewhere
2674 for complex values. If this is before reload, we can copy it into
2675 memory and reload. FIXME, we should see about using extract and
2676 insert on integer registers, but complex short and complex char
2677 variables should be rarely used. */
2678 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2679 && (reload_in_progress | reload_completed) == 0)
2680 {
2681 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2682 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2683
2684 if (packed_dest_p || packed_src_p)
2685 {
2686 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2687 ? MODE_FLOAT : MODE_INT);
2688
2689 enum machine_mode reg_mode =
2690 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2691
2692 if (reg_mode != BLKmode)
2693 {
2694 rtx mem = assign_stack_temp (reg_mode,
2695 GET_MODE_SIZE (mode), 0);
2696
2697 rtx cmem = change_address (mem, mode, NULL_RTX);
2698
2699 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2700
2701 if (packed_dest_p)
2702 {
2703 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2704 emit_move_insn_1 (cmem, y);
2705 return emit_move_insn_1 (sreg, mem);
2706 }
2707 else
2708 {
2709 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2710 emit_move_insn_1 (mem, sreg);
2711 return emit_move_insn_1 (x, cmem);
2712 }
2713 }
2714 }
2715 }
2716
2717 realpart_x = gen_realpart (submode, x);
2718 realpart_y = gen_realpart (submode, y);
2719 imagpart_x = gen_imagpart (submode, x);
2720 imagpart_y = gen_imagpart (submode, y);
2721
2722 /* Show the output dies here. This is necessary for SUBREGs
2723 of pseudos since we cannot track their lifetimes correctly;
2724 hard regs shouldn't appear here except as return values.
2725 We never want to emit such a clobber after reload. */
2726 if (x != y
2727 && ! (reload_in_progress || reload_completed)
2728 && (GET_CODE (realpart_x) == SUBREG
2729 || GET_CODE (imagpart_x) == SUBREG))
2730 {
2731 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2732 }
2733
2734 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2735 (realpart_x, realpart_y));
2736 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2737 (imagpart_x, imagpart_y));
2738 }
2739
2740 return get_last_insn ();
2741 }
2742
2743 /* This will handle any multi-word mode that lacks a move_insn pattern.
2744 However, you will get better code if you define such patterns,
2745 even if they must turn into multiple assembler instructions. */
2746 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2747 {
2748 rtx last_insn = 0;
2749 rtx seq, inner;
2750 int need_clobber;
2751
2752 #ifdef PUSH_ROUNDING
2753
2754 /* If X is a push on the stack, do the push now and replace
2755 X with a reference to the stack pointer. */
2756 if (push_operand (x, GET_MODE (x)))
2757 {
2758 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2759 x = change_address (x, VOIDmode, stack_pointer_rtx);
2760 }
2761 #endif
2762
2763 /* If we are in reload, see if either operand is a MEM whose address
2764 is scheduled for replacement. */
2765 if (reload_in_progress && GET_CODE (x) == MEM
2766 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2767 {
2768 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2769
2770 MEM_COPY_ATTRIBUTES (new, x);
2771 x = new;
2772 }
2773 if (reload_in_progress && GET_CODE (y) == MEM
2774 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2775 {
2776 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2777
2778 MEM_COPY_ATTRIBUTES (new, y);
2779 y = new;
2780 }
2781
2782 start_sequence ();
2783
2784 need_clobber = 0;
2785 for (i = 0;
2786 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2787 i++)
2788 {
2789 rtx xpart = operand_subword (x, i, 1, mode);
2790 rtx ypart = operand_subword (y, i, 1, mode);
2791
2792 /* If we can't get a part of Y, put Y into memory if it is a
2793 constant. Otherwise, force it into a register. If we still
2794 can't get a part of Y, abort. */
2795 if (ypart == 0 && CONSTANT_P (y))
2796 {
2797 y = force_const_mem (mode, y);
2798 ypart = operand_subword (y, i, 1, mode);
2799 }
2800 else if (ypart == 0)
2801 ypart = operand_subword_force (y, i, mode);
2802
2803 if (xpart == 0 || ypart == 0)
2804 abort ();
2805
2806 need_clobber |= (GET_CODE (xpart) == SUBREG);
2807
2808 last_insn = emit_move_insn (xpart, ypart);
2809 }
2810
2811 seq = gen_sequence ();
2812 end_sequence ();
2813
2814 /* Show the output dies here. This is necessary for SUBREGs
2815 of pseudos since we cannot track their lifetimes correctly;
2816 hard regs shouldn't appear here except as return values.
2817 We never want to emit such a clobber after reload. */
2818 if (x != y
2819 && ! (reload_in_progress || reload_completed)
2820 && need_clobber != 0)
2821 {
2822 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2823 }
2824
2825 emit_insn (seq);
2826
2827 return last_insn;
2828 }
2829 else
2830 abort ();
2831 }
2832 \f
2833 /* Pushing data onto the stack. */
2834
2835 /* Push a block of length SIZE (perhaps variable)
2836 and return an rtx to address the beginning of the block.
2837 Note that it is not possible for the value returned to be a QUEUED.
2838 The value may be virtual_outgoing_args_rtx.
2839
2840 EXTRA is the number of bytes of padding to push in addition to SIZE.
2841 BELOW nonzero means this padding comes at low addresses;
2842 otherwise, the padding comes at high addresses. */
2843
2844 rtx
2845 push_block (size, extra, below)
2846 rtx size;
2847 int extra, below;
2848 {
2849 register rtx temp;
2850
2851 size = convert_modes (Pmode, ptr_mode, size, 1);
2852 if (CONSTANT_P (size))
2853 anti_adjust_stack (plus_constant (size, extra));
2854 else if (GET_CODE (size) == REG && extra == 0)
2855 anti_adjust_stack (size);
2856 else
2857 {
2858 temp = copy_to_mode_reg (Pmode, size);
2859 if (extra != 0)
2860 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2861 temp, 0, OPTAB_LIB_WIDEN);
2862 anti_adjust_stack (temp);
2863 }
2864
2865 #ifndef STACK_GROWS_DOWNWARD
2866 #ifdef ARGS_GROW_DOWNWARD
2867 if (!ACCUMULATE_OUTGOING_ARGS)
2868 #else
2869 if (0)
2870 #endif
2871 #else
2872 if (1)
2873 #endif
2874 {
2875 /* Return the lowest stack address when STACK or ARGS grow downward and
2876 we are not aaccumulating outgoing arguments (the c4x port uses such
2877 conventions). */
2878 temp = virtual_outgoing_args_rtx;
2879 if (extra != 0 && below)
2880 temp = plus_constant (temp, extra);
2881 }
2882 else
2883 {
2884 if (GET_CODE (size) == CONST_INT)
2885 temp = plus_constant (virtual_outgoing_args_rtx,
2886 - INTVAL (size) - (below ? 0 : extra));
2887 else if (extra != 0 && !below)
2888 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2889 negate_rtx (Pmode, plus_constant (size, extra)));
2890 else
2891 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2892 negate_rtx (Pmode, size));
2893 }
2894
2895 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2896 }
2897
2898 rtx
2899 gen_push_operand ()
2900 {
2901 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2902 }
2903
2904 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2905 block of SIZE bytes. */
2906
2907 static rtx
2908 get_push_address (size)
2909 int size;
2910 {
2911 register rtx temp;
2912
2913 if (STACK_PUSH_CODE == POST_DEC)
2914 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2915 else if (STACK_PUSH_CODE == POST_INC)
2916 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2917 else
2918 temp = stack_pointer_rtx;
2919
2920 return copy_to_reg (temp);
2921 }
2922
2923 /* Generate code to push X onto the stack, assuming it has mode MODE and
2924 type TYPE.
2925 MODE is redundant except when X is a CONST_INT (since they don't
2926 carry mode info).
2927 SIZE is an rtx for the size of data to be copied (in bytes),
2928 needed only if X is BLKmode.
2929
2930 ALIGN is maximum alignment we can assume.
2931
2932 If PARTIAL and REG are both nonzero, then copy that many of the first
2933 words of X into registers starting with REG, and push the rest of X.
2934 The amount of space pushed is decreased by PARTIAL words,
2935 rounded *down* to a multiple of PARM_BOUNDARY.
2936 REG must be a hard register in this case.
2937 If REG is zero but PARTIAL is not, take any all others actions for an
2938 argument partially in registers, but do not actually load any
2939 registers.
2940
2941 EXTRA is the amount in bytes of extra space to leave next to this arg.
2942 This is ignored if an argument block has already been allocated.
2943
2944 On a machine that lacks real push insns, ARGS_ADDR is the address of
2945 the bottom of the argument block for this call. We use indexing off there
2946 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2947 argument block has not been preallocated.
2948
2949 ARGS_SO_FAR is the size of args previously pushed for this call.
2950
2951 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2952 for arguments passed in registers. If nonzero, it will be the number
2953 of bytes required. */
2954
2955 void
2956 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2957 args_addr, args_so_far, reg_parm_stack_space,
2958 alignment_pad)
2959 register rtx x;
2960 enum machine_mode mode;
2961 tree type;
2962 rtx size;
2963 unsigned int align;
2964 int partial;
2965 rtx reg;
2966 int extra;
2967 rtx args_addr;
2968 rtx args_so_far;
2969 int reg_parm_stack_space;
2970 rtx alignment_pad;
2971 {
2972 rtx xinner;
2973 enum direction stack_direction
2974 #ifdef STACK_GROWS_DOWNWARD
2975 = downward;
2976 #else
2977 = upward;
2978 #endif
2979
2980 /* Decide where to pad the argument: `downward' for below,
2981 `upward' for above, or `none' for don't pad it.
2982 Default is below for small data on big-endian machines; else above. */
2983 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2984
2985 /* Invert direction if stack is post-update. */
2986 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2987 if (where_pad != none)
2988 where_pad = (where_pad == downward ? upward : downward);
2989
2990 xinner = x = protect_from_queue (x, 0);
2991
2992 if (mode == BLKmode)
2993 {
2994 /* Copy a block into the stack, entirely or partially. */
2995
2996 register rtx temp;
2997 int used = partial * UNITS_PER_WORD;
2998 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2999 int skip;
3000
3001 if (size == 0)
3002 abort ();
3003
3004 used -= offset;
3005
3006 /* USED is now the # of bytes we need not copy to the stack
3007 because registers will take care of them. */
3008
3009 if (partial != 0)
3010 xinner = change_address (xinner, BLKmode,
3011 plus_constant (XEXP (xinner, 0), used));
3012
3013 /* If the partial register-part of the arg counts in its stack size,
3014 skip the part of stack space corresponding to the registers.
3015 Otherwise, start copying to the beginning of the stack space,
3016 by setting SKIP to 0. */
3017 skip = (reg_parm_stack_space == 0) ? 0 : used;
3018
3019 #ifdef PUSH_ROUNDING
3020 /* Do it with several push insns if that doesn't take lots of insns
3021 and if there is no difficulty with push insns that skip bytes
3022 on the stack for alignment purposes. */
3023 if (args_addr == 0
3024 && PUSH_ARGS
3025 && GET_CODE (size) == CONST_INT
3026 && skip == 0
3027 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3028 /* Here we avoid the case of a structure whose weak alignment
3029 forces many pushes of a small amount of data,
3030 and such small pushes do rounding that causes trouble. */
3031 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3032 || align >= BIGGEST_ALIGNMENT
3033 || PUSH_ROUNDING (align) == align)
3034 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3035 {
3036 /* Push padding now if padding above and stack grows down,
3037 or if padding below and stack grows up.
3038 But if space already allocated, this has already been done. */
3039 if (extra && args_addr == 0
3040 && where_pad != none && where_pad != stack_direction)
3041 anti_adjust_stack (GEN_INT (extra));
3042
3043 stack_pointer_delta += INTVAL (size) - used;
3044 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3045 INTVAL (size) - used, align);
3046
3047 if (current_function_check_memory_usage && ! in_check_memory_usage)
3048 {
3049 rtx temp;
3050
3051 in_check_memory_usage = 1;
3052 temp = get_push_address (INTVAL(size) - used);
3053 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3054 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3055 temp, Pmode,
3056 XEXP (xinner, 0), Pmode,
3057 GEN_INT (INTVAL(size) - used),
3058 TYPE_MODE (sizetype));
3059 else
3060 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3061 temp, Pmode,
3062 GEN_INT (INTVAL(size) - used),
3063 TYPE_MODE (sizetype),
3064 GEN_INT (MEMORY_USE_RW),
3065 TYPE_MODE (integer_type_node));
3066 in_check_memory_usage = 0;
3067 }
3068 }
3069 else
3070 #endif /* PUSH_ROUNDING */
3071 {
3072 rtx target;
3073
3074 /* Otherwise make space on the stack and copy the data
3075 to the address of that space. */
3076
3077 /* Deduct words put into registers from the size we must copy. */
3078 if (partial != 0)
3079 {
3080 if (GET_CODE (size) == CONST_INT)
3081 size = GEN_INT (INTVAL (size) - used);
3082 else
3083 size = expand_binop (GET_MODE (size), sub_optab, size,
3084 GEN_INT (used), NULL_RTX, 0,
3085 OPTAB_LIB_WIDEN);
3086 }
3087
3088 /* Get the address of the stack space.
3089 In this case, we do not deal with EXTRA separately.
3090 A single stack adjust will do. */
3091 if (! args_addr)
3092 {
3093 temp = push_block (size, extra, where_pad == downward);
3094 extra = 0;
3095 }
3096 else if (GET_CODE (args_so_far) == CONST_INT)
3097 temp = memory_address (BLKmode,
3098 plus_constant (args_addr,
3099 skip + INTVAL (args_so_far)));
3100 else
3101 temp = memory_address (BLKmode,
3102 plus_constant (gen_rtx_PLUS (Pmode,
3103 args_addr,
3104 args_so_far),
3105 skip));
3106 if (current_function_check_memory_usage && ! in_check_memory_usage)
3107 {
3108 in_check_memory_usage = 1;
3109 target = copy_to_reg (temp);
3110 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3111 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3112 target, Pmode,
3113 XEXP (xinner, 0), Pmode,
3114 size, TYPE_MODE (sizetype));
3115 else
3116 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3117 target, Pmode,
3118 size, TYPE_MODE (sizetype),
3119 GEN_INT (MEMORY_USE_RW),
3120 TYPE_MODE (integer_type_node));
3121 in_check_memory_usage = 0;
3122 }
3123
3124 target = gen_rtx_MEM (BLKmode, temp);
3125
3126 if (type != 0)
3127 {
3128 set_mem_attributes (target, type, 1);
3129 /* Function incoming arguments may overlap with sibling call
3130 outgoing arguments and we cannot allow reordering of reads
3131 from function arguments with stores to outgoing arguments
3132 of sibling calls. */
3133 MEM_ALIAS_SET (target) = 0;
3134 }
3135
3136 /* TEMP is the address of the block. Copy the data there. */
3137 if (GET_CODE (size) == CONST_INT
3138 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3139 {
3140 move_by_pieces (target, xinner, INTVAL (size), align);
3141 goto ret;
3142 }
3143 else
3144 {
3145 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3146 enum machine_mode mode;
3147
3148 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3149 mode != VOIDmode;
3150 mode = GET_MODE_WIDER_MODE (mode))
3151 {
3152 enum insn_code code = movstr_optab[(int) mode];
3153 insn_operand_predicate_fn pred;
3154
3155 if (code != CODE_FOR_nothing
3156 && ((GET_CODE (size) == CONST_INT
3157 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3158 <= (GET_MODE_MASK (mode) >> 1)))
3159 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3160 && (!(pred = insn_data[(int) code].operand[0].predicate)
3161 || ((*pred) (target, BLKmode)))
3162 && (!(pred = insn_data[(int) code].operand[1].predicate)
3163 || ((*pred) (xinner, BLKmode)))
3164 && (!(pred = insn_data[(int) code].operand[3].predicate)
3165 || ((*pred) (opalign, VOIDmode))))
3166 {
3167 rtx op2 = convert_to_mode (mode, size, 1);
3168 rtx last = get_last_insn ();
3169 rtx pat;
3170
3171 pred = insn_data[(int) code].operand[2].predicate;
3172 if (pred != 0 && ! (*pred) (op2, mode))
3173 op2 = copy_to_mode_reg (mode, op2);
3174
3175 pat = GEN_FCN ((int) code) (target, xinner,
3176 op2, opalign);
3177 if (pat)
3178 {
3179 emit_insn (pat);
3180 goto ret;
3181 }
3182 else
3183 delete_insns_since (last);
3184 }
3185 }
3186 }
3187
3188 if (!ACCUMULATE_OUTGOING_ARGS)
3189 {
3190 /* If the source is referenced relative to the stack pointer,
3191 copy it to another register to stabilize it. We do not need
3192 to do this if we know that we won't be changing sp. */
3193
3194 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3195 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3196 temp = copy_to_reg (temp);
3197 }
3198
3199 /* Make inhibit_defer_pop nonzero around the library call
3200 to force it to pop the bcopy-arguments right away. */
3201 NO_DEFER_POP;
3202 #ifdef TARGET_MEM_FUNCTIONS
3203 emit_library_call (memcpy_libfunc, 0,
3204 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3205 convert_to_mode (TYPE_MODE (sizetype),
3206 size, TREE_UNSIGNED (sizetype)),
3207 TYPE_MODE (sizetype));
3208 #else
3209 emit_library_call (bcopy_libfunc, 0,
3210 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3211 convert_to_mode (TYPE_MODE (integer_type_node),
3212 size,
3213 TREE_UNSIGNED (integer_type_node)),
3214 TYPE_MODE (integer_type_node));
3215 #endif
3216 OK_DEFER_POP;
3217 }
3218 }
3219 else if (partial > 0)
3220 {
3221 /* Scalar partly in registers. */
3222
3223 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3224 int i;
3225 int not_stack;
3226 /* # words of start of argument
3227 that we must make space for but need not store. */
3228 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3229 int args_offset = INTVAL (args_so_far);
3230 int skip;
3231
3232 /* Push padding now if padding above and stack grows down,
3233 or if padding below and stack grows up.
3234 But if space already allocated, this has already been done. */
3235 if (extra && args_addr == 0
3236 && where_pad != none && where_pad != stack_direction)
3237 anti_adjust_stack (GEN_INT (extra));
3238
3239 /* If we make space by pushing it, we might as well push
3240 the real data. Otherwise, we can leave OFFSET nonzero
3241 and leave the space uninitialized. */
3242 if (args_addr == 0)
3243 offset = 0;
3244
3245 /* Now NOT_STACK gets the number of words that we don't need to
3246 allocate on the stack. */
3247 not_stack = partial - offset;
3248
3249 /* If the partial register-part of the arg counts in its stack size,
3250 skip the part of stack space corresponding to the registers.
3251 Otherwise, start copying to the beginning of the stack space,
3252 by setting SKIP to 0. */
3253 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3254
3255 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3256 x = validize_mem (force_const_mem (mode, x));
3257
3258 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3259 SUBREGs of such registers are not allowed. */
3260 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3261 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3262 x = copy_to_reg (x);
3263
3264 /* Loop over all the words allocated on the stack for this arg. */
3265 /* We can do it by words, because any scalar bigger than a word
3266 has a size a multiple of a word. */
3267 #ifndef PUSH_ARGS_REVERSED
3268 for (i = not_stack; i < size; i++)
3269 #else
3270 for (i = size - 1; i >= not_stack; i--)
3271 #endif
3272 if (i >= not_stack + offset)
3273 emit_push_insn (operand_subword_force (x, i, mode),
3274 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3275 0, args_addr,
3276 GEN_INT (args_offset + ((i - not_stack + skip)
3277 * UNITS_PER_WORD)),
3278 reg_parm_stack_space, alignment_pad);
3279 }
3280 else
3281 {
3282 rtx addr;
3283 rtx target = NULL_RTX;
3284 rtx dest;
3285
3286 /* Push padding now if padding above and stack grows down,
3287 or if padding below and stack grows up.
3288 But if space already allocated, this has already been done. */
3289 if (extra && args_addr == 0
3290 && where_pad != none && where_pad != stack_direction)
3291 anti_adjust_stack (GEN_INT (extra));
3292
3293 #ifdef PUSH_ROUNDING
3294 if (args_addr == 0 && PUSH_ARGS)
3295 {
3296 addr = gen_push_operand ();
3297 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3298 }
3299 else
3300 #endif
3301 {
3302 if (GET_CODE (args_so_far) == CONST_INT)
3303 addr
3304 = memory_address (mode,
3305 plus_constant (args_addr,
3306 INTVAL (args_so_far)));
3307 else
3308 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3309 args_so_far));
3310 target = addr;
3311 }
3312
3313 dest = gen_rtx_MEM (mode, addr);
3314 if (type != 0)
3315 {
3316 set_mem_attributes (dest, type, 1);
3317 /* Function incoming arguments may overlap with sibling call
3318 outgoing arguments and we cannot allow reordering of reads
3319 from function arguments with stores to outgoing arguments
3320 of sibling calls. */
3321 MEM_ALIAS_SET (dest) = 0;
3322 }
3323
3324 emit_move_insn (dest, x);
3325
3326 if (current_function_check_memory_usage && ! in_check_memory_usage)
3327 {
3328 in_check_memory_usage = 1;
3329 if (target == 0)
3330 target = get_push_address (GET_MODE_SIZE (mode));
3331
3332 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3333 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3334 target, Pmode,
3335 XEXP (x, 0), Pmode,
3336 GEN_INT (GET_MODE_SIZE (mode)),
3337 TYPE_MODE (sizetype));
3338 else
3339 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3340 target, Pmode,
3341 GEN_INT (GET_MODE_SIZE (mode)),
3342 TYPE_MODE (sizetype),
3343 GEN_INT (MEMORY_USE_RW),
3344 TYPE_MODE (integer_type_node));
3345 in_check_memory_usage = 0;
3346 }
3347 }
3348
3349 ret:
3350 /* If part should go in registers, copy that part
3351 into the appropriate registers. Do this now, at the end,
3352 since mem-to-mem copies above may do function calls. */
3353 if (partial > 0 && reg != 0)
3354 {
3355 /* Handle calls that pass values in multiple non-contiguous locations.
3356 The Irix 6 ABI has examples of this. */
3357 if (GET_CODE (reg) == PARALLEL)
3358 emit_group_load (reg, x, -1, align); /* ??? size? */
3359 else
3360 move_block_to_reg (REGNO (reg), x, partial, mode);
3361 }
3362
3363 if (extra && args_addr == 0 && where_pad == stack_direction)
3364 anti_adjust_stack (GEN_INT (extra));
3365
3366 if (alignment_pad && args_addr == 0)
3367 anti_adjust_stack (alignment_pad);
3368 }
3369 \f
3370 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3371 operations. */
3372
3373 static rtx
3374 get_subtarget (x)
3375 rtx x;
3376 {
3377 return ((x == 0
3378 /* Only registers can be subtargets. */
3379 || GET_CODE (x) != REG
3380 /* If the register is readonly, it can't be set more than once. */
3381 || RTX_UNCHANGING_P (x)
3382 /* Don't use hard regs to avoid extending their life. */
3383 || REGNO (x) < FIRST_PSEUDO_REGISTER
3384 /* Avoid subtargets inside loops,
3385 since they hide some invariant expressions. */
3386 || preserve_subexpressions_p ())
3387 ? 0 : x);
3388 }
3389
3390 /* Expand an assignment that stores the value of FROM into TO.
3391 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3392 (This may contain a QUEUED rtx;
3393 if the value is constant, this rtx is a constant.)
3394 Otherwise, the returned value is NULL_RTX.
3395
3396 SUGGEST_REG is no longer actually used.
3397 It used to mean, copy the value through a register
3398 and return that register, if that is possible.
3399 We now use WANT_VALUE to decide whether to do this. */
3400
3401 rtx
3402 expand_assignment (to, from, want_value, suggest_reg)
3403 tree to, from;
3404 int want_value;
3405 int suggest_reg ATTRIBUTE_UNUSED;
3406 {
3407 register rtx to_rtx = 0;
3408 rtx result;
3409
3410 /* Don't crash if the lhs of the assignment was erroneous. */
3411
3412 if (TREE_CODE (to) == ERROR_MARK)
3413 {
3414 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3415 return want_value ? result : NULL_RTX;
3416 }
3417
3418 /* Assignment of a structure component needs special treatment
3419 if the structure component's rtx is not simply a MEM.
3420 Assignment of an array element at a constant index, and assignment of
3421 an array element in an unaligned packed structure field, has the same
3422 problem. */
3423
3424 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3425 || TREE_CODE (to) == ARRAY_REF)
3426 {
3427 enum machine_mode mode1;
3428 HOST_WIDE_INT bitsize, bitpos;
3429 tree offset;
3430 int unsignedp;
3431 int volatilep = 0;
3432 tree tem;
3433 unsigned int alignment;
3434
3435 push_temp_slots ();
3436 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3437 &unsignedp, &volatilep, &alignment);
3438
3439 /* If we are going to use store_bit_field and extract_bit_field,
3440 make sure to_rtx will be safe for multiple use. */
3441
3442 if (mode1 == VOIDmode && want_value)
3443 tem = stabilize_reference (tem);
3444
3445 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3446 if (offset != 0)
3447 {
3448 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3449
3450 if (GET_CODE (to_rtx) != MEM)
3451 abort ();
3452
3453 if (GET_MODE (offset_rtx) != ptr_mode)
3454 {
3455 #ifdef POINTERS_EXTEND_UNSIGNED
3456 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3457 #else
3458 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3459 #endif
3460 }
3461
3462 /* A constant address in TO_RTX can have VOIDmode, we must not try
3463 to call force_reg for that case. Avoid that case. */
3464 if (GET_CODE (to_rtx) == MEM
3465 && GET_MODE (to_rtx) == BLKmode
3466 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3467 && bitsize
3468 && (bitpos % bitsize) == 0
3469 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3470 && alignment == GET_MODE_ALIGNMENT (mode1))
3471 {
3472 rtx temp = change_address (to_rtx, mode1,
3473 plus_constant (XEXP (to_rtx, 0),
3474 (bitpos /
3475 BITS_PER_UNIT)));
3476 if (GET_CODE (XEXP (temp, 0)) == REG)
3477 to_rtx = temp;
3478 else
3479 to_rtx = change_address (to_rtx, mode1,
3480 force_reg (GET_MODE (XEXP (temp, 0)),
3481 XEXP (temp, 0)));
3482 bitpos = 0;
3483 }
3484
3485 to_rtx = change_address (to_rtx, VOIDmode,
3486 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3487 force_reg (ptr_mode,
3488 offset_rtx)));
3489 }
3490
3491 if (volatilep)
3492 {
3493 if (GET_CODE (to_rtx) == MEM)
3494 {
3495 /* When the offset is zero, to_rtx is the address of the
3496 structure we are storing into, and hence may be shared.
3497 We must make a new MEM before setting the volatile bit. */
3498 if (offset == 0)
3499 to_rtx = copy_rtx (to_rtx);
3500
3501 MEM_VOLATILE_P (to_rtx) = 1;
3502 }
3503 #if 0 /* This was turned off because, when a field is volatile
3504 in an object which is not volatile, the object may be in a register,
3505 and then we would abort over here. */
3506 else
3507 abort ();
3508 #endif
3509 }
3510
3511 if (TREE_CODE (to) == COMPONENT_REF
3512 && TREE_READONLY (TREE_OPERAND (to, 1)))
3513 {
3514 if (offset == 0)
3515 to_rtx = copy_rtx (to_rtx);
3516
3517 RTX_UNCHANGING_P (to_rtx) = 1;
3518 }
3519
3520 /* Check the access. */
3521 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3522 {
3523 rtx to_addr;
3524 int size;
3525 int best_mode_size;
3526 enum machine_mode best_mode;
3527
3528 best_mode = get_best_mode (bitsize, bitpos,
3529 TYPE_ALIGN (TREE_TYPE (tem)),
3530 mode1, volatilep);
3531 if (best_mode == VOIDmode)
3532 best_mode = QImode;
3533
3534 best_mode_size = GET_MODE_BITSIZE (best_mode);
3535 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3536 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3537 size *= GET_MODE_SIZE (best_mode);
3538
3539 /* Check the access right of the pointer. */
3540 in_check_memory_usage = 1;
3541 if (size)
3542 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3543 to_addr, Pmode,
3544 GEN_INT (size), TYPE_MODE (sizetype),
3545 GEN_INT (MEMORY_USE_WO),
3546 TYPE_MODE (integer_type_node));
3547 in_check_memory_usage = 0;
3548 }
3549
3550 /* If this is a varying-length object, we must get the address of
3551 the source and do an explicit block move. */
3552 if (bitsize < 0)
3553 {
3554 unsigned int from_align;
3555 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3556 rtx inner_to_rtx
3557 = change_address (to_rtx, VOIDmode,
3558 plus_constant (XEXP (to_rtx, 0),
3559 bitpos / BITS_PER_UNIT));
3560
3561 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3562 MIN (alignment, from_align));
3563 free_temp_slots ();
3564 pop_temp_slots ();
3565 return to_rtx;
3566 }
3567 else
3568 {
3569 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3570 (want_value
3571 /* Spurious cast for HPUX compiler. */
3572 ? ((enum machine_mode)
3573 TYPE_MODE (TREE_TYPE (to)))
3574 : VOIDmode),
3575 unsignedp,
3576 alignment,
3577 int_size_in_bytes (TREE_TYPE (tem)),
3578 get_alias_set (to));
3579
3580 preserve_temp_slots (result);
3581 free_temp_slots ();
3582 pop_temp_slots ();
3583
3584 /* If the value is meaningful, convert RESULT to the proper mode.
3585 Otherwise, return nothing. */
3586 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3587 TYPE_MODE (TREE_TYPE (from)),
3588 result,
3589 TREE_UNSIGNED (TREE_TYPE (to)))
3590 : NULL_RTX);
3591 }
3592 }
3593
3594 /* If the rhs is a function call and its value is not an aggregate,
3595 call the function before we start to compute the lhs.
3596 This is needed for correct code for cases such as
3597 val = setjmp (buf) on machines where reference to val
3598 requires loading up part of an address in a separate insn.
3599
3600 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3601 since it might be a promoted variable where the zero- or sign- extension
3602 needs to be done. Handling this in the normal way is safe because no
3603 computation is done before the call. */
3604 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3605 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3606 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3607 && GET_CODE (DECL_RTL (to)) == REG))
3608 {
3609 rtx value;
3610
3611 push_temp_slots ();
3612 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3613 if (to_rtx == 0)
3614 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3615
3616 /* Handle calls that return values in multiple non-contiguous locations.
3617 The Irix 6 ABI has examples of this. */
3618 if (GET_CODE (to_rtx) == PARALLEL)
3619 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3620 TYPE_ALIGN (TREE_TYPE (from)));
3621 else if (GET_MODE (to_rtx) == BLKmode)
3622 emit_block_move (to_rtx, value, expr_size (from),
3623 TYPE_ALIGN (TREE_TYPE (from)));
3624 else
3625 {
3626 #ifdef POINTERS_EXTEND_UNSIGNED
3627 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3628 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3629 value = convert_memory_address (GET_MODE (to_rtx), value);
3630 #endif
3631 emit_move_insn (to_rtx, value);
3632 }
3633 preserve_temp_slots (to_rtx);
3634 free_temp_slots ();
3635 pop_temp_slots ();
3636 return want_value ? to_rtx : NULL_RTX;
3637 }
3638
3639 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3640 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3641
3642 if (to_rtx == 0)
3643 {
3644 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3645 if (GET_CODE (to_rtx) == MEM)
3646 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3647 }
3648
3649 /* Don't move directly into a return register. */
3650 if (TREE_CODE (to) == RESULT_DECL
3651 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3652 {
3653 rtx temp;
3654
3655 push_temp_slots ();
3656 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3657
3658 if (GET_CODE (to_rtx) == PARALLEL)
3659 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3660 TYPE_ALIGN (TREE_TYPE (from)));
3661 else
3662 emit_move_insn (to_rtx, temp);
3663
3664 preserve_temp_slots (to_rtx);
3665 free_temp_slots ();
3666 pop_temp_slots ();
3667 return want_value ? to_rtx : NULL_RTX;
3668 }
3669
3670 /* In case we are returning the contents of an object which overlaps
3671 the place the value is being stored, use a safe function when copying
3672 a value through a pointer into a structure value return block. */
3673 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3674 && current_function_returns_struct
3675 && !current_function_returns_pcc_struct)
3676 {
3677 rtx from_rtx, size;
3678
3679 push_temp_slots ();
3680 size = expr_size (from);
3681 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3682 EXPAND_MEMORY_USE_DONT);
3683
3684 /* Copy the rights of the bitmap. */
3685 if (current_function_check_memory_usage)
3686 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3687 XEXP (to_rtx, 0), Pmode,
3688 XEXP (from_rtx, 0), Pmode,
3689 convert_to_mode (TYPE_MODE (sizetype),
3690 size, TREE_UNSIGNED (sizetype)),
3691 TYPE_MODE (sizetype));
3692
3693 #ifdef TARGET_MEM_FUNCTIONS
3694 emit_library_call (memcpy_libfunc, 0,
3695 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3696 XEXP (from_rtx, 0), Pmode,
3697 convert_to_mode (TYPE_MODE (sizetype),
3698 size, TREE_UNSIGNED (sizetype)),
3699 TYPE_MODE (sizetype));
3700 #else
3701 emit_library_call (bcopy_libfunc, 0,
3702 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3703 XEXP (to_rtx, 0), Pmode,
3704 convert_to_mode (TYPE_MODE (integer_type_node),
3705 size, TREE_UNSIGNED (integer_type_node)),
3706 TYPE_MODE (integer_type_node));
3707 #endif
3708
3709 preserve_temp_slots (to_rtx);
3710 free_temp_slots ();
3711 pop_temp_slots ();
3712 return want_value ? to_rtx : NULL_RTX;
3713 }
3714
3715 /* Compute FROM and store the value in the rtx we got. */
3716
3717 push_temp_slots ();
3718 result = store_expr (from, to_rtx, want_value);
3719 preserve_temp_slots (result);
3720 free_temp_slots ();
3721 pop_temp_slots ();
3722 return want_value ? result : NULL_RTX;
3723 }
3724
3725 /* Generate code for computing expression EXP,
3726 and storing the value into TARGET.
3727 TARGET may contain a QUEUED rtx.
3728
3729 If WANT_VALUE is nonzero, return a copy of the value
3730 not in TARGET, so that we can be sure to use the proper
3731 value in a containing expression even if TARGET has something
3732 else stored in it. If possible, we copy the value through a pseudo
3733 and return that pseudo. Or, if the value is constant, we try to
3734 return the constant. In some cases, we return a pseudo
3735 copied *from* TARGET.
3736
3737 If the mode is BLKmode then we may return TARGET itself.
3738 It turns out that in BLKmode it doesn't cause a problem.
3739 because C has no operators that could combine two different
3740 assignments into the same BLKmode object with different values
3741 with no sequence point. Will other languages need this to
3742 be more thorough?
3743
3744 If WANT_VALUE is 0, we return NULL, to make sure
3745 to catch quickly any cases where the caller uses the value
3746 and fails to set WANT_VALUE. */
3747
3748 rtx
3749 store_expr (exp, target, want_value)
3750 register tree exp;
3751 register rtx target;
3752 int want_value;
3753 {
3754 register rtx temp;
3755 int dont_return_target = 0;
3756
3757 if (TREE_CODE (exp) == COMPOUND_EXPR)
3758 {
3759 /* Perform first part of compound expression, then assign from second
3760 part. */
3761 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3762 emit_queue ();
3763 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3764 }
3765 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3766 {
3767 /* For conditional expression, get safe form of the target. Then
3768 test the condition, doing the appropriate assignment on either
3769 side. This avoids the creation of unnecessary temporaries.
3770 For non-BLKmode, it is more efficient not to do this. */
3771
3772 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3773
3774 emit_queue ();
3775 target = protect_from_queue (target, 1);
3776
3777 do_pending_stack_adjust ();
3778 NO_DEFER_POP;
3779 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3780 start_cleanup_deferral ();
3781 store_expr (TREE_OPERAND (exp, 1), target, 0);
3782 end_cleanup_deferral ();
3783 emit_queue ();
3784 emit_jump_insn (gen_jump (lab2));
3785 emit_barrier ();
3786 emit_label (lab1);
3787 start_cleanup_deferral ();
3788 store_expr (TREE_OPERAND (exp, 2), target, 0);
3789 end_cleanup_deferral ();
3790 emit_queue ();
3791 emit_label (lab2);
3792 OK_DEFER_POP;
3793
3794 return want_value ? target : NULL_RTX;
3795 }
3796 else if (queued_subexp_p (target))
3797 /* If target contains a postincrement, let's not risk
3798 using it as the place to generate the rhs. */
3799 {
3800 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3801 {
3802 /* Expand EXP into a new pseudo. */
3803 temp = gen_reg_rtx (GET_MODE (target));
3804 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3805 }
3806 else
3807 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3808
3809 /* If target is volatile, ANSI requires accessing the value
3810 *from* the target, if it is accessed. So make that happen.
3811 In no case return the target itself. */
3812 if (! MEM_VOLATILE_P (target) && want_value)
3813 dont_return_target = 1;
3814 }
3815 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3816 && GET_MODE (target) != BLKmode)
3817 /* If target is in memory and caller wants value in a register instead,
3818 arrange that. Pass TARGET as target for expand_expr so that,
3819 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3820 We know expand_expr will not use the target in that case.
3821 Don't do this if TARGET is volatile because we are supposed
3822 to write it and then read it. */
3823 {
3824 temp = expand_expr (exp, target, GET_MODE (target), 0);
3825 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3826 temp = copy_to_reg (temp);
3827 dont_return_target = 1;
3828 }
3829 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3830 /* If this is an scalar in a register that is stored in a wider mode
3831 than the declared mode, compute the result into its declared mode
3832 and then convert to the wider mode. Our value is the computed
3833 expression. */
3834 {
3835 /* If we don't want a value, we can do the conversion inside EXP,
3836 which will often result in some optimizations. Do the conversion
3837 in two steps: first change the signedness, if needed, then
3838 the extend. But don't do this if the type of EXP is a subtype
3839 of something else since then the conversion might involve
3840 more than just converting modes. */
3841 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3842 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3843 {
3844 if (TREE_UNSIGNED (TREE_TYPE (exp))
3845 != SUBREG_PROMOTED_UNSIGNED_P (target))
3846 exp
3847 = convert
3848 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3849 TREE_TYPE (exp)),
3850 exp);
3851
3852 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3853 SUBREG_PROMOTED_UNSIGNED_P (target)),
3854 exp);
3855 }
3856
3857 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3858
3859 /* If TEMP is a volatile MEM and we want a result value, make
3860 the access now so it gets done only once. Likewise if
3861 it contains TARGET. */
3862 if (GET_CODE (temp) == MEM && want_value
3863 && (MEM_VOLATILE_P (temp)
3864 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3865 temp = copy_to_reg (temp);
3866
3867 /* If TEMP is a VOIDmode constant, use convert_modes to make
3868 sure that we properly convert it. */
3869 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3870 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3871 TYPE_MODE (TREE_TYPE (exp)), temp,
3872 SUBREG_PROMOTED_UNSIGNED_P (target));
3873
3874 convert_move (SUBREG_REG (target), temp,
3875 SUBREG_PROMOTED_UNSIGNED_P (target));
3876
3877 /* If we promoted a constant, change the mode back down to match
3878 target. Otherwise, the caller might get confused by a result whose
3879 mode is larger than expected. */
3880
3881 if (want_value && GET_MODE (temp) != GET_MODE (target)
3882 && GET_MODE (temp) != VOIDmode)
3883 {
3884 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3885 SUBREG_PROMOTED_VAR_P (temp) = 1;
3886 SUBREG_PROMOTED_UNSIGNED_P (temp)
3887 = SUBREG_PROMOTED_UNSIGNED_P (target);
3888 }
3889
3890 return want_value ? temp : NULL_RTX;
3891 }
3892 else
3893 {
3894 temp = expand_expr (exp, target, GET_MODE (target), 0);
3895 /* Return TARGET if it's a specified hardware register.
3896 If TARGET is a volatile mem ref, either return TARGET
3897 or return a reg copied *from* TARGET; ANSI requires this.
3898
3899 Otherwise, if TEMP is not TARGET, return TEMP
3900 if it is constant (for efficiency),
3901 or if we really want the correct value. */
3902 if (!(target && GET_CODE (target) == REG
3903 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3904 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3905 && ! rtx_equal_p (temp, target)
3906 && (CONSTANT_P (temp) || want_value))
3907 dont_return_target = 1;
3908 }
3909
3910 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3911 the same as that of TARGET, adjust the constant. This is needed, for
3912 example, in case it is a CONST_DOUBLE and we want only a word-sized
3913 value. */
3914 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3915 && TREE_CODE (exp) != ERROR_MARK
3916 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3917 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3918 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3919
3920 if (current_function_check_memory_usage
3921 && GET_CODE (target) == MEM
3922 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3923 {
3924 in_check_memory_usage = 1;
3925 if (GET_CODE (temp) == MEM)
3926 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3927 XEXP (target, 0), Pmode,
3928 XEXP (temp, 0), Pmode,
3929 expr_size (exp), TYPE_MODE (sizetype));
3930 else
3931 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3932 XEXP (target, 0), Pmode,
3933 expr_size (exp), TYPE_MODE (sizetype),
3934 GEN_INT (MEMORY_USE_WO),
3935 TYPE_MODE (integer_type_node));
3936 in_check_memory_usage = 0;
3937 }
3938
3939 /* If value was not generated in the target, store it there.
3940 Convert the value to TARGET's type first if nec. */
3941 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3942 one or both of them are volatile memory refs, we have to distinguish
3943 two cases:
3944 - expand_expr has used TARGET. In this case, we must not generate
3945 another copy. This can be detected by TARGET being equal according
3946 to == .
3947 - expand_expr has not used TARGET - that means that the source just
3948 happens to have the same RTX form. Since temp will have been created
3949 by expand_expr, it will compare unequal according to == .
3950 We must generate a copy in this case, to reach the correct number
3951 of volatile memory references. */
3952
3953 if ((! rtx_equal_p (temp, target)
3954 || (temp != target && (side_effects_p (temp)
3955 || side_effects_p (target))))
3956 && TREE_CODE (exp) != ERROR_MARK)
3957 {
3958 target = protect_from_queue (target, 1);
3959 if (GET_MODE (temp) != GET_MODE (target)
3960 && GET_MODE (temp) != VOIDmode)
3961 {
3962 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3963 if (dont_return_target)
3964 {
3965 /* In this case, we will return TEMP,
3966 so make sure it has the proper mode.
3967 But don't forget to store the value into TARGET. */
3968 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3969 emit_move_insn (target, temp);
3970 }
3971 else
3972 convert_move (target, temp, unsignedp);
3973 }
3974
3975 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3976 {
3977 /* Handle copying a string constant into an array.
3978 The string constant may be shorter than the array.
3979 So copy just the string's actual length, and clear the rest. */
3980 rtx size;
3981 rtx addr;
3982
3983 /* Get the size of the data type of the string,
3984 which is actually the size of the target. */
3985 size = expr_size (exp);
3986 if (GET_CODE (size) == CONST_INT
3987 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3988 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3989 else
3990 {
3991 /* Compute the size of the data to copy from the string. */
3992 tree copy_size
3993 = size_binop (MIN_EXPR,
3994 make_tree (sizetype, size),
3995 size_int (TREE_STRING_LENGTH (exp)));
3996 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
3997 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3998 VOIDmode, 0);
3999 rtx label = 0;
4000
4001 /* Copy that much. */
4002 emit_block_move (target, temp, copy_size_rtx,
4003 TYPE_ALIGN (TREE_TYPE (exp)));
4004
4005 /* Figure out how much is left in TARGET that we have to clear.
4006 Do all calculations in ptr_mode. */
4007
4008 addr = XEXP (target, 0);
4009 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4010
4011 if (GET_CODE (copy_size_rtx) == CONST_INT)
4012 {
4013 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4014 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
4015 align = MIN (align, (BITS_PER_UNIT
4016 * (INTVAL (copy_size_rtx)
4017 & - INTVAL (copy_size_rtx))));
4018 }
4019 else
4020 {
4021 addr = force_reg (ptr_mode, addr);
4022 addr = expand_binop (ptr_mode, add_optab, addr,
4023 copy_size_rtx, NULL_RTX, 0,
4024 OPTAB_LIB_WIDEN);
4025
4026 size = expand_binop (ptr_mode, sub_optab, size,
4027 copy_size_rtx, NULL_RTX, 0,
4028 OPTAB_LIB_WIDEN);
4029
4030 align = BITS_PER_UNIT;
4031 label = gen_label_rtx ();
4032 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4033 GET_MODE (size), 0, 0, label);
4034 }
4035 align = MIN (align, expr_align (copy_size));
4036
4037 if (size != const0_rtx)
4038 {
4039 rtx dest = gen_rtx_MEM (BLKmode, addr);
4040
4041 MEM_COPY_ATTRIBUTES (dest, target);
4042
4043 /* Be sure we can write on ADDR. */
4044 in_check_memory_usage = 1;
4045 if (current_function_check_memory_usage)
4046 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
4047 addr, Pmode,
4048 size, TYPE_MODE (sizetype),
4049 GEN_INT (MEMORY_USE_WO),
4050 TYPE_MODE (integer_type_node));
4051 in_check_memory_usage = 0;
4052 clear_storage (dest, size, align);
4053 }
4054
4055 if (label)
4056 emit_label (label);
4057 }
4058 }
4059 /* Handle calls that return values in multiple non-contiguous locations.
4060 The Irix 6 ABI has examples of this. */
4061 else if (GET_CODE (target) == PARALLEL)
4062 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4063 TYPE_ALIGN (TREE_TYPE (exp)));
4064 else if (GET_MODE (temp) == BLKmode)
4065 emit_block_move (target, temp, expr_size (exp),
4066 TYPE_ALIGN (TREE_TYPE (exp)));
4067 else
4068 emit_move_insn (target, temp);
4069 }
4070
4071 /* If we don't want a value, return NULL_RTX. */
4072 if (! want_value)
4073 return NULL_RTX;
4074
4075 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4076 ??? The latter test doesn't seem to make sense. */
4077 else if (dont_return_target && GET_CODE (temp) != MEM)
4078 return temp;
4079
4080 /* Return TARGET itself if it is a hard register. */
4081 else if (want_value && GET_MODE (target) != BLKmode
4082 && ! (GET_CODE (target) == REG
4083 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4084 return copy_to_reg (target);
4085
4086 else
4087 return target;
4088 }
4089 \f
4090 /* Return 1 if EXP just contains zeros. */
4091
4092 static int
4093 is_zeros_p (exp)
4094 tree exp;
4095 {
4096 tree elt;
4097
4098 switch (TREE_CODE (exp))
4099 {
4100 case CONVERT_EXPR:
4101 case NOP_EXPR:
4102 case NON_LVALUE_EXPR:
4103 return is_zeros_p (TREE_OPERAND (exp, 0));
4104
4105 case INTEGER_CST:
4106 return integer_zerop (exp);
4107
4108 case COMPLEX_CST:
4109 return
4110 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4111
4112 case REAL_CST:
4113 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4114
4115 case CONSTRUCTOR:
4116 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4117 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4118 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4119 if (! is_zeros_p (TREE_VALUE (elt)))
4120 return 0;
4121
4122 return 1;
4123
4124 default:
4125 return 0;
4126 }
4127 }
4128
4129 /* Return 1 if EXP contains mostly (3/4) zeros. */
4130
4131 static int
4132 mostly_zeros_p (exp)
4133 tree exp;
4134 {
4135 if (TREE_CODE (exp) == CONSTRUCTOR)
4136 {
4137 int elts = 0, zeros = 0;
4138 tree elt = CONSTRUCTOR_ELTS (exp);
4139 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4140 {
4141 /* If there are no ranges of true bits, it is all zero. */
4142 return elt == NULL_TREE;
4143 }
4144 for (; elt; elt = TREE_CHAIN (elt))
4145 {
4146 /* We do not handle the case where the index is a RANGE_EXPR,
4147 so the statistic will be somewhat inaccurate.
4148 We do make a more accurate count in store_constructor itself,
4149 so since this function is only used for nested array elements,
4150 this should be close enough. */
4151 if (mostly_zeros_p (TREE_VALUE (elt)))
4152 zeros++;
4153 elts++;
4154 }
4155
4156 return 4 * zeros >= 3 * elts;
4157 }
4158
4159 return is_zeros_p (exp);
4160 }
4161 \f
4162 /* Helper function for store_constructor.
4163 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4164 TYPE is the type of the CONSTRUCTOR, not the element type.
4165 ALIGN and CLEARED are as for store_constructor.
4166
4167 This provides a recursive shortcut back to store_constructor when it isn't
4168 necessary to go through store_field. This is so that we can pass through
4169 the cleared field to let store_constructor know that we may not have to
4170 clear a substructure if the outer structure has already been cleared. */
4171
4172 static void
4173 store_constructor_field (target, bitsize, bitpos,
4174 mode, exp, type, align, cleared)
4175 rtx target;
4176 unsigned HOST_WIDE_INT bitsize;
4177 HOST_WIDE_INT bitpos;
4178 enum machine_mode mode;
4179 tree exp, type;
4180 unsigned int align;
4181 int cleared;
4182 {
4183 if (TREE_CODE (exp) == CONSTRUCTOR
4184 && bitpos % BITS_PER_UNIT == 0
4185 /* If we have a non-zero bitpos for a register target, then we just
4186 let store_field do the bitfield handling. This is unlikely to
4187 generate unnecessary clear instructions anyways. */
4188 && (bitpos == 0 || GET_CODE (target) == MEM))
4189 {
4190 if (bitpos != 0)
4191 target
4192 = change_address (target,
4193 GET_MODE (target) == BLKmode
4194 || 0 != (bitpos
4195 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4196 ? BLKmode : VOIDmode,
4197 plus_constant (XEXP (target, 0),
4198 bitpos / BITS_PER_UNIT));
4199 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4200 }
4201 else
4202 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4203 int_size_in_bytes (type), 0);
4204 }
4205
4206 /* Store the value of constructor EXP into the rtx TARGET.
4207 TARGET is either a REG or a MEM.
4208 ALIGN is the maximum known alignment for TARGET.
4209 CLEARED is true if TARGET is known to have been zero'd.
4210 SIZE is the number of bytes of TARGET we are allowed to modify: this
4211 may not be the same as the size of EXP if we are assigning to a field
4212 which has been packed to exclude padding bits. */
4213
4214 static void
4215 store_constructor (exp, target, align, cleared, size)
4216 tree exp;
4217 rtx target;
4218 unsigned int align;
4219 int cleared;
4220 HOST_WIDE_INT size;
4221 {
4222 tree type = TREE_TYPE (exp);
4223 #ifdef WORD_REGISTER_OPERATIONS
4224 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4225 #endif
4226
4227 /* We know our target cannot conflict, since safe_from_p has been called. */
4228 #if 0
4229 /* Don't try copying piece by piece into a hard register
4230 since that is vulnerable to being clobbered by EXP.
4231 Instead, construct in a pseudo register and then copy it all. */
4232 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4233 {
4234 rtx temp = gen_reg_rtx (GET_MODE (target));
4235 store_constructor (exp, temp, align, cleared, size);
4236 emit_move_insn (target, temp);
4237 return;
4238 }
4239 #endif
4240
4241 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4242 || TREE_CODE (type) == QUAL_UNION_TYPE)
4243 {
4244 register tree elt;
4245
4246 /* Inform later passes that the whole union value is dead. */
4247 if ((TREE_CODE (type) == UNION_TYPE
4248 || TREE_CODE (type) == QUAL_UNION_TYPE)
4249 && ! cleared)
4250 {
4251 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4252
4253 /* If the constructor is empty, clear the union. */
4254 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4255 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4256 }
4257
4258 /* If we are building a static constructor into a register,
4259 set the initial value as zero so we can fold the value into
4260 a constant. But if more than one register is involved,
4261 this probably loses. */
4262 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4263 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4264 {
4265 if (! cleared)
4266 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4267
4268 cleared = 1;
4269 }
4270
4271 /* If the constructor has fewer fields than the structure
4272 or if we are initializing the structure to mostly zeros,
4273 clear the whole structure first. */
4274 else if (size > 0
4275 && ((list_length (CONSTRUCTOR_ELTS (exp))
4276 != fields_length (type))
4277 || mostly_zeros_p (exp)))
4278 {
4279 if (! cleared)
4280 clear_storage (target, GEN_INT (size), align);
4281
4282 cleared = 1;
4283 }
4284 else if (! cleared)
4285 /* Inform later passes that the old value is dead. */
4286 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4287
4288 /* Store each element of the constructor into
4289 the corresponding field of TARGET. */
4290
4291 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4292 {
4293 register tree field = TREE_PURPOSE (elt);
4294 #ifdef WORD_REGISTER_OPERATIONS
4295 tree value = TREE_VALUE (elt);
4296 #endif
4297 register enum machine_mode mode;
4298 HOST_WIDE_INT bitsize;
4299 HOST_WIDE_INT bitpos = 0;
4300 int unsignedp;
4301 tree offset;
4302 rtx to_rtx = target;
4303
4304 /* Just ignore missing fields.
4305 We cleared the whole structure, above,
4306 if any fields are missing. */
4307 if (field == 0)
4308 continue;
4309
4310 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4311 continue;
4312
4313 if (host_integerp (DECL_SIZE (field), 1))
4314 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4315 else
4316 bitsize = -1;
4317
4318 unsignedp = TREE_UNSIGNED (field);
4319 mode = DECL_MODE (field);
4320 if (DECL_BIT_FIELD (field))
4321 mode = VOIDmode;
4322
4323 offset = DECL_FIELD_OFFSET (field);
4324 if (host_integerp (offset, 0)
4325 && host_integerp (bit_position (field), 0))
4326 {
4327 bitpos = int_bit_position (field);
4328 offset = 0;
4329 }
4330 else
4331 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4332
4333 if (offset)
4334 {
4335 rtx offset_rtx;
4336
4337 if (contains_placeholder_p (offset))
4338 offset = build (WITH_RECORD_EXPR, sizetype,
4339 offset, make_tree (TREE_TYPE (exp), target));
4340
4341 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4342 if (GET_CODE (to_rtx) != MEM)
4343 abort ();
4344
4345 if (GET_MODE (offset_rtx) != ptr_mode)
4346 {
4347 #ifdef POINTERS_EXTEND_UNSIGNED
4348 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4349 #else
4350 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4351 #endif
4352 }
4353
4354 to_rtx
4355 = change_address (to_rtx, VOIDmode,
4356 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4357 force_reg (ptr_mode,
4358 offset_rtx)));
4359 align = DECL_OFFSET_ALIGN (field);
4360 }
4361
4362 if (TREE_READONLY (field))
4363 {
4364 if (GET_CODE (to_rtx) == MEM)
4365 to_rtx = copy_rtx (to_rtx);
4366
4367 RTX_UNCHANGING_P (to_rtx) = 1;
4368 }
4369
4370 #ifdef WORD_REGISTER_OPERATIONS
4371 /* If this initializes a field that is smaller than a word, at the
4372 start of a word, try to widen it to a full word.
4373 This special case allows us to output C++ member function
4374 initializations in a form that the optimizers can understand. */
4375 if (GET_CODE (target) == REG
4376 && bitsize < BITS_PER_WORD
4377 && bitpos % BITS_PER_WORD == 0
4378 && GET_MODE_CLASS (mode) == MODE_INT
4379 && TREE_CODE (value) == INTEGER_CST
4380 && exp_size >= 0
4381 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4382 {
4383 tree type = TREE_TYPE (value);
4384 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4385 {
4386 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4387 value = convert (type, value);
4388 }
4389 if (BYTES_BIG_ENDIAN)
4390 value
4391 = fold (build (LSHIFT_EXPR, type, value,
4392 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4393 bitsize = BITS_PER_WORD;
4394 mode = word_mode;
4395 }
4396 #endif
4397 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4398 TREE_VALUE (elt), type, align, cleared);
4399 }
4400 }
4401 else if (TREE_CODE (type) == ARRAY_TYPE)
4402 {
4403 register tree elt;
4404 register int i;
4405 int need_to_clear;
4406 tree domain = TYPE_DOMAIN (type);
4407 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4408 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4409 tree elttype = TREE_TYPE (type);
4410
4411 /* If the constructor has fewer elements than the array,
4412 clear the whole array first. Similarly if this is
4413 static constructor of a non-BLKmode object. */
4414 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4415 need_to_clear = 1;
4416 else
4417 {
4418 HOST_WIDE_INT count = 0, zero_count = 0;
4419 need_to_clear = 0;
4420 /* This loop is a more accurate version of the loop in
4421 mostly_zeros_p (it handles RANGE_EXPR in an index).
4422 It is also needed to check for missing elements. */
4423 for (elt = CONSTRUCTOR_ELTS (exp);
4424 elt != NULL_TREE;
4425 elt = TREE_CHAIN (elt))
4426 {
4427 tree index = TREE_PURPOSE (elt);
4428 HOST_WIDE_INT this_node_count;
4429
4430 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4431 {
4432 tree lo_index = TREE_OPERAND (index, 0);
4433 tree hi_index = TREE_OPERAND (index, 1);
4434
4435 if (! host_integerp (lo_index, 1)
4436 || ! host_integerp (hi_index, 1))
4437 {
4438 need_to_clear = 1;
4439 break;
4440 }
4441
4442 this_node_count = (tree_low_cst (hi_index, 1)
4443 - tree_low_cst (lo_index, 1) + 1);
4444 }
4445 else
4446 this_node_count = 1;
4447 count += this_node_count;
4448 if (mostly_zeros_p (TREE_VALUE (elt)))
4449 zero_count += this_node_count;
4450 }
4451 /* Clear the entire array first if there are any missing elements,
4452 or if the incidence of zero elements is >= 75%. */
4453 if (count < maxelt - minelt + 1
4454 || 4 * zero_count >= 3 * count)
4455 need_to_clear = 1;
4456 }
4457 if (need_to_clear && size > 0)
4458 {
4459 if (! cleared)
4460 clear_storage (target, GEN_INT (size), align);
4461 cleared = 1;
4462 }
4463 else
4464 /* Inform later passes that the old value is dead. */
4465 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4466
4467 /* Store each element of the constructor into
4468 the corresponding element of TARGET, determined
4469 by counting the elements. */
4470 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4471 elt;
4472 elt = TREE_CHAIN (elt), i++)
4473 {
4474 register enum machine_mode mode;
4475 HOST_WIDE_INT bitsize;
4476 HOST_WIDE_INT bitpos;
4477 int unsignedp;
4478 tree value = TREE_VALUE (elt);
4479 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4480 tree index = TREE_PURPOSE (elt);
4481 rtx xtarget = target;
4482
4483 if (cleared && is_zeros_p (value))
4484 continue;
4485
4486 unsignedp = TREE_UNSIGNED (elttype);
4487 mode = TYPE_MODE (elttype);
4488 if (mode == BLKmode)
4489 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4490 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4491 : -1);
4492 else
4493 bitsize = GET_MODE_BITSIZE (mode);
4494
4495 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4496 {
4497 tree lo_index = TREE_OPERAND (index, 0);
4498 tree hi_index = TREE_OPERAND (index, 1);
4499 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4500 struct nesting *loop;
4501 HOST_WIDE_INT lo, hi, count;
4502 tree position;
4503
4504 /* If the range is constant and "small", unroll the loop. */
4505 if (host_integerp (lo_index, 0)
4506 && host_integerp (hi_index, 0)
4507 && (lo = tree_low_cst (lo_index, 0),
4508 hi = tree_low_cst (hi_index, 0),
4509 count = hi - lo + 1,
4510 (GET_CODE (target) != MEM
4511 || count <= 2
4512 || (host_integerp (TYPE_SIZE (elttype), 1)
4513 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4514 <= 40 * 8)))))
4515 {
4516 lo -= minelt; hi -= minelt;
4517 for (; lo <= hi; lo++)
4518 {
4519 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4520 store_constructor_field (target, bitsize, bitpos, mode,
4521 value, type, align, cleared);
4522 }
4523 }
4524 else
4525 {
4526 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4527 loop_top = gen_label_rtx ();
4528 loop_end = gen_label_rtx ();
4529
4530 unsignedp = TREE_UNSIGNED (domain);
4531
4532 index = build_decl (VAR_DECL, NULL_TREE, domain);
4533
4534 DECL_RTL (index) = index_r
4535 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4536 &unsignedp, 0));
4537
4538 if (TREE_CODE (value) == SAVE_EXPR
4539 && SAVE_EXPR_RTL (value) == 0)
4540 {
4541 /* Make sure value gets expanded once before the
4542 loop. */
4543 expand_expr (value, const0_rtx, VOIDmode, 0);
4544 emit_queue ();
4545 }
4546 store_expr (lo_index, index_r, 0);
4547 loop = expand_start_loop (0);
4548
4549 /* Assign value to element index. */
4550 position
4551 = convert (ssizetype,
4552 fold (build (MINUS_EXPR, TREE_TYPE (index),
4553 index, TYPE_MIN_VALUE (domain))));
4554 position = size_binop (MULT_EXPR, position,
4555 convert (ssizetype,
4556 TYPE_SIZE_UNIT (elttype)));
4557
4558 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4559 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4560 xtarget = change_address (target, mode, addr);
4561 if (TREE_CODE (value) == CONSTRUCTOR)
4562 store_constructor (value, xtarget, align, cleared,
4563 bitsize / BITS_PER_UNIT);
4564 else
4565 store_expr (value, xtarget, 0);
4566
4567 expand_exit_loop_if_false (loop,
4568 build (LT_EXPR, integer_type_node,
4569 index, hi_index));
4570
4571 expand_increment (build (PREINCREMENT_EXPR,
4572 TREE_TYPE (index),
4573 index, integer_one_node), 0, 0);
4574 expand_end_loop ();
4575 emit_label (loop_end);
4576 }
4577 }
4578 else if ((index != 0 && ! host_integerp (index, 0))
4579 || ! host_integerp (TYPE_SIZE (elttype), 1))
4580 {
4581 rtx pos_rtx, addr;
4582 tree position;
4583
4584 if (index == 0)
4585 index = ssize_int (1);
4586
4587 if (minelt)
4588 index = convert (ssizetype,
4589 fold (build (MINUS_EXPR, index,
4590 TYPE_MIN_VALUE (domain))));
4591
4592 position = size_binop (MULT_EXPR, index,
4593 convert (ssizetype,
4594 TYPE_SIZE_UNIT (elttype)));
4595 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4596 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4597 xtarget = change_address (target, mode, addr);
4598 store_expr (value, xtarget, 0);
4599 }
4600 else
4601 {
4602 if (index != 0)
4603 bitpos = ((tree_low_cst (index, 0) - minelt)
4604 * tree_low_cst (TYPE_SIZE (elttype), 1));
4605 else
4606 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4607
4608 store_constructor_field (target, bitsize, bitpos, mode, value,
4609 type, align, cleared);
4610 }
4611 }
4612 }
4613
4614 /* Set constructor assignments */
4615 else if (TREE_CODE (type) == SET_TYPE)
4616 {
4617 tree elt = CONSTRUCTOR_ELTS (exp);
4618 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4619 tree domain = TYPE_DOMAIN (type);
4620 tree domain_min, domain_max, bitlength;
4621
4622 /* The default implementation strategy is to extract the constant
4623 parts of the constructor, use that to initialize the target,
4624 and then "or" in whatever non-constant ranges we need in addition.
4625
4626 If a large set is all zero or all ones, it is
4627 probably better to set it using memset (if available) or bzero.
4628 Also, if a large set has just a single range, it may also be
4629 better to first clear all the first clear the set (using
4630 bzero/memset), and set the bits we want. */
4631
4632 /* Check for all zeros. */
4633 if (elt == NULL_TREE && size > 0)
4634 {
4635 if (!cleared)
4636 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4637 return;
4638 }
4639
4640 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4641 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4642 bitlength = size_binop (PLUS_EXPR,
4643 size_diffop (domain_max, domain_min),
4644 ssize_int (1));
4645
4646 nbits = tree_low_cst (bitlength, 1);
4647
4648 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4649 are "complicated" (more than one range), initialize (the
4650 constant parts) by copying from a constant. */
4651 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4652 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4653 {
4654 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4655 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4656 char *bit_buffer = (char *) alloca (nbits);
4657 HOST_WIDE_INT word = 0;
4658 unsigned int bit_pos = 0;
4659 unsigned int ibit = 0;
4660 unsigned int offset = 0; /* In bytes from beginning of set. */
4661
4662 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4663 for (;;)
4664 {
4665 if (bit_buffer[ibit])
4666 {
4667 if (BYTES_BIG_ENDIAN)
4668 word |= (1 << (set_word_size - 1 - bit_pos));
4669 else
4670 word |= 1 << bit_pos;
4671 }
4672
4673 bit_pos++; ibit++;
4674 if (bit_pos >= set_word_size || ibit == nbits)
4675 {
4676 if (word != 0 || ! cleared)
4677 {
4678 rtx datum = GEN_INT (word);
4679 rtx to_rtx;
4680
4681 /* The assumption here is that it is safe to use
4682 XEXP if the set is multi-word, but not if
4683 it's single-word. */
4684 if (GET_CODE (target) == MEM)
4685 {
4686 to_rtx = plus_constant (XEXP (target, 0), offset);
4687 to_rtx = change_address (target, mode, to_rtx);
4688 }
4689 else if (offset == 0)
4690 to_rtx = target;
4691 else
4692 abort ();
4693 emit_move_insn (to_rtx, datum);
4694 }
4695
4696 if (ibit == nbits)
4697 break;
4698 word = 0;
4699 bit_pos = 0;
4700 offset += set_word_size / BITS_PER_UNIT;
4701 }
4702 }
4703 }
4704 else if (!cleared)
4705 /* Don't bother clearing storage if the set is all ones. */
4706 if (TREE_CHAIN (elt) != NULL_TREE
4707 || (TREE_PURPOSE (elt) == NULL_TREE
4708 ? nbits != 1
4709 : ( ! host_integerp (TREE_VALUE (elt), 0)
4710 || ! host_integerp (TREE_PURPOSE (elt), 0)
4711 || (tree_low_cst (TREE_VALUE (elt), 0)
4712 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4713 != (HOST_WIDE_INT) nbits))))
4714 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4715
4716 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4717 {
4718 /* start of range of element or NULL */
4719 tree startbit = TREE_PURPOSE (elt);
4720 /* end of range of element, or element value */
4721 tree endbit = TREE_VALUE (elt);
4722 #ifdef TARGET_MEM_FUNCTIONS
4723 HOST_WIDE_INT startb, endb;
4724 #endif
4725 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4726
4727 bitlength_rtx = expand_expr (bitlength,
4728 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4729
4730 /* handle non-range tuple element like [ expr ] */
4731 if (startbit == NULL_TREE)
4732 {
4733 startbit = save_expr (endbit);
4734 endbit = startbit;
4735 }
4736
4737 startbit = convert (sizetype, startbit);
4738 endbit = convert (sizetype, endbit);
4739 if (! integer_zerop (domain_min))
4740 {
4741 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4742 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4743 }
4744 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4745 EXPAND_CONST_ADDRESS);
4746 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4747 EXPAND_CONST_ADDRESS);
4748
4749 if (REG_P (target))
4750 {
4751 targetx = assign_stack_temp (GET_MODE (target),
4752 GET_MODE_SIZE (GET_MODE (target)),
4753 0);
4754 emit_move_insn (targetx, target);
4755 }
4756
4757 else if (GET_CODE (target) == MEM)
4758 targetx = target;
4759 else
4760 abort ();
4761
4762 #ifdef TARGET_MEM_FUNCTIONS
4763 /* Optimization: If startbit and endbit are
4764 constants divisible by BITS_PER_UNIT,
4765 call memset instead. */
4766 if (TREE_CODE (startbit) == INTEGER_CST
4767 && TREE_CODE (endbit) == INTEGER_CST
4768 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4769 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4770 {
4771 emit_library_call (memset_libfunc, 0,
4772 VOIDmode, 3,
4773 plus_constant (XEXP (targetx, 0),
4774 startb / BITS_PER_UNIT),
4775 Pmode,
4776 constm1_rtx, TYPE_MODE (integer_type_node),
4777 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4778 TYPE_MODE (sizetype));
4779 }
4780 else
4781 #endif
4782 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4783 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4784 bitlength_rtx, TYPE_MODE (sizetype),
4785 startbit_rtx, TYPE_MODE (sizetype),
4786 endbit_rtx, TYPE_MODE (sizetype));
4787
4788 if (REG_P (target))
4789 emit_move_insn (target, targetx);
4790 }
4791 }
4792
4793 else
4794 abort ();
4795 }
4796
4797 /* Store the value of EXP (an expression tree)
4798 into a subfield of TARGET which has mode MODE and occupies
4799 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4800 If MODE is VOIDmode, it means that we are storing into a bit-field.
4801
4802 If VALUE_MODE is VOIDmode, return nothing in particular.
4803 UNSIGNEDP is not used in this case.
4804
4805 Otherwise, return an rtx for the value stored. This rtx
4806 has mode VALUE_MODE if that is convenient to do.
4807 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4808
4809 ALIGN is the alignment that TARGET is known to have.
4810 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4811
4812 ALIAS_SET is the alias set for the destination. This value will
4813 (in general) be different from that for TARGET, since TARGET is a
4814 reference to the containing structure. */
4815
4816 static rtx
4817 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4818 unsignedp, align, total_size, alias_set)
4819 rtx target;
4820 HOST_WIDE_INT bitsize;
4821 HOST_WIDE_INT bitpos;
4822 enum machine_mode mode;
4823 tree exp;
4824 enum machine_mode value_mode;
4825 int unsignedp;
4826 unsigned int align;
4827 HOST_WIDE_INT total_size;
4828 int alias_set;
4829 {
4830 HOST_WIDE_INT width_mask = 0;
4831
4832 if (TREE_CODE (exp) == ERROR_MARK)
4833 return const0_rtx;
4834
4835 if (bitsize < HOST_BITS_PER_WIDE_INT)
4836 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4837
4838 /* If we are storing into an unaligned field of an aligned union that is
4839 in a register, we may have the mode of TARGET being an integer mode but
4840 MODE == BLKmode. In that case, get an aligned object whose size and
4841 alignment are the same as TARGET and store TARGET into it (we can avoid
4842 the store if the field being stored is the entire width of TARGET). Then
4843 call ourselves recursively to store the field into a BLKmode version of
4844 that object. Finally, load from the object into TARGET. This is not
4845 very efficient in general, but should only be slightly more expensive
4846 than the otherwise-required unaligned accesses. Perhaps this can be
4847 cleaned up later. */
4848
4849 if (mode == BLKmode
4850 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4851 {
4852 rtx object = assign_stack_temp (GET_MODE (target),
4853 GET_MODE_SIZE (GET_MODE (target)), 0);
4854 rtx blk_object = copy_rtx (object);
4855
4856 MEM_SET_IN_STRUCT_P (object, 1);
4857 MEM_SET_IN_STRUCT_P (blk_object, 1);
4858 PUT_MODE (blk_object, BLKmode);
4859
4860 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4861 emit_move_insn (object, target);
4862
4863 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4864 align, total_size, alias_set);
4865
4866 /* Even though we aren't returning target, we need to
4867 give it the updated value. */
4868 emit_move_insn (target, object);
4869
4870 return blk_object;
4871 }
4872
4873 if (GET_CODE (target) == CONCAT)
4874 {
4875 /* We're storing into a struct containing a single __complex. */
4876
4877 if (bitpos != 0)
4878 abort ();
4879 return store_expr (exp, target, 0);
4880 }
4881
4882 /* If the structure is in a register or if the component
4883 is a bit field, we cannot use addressing to access it.
4884 Use bit-field techniques or SUBREG to store in it. */
4885
4886 if (mode == VOIDmode
4887 || (mode != BLKmode && ! direct_store[(int) mode]
4888 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4889 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4890 || GET_CODE (target) == REG
4891 || GET_CODE (target) == SUBREG
4892 /* If the field isn't aligned enough to store as an ordinary memref,
4893 store it as a bit field. */
4894 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4895 && (align < GET_MODE_ALIGNMENT (mode)
4896 || bitpos % GET_MODE_ALIGNMENT (mode)))
4897 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4898 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4899 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4900 /* If the RHS and field are a constant size and the size of the
4901 RHS isn't the same size as the bitfield, we must use bitfield
4902 operations. */
4903 || (bitsize >= 0
4904 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4905 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4906 {
4907 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4908
4909 /* If BITSIZE is narrower than the size of the type of EXP
4910 we will be narrowing TEMP. Normally, what's wanted are the
4911 low-order bits. However, if EXP's type is a record and this is
4912 big-endian machine, we want the upper BITSIZE bits. */
4913 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4914 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4915 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4916 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4917 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4918 - bitsize),
4919 temp, 1);
4920
4921 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4922 MODE. */
4923 if (mode != VOIDmode && mode != BLKmode
4924 && mode != TYPE_MODE (TREE_TYPE (exp)))
4925 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4926
4927 /* If the modes of TARGET and TEMP are both BLKmode, both
4928 must be in memory and BITPOS must be aligned on a byte
4929 boundary. If so, we simply do a block copy. */
4930 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4931 {
4932 unsigned int exp_align = expr_align (exp);
4933
4934 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4935 || bitpos % BITS_PER_UNIT != 0)
4936 abort ();
4937
4938 target = change_address (target, VOIDmode,
4939 plus_constant (XEXP (target, 0),
4940 bitpos / BITS_PER_UNIT));
4941
4942 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4943 align = MIN (exp_align, align);
4944
4945 /* Find an alignment that is consistent with the bit position. */
4946 while ((bitpos % align) != 0)
4947 align >>= 1;
4948
4949 emit_block_move (target, temp,
4950 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4951 / BITS_PER_UNIT),
4952 align);
4953
4954 return value_mode == VOIDmode ? const0_rtx : target;
4955 }
4956
4957 /* Store the value in the bitfield. */
4958 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4959 if (value_mode != VOIDmode)
4960 {
4961 /* The caller wants an rtx for the value. */
4962 /* If possible, avoid refetching from the bitfield itself. */
4963 if (width_mask != 0
4964 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4965 {
4966 tree count;
4967 enum machine_mode tmode;
4968
4969 if (unsignedp)
4970 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4971 tmode = GET_MODE (temp);
4972 if (tmode == VOIDmode)
4973 tmode = value_mode;
4974 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4975 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4976 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4977 }
4978 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4979 NULL_RTX, value_mode, 0, align,
4980 total_size);
4981 }
4982 return const0_rtx;
4983 }
4984 else
4985 {
4986 rtx addr = XEXP (target, 0);
4987 rtx to_rtx;
4988
4989 /* If a value is wanted, it must be the lhs;
4990 so make the address stable for multiple use. */
4991
4992 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4993 && ! CONSTANT_ADDRESS_P (addr)
4994 /* A frame-pointer reference is already stable. */
4995 && ! (GET_CODE (addr) == PLUS
4996 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4997 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4998 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4999 addr = copy_to_reg (addr);
5000
5001 /* Now build a reference to just the desired component. */
5002
5003 to_rtx = copy_rtx (change_address (target, mode,
5004 plus_constant (addr,
5005 (bitpos
5006 / BITS_PER_UNIT))));
5007 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5008 MEM_ALIAS_SET (to_rtx) = alias_set;
5009
5010 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5011 }
5012 }
5013 \f
5014 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5015 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5016 ARRAY_REFs and find the ultimate containing object, which we return.
5017
5018 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5019 bit position, and *PUNSIGNEDP to the signedness of the field.
5020 If the position of the field is variable, we store a tree
5021 giving the variable offset (in units) in *POFFSET.
5022 This offset is in addition to the bit position.
5023 If the position is not variable, we store 0 in *POFFSET.
5024 We set *PALIGNMENT to the alignment of the address that will be
5025 computed. This is the alignment of the thing we return if *POFFSET
5026 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5027
5028 If any of the extraction expressions is volatile,
5029 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5030
5031 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5032 is a mode that can be used to access the field. In that case, *PBITSIZE
5033 is redundant.
5034
5035 If the field describes a variable-sized object, *PMODE is set to
5036 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5037 this case, but the address of the object can be found. */
5038
5039 tree
5040 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5041 punsignedp, pvolatilep, palignment)
5042 tree exp;
5043 HOST_WIDE_INT *pbitsize;
5044 HOST_WIDE_INT *pbitpos;
5045 tree *poffset;
5046 enum machine_mode *pmode;
5047 int *punsignedp;
5048 int *pvolatilep;
5049 unsigned int *palignment;
5050 {
5051 tree size_tree = 0;
5052 enum machine_mode mode = VOIDmode;
5053 tree offset = size_zero_node;
5054 tree bit_offset = bitsize_zero_node;
5055 unsigned int alignment = BIGGEST_ALIGNMENT;
5056 tree tem;
5057
5058 /* First get the mode, signedness, and size. We do this from just the
5059 outermost expression. */
5060 if (TREE_CODE (exp) == COMPONENT_REF)
5061 {
5062 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5063 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5064 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5065
5066 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5067 }
5068 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5069 {
5070 size_tree = TREE_OPERAND (exp, 1);
5071 *punsignedp = TREE_UNSIGNED (exp);
5072 }
5073 else
5074 {
5075 mode = TYPE_MODE (TREE_TYPE (exp));
5076 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5077
5078 if (mode == BLKmode)
5079 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5080 else
5081 *pbitsize = GET_MODE_BITSIZE (mode);
5082 }
5083
5084 if (size_tree != 0)
5085 {
5086 if (! host_integerp (size_tree, 1))
5087 mode = BLKmode, *pbitsize = -1;
5088 else
5089 *pbitsize = tree_low_cst (size_tree, 1);
5090 }
5091
5092 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5093 and find the ultimate containing object. */
5094 while (1)
5095 {
5096 if (TREE_CODE (exp) == BIT_FIELD_REF)
5097 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5098 else if (TREE_CODE (exp) == COMPONENT_REF)
5099 {
5100 tree field = TREE_OPERAND (exp, 1);
5101 tree this_offset = DECL_FIELD_OFFSET (field);
5102
5103 /* If this field hasn't been filled in yet, don't go
5104 past it. This should only happen when folding expressions
5105 made during type construction. */
5106 if (this_offset == 0)
5107 break;
5108 else if (! TREE_CONSTANT (this_offset)
5109 && contains_placeholder_p (this_offset))
5110 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5111
5112 offset = size_binop (PLUS_EXPR, offset, this_offset);
5113 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5114 DECL_FIELD_BIT_OFFSET (field));
5115
5116 if (! host_integerp (offset, 0))
5117 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5118 }
5119
5120 else if (TREE_CODE (exp) == ARRAY_REF)
5121 {
5122 tree index = TREE_OPERAND (exp, 1);
5123 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5124 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5125 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5126
5127 /* We assume all arrays have sizes that are a multiple of a byte.
5128 First subtract the lower bound, if any, in the type of the
5129 index, then convert to sizetype and multiply by the size of the
5130 array element. */
5131 if (low_bound != 0 && ! integer_zerop (low_bound))
5132 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5133 index, low_bound));
5134
5135 /* If the index has a self-referential type, pass it to a
5136 WITH_RECORD_EXPR; if the component size is, pass our
5137 component to one. */
5138 if (! TREE_CONSTANT (index)
5139 && contains_placeholder_p (index))
5140 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5141 if (! TREE_CONSTANT (unit_size)
5142 && contains_placeholder_p (unit_size))
5143 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5144 TREE_OPERAND (exp, 0));
5145
5146 offset = size_binop (PLUS_EXPR, offset,
5147 size_binop (MULT_EXPR,
5148 convert (sizetype, index),
5149 unit_size));
5150 }
5151
5152 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5153 && ! ((TREE_CODE (exp) == NOP_EXPR
5154 || TREE_CODE (exp) == CONVERT_EXPR)
5155 && (TYPE_MODE (TREE_TYPE (exp))
5156 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5157 break;
5158
5159 /* If any reference in the chain is volatile, the effect is volatile. */
5160 if (TREE_THIS_VOLATILE (exp))
5161 *pvolatilep = 1;
5162
5163 /* If the offset is non-constant already, then we can't assume any
5164 alignment more than the alignment here. */
5165 if (! TREE_CONSTANT (offset))
5166 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5167
5168 exp = TREE_OPERAND (exp, 0);
5169 }
5170
5171 if (DECL_P (exp))
5172 alignment = MIN (alignment, DECL_ALIGN (exp));
5173 else if (TREE_TYPE (exp) != 0)
5174 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5175
5176 /* If OFFSET is constant, see if we can return the whole thing as a
5177 constant bit position. Otherwise, split it up. */
5178 if (host_integerp (offset, 0)
5179 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5180 bitsize_unit_node))
5181 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5182 && host_integerp (tem, 0))
5183 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5184 else
5185 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5186
5187 *pmode = mode;
5188 *palignment = alignment;
5189 return exp;
5190 }
5191
5192 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5193
5194 static enum memory_use_mode
5195 get_memory_usage_from_modifier (modifier)
5196 enum expand_modifier modifier;
5197 {
5198 switch (modifier)
5199 {
5200 case EXPAND_NORMAL:
5201 case EXPAND_SUM:
5202 return MEMORY_USE_RO;
5203 break;
5204 case EXPAND_MEMORY_USE_WO:
5205 return MEMORY_USE_WO;
5206 break;
5207 case EXPAND_MEMORY_USE_RW:
5208 return MEMORY_USE_RW;
5209 break;
5210 case EXPAND_MEMORY_USE_DONT:
5211 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5212 MEMORY_USE_DONT, because they are modifiers to a call of
5213 expand_expr in the ADDR_EXPR case of expand_expr. */
5214 case EXPAND_CONST_ADDRESS:
5215 case EXPAND_INITIALIZER:
5216 return MEMORY_USE_DONT;
5217 case EXPAND_MEMORY_USE_BAD:
5218 default:
5219 abort ();
5220 }
5221 }
5222 \f
5223 /* Given an rtx VALUE that may contain additions and multiplications,
5224 return an equivalent value that just refers to a register or memory.
5225 This is done by generating instructions to perform the arithmetic
5226 and returning a pseudo-register containing the value.
5227
5228 The returned value may be a REG, SUBREG, MEM or constant. */
5229
5230 rtx
5231 force_operand (value, target)
5232 rtx value, target;
5233 {
5234 register optab binoptab = 0;
5235 /* Use a temporary to force order of execution of calls to
5236 `force_operand'. */
5237 rtx tmp;
5238 register rtx op2;
5239 /* Use subtarget as the target for operand 0 of a binary operation. */
5240 register rtx subtarget = get_subtarget (target);
5241
5242 /* Check for a PIC address load. */
5243 if (flag_pic
5244 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5245 && XEXP (value, 0) == pic_offset_table_rtx
5246 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5247 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5248 || GET_CODE (XEXP (value, 1)) == CONST))
5249 {
5250 if (!subtarget)
5251 subtarget = gen_reg_rtx (GET_MODE (value));
5252 emit_move_insn (subtarget, value);
5253 return subtarget;
5254 }
5255
5256 if (GET_CODE (value) == PLUS)
5257 binoptab = add_optab;
5258 else if (GET_CODE (value) == MINUS)
5259 binoptab = sub_optab;
5260 else if (GET_CODE (value) == MULT)
5261 {
5262 op2 = XEXP (value, 1);
5263 if (!CONSTANT_P (op2)
5264 && !(GET_CODE (op2) == REG && op2 != subtarget))
5265 subtarget = 0;
5266 tmp = force_operand (XEXP (value, 0), subtarget);
5267 return expand_mult (GET_MODE (value), tmp,
5268 force_operand (op2, NULL_RTX),
5269 target, 0);
5270 }
5271
5272 if (binoptab)
5273 {
5274 op2 = XEXP (value, 1);
5275 if (!CONSTANT_P (op2)
5276 && !(GET_CODE (op2) == REG && op2 != subtarget))
5277 subtarget = 0;
5278 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5279 {
5280 binoptab = add_optab;
5281 op2 = negate_rtx (GET_MODE (value), op2);
5282 }
5283
5284 /* Check for an addition with OP2 a constant integer and our first
5285 operand a PLUS of a virtual register and something else. In that
5286 case, we want to emit the sum of the virtual register and the
5287 constant first and then add the other value. This allows virtual
5288 register instantiation to simply modify the constant rather than
5289 creating another one around this addition. */
5290 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5291 && GET_CODE (XEXP (value, 0)) == PLUS
5292 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5293 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5294 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5295 {
5296 rtx temp = expand_binop (GET_MODE (value), binoptab,
5297 XEXP (XEXP (value, 0), 0), op2,
5298 subtarget, 0, OPTAB_LIB_WIDEN);
5299 return expand_binop (GET_MODE (value), binoptab, temp,
5300 force_operand (XEXP (XEXP (value, 0), 1), 0),
5301 target, 0, OPTAB_LIB_WIDEN);
5302 }
5303
5304 tmp = force_operand (XEXP (value, 0), subtarget);
5305 return expand_binop (GET_MODE (value), binoptab, tmp,
5306 force_operand (op2, NULL_RTX),
5307 target, 0, OPTAB_LIB_WIDEN);
5308 /* We give UNSIGNEDP = 0 to expand_binop
5309 because the only operations we are expanding here are signed ones. */
5310 }
5311 return value;
5312 }
5313 \f
5314 /* Subroutine of expand_expr:
5315 save the non-copied parts (LIST) of an expr (LHS), and return a list
5316 which can restore these values to their previous values,
5317 should something modify their storage. */
5318
5319 static tree
5320 save_noncopied_parts (lhs, list)
5321 tree lhs;
5322 tree list;
5323 {
5324 tree tail;
5325 tree parts = 0;
5326
5327 for (tail = list; tail; tail = TREE_CHAIN (tail))
5328 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5329 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5330 else
5331 {
5332 tree part = TREE_VALUE (tail);
5333 tree part_type = TREE_TYPE (part);
5334 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5335 rtx target = assign_temp (part_type, 0, 1, 1);
5336 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5337 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5338 parts = tree_cons (to_be_saved,
5339 build (RTL_EXPR, part_type, NULL_TREE,
5340 (tree) target),
5341 parts);
5342 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5343 }
5344 return parts;
5345 }
5346
5347 /* Subroutine of expand_expr:
5348 record the non-copied parts (LIST) of an expr (LHS), and return a list
5349 which specifies the initial values of these parts. */
5350
5351 static tree
5352 init_noncopied_parts (lhs, list)
5353 tree lhs;
5354 tree list;
5355 {
5356 tree tail;
5357 tree parts = 0;
5358
5359 for (tail = list; tail; tail = TREE_CHAIN (tail))
5360 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5361 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5362 else if (TREE_PURPOSE (tail))
5363 {
5364 tree part = TREE_VALUE (tail);
5365 tree part_type = TREE_TYPE (part);
5366 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5367 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5368 }
5369 return parts;
5370 }
5371
5372 /* Subroutine of expand_expr: return nonzero iff there is no way that
5373 EXP can reference X, which is being modified. TOP_P is nonzero if this
5374 call is going to be used to determine whether we need a temporary
5375 for EXP, as opposed to a recursive call to this function.
5376
5377 It is always safe for this routine to return zero since it merely
5378 searches for optimization opportunities. */
5379
5380 static int
5381 safe_from_p (x, exp, top_p)
5382 rtx x;
5383 tree exp;
5384 int top_p;
5385 {
5386 rtx exp_rtl = 0;
5387 int i, nops;
5388 static int save_expr_count;
5389 static int save_expr_size = 0;
5390 static tree *save_expr_rewritten;
5391 static tree save_expr_trees[256];
5392
5393 if (x == 0
5394 /* If EXP has varying size, we MUST use a target since we currently
5395 have no way of allocating temporaries of variable size
5396 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5397 So we assume here that something at a higher level has prevented a
5398 clash. This is somewhat bogus, but the best we can do. Only
5399 do this when X is BLKmode and when we are at the top level. */
5400 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5401 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5402 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5403 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5404 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5405 != INTEGER_CST)
5406 && GET_MODE (x) == BLKmode))
5407 return 1;
5408
5409 if (top_p && save_expr_size == 0)
5410 {
5411 int rtn;
5412
5413 save_expr_count = 0;
5414 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5415 save_expr_rewritten = &save_expr_trees[0];
5416
5417 rtn = safe_from_p (x, exp, 1);
5418
5419 for (i = 0; i < save_expr_count; ++i)
5420 {
5421 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5422 abort ();
5423 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5424 }
5425
5426 save_expr_size = 0;
5427
5428 return rtn;
5429 }
5430
5431 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5432 find the underlying pseudo. */
5433 if (GET_CODE (x) == SUBREG)
5434 {
5435 x = SUBREG_REG (x);
5436 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5437 return 0;
5438 }
5439
5440 /* If X is a location in the outgoing argument area, it is always safe. */
5441 if (GET_CODE (x) == MEM
5442 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5443 || (GET_CODE (XEXP (x, 0)) == PLUS
5444 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5445 return 1;
5446
5447 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5448 {
5449 case 'd':
5450 exp_rtl = DECL_RTL (exp);
5451 break;
5452
5453 case 'c':
5454 return 1;
5455
5456 case 'x':
5457 if (TREE_CODE (exp) == TREE_LIST)
5458 return ((TREE_VALUE (exp) == 0
5459 || safe_from_p (x, TREE_VALUE (exp), 0))
5460 && (TREE_CHAIN (exp) == 0
5461 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5462 else if (TREE_CODE (exp) == ERROR_MARK)
5463 return 1; /* An already-visited SAVE_EXPR? */
5464 else
5465 return 0;
5466
5467 case '1':
5468 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5469
5470 case '2':
5471 case '<':
5472 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5473 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5474
5475 case 'e':
5476 case 'r':
5477 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5478 the expression. If it is set, we conflict iff we are that rtx or
5479 both are in memory. Otherwise, we check all operands of the
5480 expression recursively. */
5481
5482 switch (TREE_CODE (exp))
5483 {
5484 case ADDR_EXPR:
5485 return (staticp (TREE_OPERAND (exp, 0))
5486 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5487 || TREE_STATIC (exp));
5488
5489 case INDIRECT_REF:
5490 if (GET_CODE (x) == MEM)
5491 return 0;
5492 break;
5493
5494 case CALL_EXPR:
5495 exp_rtl = CALL_EXPR_RTL (exp);
5496 if (exp_rtl == 0)
5497 {
5498 /* Assume that the call will clobber all hard registers and
5499 all of memory. */
5500 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5501 || GET_CODE (x) == MEM)
5502 return 0;
5503 }
5504
5505 break;
5506
5507 case RTL_EXPR:
5508 /* If a sequence exists, we would have to scan every instruction
5509 in the sequence to see if it was safe. This is probably not
5510 worthwhile. */
5511 if (RTL_EXPR_SEQUENCE (exp))
5512 return 0;
5513
5514 exp_rtl = RTL_EXPR_RTL (exp);
5515 break;
5516
5517 case WITH_CLEANUP_EXPR:
5518 exp_rtl = RTL_EXPR_RTL (exp);
5519 break;
5520
5521 case CLEANUP_POINT_EXPR:
5522 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5523
5524 case SAVE_EXPR:
5525 exp_rtl = SAVE_EXPR_RTL (exp);
5526 if (exp_rtl)
5527 break;
5528
5529 /* This SAVE_EXPR might appear many times in the top-level
5530 safe_from_p() expression, and if it has a complex
5531 subexpression, examining it multiple times could result
5532 in a combinatorial explosion. E.g. on an Alpha
5533 running at least 200MHz, a Fortran test case compiled with
5534 optimization took about 28 minutes to compile -- even though
5535 it was only a few lines long, and the complicated line causing
5536 so much time to be spent in the earlier version of safe_from_p()
5537 had only 293 or so unique nodes.
5538
5539 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5540 where it is so we can turn it back in the top-level safe_from_p()
5541 when we're done. */
5542
5543 /* For now, don't bother re-sizing the array. */
5544 if (save_expr_count >= save_expr_size)
5545 return 0;
5546 save_expr_rewritten[save_expr_count++] = exp;
5547
5548 nops = TREE_CODE_LENGTH (SAVE_EXPR);
5549 for (i = 0; i < nops; i++)
5550 {
5551 tree operand = TREE_OPERAND (exp, i);
5552 if (operand == NULL_TREE)
5553 continue;
5554 TREE_SET_CODE (exp, ERROR_MARK);
5555 if (!safe_from_p (x, operand, 0))
5556 return 0;
5557 TREE_SET_CODE (exp, SAVE_EXPR);
5558 }
5559 TREE_SET_CODE (exp, ERROR_MARK);
5560 return 1;
5561
5562 case BIND_EXPR:
5563 /* The only operand we look at is operand 1. The rest aren't
5564 part of the expression. */
5565 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5566
5567 case METHOD_CALL_EXPR:
5568 /* This takes a rtx argument, but shouldn't appear here. */
5569 abort ();
5570
5571 default:
5572 break;
5573 }
5574
5575 /* If we have an rtx, we do not need to scan our operands. */
5576 if (exp_rtl)
5577 break;
5578
5579 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
5580 for (i = 0; i < nops; i++)
5581 if (TREE_OPERAND (exp, i) != 0
5582 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5583 return 0;
5584 }
5585
5586 /* If we have an rtl, find any enclosed object. Then see if we conflict
5587 with it. */
5588 if (exp_rtl)
5589 {
5590 if (GET_CODE (exp_rtl) == SUBREG)
5591 {
5592 exp_rtl = SUBREG_REG (exp_rtl);
5593 if (GET_CODE (exp_rtl) == REG
5594 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5595 return 0;
5596 }
5597
5598 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5599 are memory and EXP is not readonly. */
5600 return ! (rtx_equal_p (x, exp_rtl)
5601 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5602 && ! TREE_READONLY (exp)));
5603 }
5604
5605 /* If we reach here, it is safe. */
5606 return 1;
5607 }
5608
5609 /* Subroutine of expand_expr: return nonzero iff EXP is an
5610 expression whose type is statically determinable. */
5611
5612 static int
5613 fixed_type_p (exp)
5614 tree exp;
5615 {
5616 if (TREE_CODE (exp) == PARM_DECL
5617 || TREE_CODE (exp) == VAR_DECL
5618 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5619 || TREE_CODE (exp) == COMPONENT_REF
5620 || TREE_CODE (exp) == ARRAY_REF)
5621 return 1;
5622 return 0;
5623 }
5624
5625 /* Subroutine of expand_expr: return rtx if EXP is a
5626 variable or parameter; else return 0. */
5627
5628 static rtx
5629 var_rtx (exp)
5630 tree exp;
5631 {
5632 STRIP_NOPS (exp);
5633 switch (TREE_CODE (exp))
5634 {
5635 case PARM_DECL:
5636 case VAR_DECL:
5637 return DECL_RTL (exp);
5638 default:
5639 return 0;
5640 }
5641 }
5642
5643 #ifdef MAX_INTEGER_COMPUTATION_MODE
5644 void
5645 check_max_integer_computation_mode (exp)
5646 tree exp;
5647 {
5648 enum tree_code code;
5649 enum machine_mode mode;
5650
5651 /* Strip any NOPs that don't change the mode. */
5652 STRIP_NOPS (exp);
5653 code = TREE_CODE (exp);
5654
5655 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5656 if (code == NOP_EXPR
5657 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5658 return;
5659
5660 /* First check the type of the overall operation. We need only look at
5661 unary, binary and relational operations. */
5662 if (TREE_CODE_CLASS (code) == '1'
5663 || TREE_CODE_CLASS (code) == '2'
5664 || TREE_CODE_CLASS (code) == '<')
5665 {
5666 mode = TYPE_MODE (TREE_TYPE (exp));
5667 if (GET_MODE_CLASS (mode) == MODE_INT
5668 && mode > MAX_INTEGER_COMPUTATION_MODE)
5669 fatal ("unsupported wide integer operation");
5670 }
5671
5672 /* Check operand of a unary op. */
5673 if (TREE_CODE_CLASS (code) == '1')
5674 {
5675 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5676 if (GET_MODE_CLASS (mode) == MODE_INT
5677 && mode > MAX_INTEGER_COMPUTATION_MODE)
5678 fatal ("unsupported wide integer operation");
5679 }
5680
5681 /* Check operands of a binary/comparison op. */
5682 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5683 {
5684 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5685 if (GET_MODE_CLASS (mode) == MODE_INT
5686 && mode > MAX_INTEGER_COMPUTATION_MODE)
5687 fatal ("unsupported wide integer operation");
5688
5689 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5690 if (GET_MODE_CLASS (mode) == MODE_INT
5691 && mode > MAX_INTEGER_COMPUTATION_MODE)
5692 fatal ("unsupported wide integer operation");
5693 }
5694 }
5695 #endif
5696
5697 \f
5698 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5699 has any readonly fields. If any of the fields have types that
5700 contain readonly fields, return true as well. */
5701
5702 static int
5703 readonly_fields_p (type)
5704 tree type;
5705 {
5706 tree field;
5707
5708 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5709 if (TREE_CODE (field) == FIELD_DECL
5710 && (TREE_READONLY (field)
5711 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5712 && readonly_fields_p (TREE_TYPE (field)))))
5713 return 1;
5714
5715 return 0;
5716 }
5717 \f
5718 /* expand_expr: generate code for computing expression EXP.
5719 An rtx for the computed value is returned. The value is never null.
5720 In the case of a void EXP, const0_rtx is returned.
5721
5722 The value may be stored in TARGET if TARGET is nonzero.
5723 TARGET is just a suggestion; callers must assume that
5724 the rtx returned may not be the same as TARGET.
5725
5726 If TARGET is CONST0_RTX, it means that the value will be ignored.
5727
5728 If TMODE is not VOIDmode, it suggests generating the
5729 result in mode TMODE. But this is done only when convenient.
5730 Otherwise, TMODE is ignored and the value generated in its natural mode.
5731 TMODE is just a suggestion; callers must assume that
5732 the rtx returned may not have mode TMODE.
5733
5734 Note that TARGET may have neither TMODE nor MODE. In that case, it
5735 probably will not be used.
5736
5737 If MODIFIER is EXPAND_SUM then when EXP is an addition
5738 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5739 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5740 products as above, or REG or MEM, or constant.
5741 Ordinarily in such cases we would output mul or add instructions
5742 and then return a pseudo reg containing the sum.
5743
5744 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5745 it also marks a label as absolutely required (it can't be dead).
5746 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5747 This is used for outputting expressions used in initializers.
5748
5749 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5750 with a constant address even if that address is not normally legitimate.
5751 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5752
5753 rtx
5754 expand_expr (exp, target, tmode, modifier)
5755 register tree exp;
5756 rtx target;
5757 enum machine_mode tmode;
5758 enum expand_modifier modifier;
5759 {
5760 register rtx op0, op1, temp;
5761 tree type = TREE_TYPE (exp);
5762 int unsignedp = TREE_UNSIGNED (type);
5763 register enum machine_mode mode;
5764 register enum tree_code code = TREE_CODE (exp);
5765 optab this_optab;
5766 rtx subtarget, original_target;
5767 int ignore;
5768 tree context;
5769 /* Used by check-memory-usage to make modifier read only. */
5770 enum expand_modifier ro_modifier;
5771
5772 /* Handle ERROR_MARK before anybody tries to access its type. */
5773 if (TREE_CODE (exp) == ERROR_MARK)
5774 {
5775 op0 = CONST0_RTX (tmode);
5776 if (op0 != 0)
5777 return op0;
5778 return const0_rtx;
5779 }
5780
5781 mode = TYPE_MODE (type);
5782 /* Use subtarget as the target for operand 0 of a binary operation. */
5783 subtarget = get_subtarget (target);
5784 original_target = target;
5785 ignore = (target == const0_rtx
5786 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5787 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5788 || code == COND_EXPR)
5789 && TREE_CODE (type) == VOID_TYPE));
5790
5791 /* Make a read-only version of the modifier. */
5792 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5793 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5794 ro_modifier = modifier;
5795 else
5796 ro_modifier = EXPAND_NORMAL;
5797
5798 /* If we are going to ignore this result, we need only do something
5799 if there is a side-effect somewhere in the expression. If there
5800 is, short-circuit the most common cases here. Note that we must
5801 not call expand_expr with anything but const0_rtx in case this
5802 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5803
5804 if (ignore)
5805 {
5806 if (! TREE_SIDE_EFFECTS (exp))
5807 return const0_rtx;
5808
5809 /* Ensure we reference a volatile object even if value is ignored, but
5810 don't do this if all we are doing is taking its address. */
5811 if (TREE_THIS_VOLATILE (exp)
5812 && TREE_CODE (exp) != FUNCTION_DECL
5813 && mode != VOIDmode && mode != BLKmode
5814 && modifier != EXPAND_CONST_ADDRESS)
5815 {
5816 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5817 if (GET_CODE (temp) == MEM)
5818 temp = copy_to_reg (temp);
5819 return const0_rtx;
5820 }
5821
5822 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5823 || code == INDIRECT_REF || code == BUFFER_REF)
5824 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5825 VOIDmode, ro_modifier);
5826 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5827 || code == ARRAY_REF)
5828 {
5829 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5830 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5831 return const0_rtx;
5832 }
5833 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5834 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5835 /* If the second operand has no side effects, just evaluate
5836 the first. */
5837 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5838 VOIDmode, ro_modifier);
5839 else if (code == BIT_FIELD_REF)
5840 {
5841 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5842 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5843 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5844 return const0_rtx;
5845 }
5846 ;
5847 target = 0;
5848 }
5849
5850 #ifdef MAX_INTEGER_COMPUTATION_MODE
5851 /* Only check stuff here if the mode we want is different from the mode
5852 of the expression; if it's the same, check_max_integer_computiation_mode
5853 will handle it. Do we really need to check this stuff at all? */
5854
5855 if (target
5856 && GET_MODE (target) != mode
5857 && TREE_CODE (exp) != INTEGER_CST
5858 && TREE_CODE (exp) != PARM_DECL
5859 && TREE_CODE (exp) != ARRAY_REF
5860 && TREE_CODE (exp) != COMPONENT_REF
5861 && TREE_CODE (exp) != BIT_FIELD_REF
5862 && TREE_CODE (exp) != INDIRECT_REF
5863 && TREE_CODE (exp) != CALL_EXPR
5864 && TREE_CODE (exp) != VAR_DECL
5865 && TREE_CODE (exp) != RTL_EXPR)
5866 {
5867 enum machine_mode mode = GET_MODE (target);
5868
5869 if (GET_MODE_CLASS (mode) == MODE_INT
5870 && mode > MAX_INTEGER_COMPUTATION_MODE)
5871 fatal ("unsupported wide integer operation");
5872 }
5873
5874 if (tmode != mode
5875 && TREE_CODE (exp) != INTEGER_CST
5876 && TREE_CODE (exp) != PARM_DECL
5877 && TREE_CODE (exp) != ARRAY_REF
5878 && TREE_CODE (exp) != COMPONENT_REF
5879 && TREE_CODE (exp) != BIT_FIELD_REF
5880 && TREE_CODE (exp) != INDIRECT_REF
5881 && TREE_CODE (exp) != VAR_DECL
5882 && TREE_CODE (exp) != CALL_EXPR
5883 && TREE_CODE (exp) != RTL_EXPR
5884 && GET_MODE_CLASS (tmode) == MODE_INT
5885 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5886 fatal ("unsupported wide integer operation");
5887
5888 check_max_integer_computation_mode (exp);
5889 #endif
5890
5891 /* If will do cse, generate all results into pseudo registers
5892 since 1) that allows cse to find more things
5893 and 2) otherwise cse could produce an insn the machine
5894 cannot support. */
5895
5896 if (! cse_not_expected && mode != BLKmode && target
5897 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5898 target = subtarget;
5899
5900 switch (code)
5901 {
5902 case LABEL_DECL:
5903 {
5904 tree function = decl_function_context (exp);
5905 /* Handle using a label in a containing function. */
5906 if (function != current_function_decl
5907 && function != inline_function_decl && function != 0)
5908 {
5909 struct function *p = find_function_data (function);
5910 /* Allocate in the memory associated with the function
5911 that the label is in. */
5912 push_obstacks (p->function_obstack,
5913 p->function_maybepermanent_obstack);
5914
5915 p->expr->x_forced_labels
5916 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5917 p->expr->x_forced_labels);
5918 pop_obstacks ();
5919 }
5920 else
5921 {
5922 if (modifier == EXPAND_INITIALIZER)
5923 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5924 label_rtx (exp),
5925 forced_labels);
5926 }
5927
5928 temp = gen_rtx_MEM (FUNCTION_MODE,
5929 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5930 if (function != current_function_decl
5931 && function != inline_function_decl && function != 0)
5932 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5933 return temp;
5934 }
5935
5936 case PARM_DECL:
5937 if (DECL_RTL (exp) == 0)
5938 {
5939 error_with_decl (exp, "prior parameter's size depends on `%s'");
5940 return CONST0_RTX (mode);
5941 }
5942
5943 /* ... fall through ... */
5944
5945 case VAR_DECL:
5946 /* If a static var's type was incomplete when the decl was written,
5947 but the type is complete now, lay out the decl now. */
5948 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5949 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5950 {
5951 push_obstacks_nochange ();
5952 end_temporary_allocation ();
5953 layout_decl (exp, 0);
5954 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5955 pop_obstacks ();
5956 }
5957
5958 /* Although static-storage variables start off initialized, according to
5959 ANSI C, a memcpy could overwrite them with uninitialized values. So
5960 we check them too. This also lets us check for read-only variables
5961 accessed via a non-const declaration, in case it won't be detected
5962 any other way (e.g., in an embedded system or OS kernel without
5963 memory protection).
5964
5965 Aggregates are not checked here; they're handled elsewhere. */
5966 if (cfun && current_function_check_memory_usage
5967 && code == VAR_DECL
5968 && GET_CODE (DECL_RTL (exp)) == MEM
5969 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5970 {
5971 enum memory_use_mode memory_usage;
5972 memory_usage = get_memory_usage_from_modifier (modifier);
5973
5974 in_check_memory_usage = 1;
5975 if (memory_usage != MEMORY_USE_DONT)
5976 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5977 XEXP (DECL_RTL (exp), 0), Pmode,
5978 GEN_INT (int_size_in_bytes (type)),
5979 TYPE_MODE (sizetype),
5980 GEN_INT (memory_usage),
5981 TYPE_MODE (integer_type_node));
5982 in_check_memory_usage = 0;
5983 }
5984
5985 /* ... fall through ... */
5986
5987 case FUNCTION_DECL:
5988 case RESULT_DECL:
5989 if (DECL_RTL (exp) == 0)
5990 abort ();
5991
5992 /* Ensure variable marked as used even if it doesn't go through
5993 a parser. If it hasn't be used yet, write out an external
5994 definition. */
5995 if (! TREE_USED (exp))
5996 {
5997 assemble_external (exp);
5998 TREE_USED (exp) = 1;
5999 }
6000
6001 /* Show we haven't gotten RTL for this yet. */
6002 temp = 0;
6003
6004 /* Handle variables inherited from containing functions. */
6005 context = decl_function_context (exp);
6006
6007 /* We treat inline_function_decl as an alias for the current function
6008 because that is the inline function whose vars, types, etc.
6009 are being merged into the current function.
6010 See expand_inline_function. */
6011
6012 if (context != 0 && context != current_function_decl
6013 && context != inline_function_decl
6014 /* If var is static, we don't need a static chain to access it. */
6015 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6016 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6017 {
6018 rtx addr;
6019
6020 /* Mark as non-local and addressable. */
6021 DECL_NONLOCAL (exp) = 1;
6022 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6023 abort ();
6024 mark_addressable (exp);
6025 if (GET_CODE (DECL_RTL (exp)) != MEM)
6026 abort ();
6027 addr = XEXP (DECL_RTL (exp), 0);
6028 if (GET_CODE (addr) == MEM)
6029 addr = change_address (addr, Pmode,
6030 fix_lexical_addr (XEXP (addr, 0), exp));
6031 else
6032 addr = fix_lexical_addr (addr, exp);
6033
6034 temp = change_address (DECL_RTL (exp), mode, addr);
6035 }
6036
6037 /* This is the case of an array whose size is to be determined
6038 from its initializer, while the initializer is still being parsed.
6039 See expand_decl. */
6040
6041 else if (GET_CODE (DECL_RTL (exp)) == MEM
6042 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6043 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6044 XEXP (DECL_RTL (exp), 0));
6045
6046 /* If DECL_RTL is memory, we are in the normal case and either
6047 the address is not valid or it is not a register and -fforce-addr
6048 is specified, get the address into a register. */
6049
6050 else if (GET_CODE (DECL_RTL (exp)) == MEM
6051 && modifier != EXPAND_CONST_ADDRESS
6052 && modifier != EXPAND_SUM
6053 && modifier != EXPAND_INITIALIZER
6054 && (! memory_address_p (DECL_MODE (exp),
6055 XEXP (DECL_RTL (exp), 0))
6056 || (flag_force_addr
6057 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6058 temp = change_address (DECL_RTL (exp), VOIDmode,
6059 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6060
6061 /* If we got something, return it. But first, set the alignment
6062 the address is a register. */
6063 if (temp != 0)
6064 {
6065 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6066 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6067
6068 return temp;
6069 }
6070
6071 /* If the mode of DECL_RTL does not match that of the decl, it
6072 must be a promoted value. We return a SUBREG of the wanted mode,
6073 but mark it so that we know that it was already extended. */
6074
6075 if (GET_CODE (DECL_RTL (exp)) == REG
6076 && GET_MODE (DECL_RTL (exp)) != mode)
6077 {
6078 /* Get the signedness used for this variable. Ensure we get the
6079 same mode we got when the variable was declared. */
6080 if (GET_MODE (DECL_RTL (exp))
6081 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6082 abort ();
6083
6084 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6085 SUBREG_PROMOTED_VAR_P (temp) = 1;
6086 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6087 return temp;
6088 }
6089
6090 return DECL_RTL (exp);
6091
6092 case INTEGER_CST:
6093 return immed_double_const (TREE_INT_CST_LOW (exp),
6094 TREE_INT_CST_HIGH (exp), mode);
6095
6096 case CONST_DECL:
6097 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6098 EXPAND_MEMORY_USE_BAD);
6099
6100 case REAL_CST:
6101 /* If optimized, generate immediate CONST_DOUBLE
6102 which will be turned into memory by reload if necessary.
6103
6104 We used to force a register so that loop.c could see it. But
6105 this does not allow gen_* patterns to perform optimizations with
6106 the constants. It also produces two insns in cases like "x = 1.0;".
6107 On most machines, floating-point constants are not permitted in
6108 many insns, so we'd end up copying it to a register in any case.
6109
6110 Now, we do the copying in expand_binop, if appropriate. */
6111 return immed_real_const (exp);
6112
6113 case COMPLEX_CST:
6114 case STRING_CST:
6115 if (! TREE_CST_RTL (exp))
6116 output_constant_def (exp);
6117
6118 /* TREE_CST_RTL probably contains a constant address.
6119 On RISC machines where a constant address isn't valid,
6120 make some insns to get that address into a register. */
6121 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6122 && modifier != EXPAND_CONST_ADDRESS
6123 && modifier != EXPAND_INITIALIZER
6124 && modifier != EXPAND_SUM
6125 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6126 || (flag_force_addr
6127 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6128 return change_address (TREE_CST_RTL (exp), VOIDmode,
6129 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6130 return TREE_CST_RTL (exp);
6131
6132 case EXPR_WITH_FILE_LOCATION:
6133 {
6134 rtx to_return;
6135 const char *saved_input_filename = input_filename;
6136 int saved_lineno = lineno;
6137 input_filename = EXPR_WFL_FILENAME (exp);
6138 lineno = EXPR_WFL_LINENO (exp);
6139 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6140 emit_line_note (input_filename, lineno);
6141 /* Possibly avoid switching back and force here */
6142 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6143 input_filename = saved_input_filename;
6144 lineno = saved_lineno;
6145 return to_return;
6146 }
6147
6148 case SAVE_EXPR:
6149 context = decl_function_context (exp);
6150
6151 /* If this SAVE_EXPR was at global context, assume we are an
6152 initialization function and move it into our context. */
6153 if (context == 0)
6154 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6155
6156 /* We treat inline_function_decl as an alias for the current function
6157 because that is the inline function whose vars, types, etc.
6158 are being merged into the current function.
6159 See expand_inline_function. */
6160 if (context == current_function_decl || context == inline_function_decl)
6161 context = 0;
6162
6163 /* If this is non-local, handle it. */
6164 if (context)
6165 {
6166 /* The following call just exists to abort if the context is
6167 not of a containing function. */
6168 find_function_data (context);
6169
6170 temp = SAVE_EXPR_RTL (exp);
6171 if (temp && GET_CODE (temp) == REG)
6172 {
6173 put_var_into_stack (exp);
6174 temp = SAVE_EXPR_RTL (exp);
6175 }
6176 if (temp == 0 || GET_CODE (temp) != MEM)
6177 abort ();
6178 return change_address (temp, mode,
6179 fix_lexical_addr (XEXP (temp, 0), exp));
6180 }
6181 if (SAVE_EXPR_RTL (exp) == 0)
6182 {
6183 if (mode == VOIDmode)
6184 temp = const0_rtx;
6185 else
6186 temp = assign_temp (type, 3, 0, 0);
6187
6188 SAVE_EXPR_RTL (exp) = temp;
6189 if (!optimize && GET_CODE (temp) == REG)
6190 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6191 save_expr_regs);
6192
6193 /* If the mode of TEMP does not match that of the expression, it
6194 must be a promoted value. We pass store_expr a SUBREG of the
6195 wanted mode but mark it so that we know that it was already
6196 extended. Note that `unsignedp' was modified above in
6197 this case. */
6198
6199 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6200 {
6201 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6202 SUBREG_PROMOTED_VAR_P (temp) = 1;
6203 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6204 }
6205
6206 if (temp == const0_rtx)
6207 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6208 EXPAND_MEMORY_USE_BAD);
6209 else
6210 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6211
6212 TREE_USED (exp) = 1;
6213 }
6214
6215 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6216 must be a promoted value. We return a SUBREG of the wanted mode,
6217 but mark it so that we know that it was already extended. */
6218
6219 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6220 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6221 {
6222 /* Compute the signedness and make the proper SUBREG. */
6223 promote_mode (type, mode, &unsignedp, 0);
6224 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6225 SUBREG_PROMOTED_VAR_P (temp) = 1;
6226 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6227 return temp;
6228 }
6229
6230 return SAVE_EXPR_RTL (exp);
6231
6232 case UNSAVE_EXPR:
6233 {
6234 rtx temp;
6235 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6236 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6237 return temp;
6238 }
6239
6240 case PLACEHOLDER_EXPR:
6241 {
6242 tree placeholder_expr;
6243
6244 /* If there is an object on the head of the placeholder list,
6245 see if some object in it of type TYPE or a pointer to it. For
6246 further information, see tree.def. */
6247 for (placeholder_expr = placeholder_list;
6248 placeholder_expr != 0;
6249 placeholder_expr = TREE_CHAIN (placeholder_expr))
6250 {
6251 tree need_type = TYPE_MAIN_VARIANT (type);
6252 tree object = 0;
6253 tree old_list = placeholder_list;
6254 tree elt;
6255
6256 /* Find the outermost reference that is of the type we want.
6257 If none, see if any object has a type that is a pointer to
6258 the type we want. */
6259 for (elt = TREE_PURPOSE (placeholder_expr);
6260 elt != 0 && object == 0;
6261 elt
6262 = ((TREE_CODE (elt) == COMPOUND_EXPR
6263 || TREE_CODE (elt) == COND_EXPR)
6264 ? TREE_OPERAND (elt, 1)
6265 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6266 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6267 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6268 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6269 ? TREE_OPERAND (elt, 0) : 0))
6270 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6271 object = elt;
6272
6273 for (elt = TREE_PURPOSE (placeholder_expr);
6274 elt != 0 && object == 0;
6275 elt
6276 = ((TREE_CODE (elt) == COMPOUND_EXPR
6277 || TREE_CODE (elt) == COND_EXPR)
6278 ? TREE_OPERAND (elt, 1)
6279 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6280 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6281 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6282 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6283 ? TREE_OPERAND (elt, 0) : 0))
6284 if (POINTER_TYPE_P (TREE_TYPE (elt))
6285 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6286 == need_type))
6287 object = build1 (INDIRECT_REF, need_type, elt);
6288
6289 if (object != 0)
6290 {
6291 /* Expand this object skipping the list entries before
6292 it was found in case it is also a PLACEHOLDER_EXPR.
6293 In that case, we want to translate it using subsequent
6294 entries. */
6295 placeholder_list = TREE_CHAIN (placeholder_expr);
6296 temp = expand_expr (object, original_target, tmode,
6297 ro_modifier);
6298 placeholder_list = old_list;
6299 return temp;
6300 }
6301 }
6302 }
6303
6304 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6305 abort ();
6306
6307 case WITH_RECORD_EXPR:
6308 /* Put the object on the placeholder list, expand our first operand,
6309 and pop the list. */
6310 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6311 placeholder_list);
6312 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6313 tmode, ro_modifier);
6314 placeholder_list = TREE_CHAIN (placeholder_list);
6315 return target;
6316
6317 case GOTO_EXPR:
6318 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6319 expand_goto (TREE_OPERAND (exp, 0));
6320 else
6321 expand_computed_goto (TREE_OPERAND (exp, 0));
6322 return const0_rtx;
6323
6324 case EXIT_EXPR:
6325 expand_exit_loop_if_false (NULL_PTR,
6326 invert_truthvalue (TREE_OPERAND (exp, 0)));
6327 return const0_rtx;
6328
6329 case LABELED_BLOCK_EXPR:
6330 if (LABELED_BLOCK_BODY (exp))
6331 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6332 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6333 return const0_rtx;
6334
6335 case EXIT_BLOCK_EXPR:
6336 if (EXIT_BLOCK_RETURN (exp))
6337 sorry ("returned value in block_exit_expr");
6338 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6339 return const0_rtx;
6340
6341 case LOOP_EXPR:
6342 push_temp_slots ();
6343 expand_start_loop (1);
6344 expand_expr_stmt (TREE_OPERAND (exp, 0));
6345 expand_end_loop ();
6346 pop_temp_slots ();
6347
6348 return const0_rtx;
6349
6350 case BIND_EXPR:
6351 {
6352 tree vars = TREE_OPERAND (exp, 0);
6353 int vars_need_expansion = 0;
6354
6355 /* Need to open a binding contour here because
6356 if there are any cleanups they must be contained here. */
6357 expand_start_bindings (2);
6358
6359 /* Mark the corresponding BLOCK for output in its proper place. */
6360 if (TREE_OPERAND (exp, 2) != 0
6361 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6362 insert_block (TREE_OPERAND (exp, 2));
6363
6364 /* If VARS have not yet been expanded, expand them now. */
6365 while (vars)
6366 {
6367 if (DECL_RTL (vars) == 0)
6368 {
6369 vars_need_expansion = 1;
6370 expand_decl (vars);
6371 }
6372 expand_decl_init (vars);
6373 vars = TREE_CHAIN (vars);
6374 }
6375
6376 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6377
6378 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6379
6380 return temp;
6381 }
6382
6383 case RTL_EXPR:
6384 if (RTL_EXPR_SEQUENCE (exp))
6385 {
6386 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6387 abort ();
6388 emit_insns (RTL_EXPR_SEQUENCE (exp));
6389 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6390 }
6391 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6392 free_temps_for_rtl_expr (exp);
6393 return RTL_EXPR_RTL (exp);
6394
6395 case CONSTRUCTOR:
6396 /* If we don't need the result, just ensure we evaluate any
6397 subexpressions. */
6398 if (ignore)
6399 {
6400 tree elt;
6401 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6402 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6403 EXPAND_MEMORY_USE_BAD);
6404 return const0_rtx;
6405 }
6406
6407 /* All elts simple constants => refer to a constant in memory. But
6408 if this is a non-BLKmode mode, let it store a field at a time
6409 since that should make a CONST_INT or CONST_DOUBLE when we
6410 fold. Likewise, if we have a target we can use, it is best to
6411 store directly into the target unless the type is large enough
6412 that memcpy will be used. If we are making an initializer and
6413 all operands are constant, put it in memory as well. */
6414 else if ((TREE_STATIC (exp)
6415 && ((mode == BLKmode
6416 && ! (target != 0 && safe_from_p (target, exp, 1)))
6417 || TREE_ADDRESSABLE (exp)
6418 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6419 && (! MOVE_BY_PIECES_P
6420 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6421 TYPE_ALIGN (type)))
6422 && ! mostly_zeros_p (exp))))
6423 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6424 {
6425 rtx constructor = output_constant_def (exp);
6426
6427 if (modifier != EXPAND_CONST_ADDRESS
6428 && modifier != EXPAND_INITIALIZER
6429 && modifier != EXPAND_SUM
6430 && (! memory_address_p (GET_MODE (constructor),
6431 XEXP (constructor, 0))
6432 || (flag_force_addr
6433 && GET_CODE (XEXP (constructor, 0)) != REG)))
6434 constructor = change_address (constructor, VOIDmode,
6435 XEXP (constructor, 0));
6436 return constructor;
6437 }
6438
6439 else
6440 {
6441 /* Handle calls that pass values in multiple non-contiguous
6442 locations. The Irix 6 ABI has examples of this. */
6443 if (target == 0 || ! safe_from_p (target, exp, 1)
6444 || GET_CODE (target) == PARALLEL)
6445 {
6446 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6447 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6448 else
6449 target = assign_temp (type, 0, 1, 1);
6450 }
6451
6452 if (TREE_READONLY (exp))
6453 {
6454 if (GET_CODE (target) == MEM)
6455 target = copy_rtx (target);
6456
6457 RTX_UNCHANGING_P (target) = 1;
6458 }
6459
6460 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6461 int_size_in_bytes (TREE_TYPE (exp)));
6462 return target;
6463 }
6464
6465 case INDIRECT_REF:
6466 {
6467 tree exp1 = TREE_OPERAND (exp, 0);
6468 tree index;
6469 tree string = string_constant (exp1, &index);
6470
6471 /* Try to optimize reads from const strings. */
6472 if (string
6473 && TREE_CODE (string) == STRING_CST
6474 && TREE_CODE (index) == INTEGER_CST
6475 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6476 && GET_MODE_CLASS (mode) == MODE_INT
6477 && GET_MODE_SIZE (mode) == 1
6478 && modifier != EXPAND_MEMORY_USE_WO)
6479 return
6480 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6481
6482 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6483 op0 = memory_address (mode, op0);
6484
6485 if (cfun && current_function_check_memory_usage
6486 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6487 {
6488 enum memory_use_mode memory_usage;
6489 memory_usage = get_memory_usage_from_modifier (modifier);
6490
6491 if (memory_usage != MEMORY_USE_DONT)
6492 {
6493 in_check_memory_usage = 1;
6494 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6495 op0, Pmode,
6496 GEN_INT (int_size_in_bytes (type)),
6497 TYPE_MODE (sizetype),
6498 GEN_INT (memory_usage),
6499 TYPE_MODE (integer_type_node));
6500 in_check_memory_usage = 0;
6501 }
6502 }
6503
6504 temp = gen_rtx_MEM (mode, op0);
6505 set_mem_attributes (temp, exp, 0);
6506
6507 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6508 here, because, in C and C++, the fact that a location is accessed
6509 through a pointer to const does not mean that the value there can
6510 never change. Languages where it can never change should
6511 also set TREE_STATIC. */
6512 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6513
6514 /* If we are writing to this object and its type is a record with
6515 readonly fields, we must mark it as readonly so it will
6516 conflict with readonly references to those fields. */
6517 if (modifier == EXPAND_MEMORY_USE_WO
6518 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6519 RTX_UNCHANGING_P (temp) = 1;
6520
6521 return temp;
6522 }
6523
6524 case ARRAY_REF:
6525 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6526 abort ();
6527
6528 {
6529 tree array = TREE_OPERAND (exp, 0);
6530 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6531 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6532 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6533 HOST_WIDE_INT i;
6534
6535 /* Optimize the special-case of a zero lower bound.
6536
6537 We convert the low_bound to sizetype to avoid some problems
6538 with constant folding. (E.g. suppose the lower bound is 1,
6539 and its mode is QI. Without the conversion, (ARRAY
6540 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6541 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6542
6543 if (! integer_zerop (low_bound))
6544 index = size_diffop (index, convert (sizetype, low_bound));
6545
6546 /* Fold an expression like: "foo"[2].
6547 This is not done in fold so it won't happen inside &.
6548 Don't fold if this is for wide characters since it's too
6549 difficult to do correctly and this is a very rare case. */
6550
6551 if (TREE_CODE (array) == STRING_CST
6552 && TREE_CODE (index) == INTEGER_CST
6553 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6554 && GET_MODE_CLASS (mode) == MODE_INT
6555 && GET_MODE_SIZE (mode) == 1)
6556 return
6557 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6558
6559 /* If this is a constant index into a constant array,
6560 just get the value from the array. Handle both the cases when
6561 we have an explicit constructor and when our operand is a variable
6562 that was declared const. */
6563
6564 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6565 && TREE_CODE (index) == INTEGER_CST
6566 && 0 > compare_tree_int (index,
6567 list_length (CONSTRUCTOR_ELTS
6568 (TREE_OPERAND (exp, 0)))))
6569 {
6570 tree elem;
6571
6572 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6573 i = TREE_INT_CST_LOW (index);
6574 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6575 ;
6576
6577 if (elem)
6578 return expand_expr (fold (TREE_VALUE (elem)), target,
6579 tmode, ro_modifier);
6580 }
6581
6582 else if (optimize >= 1
6583 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6584 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6585 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6586 {
6587 if (TREE_CODE (index) == INTEGER_CST)
6588 {
6589 tree init = DECL_INITIAL (array);
6590
6591 if (TREE_CODE (init) == CONSTRUCTOR)
6592 {
6593 tree elem;
6594
6595 for (elem = CONSTRUCTOR_ELTS (init);
6596 (elem
6597 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6598 elem = TREE_CHAIN (elem))
6599 ;
6600
6601 if (elem)
6602 return expand_expr (fold (TREE_VALUE (elem)), target,
6603 tmode, ro_modifier);
6604 }
6605 else if (TREE_CODE (init) == STRING_CST
6606 && 0 > compare_tree_int (index,
6607 TREE_STRING_LENGTH (init)))
6608 return (GEN_INT
6609 (TREE_STRING_POINTER
6610 (init)[TREE_INT_CST_LOW (index)]));
6611 }
6612 }
6613 }
6614
6615 /* ... fall through ... */
6616
6617 case COMPONENT_REF:
6618 case BIT_FIELD_REF:
6619 /* If the operand is a CONSTRUCTOR, we can just extract the
6620 appropriate field if it is present. Don't do this if we have
6621 already written the data since we want to refer to that copy
6622 and varasm.c assumes that's what we'll do. */
6623 if (code != ARRAY_REF
6624 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6625 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6626 {
6627 tree elt;
6628
6629 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6630 elt = TREE_CHAIN (elt))
6631 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6632 /* We can normally use the value of the field in the
6633 CONSTRUCTOR. However, if this is a bitfield in
6634 an integral mode that we can fit in a HOST_WIDE_INT,
6635 we must mask only the number of bits in the bitfield,
6636 since this is done implicitly by the constructor. If
6637 the bitfield does not meet either of those conditions,
6638 we can't do this optimization. */
6639 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6640 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6641 == MODE_INT)
6642 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6643 <= HOST_BITS_PER_WIDE_INT))))
6644 {
6645 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6646 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6647 {
6648 HOST_WIDE_INT bitsize
6649 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6650
6651 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6652 {
6653 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6654 op0 = expand_and (op0, op1, target);
6655 }
6656 else
6657 {
6658 enum machine_mode imode
6659 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6660 tree count
6661 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6662 0);
6663
6664 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6665 target, 0);
6666 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6667 target, 0);
6668 }
6669 }
6670
6671 return op0;
6672 }
6673 }
6674
6675 {
6676 enum machine_mode mode1;
6677 HOST_WIDE_INT bitsize, bitpos;
6678 tree offset;
6679 int volatilep = 0;
6680 unsigned int alignment;
6681 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6682 &mode1, &unsignedp, &volatilep,
6683 &alignment);
6684
6685 /* If we got back the original object, something is wrong. Perhaps
6686 we are evaluating an expression too early. In any event, don't
6687 infinitely recurse. */
6688 if (tem == exp)
6689 abort ();
6690
6691 /* If TEM's type is a union of variable size, pass TARGET to the inner
6692 computation, since it will need a temporary and TARGET is known
6693 to have to do. This occurs in unchecked conversion in Ada. */
6694
6695 op0 = expand_expr (tem,
6696 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6697 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6698 != INTEGER_CST)
6699 ? target : NULL_RTX),
6700 VOIDmode,
6701 (modifier == EXPAND_INITIALIZER
6702 || modifier == EXPAND_CONST_ADDRESS)
6703 ? modifier : EXPAND_NORMAL);
6704
6705 /* If this is a constant, put it into a register if it is a
6706 legitimate constant and OFFSET is 0 and memory if it isn't. */
6707 if (CONSTANT_P (op0))
6708 {
6709 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6710 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6711 && offset == 0)
6712 op0 = force_reg (mode, op0);
6713 else
6714 op0 = validize_mem (force_const_mem (mode, op0));
6715 }
6716
6717 if (offset != 0)
6718 {
6719 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6720
6721 /* If this object is in memory, put it into a register.
6722 This case can't occur in C, but can in Ada if we have
6723 unchecked conversion of an expression from a scalar type to
6724 an array or record type. */
6725 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6726 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6727 {
6728 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6729
6730 mark_temp_addr_taken (memloc);
6731 emit_move_insn (memloc, op0);
6732 op0 = memloc;
6733 }
6734
6735 if (GET_CODE (op0) != MEM)
6736 abort ();
6737
6738 if (GET_MODE (offset_rtx) != ptr_mode)
6739 {
6740 #ifdef POINTERS_EXTEND_UNSIGNED
6741 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6742 #else
6743 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6744 #endif
6745 }
6746
6747 /* A constant address in OP0 can have VOIDmode, we must not try
6748 to call force_reg for that case. Avoid that case. */
6749 if (GET_CODE (op0) == MEM
6750 && GET_MODE (op0) == BLKmode
6751 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6752 && bitsize != 0
6753 && (bitpos % bitsize) == 0
6754 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6755 && alignment == GET_MODE_ALIGNMENT (mode1))
6756 {
6757 rtx temp = change_address (op0, mode1,
6758 plus_constant (XEXP (op0, 0),
6759 (bitpos /
6760 BITS_PER_UNIT)));
6761 if (GET_CODE (XEXP (temp, 0)) == REG)
6762 op0 = temp;
6763 else
6764 op0 = change_address (op0, mode1,
6765 force_reg (GET_MODE (XEXP (temp, 0)),
6766 XEXP (temp, 0)));
6767 bitpos = 0;
6768 }
6769
6770
6771 op0 = change_address (op0, VOIDmode,
6772 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6773 force_reg (ptr_mode,
6774 offset_rtx)));
6775 }
6776
6777 /* Don't forget about volatility even if this is a bitfield. */
6778 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6779 {
6780 op0 = copy_rtx (op0);
6781 MEM_VOLATILE_P (op0) = 1;
6782 }
6783
6784 /* Check the access. */
6785 if (cfun != 0 && current_function_check_memory_usage
6786 && GET_CODE (op0) == MEM)
6787 {
6788 enum memory_use_mode memory_usage;
6789 memory_usage = get_memory_usage_from_modifier (modifier);
6790
6791 if (memory_usage != MEMORY_USE_DONT)
6792 {
6793 rtx to;
6794 int size;
6795
6796 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6797 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6798
6799 /* Check the access right of the pointer. */
6800 in_check_memory_usage = 1;
6801 if (size > BITS_PER_UNIT)
6802 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6803 to, Pmode,
6804 GEN_INT (size / BITS_PER_UNIT),
6805 TYPE_MODE (sizetype),
6806 GEN_INT (memory_usage),
6807 TYPE_MODE (integer_type_node));
6808 in_check_memory_usage = 0;
6809 }
6810 }
6811
6812 /* In cases where an aligned union has an unaligned object
6813 as a field, we might be extracting a BLKmode value from
6814 an integer-mode (e.g., SImode) object. Handle this case
6815 by doing the extract into an object as wide as the field
6816 (which we know to be the width of a basic mode), then
6817 storing into memory, and changing the mode to BLKmode.
6818 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6819 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6820 if (mode1 == VOIDmode
6821 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6822 || (modifier != EXPAND_CONST_ADDRESS
6823 && modifier != EXPAND_INITIALIZER
6824 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6825 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6826 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6827 /* If the field isn't aligned enough to fetch as a memref,
6828 fetch it as a bit field. */
6829 || (mode1 != BLKmode
6830 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6831 && ((TYPE_ALIGN (TREE_TYPE (tem))
6832 < GET_MODE_ALIGNMENT (mode))
6833 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6834 /* If the type and the field are a constant size and the
6835 size of the type isn't the same size as the bitfield,
6836 we must use bitfield operations. */
6837 || ((bitsize >= 0
6838 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6839 == INTEGER_CST)
6840 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6841 bitsize)))))
6842 || (modifier != EXPAND_CONST_ADDRESS
6843 && modifier != EXPAND_INITIALIZER
6844 && mode == BLKmode
6845 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6846 && (TYPE_ALIGN (type) > alignment
6847 || bitpos % TYPE_ALIGN (type) != 0)))
6848 {
6849 enum machine_mode ext_mode = mode;
6850
6851 if (ext_mode == BLKmode
6852 && ! (target != 0 && GET_CODE (op0) == MEM
6853 && GET_CODE (target) == MEM
6854 && bitpos % BITS_PER_UNIT == 0))
6855 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6856
6857 if (ext_mode == BLKmode)
6858 {
6859 /* In this case, BITPOS must start at a byte boundary and
6860 TARGET, if specified, must be a MEM. */
6861 if (GET_CODE (op0) != MEM
6862 || (target != 0 && GET_CODE (target) != MEM)
6863 || bitpos % BITS_PER_UNIT != 0)
6864 abort ();
6865
6866 op0 = change_address (op0, VOIDmode,
6867 plus_constant (XEXP (op0, 0),
6868 bitpos / BITS_PER_UNIT));
6869 if (target == 0)
6870 target = assign_temp (type, 0, 1, 1);
6871
6872 emit_block_move (target, op0,
6873 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6874 / BITS_PER_UNIT),
6875 BITS_PER_UNIT);
6876
6877 return target;
6878 }
6879
6880 op0 = validize_mem (op0);
6881
6882 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6883 mark_reg_pointer (XEXP (op0, 0), alignment);
6884
6885 op0 = extract_bit_field (op0, bitsize, bitpos,
6886 unsignedp, target, ext_mode, ext_mode,
6887 alignment,
6888 int_size_in_bytes (TREE_TYPE (tem)));
6889
6890 /* If the result is a record type and BITSIZE is narrower than
6891 the mode of OP0, an integral mode, and this is a big endian
6892 machine, we must put the field into the high-order bits. */
6893 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6894 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6895 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6896 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6897 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6898 - bitsize),
6899 op0, 1);
6900
6901 if (mode == BLKmode)
6902 {
6903 rtx new = assign_stack_temp (ext_mode,
6904 bitsize / BITS_PER_UNIT, 0);
6905
6906 emit_move_insn (new, op0);
6907 op0 = copy_rtx (new);
6908 PUT_MODE (op0, BLKmode);
6909 MEM_SET_IN_STRUCT_P (op0, 1);
6910 }
6911
6912 return op0;
6913 }
6914
6915 /* If the result is BLKmode, use that to access the object
6916 now as well. */
6917 if (mode == BLKmode)
6918 mode1 = BLKmode;
6919
6920 /* Get a reference to just this component. */
6921 if (modifier == EXPAND_CONST_ADDRESS
6922 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6923 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6924 (bitpos / BITS_PER_UNIT)));
6925 else
6926 op0 = change_address (op0, mode1,
6927 plus_constant (XEXP (op0, 0),
6928 (bitpos / BITS_PER_UNIT)));
6929
6930 set_mem_attributes (op0, exp, 0);
6931 if (GET_CODE (XEXP (op0, 0)) == REG)
6932 mark_reg_pointer (XEXP (op0, 0), alignment);
6933
6934 MEM_VOLATILE_P (op0) |= volatilep;
6935 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6936 || modifier == EXPAND_CONST_ADDRESS
6937 || modifier == EXPAND_INITIALIZER)
6938 return op0;
6939 else if (target == 0)
6940 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6941
6942 convert_move (target, op0, unsignedp);
6943 return target;
6944 }
6945
6946 /* Intended for a reference to a buffer of a file-object in Pascal.
6947 But it's not certain that a special tree code will really be
6948 necessary for these. INDIRECT_REF might work for them. */
6949 case BUFFER_REF:
6950 abort ();
6951
6952 case IN_EXPR:
6953 {
6954 /* Pascal set IN expression.
6955
6956 Algorithm:
6957 rlo = set_low - (set_low%bits_per_word);
6958 the_word = set [ (index - rlo)/bits_per_word ];
6959 bit_index = index % bits_per_word;
6960 bitmask = 1 << bit_index;
6961 return !!(the_word & bitmask); */
6962
6963 tree set = TREE_OPERAND (exp, 0);
6964 tree index = TREE_OPERAND (exp, 1);
6965 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6966 tree set_type = TREE_TYPE (set);
6967 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6968 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6969 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6970 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6971 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6972 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6973 rtx setaddr = XEXP (setval, 0);
6974 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6975 rtx rlow;
6976 rtx diff, quo, rem, addr, bit, result;
6977
6978 preexpand_calls (exp);
6979
6980 /* If domain is empty, answer is no. Likewise if index is constant
6981 and out of bounds. */
6982 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6983 && TREE_CODE (set_low_bound) == INTEGER_CST
6984 && tree_int_cst_lt (set_high_bound, set_low_bound))
6985 || (TREE_CODE (index) == INTEGER_CST
6986 && TREE_CODE (set_low_bound) == INTEGER_CST
6987 && tree_int_cst_lt (index, set_low_bound))
6988 || (TREE_CODE (set_high_bound) == INTEGER_CST
6989 && TREE_CODE (index) == INTEGER_CST
6990 && tree_int_cst_lt (set_high_bound, index))))
6991 return const0_rtx;
6992
6993 if (target == 0)
6994 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6995
6996 /* If we get here, we have to generate the code for both cases
6997 (in range and out of range). */
6998
6999 op0 = gen_label_rtx ();
7000 op1 = gen_label_rtx ();
7001
7002 if (! (GET_CODE (index_val) == CONST_INT
7003 && GET_CODE (lo_r) == CONST_INT))
7004 {
7005 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7006 GET_MODE (index_val), iunsignedp, 0, op1);
7007 }
7008
7009 if (! (GET_CODE (index_val) == CONST_INT
7010 && GET_CODE (hi_r) == CONST_INT))
7011 {
7012 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7013 GET_MODE (index_val), iunsignedp, 0, op1);
7014 }
7015
7016 /* Calculate the element number of bit zero in the first word
7017 of the set. */
7018 if (GET_CODE (lo_r) == CONST_INT)
7019 rlow = GEN_INT (INTVAL (lo_r)
7020 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7021 else
7022 rlow = expand_binop (index_mode, and_optab, lo_r,
7023 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7024 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7025
7026 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7027 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7028
7029 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7030 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7031 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7032 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7033
7034 addr = memory_address (byte_mode,
7035 expand_binop (index_mode, add_optab, diff,
7036 setaddr, NULL_RTX, iunsignedp,
7037 OPTAB_LIB_WIDEN));
7038
7039 /* Extract the bit we want to examine */
7040 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7041 gen_rtx_MEM (byte_mode, addr),
7042 make_tree (TREE_TYPE (index), rem),
7043 NULL_RTX, 1);
7044 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7045 GET_MODE (target) == byte_mode ? target : 0,
7046 1, OPTAB_LIB_WIDEN);
7047
7048 if (result != target)
7049 convert_move (target, result, 1);
7050
7051 /* Output the code to handle the out-of-range case. */
7052 emit_jump (op0);
7053 emit_label (op1);
7054 emit_move_insn (target, const0_rtx);
7055 emit_label (op0);
7056 return target;
7057 }
7058
7059 case WITH_CLEANUP_EXPR:
7060 if (RTL_EXPR_RTL (exp) == 0)
7061 {
7062 RTL_EXPR_RTL (exp)
7063 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7064 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7065
7066 /* That's it for this cleanup. */
7067 TREE_OPERAND (exp, 2) = 0;
7068 }
7069 return RTL_EXPR_RTL (exp);
7070
7071 case CLEANUP_POINT_EXPR:
7072 {
7073 /* Start a new binding layer that will keep track of all cleanup
7074 actions to be performed. */
7075 expand_start_bindings (2);
7076
7077 target_temp_slot_level = temp_slot_level;
7078
7079 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7080 /* If we're going to use this value, load it up now. */
7081 if (! ignore)
7082 op0 = force_not_mem (op0);
7083 preserve_temp_slots (op0);
7084 expand_end_bindings (NULL_TREE, 0, 0);
7085 }
7086 return op0;
7087
7088 case CALL_EXPR:
7089 /* Check for a built-in function. */
7090 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7091 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7092 == FUNCTION_DECL)
7093 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7094 return expand_builtin (exp, target, subtarget, tmode, ignore);
7095
7096 /* If this call was expanded already by preexpand_calls,
7097 just return the result we got. */
7098 if (CALL_EXPR_RTL (exp) != 0)
7099 return CALL_EXPR_RTL (exp);
7100
7101 return expand_call (exp, target, ignore);
7102
7103 case NON_LVALUE_EXPR:
7104 case NOP_EXPR:
7105 case CONVERT_EXPR:
7106 case REFERENCE_EXPR:
7107 if (TREE_OPERAND (exp, 0) == error_mark_node)
7108 return const0_rtx;
7109
7110 if (TREE_CODE (type) == UNION_TYPE)
7111 {
7112 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7113
7114 /* If both input and output are BLKmode, this conversion
7115 isn't actually doing anything unless we need to make the
7116 alignment stricter. */
7117 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7118 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7119 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7120 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7121 modifier);
7122
7123 if (target == 0)
7124 {
7125 if (mode != BLKmode)
7126 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7127 else
7128 target = assign_temp (type, 0, 1, 1);
7129 }
7130
7131 if (GET_CODE (target) == MEM)
7132 /* Store data into beginning of memory target. */
7133 store_expr (TREE_OPERAND (exp, 0),
7134 change_address (target, TYPE_MODE (valtype), 0), 0);
7135
7136 else if (GET_CODE (target) == REG)
7137 /* Store this field into a union of the proper type. */
7138 store_field (target,
7139 MIN ((int_size_in_bytes (TREE_TYPE
7140 (TREE_OPERAND (exp, 0)))
7141 * BITS_PER_UNIT),
7142 GET_MODE_BITSIZE (mode)),
7143 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7144 VOIDmode, 0, BITS_PER_UNIT,
7145 int_size_in_bytes (type), 0);
7146 else
7147 abort ();
7148
7149 /* Return the entire union. */
7150 return target;
7151 }
7152
7153 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7154 {
7155 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7156 ro_modifier);
7157
7158 /* If the signedness of the conversion differs and OP0 is
7159 a promoted SUBREG, clear that indication since we now
7160 have to do the proper extension. */
7161 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7162 && GET_CODE (op0) == SUBREG)
7163 SUBREG_PROMOTED_VAR_P (op0) = 0;
7164
7165 return op0;
7166 }
7167
7168 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7169 if (GET_MODE (op0) == mode)
7170 return op0;
7171
7172 /* If OP0 is a constant, just convert it into the proper mode. */
7173 if (CONSTANT_P (op0))
7174 return
7175 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7176 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7177
7178 if (modifier == EXPAND_INITIALIZER)
7179 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7180
7181 if (target == 0)
7182 return
7183 convert_to_mode (mode, op0,
7184 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7185 else
7186 convert_move (target, op0,
7187 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7188 return target;
7189
7190 case PLUS_EXPR:
7191 /* We come here from MINUS_EXPR when the second operand is a
7192 constant. */
7193 plus_expr:
7194 this_optab = add_optab;
7195
7196 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7197 something else, make sure we add the register to the constant and
7198 then to the other thing. This case can occur during strength
7199 reduction and doing it this way will produce better code if the
7200 frame pointer or argument pointer is eliminated.
7201
7202 fold-const.c will ensure that the constant is always in the inner
7203 PLUS_EXPR, so the only case we need to do anything about is if
7204 sp, ap, or fp is our second argument, in which case we must swap
7205 the innermost first argument and our second argument. */
7206
7207 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7208 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7209 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7210 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7211 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7212 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7213 {
7214 tree t = TREE_OPERAND (exp, 1);
7215
7216 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7217 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7218 }
7219
7220 /* If the result is to be ptr_mode and we are adding an integer to
7221 something, we might be forming a constant. So try to use
7222 plus_constant. If it produces a sum and we can't accept it,
7223 use force_operand. This allows P = &ARR[const] to generate
7224 efficient code on machines where a SYMBOL_REF is not a valid
7225 address.
7226
7227 If this is an EXPAND_SUM call, always return the sum. */
7228 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7229 || mode == ptr_mode)
7230 {
7231 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7232 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7233 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7234 {
7235 rtx constant_part;
7236
7237 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7238 EXPAND_SUM);
7239 /* Use immed_double_const to ensure that the constant is
7240 truncated according to the mode of OP1, then sign extended
7241 to a HOST_WIDE_INT. Using the constant directly can result
7242 in non-canonical RTL in a 64x32 cross compile. */
7243 constant_part
7244 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7245 (HOST_WIDE_INT) 0,
7246 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7247 op1 = plus_constant (op1, INTVAL (constant_part));
7248 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7249 op1 = force_operand (op1, target);
7250 return op1;
7251 }
7252
7253 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7254 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7255 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7256 {
7257 rtx constant_part;
7258
7259 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7260 EXPAND_SUM);
7261 if (! CONSTANT_P (op0))
7262 {
7263 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7264 VOIDmode, modifier);
7265 /* Don't go to both_summands if modifier
7266 says it's not right to return a PLUS. */
7267 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7268 goto binop2;
7269 goto both_summands;
7270 }
7271 /* Use immed_double_const to ensure that the constant is
7272 truncated according to the mode of OP1, then sign extended
7273 to a HOST_WIDE_INT. Using the constant directly can result
7274 in non-canonical RTL in a 64x32 cross compile. */
7275 constant_part
7276 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7277 (HOST_WIDE_INT) 0,
7278 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7279 op0 = plus_constant (op0, INTVAL (constant_part));
7280 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7281 op0 = force_operand (op0, target);
7282 return op0;
7283 }
7284 }
7285
7286 /* No sense saving up arithmetic to be done
7287 if it's all in the wrong mode to form part of an address.
7288 And force_operand won't know whether to sign-extend or
7289 zero-extend. */
7290 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7291 || mode != ptr_mode)
7292 goto binop;
7293
7294 preexpand_calls (exp);
7295 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7296 subtarget = 0;
7297
7298 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7299 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7300
7301 both_summands:
7302 /* Make sure any term that's a sum with a constant comes last. */
7303 if (GET_CODE (op0) == PLUS
7304 && CONSTANT_P (XEXP (op0, 1)))
7305 {
7306 temp = op0;
7307 op0 = op1;
7308 op1 = temp;
7309 }
7310 /* If adding to a sum including a constant,
7311 associate it to put the constant outside. */
7312 if (GET_CODE (op1) == PLUS
7313 && CONSTANT_P (XEXP (op1, 1)))
7314 {
7315 rtx constant_term = const0_rtx;
7316
7317 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7318 if (temp != 0)
7319 op0 = temp;
7320 /* Ensure that MULT comes first if there is one. */
7321 else if (GET_CODE (op0) == MULT)
7322 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7323 else
7324 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7325
7326 /* Let's also eliminate constants from op0 if possible. */
7327 op0 = eliminate_constant_term (op0, &constant_term);
7328
7329 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7330 their sum should be a constant. Form it into OP1, since the
7331 result we want will then be OP0 + OP1. */
7332
7333 temp = simplify_binary_operation (PLUS, mode, constant_term,
7334 XEXP (op1, 1));
7335 if (temp != 0)
7336 op1 = temp;
7337 else
7338 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7339 }
7340
7341 /* Put a constant term last and put a multiplication first. */
7342 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7343 temp = op1, op1 = op0, op0 = temp;
7344
7345 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7346 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7347
7348 case MINUS_EXPR:
7349 /* For initializers, we are allowed to return a MINUS of two
7350 symbolic constants. Here we handle all cases when both operands
7351 are constant. */
7352 /* Handle difference of two symbolic constants,
7353 for the sake of an initializer. */
7354 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7355 && really_constant_p (TREE_OPERAND (exp, 0))
7356 && really_constant_p (TREE_OPERAND (exp, 1)))
7357 {
7358 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7359 VOIDmode, ro_modifier);
7360 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7361 VOIDmode, ro_modifier);
7362
7363 /* If the last operand is a CONST_INT, use plus_constant of
7364 the negated constant. Else make the MINUS. */
7365 if (GET_CODE (op1) == CONST_INT)
7366 return plus_constant (op0, - INTVAL (op1));
7367 else
7368 return gen_rtx_MINUS (mode, op0, op1);
7369 }
7370 /* Convert A - const to A + (-const). */
7371 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7372 {
7373 tree negated = fold (build1 (NEGATE_EXPR, type,
7374 TREE_OPERAND (exp, 1)));
7375
7376 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7377 /* If we can't negate the constant in TYPE, leave it alone and
7378 expand_binop will negate it for us. We used to try to do it
7379 here in the signed version of TYPE, but that doesn't work
7380 on POINTER_TYPEs. */;
7381 else
7382 {
7383 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7384 goto plus_expr;
7385 }
7386 }
7387 this_optab = sub_optab;
7388 goto binop;
7389
7390 case MULT_EXPR:
7391 preexpand_calls (exp);
7392 /* If first operand is constant, swap them.
7393 Thus the following special case checks need only
7394 check the second operand. */
7395 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7396 {
7397 register tree t1 = TREE_OPERAND (exp, 0);
7398 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7399 TREE_OPERAND (exp, 1) = t1;
7400 }
7401
7402 /* Attempt to return something suitable for generating an
7403 indexed address, for machines that support that. */
7404
7405 if (modifier == EXPAND_SUM && mode == ptr_mode
7406 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7407 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7408 {
7409 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7410 EXPAND_SUM);
7411
7412 /* Apply distributive law if OP0 is x+c. */
7413 if (GET_CODE (op0) == PLUS
7414 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7415 return
7416 gen_rtx_PLUS
7417 (mode,
7418 gen_rtx_MULT
7419 (mode, XEXP (op0, 0),
7420 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7421 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7422 * INTVAL (XEXP (op0, 1))));
7423
7424 if (GET_CODE (op0) != REG)
7425 op0 = force_operand (op0, NULL_RTX);
7426 if (GET_CODE (op0) != REG)
7427 op0 = copy_to_mode_reg (mode, op0);
7428
7429 return
7430 gen_rtx_MULT (mode, op0,
7431 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7432 }
7433
7434 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7435 subtarget = 0;
7436
7437 /* Check for multiplying things that have been extended
7438 from a narrower type. If this machine supports multiplying
7439 in that narrower type with a result in the desired type,
7440 do it that way, and avoid the explicit type-conversion. */
7441 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7442 && TREE_CODE (type) == INTEGER_TYPE
7443 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7444 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7445 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7446 && int_fits_type_p (TREE_OPERAND (exp, 1),
7447 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7448 /* Don't use a widening multiply if a shift will do. */
7449 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7450 > HOST_BITS_PER_WIDE_INT)
7451 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7452 ||
7453 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7454 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7455 ==
7456 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7457 /* If both operands are extended, they must either both
7458 be zero-extended or both be sign-extended. */
7459 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7460 ==
7461 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7462 {
7463 enum machine_mode innermode
7464 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7465 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7466 ? smul_widen_optab : umul_widen_optab);
7467 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7468 ? umul_widen_optab : smul_widen_optab);
7469 if (mode == GET_MODE_WIDER_MODE (innermode))
7470 {
7471 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7472 {
7473 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7474 NULL_RTX, VOIDmode, 0);
7475 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7476 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7477 VOIDmode, 0);
7478 else
7479 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7480 NULL_RTX, VOIDmode, 0);
7481 goto binop2;
7482 }
7483 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7484 && innermode == word_mode)
7485 {
7486 rtx htem;
7487 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7488 NULL_RTX, VOIDmode, 0);
7489 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7490 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7491 VOIDmode, 0);
7492 else
7493 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7494 NULL_RTX, VOIDmode, 0);
7495 temp = expand_binop (mode, other_optab, op0, op1, target,
7496 unsignedp, OPTAB_LIB_WIDEN);
7497 htem = expand_mult_highpart_adjust (innermode,
7498 gen_highpart (innermode, temp),
7499 op0, op1,
7500 gen_highpart (innermode, temp),
7501 unsignedp);
7502 emit_move_insn (gen_highpart (innermode, temp), htem);
7503 return temp;
7504 }
7505 }
7506 }
7507 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7508 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7509 return expand_mult (mode, op0, op1, target, unsignedp);
7510
7511 case TRUNC_DIV_EXPR:
7512 case FLOOR_DIV_EXPR:
7513 case CEIL_DIV_EXPR:
7514 case ROUND_DIV_EXPR:
7515 case EXACT_DIV_EXPR:
7516 preexpand_calls (exp);
7517 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7518 subtarget = 0;
7519 /* Possible optimization: compute the dividend with EXPAND_SUM
7520 then if the divisor is constant can optimize the case
7521 where some terms of the dividend have coeffs divisible by it. */
7522 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7523 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7524 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7525
7526 case RDIV_EXPR:
7527 this_optab = flodiv_optab;
7528 goto binop;
7529
7530 case TRUNC_MOD_EXPR:
7531 case FLOOR_MOD_EXPR:
7532 case CEIL_MOD_EXPR:
7533 case ROUND_MOD_EXPR:
7534 preexpand_calls (exp);
7535 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7536 subtarget = 0;
7537 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7538 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7539 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7540
7541 case FIX_ROUND_EXPR:
7542 case FIX_FLOOR_EXPR:
7543 case FIX_CEIL_EXPR:
7544 abort (); /* Not used for C. */
7545
7546 case FIX_TRUNC_EXPR:
7547 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7548 if (target == 0)
7549 target = gen_reg_rtx (mode);
7550 expand_fix (target, op0, unsignedp);
7551 return target;
7552
7553 case FLOAT_EXPR:
7554 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7555 if (target == 0)
7556 target = gen_reg_rtx (mode);
7557 /* expand_float can't figure out what to do if FROM has VOIDmode.
7558 So give it the correct mode. With -O, cse will optimize this. */
7559 if (GET_MODE (op0) == VOIDmode)
7560 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7561 op0);
7562 expand_float (target, op0,
7563 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7564 return target;
7565
7566 case NEGATE_EXPR:
7567 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7568 temp = expand_unop (mode, neg_optab, op0, target, 0);
7569 if (temp == 0)
7570 abort ();
7571 return temp;
7572
7573 case ABS_EXPR:
7574 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7575
7576 /* Handle complex values specially. */
7577 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7578 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7579 return expand_complex_abs (mode, op0, target, unsignedp);
7580
7581 /* Unsigned abs is simply the operand. Testing here means we don't
7582 risk generating incorrect code below. */
7583 if (TREE_UNSIGNED (type))
7584 return op0;
7585
7586 return expand_abs (mode, op0, target,
7587 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7588
7589 case MAX_EXPR:
7590 case MIN_EXPR:
7591 target = original_target;
7592 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7593 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7594 || GET_MODE (target) != mode
7595 || (GET_CODE (target) == REG
7596 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7597 target = gen_reg_rtx (mode);
7598 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7599 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7600
7601 /* First try to do it with a special MIN or MAX instruction.
7602 If that does not win, use a conditional jump to select the proper
7603 value. */
7604 this_optab = (TREE_UNSIGNED (type)
7605 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7606 : (code == MIN_EXPR ? smin_optab : smax_optab));
7607
7608 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7609 OPTAB_WIDEN);
7610 if (temp != 0)
7611 return temp;
7612
7613 /* At this point, a MEM target is no longer useful; we will get better
7614 code without it. */
7615
7616 if (GET_CODE (target) == MEM)
7617 target = gen_reg_rtx (mode);
7618
7619 if (target != op0)
7620 emit_move_insn (target, op0);
7621
7622 op0 = gen_label_rtx ();
7623
7624 /* If this mode is an integer too wide to compare properly,
7625 compare word by word. Rely on cse to optimize constant cases. */
7626 if (GET_MODE_CLASS (mode) == MODE_INT
7627 && ! can_compare_p (GE, mode, ccp_jump))
7628 {
7629 if (code == MAX_EXPR)
7630 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7631 target, op1, NULL_RTX, op0);
7632 else
7633 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7634 op1, target, NULL_RTX, op0);
7635 }
7636 else
7637 {
7638 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7639 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7640 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7641 op0);
7642 }
7643 emit_move_insn (target, op1);
7644 emit_label (op0);
7645 return target;
7646
7647 case BIT_NOT_EXPR:
7648 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7649 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7650 if (temp == 0)
7651 abort ();
7652 return temp;
7653
7654 case FFS_EXPR:
7655 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7656 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7657 if (temp == 0)
7658 abort ();
7659 return temp;
7660
7661 /* ??? Can optimize bitwise operations with one arg constant.
7662 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7663 and (a bitwise1 b) bitwise2 b (etc)
7664 but that is probably not worth while. */
7665
7666 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7667 boolean values when we want in all cases to compute both of them. In
7668 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7669 as actual zero-or-1 values and then bitwise anding. In cases where
7670 there cannot be any side effects, better code would be made by
7671 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7672 how to recognize those cases. */
7673
7674 case TRUTH_AND_EXPR:
7675 case BIT_AND_EXPR:
7676 this_optab = and_optab;
7677 goto binop;
7678
7679 case TRUTH_OR_EXPR:
7680 case BIT_IOR_EXPR:
7681 this_optab = ior_optab;
7682 goto binop;
7683
7684 case TRUTH_XOR_EXPR:
7685 case BIT_XOR_EXPR:
7686 this_optab = xor_optab;
7687 goto binop;
7688
7689 case LSHIFT_EXPR:
7690 case RSHIFT_EXPR:
7691 case LROTATE_EXPR:
7692 case RROTATE_EXPR:
7693 preexpand_calls (exp);
7694 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7695 subtarget = 0;
7696 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7697 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7698 unsignedp);
7699
7700 /* Could determine the answer when only additive constants differ. Also,
7701 the addition of one can be handled by changing the condition. */
7702 case LT_EXPR:
7703 case LE_EXPR:
7704 case GT_EXPR:
7705 case GE_EXPR:
7706 case EQ_EXPR:
7707 case NE_EXPR:
7708 case UNORDERED_EXPR:
7709 case ORDERED_EXPR:
7710 case UNLT_EXPR:
7711 case UNLE_EXPR:
7712 case UNGT_EXPR:
7713 case UNGE_EXPR:
7714 case UNEQ_EXPR:
7715 preexpand_calls (exp);
7716 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7717 if (temp != 0)
7718 return temp;
7719
7720 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7721 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7722 && original_target
7723 && GET_CODE (original_target) == REG
7724 && (GET_MODE (original_target)
7725 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7726 {
7727 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7728 VOIDmode, 0);
7729
7730 if (temp != original_target)
7731 temp = copy_to_reg (temp);
7732
7733 op1 = gen_label_rtx ();
7734 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7735 GET_MODE (temp), unsignedp, 0, op1);
7736 emit_move_insn (temp, const1_rtx);
7737 emit_label (op1);
7738 return temp;
7739 }
7740
7741 /* If no set-flag instruction, must generate a conditional
7742 store into a temporary variable. Drop through
7743 and handle this like && and ||. */
7744
7745 case TRUTH_ANDIF_EXPR:
7746 case TRUTH_ORIF_EXPR:
7747 if (! ignore
7748 && (target == 0 || ! safe_from_p (target, exp, 1)
7749 /* Make sure we don't have a hard reg (such as function's return
7750 value) live across basic blocks, if not optimizing. */
7751 || (!optimize && GET_CODE (target) == REG
7752 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7753 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7754
7755 if (target)
7756 emit_clr_insn (target);
7757
7758 op1 = gen_label_rtx ();
7759 jumpifnot (exp, op1);
7760
7761 if (target)
7762 emit_0_to_1_insn (target);
7763
7764 emit_label (op1);
7765 return ignore ? const0_rtx : target;
7766
7767 case TRUTH_NOT_EXPR:
7768 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7769 /* The parser is careful to generate TRUTH_NOT_EXPR
7770 only with operands that are always zero or one. */
7771 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7772 target, 1, OPTAB_LIB_WIDEN);
7773 if (temp == 0)
7774 abort ();
7775 return temp;
7776
7777 case COMPOUND_EXPR:
7778 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7779 emit_queue ();
7780 return expand_expr (TREE_OPERAND (exp, 1),
7781 (ignore ? const0_rtx : target),
7782 VOIDmode, 0);
7783
7784 case COND_EXPR:
7785 /* If we would have a "singleton" (see below) were it not for a
7786 conversion in each arm, bring that conversion back out. */
7787 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7788 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7789 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7790 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7791 {
7792 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7793 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7794
7795 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7796 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7797 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7798 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7799 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7800 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7801 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7802 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7803 return expand_expr (build1 (NOP_EXPR, type,
7804 build (COND_EXPR, TREE_TYPE (true),
7805 TREE_OPERAND (exp, 0),
7806 true, false)),
7807 target, tmode, modifier);
7808 }
7809
7810 {
7811 /* Note that COND_EXPRs whose type is a structure or union
7812 are required to be constructed to contain assignments of
7813 a temporary variable, so that we can evaluate them here
7814 for side effect only. If type is void, we must do likewise. */
7815
7816 /* If an arm of the branch requires a cleanup,
7817 only that cleanup is performed. */
7818
7819 tree singleton = 0;
7820 tree binary_op = 0, unary_op = 0;
7821
7822 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7823 convert it to our mode, if necessary. */
7824 if (integer_onep (TREE_OPERAND (exp, 1))
7825 && integer_zerop (TREE_OPERAND (exp, 2))
7826 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7827 {
7828 if (ignore)
7829 {
7830 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7831 ro_modifier);
7832 return const0_rtx;
7833 }
7834
7835 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7836 if (GET_MODE (op0) == mode)
7837 return op0;
7838
7839 if (target == 0)
7840 target = gen_reg_rtx (mode);
7841 convert_move (target, op0, unsignedp);
7842 return target;
7843 }
7844
7845 /* Check for X ? A + B : A. If we have this, we can copy A to the
7846 output and conditionally add B. Similarly for unary operations.
7847 Don't do this if X has side-effects because those side effects
7848 might affect A or B and the "?" operation is a sequence point in
7849 ANSI. (operand_equal_p tests for side effects.) */
7850
7851 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7852 && operand_equal_p (TREE_OPERAND (exp, 2),
7853 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7854 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7855 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7856 && operand_equal_p (TREE_OPERAND (exp, 1),
7857 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7858 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7859 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7860 && operand_equal_p (TREE_OPERAND (exp, 2),
7861 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7862 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7863 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7864 && operand_equal_p (TREE_OPERAND (exp, 1),
7865 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7866 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7867
7868 /* If we are not to produce a result, we have no target. Otherwise,
7869 if a target was specified use it; it will not be used as an
7870 intermediate target unless it is safe. If no target, use a
7871 temporary. */
7872
7873 if (ignore)
7874 temp = 0;
7875 else if (original_target
7876 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7877 || (singleton && GET_CODE (original_target) == REG
7878 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7879 && original_target == var_rtx (singleton)))
7880 && GET_MODE (original_target) == mode
7881 #ifdef HAVE_conditional_move
7882 && (! can_conditionally_move_p (mode)
7883 || GET_CODE (original_target) == REG
7884 || TREE_ADDRESSABLE (type))
7885 #endif
7886 && ! (GET_CODE (original_target) == MEM
7887 && MEM_VOLATILE_P (original_target)))
7888 temp = original_target;
7889 else if (TREE_ADDRESSABLE (type))
7890 abort ();
7891 else
7892 temp = assign_temp (type, 0, 0, 1);
7893
7894 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7895 do the test of X as a store-flag operation, do this as
7896 A + ((X != 0) << log C). Similarly for other simple binary
7897 operators. Only do for C == 1 if BRANCH_COST is low. */
7898 if (temp && singleton && binary_op
7899 && (TREE_CODE (binary_op) == PLUS_EXPR
7900 || TREE_CODE (binary_op) == MINUS_EXPR
7901 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7902 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7903 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7904 : integer_onep (TREE_OPERAND (binary_op, 1)))
7905 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7906 {
7907 rtx result;
7908 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7909 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7910 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7911 : xor_optab);
7912
7913 /* If we had X ? A : A + 1, do this as A + (X == 0).
7914
7915 We have to invert the truth value here and then put it
7916 back later if do_store_flag fails. We cannot simply copy
7917 TREE_OPERAND (exp, 0) to another variable and modify that
7918 because invert_truthvalue can modify the tree pointed to
7919 by its argument. */
7920 if (singleton == TREE_OPERAND (exp, 1))
7921 TREE_OPERAND (exp, 0)
7922 = invert_truthvalue (TREE_OPERAND (exp, 0));
7923
7924 result = do_store_flag (TREE_OPERAND (exp, 0),
7925 (safe_from_p (temp, singleton, 1)
7926 ? temp : NULL_RTX),
7927 mode, BRANCH_COST <= 1);
7928
7929 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7930 result = expand_shift (LSHIFT_EXPR, mode, result,
7931 build_int_2 (tree_log2
7932 (TREE_OPERAND
7933 (binary_op, 1)),
7934 0),
7935 (safe_from_p (temp, singleton, 1)
7936 ? temp : NULL_RTX), 0);
7937
7938 if (result)
7939 {
7940 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7941 return expand_binop (mode, boptab, op1, result, temp,
7942 unsignedp, OPTAB_LIB_WIDEN);
7943 }
7944 else if (singleton == TREE_OPERAND (exp, 1))
7945 TREE_OPERAND (exp, 0)
7946 = invert_truthvalue (TREE_OPERAND (exp, 0));
7947 }
7948
7949 do_pending_stack_adjust ();
7950 NO_DEFER_POP;
7951 op0 = gen_label_rtx ();
7952
7953 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7954 {
7955 if (temp != 0)
7956 {
7957 /* If the target conflicts with the other operand of the
7958 binary op, we can't use it. Also, we can't use the target
7959 if it is a hard register, because evaluating the condition
7960 might clobber it. */
7961 if ((binary_op
7962 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7963 || (GET_CODE (temp) == REG
7964 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7965 temp = gen_reg_rtx (mode);
7966 store_expr (singleton, temp, 0);
7967 }
7968 else
7969 expand_expr (singleton,
7970 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7971 if (singleton == TREE_OPERAND (exp, 1))
7972 jumpif (TREE_OPERAND (exp, 0), op0);
7973 else
7974 jumpifnot (TREE_OPERAND (exp, 0), op0);
7975
7976 start_cleanup_deferral ();
7977 if (binary_op && temp == 0)
7978 /* Just touch the other operand. */
7979 expand_expr (TREE_OPERAND (binary_op, 1),
7980 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7981 else if (binary_op)
7982 store_expr (build (TREE_CODE (binary_op), type,
7983 make_tree (type, temp),
7984 TREE_OPERAND (binary_op, 1)),
7985 temp, 0);
7986 else
7987 store_expr (build1 (TREE_CODE (unary_op), type,
7988 make_tree (type, temp)),
7989 temp, 0);
7990 op1 = op0;
7991 }
7992 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7993 comparison operator. If we have one of these cases, set the
7994 output to A, branch on A (cse will merge these two references),
7995 then set the output to FOO. */
7996 else if (temp
7997 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7998 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7999 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8000 TREE_OPERAND (exp, 1), 0)
8001 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8002 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8003 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8004 {
8005 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8006 temp = gen_reg_rtx (mode);
8007 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8008 jumpif (TREE_OPERAND (exp, 0), op0);
8009
8010 start_cleanup_deferral ();
8011 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8012 op1 = op0;
8013 }
8014 else if (temp
8015 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8016 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8017 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8018 TREE_OPERAND (exp, 2), 0)
8019 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8020 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8021 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8022 {
8023 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8024 temp = gen_reg_rtx (mode);
8025 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8026 jumpifnot (TREE_OPERAND (exp, 0), op0);
8027
8028 start_cleanup_deferral ();
8029 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8030 op1 = op0;
8031 }
8032 else
8033 {
8034 op1 = gen_label_rtx ();
8035 jumpifnot (TREE_OPERAND (exp, 0), op0);
8036
8037 start_cleanup_deferral ();
8038
8039 /* One branch of the cond can be void, if it never returns. For
8040 example A ? throw : E */
8041 if (temp != 0
8042 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8043 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8044 else
8045 expand_expr (TREE_OPERAND (exp, 1),
8046 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8047 end_cleanup_deferral ();
8048 emit_queue ();
8049 emit_jump_insn (gen_jump (op1));
8050 emit_barrier ();
8051 emit_label (op0);
8052 start_cleanup_deferral ();
8053 if (temp != 0
8054 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8055 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8056 else
8057 expand_expr (TREE_OPERAND (exp, 2),
8058 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8059 }
8060
8061 end_cleanup_deferral ();
8062
8063 emit_queue ();
8064 emit_label (op1);
8065 OK_DEFER_POP;
8066
8067 return temp;
8068 }
8069
8070 case TARGET_EXPR:
8071 {
8072 /* Something needs to be initialized, but we didn't know
8073 where that thing was when building the tree. For example,
8074 it could be the return value of a function, or a parameter
8075 to a function which lays down in the stack, or a temporary
8076 variable which must be passed by reference.
8077
8078 We guarantee that the expression will either be constructed
8079 or copied into our original target. */
8080
8081 tree slot = TREE_OPERAND (exp, 0);
8082 tree cleanups = NULL_TREE;
8083 tree exp1;
8084
8085 if (TREE_CODE (slot) != VAR_DECL)
8086 abort ();
8087
8088 if (! ignore)
8089 target = original_target;
8090
8091 /* Set this here so that if we get a target that refers to a
8092 register variable that's already been used, put_reg_into_stack
8093 knows that it should fix up those uses. */
8094 TREE_USED (slot) = 1;
8095
8096 if (target == 0)
8097 {
8098 if (DECL_RTL (slot) != 0)
8099 {
8100 target = DECL_RTL (slot);
8101 /* If we have already expanded the slot, so don't do
8102 it again. (mrs) */
8103 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8104 return target;
8105 }
8106 else
8107 {
8108 target = assign_temp (type, 2, 0, 1);
8109 /* All temp slots at this level must not conflict. */
8110 preserve_temp_slots (target);
8111 DECL_RTL (slot) = target;
8112 if (TREE_ADDRESSABLE (slot))
8113 {
8114 TREE_ADDRESSABLE (slot) = 0;
8115 mark_addressable (slot);
8116 }
8117
8118 /* Since SLOT is not known to the called function
8119 to belong to its stack frame, we must build an explicit
8120 cleanup. This case occurs when we must build up a reference
8121 to pass the reference as an argument. In this case,
8122 it is very likely that such a reference need not be
8123 built here. */
8124
8125 if (TREE_OPERAND (exp, 2) == 0)
8126 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8127 cleanups = TREE_OPERAND (exp, 2);
8128 }
8129 }
8130 else
8131 {
8132 /* This case does occur, when expanding a parameter which
8133 needs to be constructed on the stack. The target
8134 is the actual stack address that we want to initialize.
8135 The function we call will perform the cleanup in this case. */
8136
8137 /* If we have already assigned it space, use that space,
8138 not target that we were passed in, as our target
8139 parameter is only a hint. */
8140 if (DECL_RTL (slot) != 0)
8141 {
8142 target = DECL_RTL (slot);
8143 /* If we have already expanded the slot, so don't do
8144 it again. (mrs) */
8145 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8146 return target;
8147 }
8148 else
8149 {
8150 DECL_RTL (slot) = target;
8151 /* If we must have an addressable slot, then make sure that
8152 the RTL that we just stored in slot is OK. */
8153 if (TREE_ADDRESSABLE (slot))
8154 {
8155 TREE_ADDRESSABLE (slot) = 0;
8156 mark_addressable (slot);
8157 }
8158 }
8159 }
8160
8161 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8162 /* Mark it as expanded. */
8163 TREE_OPERAND (exp, 1) = NULL_TREE;
8164
8165 store_expr (exp1, target, 0);
8166
8167 expand_decl_cleanup (NULL_TREE, cleanups);
8168
8169 return target;
8170 }
8171
8172 case INIT_EXPR:
8173 {
8174 tree lhs = TREE_OPERAND (exp, 0);
8175 tree rhs = TREE_OPERAND (exp, 1);
8176 tree noncopied_parts = 0;
8177 tree lhs_type = TREE_TYPE (lhs);
8178
8179 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8180 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8181 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8182 TYPE_NONCOPIED_PARTS (lhs_type));
8183 while (noncopied_parts != 0)
8184 {
8185 expand_assignment (TREE_VALUE (noncopied_parts),
8186 TREE_PURPOSE (noncopied_parts), 0, 0);
8187 noncopied_parts = TREE_CHAIN (noncopied_parts);
8188 }
8189 return temp;
8190 }
8191
8192 case MODIFY_EXPR:
8193 {
8194 /* If lhs is complex, expand calls in rhs before computing it.
8195 That's so we don't compute a pointer and save it over a call.
8196 If lhs is simple, compute it first so we can give it as a
8197 target if the rhs is just a call. This avoids an extra temp and copy
8198 and that prevents a partial-subsumption which makes bad code.
8199 Actually we could treat component_ref's of vars like vars. */
8200
8201 tree lhs = TREE_OPERAND (exp, 0);
8202 tree rhs = TREE_OPERAND (exp, 1);
8203 tree noncopied_parts = 0;
8204 tree lhs_type = TREE_TYPE (lhs);
8205
8206 temp = 0;
8207
8208 if (TREE_CODE (lhs) != VAR_DECL
8209 && TREE_CODE (lhs) != RESULT_DECL
8210 && TREE_CODE (lhs) != PARM_DECL
8211 && ! (TREE_CODE (lhs) == INDIRECT_REF
8212 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8213 preexpand_calls (exp);
8214
8215 /* Check for |= or &= of a bitfield of size one into another bitfield
8216 of size 1. In this case, (unless we need the result of the
8217 assignment) we can do this more efficiently with a
8218 test followed by an assignment, if necessary.
8219
8220 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8221 things change so we do, this code should be enhanced to
8222 support it. */
8223 if (ignore
8224 && TREE_CODE (lhs) == COMPONENT_REF
8225 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8226 || TREE_CODE (rhs) == BIT_AND_EXPR)
8227 && TREE_OPERAND (rhs, 0) == lhs
8228 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8229 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8230 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8231 {
8232 rtx label = gen_label_rtx ();
8233
8234 do_jump (TREE_OPERAND (rhs, 1),
8235 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8236 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8237 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8238 (TREE_CODE (rhs) == BIT_IOR_EXPR
8239 ? integer_one_node
8240 : integer_zero_node)),
8241 0, 0);
8242 do_pending_stack_adjust ();
8243 emit_label (label);
8244 return const0_rtx;
8245 }
8246
8247 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8248 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8249 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8250 TYPE_NONCOPIED_PARTS (lhs_type));
8251
8252 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8253 while (noncopied_parts != 0)
8254 {
8255 expand_assignment (TREE_PURPOSE (noncopied_parts),
8256 TREE_VALUE (noncopied_parts), 0, 0);
8257 noncopied_parts = TREE_CHAIN (noncopied_parts);
8258 }
8259 return temp;
8260 }
8261
8262 case RETURN_EXPR:
8263 if (!TREE_OPERAND (exp, 0))
8264 expand_null_return ();
8265 else
8266 expand_return (TREE_OPERAND (exp, 0));
8267 return const0_rtx;
8268
8269 case PREINCREMENT_EXPR:
8270 case PREDECREMENT_EXPR:
8271 return expand_increment (exp, 0, ignore);
8272
8273 case POSTINCREMENT_EXPR:
8274 case POSTDECREMENT_EXPR:
8275 /* Faster to treat as pre-increment if result is not used. */
8276 return expand_increment (exp, ! ignore, ignore);
8277
8278 case ADDR_EXPR:
8279 /* If nonzero, TEMP will be set to the address of something that might
8280 be a MEM corresponding to a stack slot. */
8281 temp = 0;
8282
8283 /* Are we taking the address of a nested function? */
8284 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8285 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8286 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8287 && ! TREE_STATIC (exp))
8288 {
8289 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8290 op0 = force_operand (op0, target);
8291 }
8292 /* If we are taking the address of something erroneous, just
8293 return a zero. */
8294 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8295 return const0_rtx;
8296 else
8297 {
8298 /* We make sure to pass const0_rtx down if we came in with
8299 ignore set, to avoid doing the cleanups twice for something. */
8300 op0 = expand_expr (TREE_OPERAND (exp, 0),
8301 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8302 (modifier == EXPAND_INITIALIZER
8303 ? modifier : EXPAND_CONST_ADDRESS));
8304
8305 /* If we are going to ignore the result, OP0 will have been set
8306 to const0_rtx, so just return it. Don't get confused and
8307 think we are taking the address of the constant. */
8308 if (ignore)
8309 return op0;
8310
8311 op0 = protect_from_queue (op0, 0);
8312
8313 /* We would like the object in memory. If it is a constant, we can
8314 have it be statically allocated into memory. For a non-constant,
8315 we need to allocate some memory and store the value into it. */
8316
8317 if (CONSTANT_P (op0))
8318 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8319 op0);
8320 else if (GET_CODE (op0) == MEM)
8321 {
8322 mark_temp_addr_taken (op0);
8323 temp = XEXP (op0, 0);
8324 }
8325
8326 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8327 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8328 {
8329 /* If this object is in a register, it must be not
8330 be BLKmode. */
8331 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8332 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8333
8334 mark_temp_addr_taken (memloc);
8335 emit_move_insn (memloc, op0);
8336 op0 = memloc;
8337 }
8338
8339 if (GET_CODE (op0) != MEM)
8340 abort ();
8341
8342 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8343 {
8344 temp = XEXP (op0, 0);
8345 #ifdef POINTERS_EXTEND_UNSIGNED
8346 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8347 && mode == ptr_mode)
8348 temp = convert_memory_address (ptr_mode, temp);
8349 #endif
8350 return temp;
8351 }
8352
8353 op0 = force_operand (XEXP (op0, 0), target);
8354 }
8355
8356 if (flag_force_addr && GET_CODE (op0) != REG)
8357 op0 = force_reg (Pmode, op0);
8358
8359 if (GET_CODE (op0) == REG
8360 && ! REG_USERVAR_P (op0))
8361 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8362
8363 /* If we might have had a temp slot, add an equivalent address
8364 for it. */
8365 if (temp != 0)
8366 update_temp_slot_address (temp, op0);
8367
8368 #ifdef POINTERS_EXTEND_UNSIGNED
8369 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8370 && mode == ptr_mode)
8371 op0 = convert_memory_address (ptr_mode, op0);
8372 #endif
8373
8374 return op0;
8375
8376 case ENTRY_VALUE_EXPR:
8377 abort ();
8378
8379 /* COMPLEX type for Extended Pascal & Fortran */
8380 case COMPLEX_EXPR:
8381 {
8382 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8383 rtx insns;
8384
8385 /* Get the rtx code of the operands. */
8386 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8387 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8388
8389 if (! target)
8390 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8391
8392 start_sequence ();
8393
8394 /* Move the real (op0) and imaginary (op1) parts to their location. */
8395 emit_move_insn (gen_realpart (mode, target), op0);
8396 emit_move_insn (gen_imagpart (mode, target), op1);
8397
8398 insns = get_insns ();
8399 end_sequence ();
8400
8401 /* Complex construction should appear as a single unit. */
8402 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8403 each with a separate pseudo as destination.
8404 It's not correct for flow to treat them as a unit. */
8405 if (GET_CODE (target) != CONCAT)
8406 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8407 else
8408 emit_insns (insns);
8409
8410 return target;
8411 }
8412
8413 case REALPART_EXPR:
8414 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8415 return gen_realpart (mode, op0);
8416
8417 case IMAGPART_EXPR:
8418 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8419 return gen_imagpart (mode, op0);
8420
8421 case CONJ_EXPR:
8422 {
8423 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8424 rtx imag_t;
8425 rtx insns;
8426
8427 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8428
8429 if (! target)
8430 target = gen_reg_rtx (mode);
8431
8432 start_sequence ();
8433
8434 /* Store the realpart and the negated imagpart to target. */
8435 emit_move_insn (gen_realpart (partmode, target),
8436 gen_realpart (partmode, op0));
8437
8438 imag_t = gen_imagpart (partmode, target);
8439 temp = expand_unop (partmode, neg_optab,
8440 gen_imagpart (partmode, op0), imag_t, 0);
8441 if (temp != imag_t)
8442 emit_move_insn (imag_t, temp);
8443
8444 insns = get_insns ();
8445 end_sequence ();
8446
8447 /* Conjugate should appear as a single unit
8448 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8449 each with a separate pseudo as destination.
8450 It's not correct for flow to treat them as a unit. */
8451 if (GET_CODE (target) != CONCAT)
8452 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8453 else
8454 emit_insns (insns);
8455
8456 return target;
8457 }
8458
8459 case TRY_CATCH_EXPR:
8460 {
8461 tree handler = TREE_OPERAND (exp, 1);
8462
8463 expand_eh_region_start ();
8464
8465 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8466
8467 expand_eh_region_end (handler);
8468
8469 return op0;
8470 }
8471
8472 case TRY_FINALLY_EXPR:
8473 {
8474 tree try_block = TREE_OPERAND (exp, 0);
8475 tree finally_block = TREE_OPERAND (exp, 1);
8476 rtx finally_label = gen_label_rtx ();
8477 rtx done_label = gen_label_rtx ();
8478 rtx return_link = gen_reg_rtx (Pmode);
8479 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8480 (tree) finally_label, (tree) return_link);
8481 TREE_SIDE_EFFECTS (cleanup) = 1;
8482
8483 /* Start a new binding layer that will keep track of all cleanup
8484 actions to be performed. */
8485 expand_start_bindings (2);
8486
8487 target_temp_slot_level = temp_slot_level;
8488
8489 expand_decl_cleanup (NULL_TREE, cleanup);
8490 op0 = expand_expr (try_block, target, tmode, modifier);
8491
8492 preserve_temp_slots (op0);
8493 expand_end_bindings (NULL_TREE, 0, 0);
8494 emit_jump (done_label);
8495 emit_label (finally_label);
8496 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8497 emit_indirect_jump (return_link);
8498 emit_label (done_label);
8499 return op0;
8500 }
8501
8502 case GOTO_SUBROUTINE_EXPR:
8503 {
8504 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8505 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8506 rtx return_address = gen_label_rtx ();
8507 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8508 emit_jump (subr);
8509 emit_label (return_address);
8510 return const0_rtx;
8511 }
8512
8513 case POPDCC_EXPR:
8514 {
8515 rtx dcc = get_dynamic_cleanup_chain ();
8516 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8517 return const0_rtx;
8518 }
8519
8520 case POPDHC_EXPR:
8521 {
8522 rtx dhc = get_dynamic_handler_chain ();
8523 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8524 return const0_rtx;
8525 }
8526
8527 case VA_ARG_EXPR:
8528 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8529
8530 default:
8531 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8532 }
8533
8534 /* Here to do an ordinary binary operator, generating an instruction
8535 from the optab already placed in `this_optab'. */
8536 binop:
8537 preexpand_calls (exp);
8538 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8539 subtarget = 0;
8540 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8541 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8542 binop2:
8543 temp = expand_binop (mode, this_optab, op0, op1, target,
8544 unsignedp, OPTAB_LIB_WIDEN);
8545 if (temp == 0)
8546 abort ();
8547 return temp;
8548 }
8549 \f
8550 /* Similar to expand_expr, except that we don't specify a target, target
8551 mode, or modifier and we return the alignment of the inner type. This is
8552 used in cases where it is not necessary to align the result to the
8553 alignment of its type as long as we know the alignment of the result, for
8554 example for comparisons of BLKmode values. */
8555
8556 static rtx
8557 expand_expr_unaligned (exp, palign)
8558 register tree exp;
8559 unsigned int *palign;
8560 {
8561 register rtx op0;
8562 tree type = TREE_TYPE (exp);
8563 register enum machine_mode mode = TYPE_MODE (type);
8564
8565 /* Default the alignment we return to that of the type. */
8566 *palign = TYPE_ALIGN (type);
8567
8568 /* The only cases in which we do anything special is if the resulting mode
8569 is BLKmode. */
8570 if (mode != BLKmode)
8571 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8572
8573 switch (TREE_CODE (exp))
8574 {
8575 case CONVERT_EXPR:
8576 case NOP_EXPR:
8577 case NON_LVALUE_EXPR:
8578 /* Conversions between BLKmode values don't change the underlying
8579 alignment or value. */
8580 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8581 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8582 break;
8583
8584 case ARRAY_REF:
8585 /* Much of the code for this case is copied directly from expand_expr.
8586 We need to duplicate it here because we will do something different
8587 in the fall-through case, so we need to handle the same exceptions
8588 it does. */
8589 {
8590 tree array = TREE_OPERAND (exp, 0);
8591 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8592 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8593 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8594 HOST_WIDE_INT i;
8595
8596 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8597 abort ();
8598
8599 /* Optimize the special-case of a zero lower bound.
8600
8601 We convert the low_bound to sizetype to avoid some problems
8602 with constant folding. (E.g. suppose the lower bound is 1,
8603 and its mode is QI. Without the conversion, (ARRAY
8604 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8605 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8606
8607 if (! integer_zerop (low_bound))
8608 index = size_diffop (index, convert (sizetype, low_bound));
8609
8610 /* If this is a constant index into a constant array,
8611 just get the value from the array. Handle both the cases when
8612 we have an explicit constructor and when our operand is a variable
8613 that was declared const. */
8614
8615 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8616 && 0 > compare_tree_int (index,
8617 list_length (CONSTRUCTOR_ELTS
8618 (TREE_OPERAND (exp, 0)))))
8619 {
8620 tree elem;
8621
8622 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8623 i = TREE_INT_CST_LOW (index);
8624 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8625 ;
8626
8627 if (elem)
8628 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8629 }
8630
8631 else if (optimize >= 1
8632 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8633 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8634 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8635 {
8636 if (TREE_CODE (index) == INTEGER_CST)
8637 {
8638 tree init = DECL_INITIAL (array);
8639
8640 if (TREE_CODE (init) == CONSTRUCTOR)
8641 {
8642 tree elem;
8643
8644 for (elem = CONSTRUCTOR_ELTS (init);
8645 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8646 elem = TREE_CHAIN (elem))
8647 ;
8648
8649 if (elem)
8650 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8651 palign);
8652 }
8653 }
8654 }
8655 }
8656
8657 /* ... fall through ... */
8658
8659 case COMPONENT_REF:
8660 case BIT_FIELD_REF:
8661 /* If the operand is a CONSTRUCTOR, we can just extract the
8662 appropriate field if it is present. Don't do this if we have
8663 already written the data since we want to refer to that copy
8664 and varasm.c assumes that's what we'll do. */
8665 if (TREE_CODE (exp) != ARRAY_REF
8666 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8667 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8668 {
8669 tree elt;
8670
8671 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8672 elt = TREE_CHAIN (elt))
8673 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8674 /* Note that unlike the case in expand_expr, we know this is
8675 BLKmode and hence not an integer. */
8676 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8677 }
8678
8679 {
8680 enum machine_mode mode1;
8681 HOST_WIDE_INT bitsize, bitpos;
8682 tree offset;
8683 int volatilep = 0;
8684 unsigned int alignment;
8685 int unsignedp;
8686 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8687 &mode1, &unsignedp, &volatilep,
8688 &alignment);
8689
8690 /* If we got back the original object, something is wrong. Perhaps
8691 we are evaluating an expression too early. In any event, don't
8692 infinitely recurse. */
8693 if (tem == exp)
8694 abort ();
8695
8696 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8697
8698 /* If this is a constant, put it into a register if it is a
8699 legitimate constant and OFFSET is 0 and memory if it isn't. */
8700 if (CONSTANT_P (op0))
8701 {
8702 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8703
8704 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8705 && offset == 0)
8706 op0 = force_reg (inner_mode, op0);
8707 else
8708 op0 = validize_mem (force_const_mem (inner_mode, op0));
8709 }
8710
8711 if (offset != 0)
8712 {
8713 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8714
8715 /* If this object is in a register, put it into memory.
8716 This case can't occur in C, but can in Ada if we have
8717 unchecked conversion of an expression from a scalar type to
8718 an array or record type. */
8719 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8720 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8721 {
8722 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8723
8724 mark_temp_addr_taken (memloc);
8725 emit_move_insn (memloc, op0);
8726 op0 = memloc;
8727 }
8728
8729 if (GET_CODE (op0) != MEM)
8730 abort ();
8731
8732 if (GET_MODE (offset_rtx) != ptr_mode)
8733 {
8734 #ifdef POINTERS_EXTEND_UNSIGNED
8735 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8736 #else
8737 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8738 #endif
8739 }
8740
8741 op0 = change_address (op0, VOIDmode,
8742 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8743 force_reg (ptr_mode,
8744 offset_rtx)));
8745 }
8746
8747 /* Don't forget about volatility even if this is a bitfield. */
8748 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8749 {
8750 op0 = copy_rtx (op0);
8751 MEM_VOLATILE_P (op0) = 1;
8752 }
8753
8754 /* Check the access. */
8755 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8756 {
8757 rtx to;
8758 int size;
8759
8760 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8761 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8762
8763 /* Check the access right of the pointer. */
8764 in_check_memory_usage = 1;
8765 if (size > BITS_PER_UNIT)
8766 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8767 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8768 TYPE_MODE (sizetype),
8769 GEN_INT (MEMORY_USE_RO),
8770 TYPE_MODE (integer_type_node));
8771 in_check_memory_usage = 0;
8772 }
8773
8774 /* In cases where an aligned union has an unaligned object
8775 as a field, we might be extracting a BLKmode value from
8776 an integer-mode (e.g., SImode) object. Handle this case
8777 by doing the extract into an object as wide as the field
8778 (which we know to be the width of a basic mode), then
8779 storing into memory, and changing the mode to BLKmode.
8780 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8781 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8782 if (mode1 == VOIDmode
8783 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8784 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8785 && (TYPE_ALIGN (type) > alignment
8786 || bitpos % TYPE_ALIGN (type) != 0)))
8787 {
8788 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8789
8790 if (ext_mode == BLKmode)
8791 {
8792 /* In this case, BITPOS must start at a byte boundary. */
8793 if (GET_CODE (op0) != MEM
8794 || bitpos % BITS_PER_UNIT != 0)
8795 abort ();
8796
8797 op0 = change_address (op0, VOIDmode,
8798 plus_constant (XEXP (op0, 0),
8799 bitpos / BITS_PER_UNIT));
8800 }
8801 else
8802 {
8803 rtx new = assign_stack_temp (ext_mode,
8804 bitsize / BITS_PER_UNIT, 0);
8805
8806 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8807 unsignedp, NULL_RTX, ext_mode,
8808 ext_mode, alignment,
8809 int_size_in_bytes (TREE_TYPE (tem)));
8810
8811 /* If the result is a record type and BITSIZE is narrower than
8812 the mode of OP0, an integral mode, and this is a big endian
8813 machine, we must put the field into the high-order bits. */
8814 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8815 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8816 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8817 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8818 size_int (GET_MODE_BITSIZE
8819 (GET_MODE (op0))
8820 - bitsize),
8821 op0, 1);
8822
8823
8824 emit_move_insn (new, op0);
8825 op0 = copy_rtx (new);
8826 PUT_MODE (op0, BLKmode);
8827 }
8828 }
8829 else
8830 /* Get a reference to just this component. */
8831 op0 = change_address (op0, mode1,
8832 plus_constant (XEXP (op0, 0),
8833 (bitpos / BITS_PER_UNIT)));
8834
8835 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8836
8837 /* Adjust the alignment in case the bit position is not
8838 a multiple of the alignment of the inner object. */
8839 while (bitpos % alignment != 0)
8840 alignment >>= 1;
8841
8842 if (GET_CODE (XEXP (op0, 0)) == REG)
8843 mark_reg_pointer (XEXP (op0, 0), alignment);
8844
8845 MEM_IN_STRUCT_P (op0) = 1;
8846 MEM_VOLATILE_P (op0) |= volatilep;
8847
8848 *palign = alignment;
8849 return op0;
8850 }
8851
8852 default:
8853 break;
8854
8855 }
8856
8857 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8858 }
8859 \f
8860 /* Return the tree node if a ARG corresponds to a string constant or zero
8861 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8862 in bytes within the string that ARG is accessing. The type of the
8863 offset will be `sizetype'. */
8864
8865 tree
8866 string_constant (arg, ptr_offset)
8867 tree arg;
8868 tree *ptr_offset;
8869 {
8870 STRIP_NOPS (arg);
8871
8872 if (TREE_CODE (arg) == ADDR_EXPR
8873 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8874 {
8875 *ptr_offset = size_zero_node;
8876 return TREE_OPERAND (arg, 0);
8877 }
8878 else if (TREE_CODE (arg) == PLUS_EXPR)
8879 {
8880 tree arg0 = TREE_OPERAND (arg, 0);
8881 tree arg1 = TREE_OPERAND (arg, 1);
8882
8883 STRIP_NOPS (arg0);
8884 STRIP_NOPS (arg1);
8885
8886 if (TREE_CODE (arg0) == ADDR_EXPR
8887 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8888 {
8889 *ptr_offset = convert (sizetype, arg1);
8890 return TREE_OPERAND (arg0, 0);
8891 }
8892 else if (TREE_CODE (arg1) == ADDR_EXPR
8893 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8894 {
8895 *ptr_offset = convert (sizetype, arg0);
8896 return TREE_OPERAND (arg1, 0);
8897 }
8898 }
8899
8900 return 0;
8901 }
8902 \f
8903 /* Expand code for a post- or pre- increment or decrement
8904 and return the RTX for the result.
8905 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8906
8907 static rtx
8908 expand_increment (exp, post, ignore)
8909 register tree exp;
8910 int post, ignore;
8911 {
8912 register rtx op0, op1;
8913 register rtx temp, value;
8914 register tree incremented = TREE_OPERAND (exp, 0);
8915 optab this_optab = add_optab;
8916 int icode;
8917 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8918 int op0_is_copy = 0;
8919 int single_insn = 0;
8920 /* 1 means we can't store into OP0 directly,
8921 because it is a subreg narrower than a word,
8922 and we don't dare clobber the rest of the word. */
8923 int bad_subreg = 0;
8924
8925 /* Stabilize any component ref that might need to be
8926 evaluated more than once below. */
8927 if (!post
8928 || TREE_CODE (incremented) == BIT_FIELD_REF
8929 || (TREE_CODE (incremented) == COMPONENT_REF
8930 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8931 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8932 incremented = stabilize_reference (incremented);
8933 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8934 ones into save exprs so that they don't accidentally get evaluated
8935 more than once by the code below. */
8936 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8937 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8938 incremented = save_expr (incremented);
8939
8940 /* Compute the operands as RTX.
8941 Note whether OP0 is the actual lvalue or a copy of it:
8942 I believe it is a copy iff it is a register or subreg
8943 and insns were generated in computing it. */
8944
8945 temp = get_last_insn ();
8946 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8947
8948 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8949 in place but instead must do sign- or zero-extension during assignment,
8950 so we copy it into a new register and let the code below use it as
8951 a copy.
8952
8953 Note that we can safely modify this SUBREG since it is know not to be
8954 shared (it was made by the expand_expr call above). */
8955
8956 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8957 {
8958 if (post)
8959 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8960 else
8961 bad_subreg = 1;
8962 }
8963 else if (GET_CODE (op0) == SUBREG
8964 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8965 {
8966 /* We cannot increment this SUBREG in place. If we are
8967 post-incrementing, get a copy of the old value. Otherwise,
8968 just mark that we cannot increment in place. */
8969 if (post)
8970 op0 = copy_to_reg (op0);
8971 else
8972 bad_subreg = 1;
8973 }
8974
8975 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8976 && temp != get_last_insn ());
8977 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8978 EXPAND_MEMORY_USE_BAD);
8979
8980 /* Decide whether incrementing or decrementing. */
8981 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8982 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8983 this_optab = sub_optab;
8984
8985 /* Convert decrement by a constant into a negative increment. */
8986 if (this_optab == sub_optab
8987 && GET_CODE (op1) == CONST_INT)
8988 {
8989 op1 = GEN_INT (- INTVAL (op1));
8990 this_optab = add_optab;
8991 }
8992
8993 /* For a preincrement, see if we can do this with a single instruction. */
8994 if (!post)
8995 {
8996 icode = (int) this_optab->handlers[(int) mode].insn_code;
8997 if (icode != (int) CODE_FOR_nothing
8998 /* Make sure that OP0 is valid for operands 0 and 1
8999 of the insn we want to queue. */
9000 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9001 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9002 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9003 single_insn = 1;
9004 }
9005
9006 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9007 then we cannot just increment OP0. We must therefore contrive to
9008 increment the original value. Then, for postincrement, we can return
9009 OP0 since it is a copy of the old value. For preincrement, expand here
9010 unless we can do it with a single insn.
9011
9012 Likewise if storing directly into OP0 would clobber high bits
9013 we need to preserve (bad_subreg). */
9014 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9015 {
9016 /* This is the easiest way to increment the value wherever it is.
9017 Problems with multiple evaluation of INCREMENTED are prevented
9018 because either (1) it is a component_ref or preincrement,
9019 in which case it was stabilized above, or (2) it is an array_ref
9020 with constant index in an array in a register, which is
9021 safe to reevaluate. */
9022 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9023 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9024 ? MINUS_EXPR : PLUS_EXPR),
9025 TREE_TYPE (exp),
9026 incremented,
9027 TREE_OPERAND (exp, 1));
9028
9029 while (TREE_CODE (incremented) == NOP_EXPR
9030 || TREE_CODE (incremented) == CONVERT_EXPR)
9031 {
9032 newexp = convert (TREE_TYPE (incremented), newexp);
9033 incremented = TREE_OPERAND (incremented, 0);
9034 }
9035
9036 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9037 return post ? op0 : temp;
9038 }
9039
9040 if (post)
9041 {
9042 /* We have a true reference to the value in OP0.
9043 If there is an insn to add or subtract in this mode, queue it.
9044 Queueing the increment insn avoids the register shuffling
9045 that often results if we must increment now and first save
9046 the old value for subsequent use. */
9047
9048 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9049 op0 = stabilize (op0);
9050 #endif
9051
9052 icode = (int) this_optab->handlers[(int) mode].insn_code;
9053 if (icode != (int) CODE_FOR_nothing
9054 /* Make sure that OP0 is valid for operands 0 and 1
9055 of the insn we want to queue. */
9056 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9057 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9058 {
9059 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9060 op1 = force_reg (mode, op1);
9061
9062 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9063 }
9064 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9065 {
9066 rtx addr = (general_operand (XEXP (op0, 0), mode)
9067 ? force_reg (Pmode, XEXP (op0, 0))
9068 : copy_to_reg (XEXP (op0, 0)));
9069 rtx temp, result;
9070
9071 op0 = change_address (op0, VOIDmode, addr);
9072 temp = force_reg (GET_MODE (op0), op0);
9073 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9074 op1 = force_reg (mode, op1);
9075
9076 /* The increment queue is LIFO, thus we have to `queue'
9077 the instructions in reverse order. */
9078 enqueue_insn (op0, gen_move_insn (op0, temp));
9079 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9080 return result;
9081 }
9082 }
9083
9084 /* Preincrement, or we can't increment with one simple insn. */
9085 if (post)
9086 /* Save a copy of the value before inc or dec, to return it later. */
9087 temp = value = copy_to_reg (op0);
9088 else
9089 /* Arrange to return the incremented value. */
9090 /* Copy the rtx because expand_binop will protect from the queue,
9091 and the results of that would be invalid for us to return
9092 if our caller does emit_queue before using our result. */
9093 temp = copy_rtx (value = op0);
9094
9095 /* Increment however we can. */
9096 op1 = expand_binop (mode, this_optab, value, op1,
9097 current_function_check_memory_usage ? NULL_RTX : op0,
9098 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9099 /* Make sure the value is stored into OP0. */
9100 if (op1 != op0)
9101 emit_move_insn (op0, op1);
9102
9103 return temp;
9104 }
9105 \f
9106 /* Expand all function calls contained within EXP, innermost ones first.
9107 But don't look within expressions that have sequence points.
9108 For each CALL_EXPR, record the rtx for its value
9109 in the CALL_EXPR_RTL field. */
9110
9111 static void
9112 preexpand_calls (exp)
9113 tree exp;
9114 {
9115 register int nops, i;
9116 int class = TREE_CODE_CLASS (TREE_CODE (exp));
9117
9118 if (! do_preexpand_calls)
9119 return;
9120
9121 /* Only expressions and references can contain calls. */
9122
9123 if (! IS_EXPR_CODE_CLASS (class) && class != 'r')
9124 return;
9125
9126 switch (TREE_CODE (exp))
9127 {
9128 case CALL_EXPR:
9129 /* Do nothing if already expanded. */
9130 if (CALL_EXPR_RTL (exp) != 0
9131 /* Do nothing if the call returns a variable-sized object. */
9132 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9133 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9134 /* Do nothing to built-in functions. */
9135 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9136 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9137 == FUNCTION_DECL)
9138 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9139 return;
9140
9141 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9142 return;
9143
9144 case COMPOUND_EXPR:
9145 case COND_EXPR:
9146 case TRUTH_ANDIF_EXPR:
9147 case TRUTH_ORIF_EXPR:
9148 /* If we find one of these, then we can be sure
9149 the adjust will be done for it (since it makes jumps).
9150 Do it now, so that if this is inside an argument
9151 of a function, we don't get the stack adjustment
9152 after some other args have already been pushed. */
9153 do_pending_stack_adjust ();
9154 return;
9155
9156 case BLOCK:
9157 case RTL_EXPR:
9158 case WITH_CLEANUP_EXPR:
9159 case CLEANUP_POINT_EXPR:
9160 case TRY_CATCH_EXPR:
9161 return;
9162
9163 case SAVE_EXPR:
9164 if (SAVE_EXPR_RTL (exp) != 0)
9165 return;
9166
9167 default:
9168 break;
9169 }
9170
9171 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
9172 for (i = 0; i < nops; i++)
9173 if (TREE_OPERAND (exp, i) != 0)
9174 {
9175 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9176 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9177 It doesn't happen before the call is made. */
9178 ;
9179 else
9180 {
9181 class = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9182 if (IS_EXPR_CODE_CLASS (class) || class == 'r')
9183 preexpand_calls (TREE_OPERAND (exp, i));
9184 }
9185 }
9186 }
9187 \f
9188 /* At the start of a function, record that we have no previously-pushed
9189 arguments waiting to be popped. */
9190
9191 void
9192 init_pending_stack_adjust ()
9193 {
9194 pending_stack_adjust = 0;
9195 }
9196
9197 /* When exiting from function, if safe, clear out any pending stack adjust
9198 so the adjustment won't get done.
9199
9200 Note, if the current function calls alloca, then it must have a
9201 frame pointer regardless of the value of flag_omit_frame_pointer. */
9202
9203 void
9204 clear_pending_stack_adjust ()
9205 {
9206 #ifdef EXIT_IGNORE_STACK
9207 if (optimize > 0
9208 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9209 && EXIT_IGNORE_STACK
9210 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9211 && ! flag_inline_functions)
9212 {
9213 stack_pointer_delta -= pending_stack_adjust,
9214 pending_stack_adjust = 0;
9215 }
9216 #endif
9217 }
9218
9219 /* Pop any previously-pushed arguments that have not been popped yet. */
9220
9221 void
9222 do_pending_stack_adjust ()
9223 {
9224 if (inhibit_defer_pop == 0)
9225 {
9226 if (pending_stack_adjust != 0)
9227 adjust_stack (GEN_INT (pending_stack_adjust));
9228 pending_stack_adjust = 0;
9229 }
9230 }
9231 \f
9232 /* Expand conditional expressions. */
9233
9234 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9235 LABEL is an rtx of code CODE_LABEL, in this function and all the
9236 functions here. */
9237
9238 void
9239 jumpifnot (exp, label)
9240 tree exp;
9241 rtx label;
9242 {
9243 do_jump (exp, label, NULL_RTX);
9244 }
9245
9246 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9247
9248 void
9249 jumpif (exp, label)
9250 tree exp;
9251 rtx label;
9252 {
9253 do_jump (exp, NULL_RTX, label);
9254 }
9255
9256 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9257 the result is zero, or IF_TRUE_LABEL if the result is one.
9258 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9259 meaning fall through in that case.
9260
9261 do_jump always does any pending stack adjust except when it does not
9262 actually perform a jump. An example where there is no jump
9263 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9264
9265 This function is responsible for optimizing cases such as
9266 &&, || and comparison operators in EXP. */
9267
9268 void
9269 do_jump (exp, if_false_label, if_true_label)
9270 tree exp;
9271 rtx if_false_label, if_true_label;
9272 {
9273 register enum tree_code code = TREE_CODE (exp);
9274 /* Some cases need to create a label to jump to
9275 in order to properly fall through.
9276 These cases set DROP_THROUGH_LABEL nonzero. */
9277 rtx drop_through_label = 0;
9278 rtx temp;
9279 int i;
9280 tree type;
9281 enum machine_mode mode;
9282
9283 #ifdef MAX_INTEGER_COMPUTATION_MODE
9284 check_max_integer_computation_mode (exp);
9285 #endif
9286
9287 emit_queue ();
9288
9289 switch (code)
9290 {
9291 case ERROR_MARK:
9292 break;
9293
9294 case INTEGER_CST:
9295 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9296 if (temp)
9297 emit_jump (temp);
9298 break;
9299
9300 #if 0
9301 /* This is not true with #pragma weak */
9302 case ADDR_EXPR:
9303 /* The address of something can never be zero. */
9304 if (if_true_label)
9305 emit_jump (if_true_label);
9306 break;
9307 #endif
9308
9309 case NOP_EXPR:
9310 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9311 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9312 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9313 goto normal;
9314 case CONVERT_EXPR:
9315 /* If we are narrowing the operand, we have to do the compare in the
9316 narrower mode. */
9317 if ((TYPE_PRECISION (TREE_TYPE (exp))
9318 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9319 goto normal;
9320 case NON_LVALUE_EXPR:
9321 case REFERENCE_EXPR:
9322 case ABS_EXPR:
9323 case NEGATE_EXPR:
9324 case LROTATE_EXPR:
9325 case RROTATE_EXPR:
9326 /* These cannot change zero->non-zero or vice versa. */
9327 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9328 break;
9329
9330 case WITH_RECORD_EXPR:
9331 /* Put the object on the placeholder list, recurse through our first
9332 operand, and pop the list. */
9333 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9334 placeholder_list);
9335 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9336 placeholder_list = TREE_CHAIN (placeholder_list);
9337 break;
9338
9339 #if 0
9340 /* This is never less insns than evaluating the PLUS_EXPR followed by
9341 a test and can be longer if the test is eliminated. */
9342 case PLUS_EXPR:
9343 /* Reduce to minus. */
9344 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9345 TREE_OPERAND (exp, 0),
9346 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9347 TREE_OPERAND (exp, 1))));
9348 /* Process as MINUS. */
9349 #endif
9350
9351 case MINUS_EXPR:
9352 /* Non-zero iff operands of minus differ. */
9353 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9354 TREE_OPERAND (exp, 0),
9355 TREE_OPERAND (exp, 1)),
9356 NE, NE, if_false_label, if_true_label);
9357 break;
9358
9359 case BIT_AND_EXPR:
9360 /* If we are AND'ing with a small constant, do this comparison in the
9361 smallest type that fits. If the machine doesn't have comparisons
9362 that small, it will be converted back to the wider comparison.
9363 This helps if we are testing the sign bit of a narrower object.
9364 combine can't do this for us because it can't know whether a
9365 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9366
9367 if (! SLOW_BYTE_ACCESS
9368 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9369 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9370 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9371 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9372 && (type = type_for_mode (mode, 1)) != 0
9373 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9374 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9375 != CODE_FOR_nothing))
9376 {
9377 do_jump (convert (type, exp), if_false_label, if_true_label);
9378 break;
9379 }
9380 goto normal;
9381
9382 case TRUTH_NOT_EXPR:
9383 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9384 break;
9385
9386 case TRUTH_ANDIF_EXPR:
9387 if (if_false_label == 0)
9388 if_false_label = drop_through_label = gen_label_rtx ();
9389 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9390 start_cleanup_deferral ();
9391 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9392 end_cleanup_deferral ();
9393 break;
9394
9395 case TRUTH_ORIF_EXPR:
9396 if (if_true_label == 0)
9397 if_true_label = drop_through_label = gen_label_rtx ();
9398 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9399 start_cleanup_deferral ();
9400 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9401 end_cleanup_deferral ();
9402 break;
9403
9404 case COMPOUND_EXPR:
9405 push_temp_slots ();
9406 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9407 preserve_temp_slots (NULL_RTX);
9408 free_temp_slots ();
9409 pop_temp_slots ();
9410 emit_queue ();
9411 do_pending_stack_adjust ();
9412 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9413 break;
9414
9415 case COMPONENT_REF:
9416 case BIT_FIELD_REF:
9417 case ARRAY_REF:
9418 {
9419 HOST_WIDE_INT bitsize, bitpos;
9420 int unsignedp;
9421 enum machine_mode mode;
9422 tree type;
9423 tree offset;
9424 int volatilep = 0;
9425 unsigned int alignment;
9426
9427 /* Get description of this reference. We don't actually care
9428 about the underlying object here. */
9429 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9430 &unsignedp, &volatilep, &alignment);
9431
9432 type = type_for_size (bitsize, unsignedp);
9433 if (! SLOW_BYTE_ACCESS
9434 && type != 0 && bitsize >= 0
9435 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9436 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9437 != CODE_FOR_nothing))
9438 {
9439 do_jump (convert (type, exp), if_false_label, if_true_label);
9440 break;
9441 }
9442 goto normal;
9443 }
9444
9445 case COND_EXPR:
9446 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9447 if (integer_onep (TREE_OPERAND (exp, 1))
9448 && integer_zerop (TREE_OPERAND (exp, 2)))
9449 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9450
9451 else if (integer_zerop (TREE_OPERAND (exp, 1))
9452 && integer_onep (TREE_OPERAND (exp, 2)))
9453 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9454
9455 else
9456 {
9457 register rtx label1 = gen_label_rtx ();
9458 drop_through_label = gen_label_rtx ();
9459
9460 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9461
9462 start_cleanup_deferral ();
9463 /* Now the THEN-expression. */
9464 do_jump (TREE_OPERAND (exp, 1),
9465 if_false_label ? if_false_label : drop_through_label,
9466 if_true_label ? if_true_label : drop_through_label);
9467 /* In case the do_jump just above never jumps. */
9468 do_pending_stack_adjust ();
9469 emit_label (label1);
9470
9471 /* Now the ELSE-expression. */
9472 do_jump (TREE_OPERAND (exp, 2),
9473 if_false_label ? if_false_label : drop_through_label,
9474 if_true_label ? if_true_label : drop_through_label);
9475 end_cleanup_deferral ();
9476 }
9477 break;
9478
9479 case EQ_EXPR:
9480 {
9481 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9482
9483 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9484 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9485 {
9486 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9487 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9488 do_jump
9489 (fold
9490 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9491 fold (build (EQ_EXPR, TREE_TYPE (exp),
9492 fold (build1 (REALPART_EXPR,
9493 TREE_TYPE (inner_type),
9494 exp0)),
9495 fold (build1 (REALPART_EXPR,
9496 TREE_TYPE (inner_type),
9497 exp1)))),
9498 fold (build (EQ_EXPR, TREE_TYPE (exp),
9499 fold (build1 (IMAGPART_EXPR,
9500 TREE_TYPE (inner_type),
9501 exp0)),
9502 fold (build1 (IMAGPART_EXPR,
9503 TREE_TYPE (inner_type),
9504 exp1)))))),
9505 if_false_label, if_true_label);
9506 }
9507
9508 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9509 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9510
9511 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9512 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9513 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9514 else
9515 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9516 break;
9517 }
9518
9519 case NE_EXPR:
9520 {
9521 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9522
9523 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9524 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9525 {
9526 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9527 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9528 do_jump
9529 (fold
9530 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9531 fold (build (NE_EXPR, TREE_TYPE (exp),
9532 fold (build1 (REALPART_EXPR,
9533 TREE_TYPE (inner_type),
9534 exp0)),
9535 fold (build1 (REALPART_EXPR,
9536 TREE_TYPE (inner_type),
9537 exp1)))),
9538 fold (build (NE_EXPR, TREE_TYPE (exp),
9539 fold (build1 (IMAGPART_EXPR,
9540 TREE_TYPE (inner_type),
9541 exp0)),
9542 fold (build1 (IMAGPART_EXPR,
9543 TREE_TYPE (inner_type),
9544 exp1)))))),
9545 if_false_label, if_true_label);
9546 }
9547
9548 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9549 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9550
9551 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9552 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9553 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9554 else
9555 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9556 break;
9557 }
9558
9559 case LT_EXPR:
9560 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9561 if (GET_MODE_CLASS (mode) == MODE_INT
9562 && ! can_compare_p (LT, mode, ccp_jump))
9563 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9564 else
9565 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9566 break;
9567
9568 case LE_EXPR:
9569 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9570 if (GET_MODE_CLASS (mode) == MODE_INT
9571 && ! can_compare_p (LE, mode, ccp_jump))
9572 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9573 else
9574 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9575 break;
9576
9577 case GT_EXPR:
9578 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9579 if (GET_MODE_CLASS (mode) == MODE_INT
9580 && ! can_compare_p (GT, mode, ccp_jump))
9581 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9582 else
9583 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9584 break;
9585
9586 case GE_EXPR:
9587 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9588 if (GET_MODE_CLASS (mode) == MODE_INT
9589 && ! can_compare_p (GE, mode, ccp_jump))
9590 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9591 else
9592 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9593 break;
9594
9595 case UNORDERED_EXPR:
9596 case ORDERED_EXPR:
9597 {
9598 enum rtx_code cmp, rcmp;
9599 int do_rev;
9600
9601 if (code == UNORDERED_EXPR)
9602 cmp = UNORDERED, rcmp = ORDERED;
9603 else
9604 cmp = ORDERED, rcmp = UNORDERED;
9605 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9606
9607 do_rev = 0;
9608 if (! can_compare_p (cmp, mode, ccp_jump)
9609 && (can_compare_p (rcmp, mode, ccp_jump)
9610 /* If the target doesn't provide either UNORDERED or ORDERED
9611 comparisons, canonicalize on UNORDERED for the library. */
9612 || rcmp == UNORDERED))
9613 do_rev = 1;
9614
9615 if (! do_rev)
9616 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9617 else
9618 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9619 }
9620 break;
9621
9622 {
9623 enum rtx_code rcode1;
9624 enum tree_code tcode2;
9625
9626 case UNLT_EXPR:
9627 rcode1 = UNLT;
9628 tcode2 = LT_EXPR;
9629 goto unordered_bcc;
9630 case UNLE_EXPR:
9631 rcode1 = UNLE;
9632 tcode2 = LE_EXPR;
9633 goto unordered_bcc;
9634 case UNGT_EXPR:
9635 rcode1 = UNGT;
9636 tcode2 = GT_EXPR;
9637 goto unordered_bcc;
9638 case UNGE_EXPR:
9639 rcode1 = UNGE;
9640 tcode2 = GE_EXPR;
9641 goto unordered_bcc;
9642 case UNEQ_EXPR:
9643 rcode1 = UNEQ;
9644 tcode2 = EQ_EXPR;
9645 goto unordered_bcc;
9646
9647 unordered_bcc:
9648 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9649 if (can_compare_p (rcode1, mode, ccp_jump))
9650 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9651 if_true_label);
9652 else
9653 {
9654 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9655 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9656 tree cmp0, cmp1;
9657
9658 /* If the target doesn't support combined unordered
9659 compares, decompose into UNORDERED + comparison. */
9660 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9661 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9662 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9663 do_jump (exp, if_false_label, if_true_label);
9664 }
9665 }
9666 break;
9667
9668 default:
9669 normal:
9670 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9671 #if 0
9672 /* This is not needed any more and causes poor code since it causes
9673 comparisons and tests from non-SI objects to have different code
9674 sequences. */
9675 /* Copy to register to avoid generating bad insns by cse
9676 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9677 if (!cse_not_expected && GET_CODE (temp) == MEM)
9678 temp = copy_to_reg (temp);
9679 #endif
9680 do_pending_stack_adjust ();
9681 /* Do any postincrements in the expression that was tested. */
9682 emit_queue ();
9683
9684 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9685 {
9686 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9687 if (target)
9688 emit_jump (target);
9689 }
9690 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9691 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9692 /* Note swapping the labels gives us not-equal. */
9693 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9694 else if (GET_MODE (temp) != VOIDmode)
9695 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9696 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9697 GET_MODE (temp), NULL_RTX, 0,
9698 if_false_label, if_true_label);
9699 else
9700 abort ();
9701 }
9702
9703 if (drop_through_label)
9704 {
9705 /* If do_jump produces code that might be jumped around,
9706 do any stack adjusts from that code, before the place
9707 where control merges in. */
9708 do_pending_stack_adjust ();
9709 emit_label (drop_through_label);
9710 }
9711 }
9712 \f
9713 /* Given a comparison expression EXP for values too wide to be compared
9714 with one insn, test the comparison and jump to the appropriate label.
9715 The code of EXP is ignored; we always test GT if SWAP is 0,
9716 and LT if SWAP is 1. */
9717
9718 static void
9719 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9720 tree exp;
9721 int swap;
9722 rtx if_false_label, if_true_label;
9723 {
9724 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9725 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9726 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9727 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9728
9729 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9730 }
9731
9732 /* Compare OP0 with OP1, word at a time, in mode MODE.
9733 UNSIGNEDP says to do unsigned comparison.
9734 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9735
9736 void
9737 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9738 enum machine_mode mode;
9739 int unsignedp;
9740 rtx op0, op1;
9741 rtx if_false_label, if_true_label;
9742 {
9743 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9744 rtx drop_through_label = 0;
9745 int i;
9746
9747 if (! if_true_label || ! if_false_label)
9748 drop_through_label = gen_label_rtx ();
9749 if (! if_true_label)
9750 if_true_label = drop_through_label;
9751 if (! if_false_label)
9752 if_false_label = drop_through_label;
9753
9754 /* Compare a word at a time, high order first. */
9755 for (i = 0; i < nwords; i++)
9756 {
9757 rtx op0_word, op1_word;
9758
9759 if (WORDS_BIG_ENDIAN)
9760 {
9761 op0_word = operand_subword_force (op0, i, mode);
9762 op1_word = operand_subword_force (op1, i, mode);
9763 }
9764 else
9765 {
9766 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9767 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9768 }
9769
9770 /* All but high-order word must be compared as unsigned. */
9771 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9772 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9773 NULL_RTX, if_true_label);
9774
9775 /* Consider lower words only if these are equal. */
9776 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9777 NULL_RTX, 0, NULL_RTX, if_false_label);
9778 }
9779
9780 if (if_false_label)
9781 emit_jump (if_false_label);
9782 if (drop_through_label)
9783 emit_label (drop_through_label);
9784 }
9785
9786 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9787 with one insn, test the comparison and jump to the appropriate label. */
9788
9789 static void
9790 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9791 tree exp;
9792 rtx if_false_label, if_true_label;
9793 {
9794 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9795 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9796 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9797 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9798 int i;
9799 rtx drop_through_label = 0;
9800
9801 if (! if_false_label)
9802 drop_through_label = if_false_label = gen_label_rtx ();
9803
9804 for (i = 0; i < nwords; i++)
9805 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9806 operand_subword_force (op1, i, mode),
9807 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9808 word_mode, NULL_RTX, 0, if_false_label,
9809 NULL_RTX);
9810
9811 if (if_true_label)
9812 emit_jump (if_true_label);
9813 if (drop_through_label)
9814 emit_label (drop_through_label);
9815 }
9816 \f
9817 /* Jump according to whether OP0 is 0.
9818 We assume that OP0 has an integer mode that is too wide
9819 for the available compare insns. */
9820
9821 void
9822 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9823 rtx op0;
9824 rtx if_false_label, if_true_label;
9825 {
9826 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9827 rtx part;
9828 int i;
9829 rtx drop_through_label = 0;
9830
9831 /* The fastest way of doing this comparison on almost any machine is to
9832 "or" all the words and compare the result. If all have to be loaded
9833 from memory and this is a very wide item, it's possible this may
9834 be slower, but that's highly unlikely. */
9835
9836 part = gen_reg_rtx (word_mode);
9837 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9838 for (i = 1; i < nwords && part != 0; i++)
9839 part = expand_binop (word_mode, ior_optab, part,
9840 operand_subword_force (op0, i, GET_MODE (op0)),
9841 part, 1, OPTAB_WIDEN);
9842
9843 if (part != 0)
9844 {
9845 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9846 NULL_RTX, 0, if_false_label, if_true_label);
9847
9848 return;
9849 }
9850
9851 /* If we couldn't do the "or" simply, do this with a series of compares. */
9852 if (! if_false_label)
9853 drop_through_label = if_false_label = gen_label_rtx ();
9854
9855 for (i = 0; i < nwords; i++)
9856 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9857 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9858 if_false_label, NULL_RTX);
9859
9860 if (if_true_label)
9861 emit_jump (if_true_label);
9862
9863 if (drop_through_label)
9864 emit_label (drop_through_label);
9865 }
9866 \f
9867 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9868 (including code to compute the values to be compared)
9869 and set (CC0) according to the result.
9870 The decision as to signed or unsigned comparison must be made by the caller.
9871
9872 We force a stack adjustment unless there are currently
9873 things pushed on the stack that aren't yet used.
9874
9875 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9876 compared.
9877
9878 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9879 size of MODE should be used. */
9880
9881 rtx
9882 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9883 register rtx op0, op1;
9884 enum rtx_code code;
9885 int unsignedp;
9886 enum machine_mode mode;
9887 rtx size;
9888 unsigned int align;
9889 {
9890 rtx tem;
9891
9892 /* If one operand is constant, make it the second one. Only do this
9893 if the other operand is not constant as well. */
9894
9895 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9896 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9897 {
9898 tem = op0;
9899 op0 = op1;
9900 op1 = tem;
9901 code = swap_condition (code);
9902 }
9903
9904 if (flag_force_mem)
9905 {
9906 op0 = force_not_mem (op0);
9907 op1 = force_not_mem (op1);
9908 }
9909
9910 do_pending_stack_adjust ();
9911
9912 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9913 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9914 return tem;
9915
9916 #if 0
9917 /* There's no need to do this now that combine.c can eliminate lots of
9918 sign extensions. This can be less efficient in certain cases on other
9919 machines. */
9920
9921 /* If this is a signed equality comparison, we can do it as an
9922 unsigned comparison since zero-extension is cheaper than sign
9923 extension and comparisons with zero are done as unsigned. This is
9924 the case even on machines that can do fast sign extension, since
9925 zero-extension is easier to combine with other operations than
9926 sign-extension is. If we are comparing against a constant, we must
9927 convert it to what it would look like unsigned. */
9928 if ((code == EQ || code == NE) && ! unsignedp
9929 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9930 {
9931 if (GET_CODE (op1) == CONST_INT
9932 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9933 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9934 unsignedp = 1;
9935 }
9936 #endif
9937
9938 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9939
9940 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9941 }
9942
9943 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9944 The decision as to signed or unsigned comparison must be made by the caller.
9945
9946 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9947 compared.
9948
9949 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9950 size of MODE should be used. */
9951
9952 void
9953 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9954 if_false_label, if_true_label)
9955 register rtx op0, op1;
9956 enum rtx_code code;
9957 int unsignedp;
9958 enum machine_mode mode;
9959 rtx size;
9960 unsigned int align;
9961 rtx if_false_label, if_true_label;
9962 {
9963 rtx tem;
9964 int dummy_true_label = 0;
9965
9966 /* Reverse the comparison if that is safe and we want to jump if it is
9967 false. */
9968 if (! if_true_label && ! FLOAT_MODE_P (mode))
9969 {
9970 if_true_label = if_false_label;
9971 if_false_label = 0;
9972 code = reverse_condition (code);
9973 }
9974
9975 /* If one operand is constant, make it the second one. Only do this
9976 if the other operand is not constant as well. */
9977
9978 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9979 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9980 {
9981 tem = op0;
9982 op0 = op1;
9983 op1 = tem;
9984 code = swap_condition (code);
9985 }
9986
9987 if (flag_force_mem)
9988 {
9989 op0 = force_not_mem (op0);
9990 op1 = force_not_mem (op1);
9991 }
9992
9993 do_pending_stack_adjust ();
9994
9995 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9996 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9997 {
9998 if (tem == const_true_rtx)
9999 {
10000 if (if_true_label)
10001 emit_jump (if_true_label);
10002 }
10003 else
10004 {
10005 if (if_false_label)
10006 emit_jump (if_false_label);
10007 }
10008 return;
10009 }
10010
10011 #if 0
10012 /* There's no need to do this now that combine.c can eliminate lots of
10013 sign extensions. This can be less efficient in certain cases on other
10014 machines. */
10015
10016 /* If this is a signed equality comparison, we can do it as an
10017 unsigned comparison since zero-extension is cheaper than sign
10018 extension and comparisons with zero are done as unsigned. This is
10019 the case even on machines that can do fast sign extension, since
10020 zero-extension is easier to combine with other operations than
10021 sign-extension is. If we are comparing against a constant, we must
10022 convert it to what it would look like unsigned. */
10023 if ((code == EQ || code == NE) && ! unsignedp
10024 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10025 {
10026 if (GET_CODE (op1) == CONST_INT
10027 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10028 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10029 unsignedp = 1;
10030 }
10031 #endif
10032
10033 if (! if_true_label)
10034 {
10035 dummy_true_label = 1;
10036 if_true_label = gen_label_rtx ();
10037 }
10038
10039 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10040 if_true_label);
10041
10042 if (if_false_label)
10043 emit_jump (if_false_label);
10044 if (dummy_true_label)
10045 emit_label (if_true_label);
10046 }
10047
10048 /* Generate code for a comparison expression EXP (including code to compute
10049 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10050 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10051 generated code will drop through.
10052 SIGNED_CODE should be the rtx operation for this comparison for
10053 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10054
10055 We force a stack adjustment unless there are currently
10056 things pushed on the stack that aren't yet used. */
10057
10058 static void
10059 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10060 if_true_label)
10061 register tree exp;
10062 enum rtx_code signed_code, unsigned_code;
10063 rtx if_false_label, if_true_label;
10064 {
10065 unsigned int align0, align1;
10066 register rtx op0, op1;
10067 register tree type;
10068 register enum machine_mode mode;
10069 int unsignedp;
10070 enum rtx_code code;
10071
10072 /* Don't crash if the comparison was erroneous. */
10073 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10074 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10075 return;
10076
10077 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10078 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10079 mode = TYPE_MODE (type);
10080 unsignedp = TREE_UNSIGNED (type);
10081 code = unsignedp ? unsigned_code : signed_code;
10082
10083 #ifdef HAVE_canonicalize_funcptr_for_compare
10084 /* If function pointers need to be "canonicalized" before they can
10085 be reliably compared, then canonicalize them. */
10086 if (HAVE_canonicalize_funcptr_for_compare
10087 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10088 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10089 == FUNCTION_TYPE))
10090 {
10091 rtx new_op0 = gen_reg_rtx (mode);
10092
10093 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10094 op0 = new_op0;
10095 }
10096
10097 if (HAVE_canonicalize_funcptr_for_compare
10098 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10099 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10100 == FUNCTION_TYPE))
10101 {
10102 rtx new_op1 = gen_reg_rtx (mode);
10103
10104 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10105 op1 = new_op1;
10106 }
10107 #endif
10108
10109 /* Do any postincrements in the expression that was tested. */
10110 emit_queue ();
10111
10112 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10113 ((mode == BLKmode)
10114 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10115 MIN (align0, align1),
10116 if_false_label, if_true_label);
10117 }
10118 \f
10119 /* Generate code to calculate EXP using a store-flag instruction
10120 and return an rtx for the result. EXP is either a comparison
10121 or a TRUTH_NOT_EXPR whose operand is a comparison.
10122
10123 If TARGET is nonzero, store the result there if convenient.
10124
10125 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10126 cheap.
10127
10128 Return zero if there is no suitable set-flag instruction
10129 available on this machine.
10130
10131 Once expand_expr has been called on the arguments of the comparison,
10132 we are committed to doing the store flag, since it is not safe to
10133 re-evaluate the expression. We emit the store-flag insn by calling
10134 emit_store_flag, but only expand the arguments if we have a reason
10135 to believe that emit_store_flag will be successful. If we think that
10136 it will, but it isn't, we have to simulate the store-flag with a
10137 set/jump/set sequence. */
10138
10139 static rtx
10140 do_store_flag (exp, target, mode, only_cheap)
10141 tree exp;
10142 rtx target;
10143 enum machine_mode mode;
10144 int only_cheap;
10145 {
10146 enum rtx_code code;
10147 tree arg0, arg1, type;
10148 tree tem;
10149 enum machine_mode operand_mode;
10150 int invert = 0;
10151 int unsignedp;
10152 rtx op0, op1;
10153 enum insn_code icode;
10154 rtx subtarget = target;
10155 rtx result, label;
10156
10157 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10158 result at the end. We can't simply invert the test since it would
10159 have already been inverted if it were valid. This case occurs for
10160 some floating-point comparisons. */
10161
10162 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10163 invert = 1, exp = TREE_OPERAND (exp, 0);
10164
10165 arg0 = TREE_OPERAND (exp, 0);
10166 arg1 = TREE_OPERAND (exp, 1);
10167 type = TREE_TYPE (arg0);
10168 operand_mode = TYPE_MODE (type);
10169 unsignedp = TREE_UNSIGNED (type);
10170
10171 /* We won't bother with BLKmode store-flag operations because it would mean
10172 passing a lot of information to emit_store_flag. */
10173 if (operand_mode == BLKmode)
10174 return 0;
10175
10176 /* We won't bother with store-flag operations involving function pointers
10177 when function pointers must be canonicalized before comparisons. */
10178 #ifdef HAVE_canonicalize_funcptr_for_compare
10179 if (HAVE_canonicalize_funcptr_for_compare
10180 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10181 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10182 == FUNCTION_TYPE))
10183 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10184 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10185 == FUNCTION_TYPE))))
10186 return 0;
10187 #endif
10188
10189 STRIP_NOPS (arg0);
10190 STRIP_NOPS (arg1);
10191
10192 /* Get the rtx comparison code to use. We know that EXP is a comparison
10193 operation of some type. Some comparisons against 1 and -1 can be
10194 converted to comparisons with zero. Do so here so that the tests
10195 below will be aware that we have a comparison with zero. These
10196 tests will not catch constants in the first operand, but constants
10197 are rarely passed as the first operand. */
10198
10199 switch (TREE_CODE (exp))
10200 {
10201 case EQ_EXPR:
10202 code = EQ;
10203 break;
10204 case NE_EXPR:
10205 code = NE;
10206 break;
10207 case LT_EXPR:
10208 if (integer_onep (arg1))
10209 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10210 else
10211 code = unsignedp ? LTU : LT;
10212 break;
10213 case LE_EXPR:
10214 if (! unsignedp && integer_all_onesp (arg1))
10215 arg1 = integer_zero_node, code = LT;
10216 else
10217 code = unsignedp ? LEU : LE;
10218 break;
10219 case GT_EXPR:
10220 if (! unsignedp && integer_all_onesp (arg1))
10221 arg1 = integer_zero_node, code = GE;
10222 else
10223 code = unsignedp ? GTU : GT;
10224 break;
10225 case GE_EXPR:
10226 if (integer_onep (arg1))
10227 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10228 else
10229 code = unsignedp ? GEU : GE;
10230 break;
10231
10232 case UNORDERED_EXPR:
10233 code = UNORDERED;
10234 break;
10235 case ORDERED_EXPR:
10236 code = ORDERED;
10237 break;
10238 case UNLT_EXPR:
10239 code = UNLT;
10240 break;
10241 case UNLE_EXPR:
10242 code = UNLE;
10243 break;
10244 case UNGT_EXPR:
10245 code = UNGT;
10246 break;
10247 case UNGE_EXPR:
10248 code = UNGE;
10249 break;
10250 case UNEQ_EXPR:
10251 code = UNEQ;
10252 break;
10253
10254 default:
10255 abort ();
10256 }
10257
10258 /* Put a constant second. */
10259 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10260 {
10261 tem = arg0; arg0 = arg1; arg1 = tem;
10262 code = swap_condition (code);
10263 }
10264
10265 /* If this is an equality or inequality test of a single bit, we can
10266 do this by shifting the bit being tested to the low-order bit and
10267 masking the result with the constant 1. If the condition was EQ,
10268 we xor it with 1. This does not require an scc insn and is faster
10269 than an scc insn even if we have it. */
10270
10271 if ((code == NE || code == EQ)
10272 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10273 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10274 {
10275 tree inner = TREE_OPERAND (arg0, 0);
10276 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10277 int ops_unsignedp;
10278
10279 /* If INNER is a right shift of a constant and it plus BITNUM does
10280 not overflow, adjust BITNUM and INNER. */
10281
10282 if (TREE_CODE (inner) == RSHIFT_EXPR
10283 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10284 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10285 && bitnum < TYPE_PRECISION (type)
10286 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10287 bitnum - TYPE_PRECISION (type)))
10288 {
10289 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10290 inner = TREE_OPERAND (inner, 0);
10291 }
10292
10293 /* If we are going to be able to omit the AND below, we must do our
10294 operations as unsigned. If we must use the AND, we have a choice.
10295 Normally unsigned is faster, but for some machines signed is. */
10296 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10297 #ifdef LOAD_EXTEND_OP
10298 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10299 #else
10300 : 1
10301 #endif
10302 );
10303
10304 if (! get_subtarget (subtarget)
10305 || GET_MODE (subtarget) != operand_mode
10306 || ! safe_from_p (subtarget, inner, 1))
10307 subtarget = 0;
10308
10309 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10310
10311 if (bitnum != 0)
10312 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10313 size_int (bitnum), subtarget, ops_unsignedp);
10314
10315 if (GET_MODE (op0) != mode)
10316 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10317
10318 if ((code == EQ && ! invert) || (code == NE && invert))
10319 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10320 ops_unsignedp, OPTAB_LIB_WIDEN);
10321
10322 /* Put the AND last so it can combine with more things. */
10323 if (bitnum != TYPE_PRECISION (type) - 1)
10324 op0 = expand_and (op0, const1_rtx, subtarget);
10325
10326 return op0;
10327 }
10328
10329 /* Now see if we are likely to be able to do this. Return if not. */
10330 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10331 return 0;
10332
10333 icode = setcc_gen_code[(int) code];
10334 if (icode == CODE_FOR_nothing
10335 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10336 {
10337 /* We can only do this if it is one of the special cases that
10338 can be handled without an scc insn. */
10339 if ((code == LT && integer_zerop (arg1))
10340 || (! only_cheap && code == GE && integer_zerop (arg1)))
10341 ;
10342 else if (BRANCH_COST >= 0
10343 && ! only_cheap && (code == NE || code == EQ)
10344 && TREE_CODE (type) != REAL_TYPE
10345 && ((abs_optab->handlers[(int) operand_mode].insn_code
10346 != CODE_FOR_nothing)
10347 || (ffs_optab->handlers[(int) operand_mode].insn_code
10348 != CODE_FOR_nothing)))
10349 ;
10350 else
10351 return 0;
10352 }
10353
10354 preexpand_calls (exp);
10355 if (! get_subtarget (target)
10356 || GET_MODE (subtarget) != operand_mode
10357 || ! safe_from_p (subtarget, arg1, 1))
10358 subtarget = 0;
10359
10360 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10361 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10362
10363 if (target == 0)
10364 target = gen_reg_rtx (mode);
10365
10366 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10367 because, if the emit_store_flag does anything it will succeed and
10368 OP0 and OP1 will not be used subsequently. */
10369
10370 result = emit_store_flag (target, code,
10371 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10372 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10373 operand_mode, unsignedp, 1);
10374
10375 if (result)
10376 {
10377 if (invert)
10378 result = expand_binop (mode, xor_optab, result, const1_rtx,
10379 result, 0, OPTAB_LIB_WIDEN);
10380 return result;
10381 }
10382
10383 /* If this failed, we have to do this with set/compare/jump/set code. */
10384 if (GET_CODE (target) != REG
10385 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10386 target = gen_reg_rtx (GET_MODE (target));
10387
10388 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10389 result = compare_from_rtx (op0, op1, code, unsignedp,
10390 operand_mode, NULL_RTX, 0);
10391 if (GET_CODE (result) == CONST_INT)
10392 return (((result == const0_rtx && ! invert)
10393 || (result != const0_rtx && invert))
10394 ? const0_rtx : const1_rtx);
10395
10396 label = gen_label_rtx ();
10397 if (bcc_gen_fctn[(int) code] == 0)
10398 abort ();
10399
10400 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10401 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10402 emit_label (label);
10403
10404 return target;
10405 }
10406 \f
10407 /* Generate a tablejump instruction (used for switch statements). */
10408
10409 #ifdef HAVE_tablejump
10410
10411 /* INDEX is the value being switched on, with the lowest value
10412 in the table already subtracted.
10413 MODE is its expected mode (needed if INDEX is constant).
10414 RANGE is the length of the jump table.
10415 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10416
10417 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10418 index value is out of range. */
10419
10420 void
10421 do_tablejump (index, mode, range, table_label, default_label)
10422 rtx index, range, table_label, default_label;
10423 enum machine_mode mode;
10424 {
10425 register rtx temp, vector;
10426
10427 /* Do an unsigned comparison (in the proper mode) between the index
10428 expression and the value which represents the length of the range.
10429 Since we just finished subtracting the lower bound of the range
10430 from the index expression, this comparison allows us to simultaneously
10431 check that the original index expression value is both greater than
10432 or equal to the minimum value of the range and less than or equal to
10433 the maximum value of the range. */
10434
10435 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10436 0, default_label);
10437
10438 /* If index is in range, it must fit in Pmode.
10439 Convert to Pmode so we can index with it. */
10440 if (mode != Pmode)
10441 index = convert_to_mode (Pmode, index, 1);
10442
10443 /* Don't let a MEM slip thru, because then INDEX that comes
10444 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10445 and break_out_memory_refs will go to work on it and mess it up. */
10446 #ifdef PIC_CASE_VECTOR_ADDRESS
10447 if (flag_pic && GET_CODE (index) != REG)
10448 index = copy_to_mode_reg (Pmode, index);
10449 #endif
10450
10451 /* If flag_force_addr were to affect this address
10452 it could interfere with the tricky assumptions made
10453 about addresses that contain label-refs,
10454 which may be valid only very near the tablejump itself. */
10455 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10456 GET_MODE_SIZE, because this indicates how large insns are. The other
10457 uses should all be Pmode, because they are addresses. This code
10458 could fail if addresses and insns are not the same size. */
10459 index = gen_rtx_PLUS (Pmode,
10460 gen_rtx_MULT (Pmode, index,
10461 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10462 gen_rtx_LABEL_REF (Pmode, table_label));
10463 #ifdef PIC_CASE_VECTOR_ADDRESS
10464 if (flag_pic)
10465 index = PIC_CASE_VECTOR_ADDRESS (index);
10466 else
10467 #endif
10468 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10469 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10470 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10471 RTX_UNCHANGING_P (vector) = 1;
10472 convert_move (temp, vector, 0);
10473
10474 emit_jump_insn (gen_tablejump (temp, table_label));
10475
10476 /* If we are generating PIC code or if the table is PC-relative, the
10477 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10478 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10479 emit_barrier ();
10480 }
10481
10482 #endif /* HAVE_tablejump */