expr.c (expand_expr, [...]): Still check for missing CONSTRUCTOR element.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "machmode.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "tm_p.h"
46
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
49
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
52
53 #ifdef PUSH_ROUNDING
54
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first */
57 #endif
58
59 #endif
60
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
64 #else
65 #define STACK_PUSH_CODE PRE_INC
66 #endif
67 #endif
68
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
72 #endif
73
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80 int cse_not_expected;
81
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
86
87 /* Don't check memory usage, since code is being emitted to check a memory
88 usage. Used when current_function_check_memory_usage is true, to avoid
89 infinite recursion. */
90 static int in_check_memory_usage;
91
92 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
93 static tree placeholder_list = 0;
94
95 /* This structure is used by move_by_pieces to describe the move to
96 be performed. */
97 struct move_by_pieces
98 {
99 rtx to;
100 rtx to_addr;
101 int autinc_to;
102 int explicit_inc_to;
103 int to_struct;
104 int to_readonly;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 int from_struct;
110 int from_readonly;
111 int len;
112 int offset;
113 int reverse;
114 };
115
116 /* This structure is used by clear_by_pieces to describe the clear to
117 be performed. */
118
119 struct clear_by_pieces
120 {
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
125 int to_struct;
126 int len;
127 int offset;
128 int reverse;
129 };
130
131 extern struct obstack permanent_obstack;
132
133 static rtx get_push_address PARAMS ((int));
134
135 static rtx enqueue_insn PARAMS ((rtx, rtx));
136 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
140 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
141 enum machine_mode,
142 struct clear_by_pieces *));
143 static int is_zeros_p PARAMS ((tree));
144 static int mostly_zeros_p PARAMS ((tree));
145 static void store_constructor_field PARAMS ((rtx, int, int, enum machine_mode,
146 tree, tree, unsigned int, int));
147 static void store_constructor PARAMS ((tree, rtx, unsigned int, int, int));
148 static rtx store_field PARAMS ((rtx, int, int, enum machine_mode,
149 tree, enum machine_mode, int,
150 unsigned int, int, int));
151 static enum memory_use_mode
152 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
153 static tree save_noncopied_parts PARAMS ((tree, tree));
154 static tree init_noncopied_parts PARAMS ((tree, tree));
155 static int safe_from_p PARAMS ((rtx, tree, int));
156 static int fixed_type_p PARAMS ((tree));
157 static rtx var_rtx PARAMS ((tree));
158 static int readonly_fields_p PARAMS ((tree));
159 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
160 static rtx expand_increment PARAMS ((tree, int, int));
161 static void preexpand_calls PARAMS ((tree));
162 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
163 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
164 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code, rtx, rtx));
165 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
166
167 /* Record for each mode whether we can move a register directly to or
168 from an object of that mode in memory. If we can't, we won't try
169 to use that mode directly when accessing a field of that mode. */
170
171 static char direct_load[NUM_MACHINE_MODES];
172 static char direct_store[NUM_MACHINE_MODES];
173
174 /* If a memory-to-memory move would take MOVE_RATIO or more simple
175 move-instruction sequences, we will do a movstr or libcall instead. */
176
177 #ifndef MOVE_RATIO
178 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
179 #define MOVE_RATIO 2
180 #else
181 /* If we are optimizing for space (-Os), cut down the default move ratio */
182 #define MOVE_RATIO (optimize_size ? 3 : 15)
183 #endif
184 #endif
185
186 /* This macro is used to determine whether move_by_pieces should be called
187 to perform a structure copy. */
188 #ifndef MOVE_BY_PIECES_P
189 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
190 (SIZE, ALIGN) < MOVE_RATIO)
191 #endif
192
193 /* This array records the insn_code of insns to perform block moves. */
194 enum insn_code movstr_optab[NUM_MACHINE_MODES];
195
196 /* This array records the insn_code of insns to perform block clears. */
197 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
198
199 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
200
201 #ifndef SLOW_UNALIGNED_ACCESS
202 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
203 #endif
204 \f
205 /* This is run once per compilation to set up which modes can be used
206 directly in memory and to initialize the block move optab. */
207
208 void
209 init_expr_once ()
210 {
211 rtx insn, pat;
212 enum machine_mode mode;
213 int num_clobbers;
214 rtx mem, mem1;
215 char *free_point;
216
217 start_sequence ();
218
219 /* Since we are on the permanent obstack, we must be sure we save this
220 spot AFTER we call start_sequence, since it will reuse the rtl it
221 makes. */
222 free_point = (char *) oballoc (0);
223
224 /* Try indexing by frame ptr and try by stack ptr.
225 It is known that on the Convex the stack ptr isn't a valid index.
226 With luck, one or the other is valid on any machine. */
227 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
228 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
229
230 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
231 pat = PATTERN (insn);
232
233 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
234 mode = (enum machine_mode) ((int) mode + 1))
235 {
236 int regno;
237 rtx reg;
238
239 direct_load[(int) mode] = direct_store[(int) mode] = 0;
240 PUT_MODE (mem, mode);
241 PUT_MODE (mem1, mode);
242
243 /* See if there is some register that can be used in this mode and
244 directly loaded or stored from memory. */
245
246 if (mode != VOIDmode && mode != BLKmode)
247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
248 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
249 regno++)
250 {
251 if (! HARD_REGNO_MODE_OK (regno, mode))
252 continue;
253
254 reg = gen_rtx_REG (mode, regno);
255
256 SET_SRC (pat) = mem;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
260
261 SET_SRC (pat) = mem1;
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
265
266 SET_SRC (pat) = reg;
267 SET_DEST (pat) = mem;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
270
271 SET_SRC (pat) = reg;
272 SET_DEST (pat) = mem1;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
275 }
276 }
277
278 end_sequence ();
279 obfree (free_point);
280 }
281
282 /* This is run at the start of compiling a function. */
283
284 void
285 init_expr ()
286 {
287 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
288
289 pending_chain = 0;
290 pending_stack_adjust = 0;
291 arg_space_so_far = 0;
292 inhibit_defer_pop = 0;
293 saveregs_value = 0;
294 apply_args_value = 0;
295 forced_labels = 0;
296 }
297
298 void
299 mark_expr_status (p)
300 struct expr_status *p;
301 {
302 if (p == NULL)
303 return;
304
305 ggc_mark_rtx (p->x_saveregs_value);
306 ggc_mark_rtx (p->x_apply_args_value);
307 ggc_mark_rtx (p->x_forced_labels);
308 }
309
310 void
311 free_expr_status (f)
312 struct function *f;
313 {
314 free (f->expr);
315 f->expr = NULL;
316 }
317
318 /* Small sanity check that the queue is empty at the end of a function. */
319 void
320 finish_expr_for_function ()
321 {
322 if (pending_chain)
323 abort ();
324 }
325 \f
326 /* Manage the queue of increment instructions to be output
327 for POSTINCREMENT_EXPR expressions, etc. */
328
329 /* Queue up to increment (or change) VAR later. BODY says how:
330 BODY should be the same thing you would pass to emit_insn
331 to increment right away. It will go to emit_insn later on.
332
333 The value is a QUEUED expression to be used in place of VAR
334 where you want to guarantee the pre-incrementation value of VAR. */
335
336 static rtx
337 enqueue_insn (var, body)
338 rtx var, body;
339 {
340 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
341 body, pending_chain);
342 return pending_chain;
343 }
344
345 /* Use protect_from_queue to convert a QUEUED expression
346 into something that you can put immediately into an instruction.
347 If the queued incrementation has not happened yet,
348 protect_from_queue returns the variable itself.
349 If the incrementation has happened, protect_from_queue returns a temp
350 that contains a copy of the old value of the variable.
351
352 Any time an rtx which might possibly be a QUEUED is to be put
353 into an instruction, it must be passed through protect_from_queue first.
354 QUEUED expressions are not meaningful in instructions.
355
356 Do not pass a value through protect_from_queue and then hold
357 on to it for a while before putting it in an instruction!
358 If the queue is flushed in between, incorrect code will result. */
359
360 rtx
361 protect_from_queue (x, modify)
362 register rtx x;
363 int modify;
364 {
365 register RTX_CODE code = GET_CODE (x);
366
367 #if 0 /* A QUEUED can hang around after the queue is forced out. */
368 /* Shortcut for most common case. */
369 if (pending_chain == 0)
370 return x;
371 #endif
372
373 if (code != QUEUED)
374 {
375 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
376 use of autoincrement. Make a copy of the contents of the memory
377 location rather than a copy of the address, but not if the value is
378 of mode BLKmode. Don't modify X in place since it might be
379 shared. */
380 if (code == MEM && GET_MODE (x) != BLKmode
381 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
382 {
383 register rtx y = XEXP (x, 0);
384 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
385
386 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
387 MEM_COPY_ATTRIBUTES (new, x);
388 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
389
390 if (QUEUED_INSN (y))
391 {
392 register rtx temp = gen_reg_rtx (GET_MODE (new));
393 emit_insn_before (gen_move_insn (temp, new),
394 QUEUED_INSN (y));
395 return temp;
396 }
397 return new;
398 }
399 /* Otherwise, recursively protect the subexpressions of all
400 the kinds of rtx's that can contain a QUEUED. */
401 if (code == MEM)
402 {
403 rtx tem = protect_from_queue (XEXP (x, 0), 0);
404 if (tem != XEXP (x, 0))
405 {
406 x = copy_rtx (x);
407 XEXP (x, 0) = tem;
408 }
409 }
410 else if (code == PLUS || code == MULT)
411 {
412 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
413 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
414 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
415 {
416 x = copy_rtx (x);
417 XEXP (x, 0) = new0;
418 XEXP (x, 1) = new1;
419 }
420 }
421 return x;
422 }
423 /* If the increment has not happened, use the variable itself. */
424 if (QUEUED_INSN (x) == 0)
425 return QUEUED_VAR (x);
426 /* If the increment has happened and a pre-increment copy exists,
427 use that copy. */
428 if (QUEUED_COPY (x) != 0)
429 return QUEUED_COPY (x);
430 /* The increment has happened but we haven't set up a pre-increment copy.
431 Set one up now, and use it. */
432 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
433 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
434 QUEUED_INSN (x));
435 return QUEUED_COPY (x);
436 }
437
438 /* Return nonzero if X contains a QUEUED expression:
439 if it contains anything that will be altered by a queued increment.
440 We handle only combinations of MEM, PLUS, MINUS and MULT operators
441 since memory addresses generally contain only those. */
442
443 int
444 queued_subexp_p (x)
445 rtx x;
446 {
447 register enum rtx_code code = GET_CODE (x);
448 switch (code)
449 {
450 case QUEUED:
451 return 1;
452 case MEM:
453 return queued_subexp_p (XEXP (x, 0));
454 case MULT:
455 case PLUS:
456 case MINUS:
457 return (queued_subexp_p (XEXP (x, 0))
458 || queued_subexp_p (XEXP (x, 1)));
459 default:
460 return 0;
461 }
462 }
463
464 /* Perform all the pending incrementations. */
465
466 void
467 emit_queue ()
468 {
469 register rtx p;
470 while ((p = pending_chain))
471 {
472 rtx body = QUEUED_BODY (p);
473
474 if (GET_CODE (body) == SEQUENCE)
475 {
476 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
477 emit_insn (QUEUED_BODY (p));
478 }
479 else
480 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
481 pending_chain = QUEUED_NEXT (p);
482 }
483 }
484 \f
485 /* Copy data from FROM to TO, where the machine modes are not the same.
486 Both modes may be integer, or both may be floating.
487 UNSIGNEDP should be nonzero if FROM is an unsigned type.
488 This causes zero-extension instead of sign-extension. */
489
490 void
491 convert_move (to, from, unsignedp)
492 register rtx to, from;
493 int unsignedp;
494 {
495 enum machine_mode to_mode = GET_MODE (to);
496 enum machine_mode from_mode = GET_MODE (from);
497 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
498 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
499 enum insn_code code;
500 rtx libcall;
501
502 /* rtx code for making an equivalent value. */
503 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
504
505 to = protect_from_queue (to, 1);
506 from = protect_from_queue (from, 0);
507
508 if (to_real != from_real)
509 abort ();
510
511 /* If FROM is a SUBREG that indicates that we have already done at least
512 the required extension, strip it. We don't handle such SUBREGs as
513 TO here. */
514
515 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
516 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
517 >= GET_MODE_SIZE (to_mode))
518 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
519 from = gen_lowpart (to_mode, from), from_mode = to_mode;
520
521 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
522 abort ();
523
524 if (to_mode == from_mode
525 || (from_mode == VOIDmode && CONSTANT_P (from)))
526 {
527 emit_move_insn (to, from);
528 return;
529 }
530
531 if (to_real)
532 {
533 rtx value;
534
535 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
536 {
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, 0))
539 != CODE_FOR_nothing)
540 {
541 emit_unop_insn (code, to, from, UNKNOWN);
542 return;
543 }
544 }
545
546 #ifdef HAVE_trunchfqf2
547 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
548 {
549 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
550 return;
551 }
552 #endif
553 #ifdef HAVE_trunctqfqf2
554 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
555 {
556 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
557 return;
558 }
559 #endif
560 #ifdef HAVE_truncsfqf2
561 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
562 {
563 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
564 return;
565 }
566 #endif
567 #ifdef HAVE_truncdfqf2
568 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
569 {
570 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
571 return;
572 }
573 #endif
574 #ifdef HAVE_truncxfqf2
575 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
576 {
577 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
578 return;
579 }
580 #endif
581 #ifdef HAVE_trunctfqf2
582 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
583 {
584 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
585 return;
586 }
587 #endif
588
589 #ifdef HAVE_trunctqfhf2
590 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
591 {
592 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
593 return;
594 }
595 #endif
596 #ifdef HAVE_truncsfhf2
597 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
598 {
599 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
600 return;
601 }
602 #endif
603 #ifdef HAVE_truncdfhf2
604 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
605 {
606 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
607 return;
608 }
609 #endif
610 #ifdef HAVE_truncxfhf2
611 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
612 {
613 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
614 return;
615 }
616 #endif
617 #ifdef HAVE_trunctfhf2
618 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
619 {
620 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
621 return;
622 }
623 #endif
624
625 #ifdef HAVE_truncsftqf2
626 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
627 {
628 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
629 return;
630 }
631 #endif
632 #ifdef HAVE_truncdftqf2
633 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
634 {
635 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
636 return;
637 }
638 #endif
639 #ifdef HAVE_truncxftqf2
640 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
641 {
642 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
643 return;
644 }
645 #endif
646 #ifdef HAVE_trunctftqf2
647 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
648 {
649 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
650 return;
651 }
652 #endif
653
654 #ifdef HAVE_truncdfsf2
655 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
656 {
657 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
658 return;
659 }
660 #endif
661 #ifdef HAVE_truncxfsf2
662 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
663 {
664 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
665 return;
666 }
667 #endif
668 #ifdef HAVE_trunctfsf2
669 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
670 {
671 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
672 return;
673 }
674 #endif
675 #ifdef HAVE_truncxfdf2
676 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
677 {
678 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
679 return;
680 }
681 #endif
682 #ifdef HAVE_trunctfdf2
683 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
684 {
685 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
686 return;
687 }
688 #endif
689
690 libcall = (rtx) 0;
691 switch (from_mode)
692 {
693 case SFmode:
694 switch (to_mode)
695 {
696 case DFmode:
697 libcall = extendsfdf2_libfunc;
698 break;
699
700 case XFmode:
701 libcall = extendsfxf2_libfunc;
702 break;
703
704 case TFmode:
705 libcall = extendsftf2_libfunc;
706 break;
707
708 default:
709 break;
710 }
711 break;
712
713 case DFmode:
714 switch (to_mode)
715 {
716 case SFmode:
717 libcall = truncdfsf2_libfunc;
718 break;
719
720 case XFmode:
721 libcall = extenddfxf2_libfunc;
722 break;
723
724 case TFmode:
725 libcall = extenddftf2_libfunc;
726 break;
727
728 default:
729 break;
730 }
731 break;
732
733 case XFmode:
734 switch (to_mode)
735 {
736 case SFmode:
737 libcall = truncxfsf2_libfunc;
738 break;
739
740 case DFmode:
741 libcall = truncxfdf2_libfunc;
742 break;
743
744 default:
745 break;
746 }
747 break;
748
749 case TFmode:
750 switch (to_mode)
751 {
752 case SFmode:
753 libcall = trunctfsf2_libfunc;
754 break;
755
756 case DFmode:
757 libcall = trunctfdf2_libfunc;
758 break;
759
760 default:
761 break;
762 }
763 break;
764
765 default:
766 break;
767 }
768
769 if (libcall == (rtx) 0)
770 /* This conversion is not implemented yet. */
771 abort ();
772
773 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
774 1, from, from_mode);
775 emit_move_insn (to, value);
776 return;
777 }
778
779 /* Now both modes are integers. */
780
781 /* Handle expanding beyond a word. */
782 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
783 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
784 {
785 rtx insns;
786 rtx lowpart;
787 rtx fill_value;
788 rtx lowfrom;
789 int i;
790 enum machine_mode lowpart_mode;
791 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
792
793 /* Try converting directly if the insn is supported. */
794 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
795 != CODE_FOR_nothing)
796 {
797 /* If FROM is a SUBREG, put it into a register. Do this
798 so that we always generate the same set of insns for
799 better cse'ing; if an intermediate assignment occurred,
800 we won't be doing the operation directly on the SUBREG. */
801 if (optimize > 0 && GET_CODE (from) == SUBREG)
802 from = force_reg (from_mode, from);
803 emit_unop_insn (code, to, from, equiv_code);
804 return;
805 }
806 /* Next, try converting via full word. */
807 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
808 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
809 != CODE_FOR_nothing))
810 {
811 if (GET_CODE (to) == REG)
812 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
813 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
814 emit_unop_insn (code, to,
815 gen_lowpart (word_mode, to), equiv_code);
816 return;
817 }
818
819 /* No special multiword conversion insn; do it by hand. */
820 start_sequence ();
821
822 /* Since we will turn this into a no conflict block, we must ensure
823 that the source does not overlap the target. */
824
825 if (reg_overlap_mentioned_p (to, from))
826 from = force_reg (from_mode, from);
827
828 /* Get a copy of FROM widened to a word, if necessary. */
829 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
830 lowpart_mode = word_mode;
831 else
832 lowpart_mode = from_mode;
833
834 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
835
836 lowpart = gen_lowpart (lowpart_mode, to);
837 emit_move_insn (lowpart, lowfrom);
838
839 /* Compute the value to put in each remaining word. */
840 if (unsignedp)
841 fill_value = const0_rtx;
842 else
843 {
844 #ifdef HAVE_slt
845 if (HAVE_slt
846 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
847 && STORE_FLAG_VALUE == -1)
848 {
849 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
850 lowpart_mode, 0, 0);
851 fill_value = gen_reg_rtx (word_mode);
852 emit_insn (gen_slt (fill_value));
853 }
854 else
855 #endif
856 {
857 fill_value
858 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
859 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
860 NULL_RTX, 0);
861 fill_value = convert_to_mode (word_mode, fill_value, 1);
862 }
863 }
864
865 /* Fill the remaining words. */
866 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
867 {
868 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
869 rtx subword = operand_subword (to, index, 1, to_mode);
870
871 if (subword == 0)
872 abort ();
873
874 if (fill_value != subword)
875 emit_move_insn (subword, fill_value);
876 }
877
878 insns = get_insns ();
879 end_sequence ();
880
881 emit_no_conflict_block (insns, to, from, NULL_RTX,
882 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
883 return;
884 }
885
886 /* Truncating multi-word to a word or less. */
887 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
888 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
889 {
890 if (!((GET_CODE (from) == MEM
891 && ! MEM_VOLATILE_P (from)
892 && direct_load[(int) to_mode]
893 && ! mode_dependent_address_p (XEXP (from, 0)))
894 || GET_CODE (from) == REG
895 || GET_CODE (from) == SUBREG))
896 from = force_reg (from_mode, from);
897 convert_move (to, gen_lowpart (word_mode, from), 0);
898 return;
899 }
900
901 /* Handle pointer conversion */ /* SPEE 900220 */
902 if (to_mode == PQImode)
903 {
904 if (from_mode != QImode)
905 from = convert_to_mode (QImode, from, unsignedp);
906
907 #ifdef HAVE_truncqipqi2
908 if (HAVE_truncqipqi2)
909 {
910 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
911 return;
912 }
913 #endif /* HAVE_truncqipqi2 */
914 abort ();
915 }
916
917 if (from_mode == PQImode)
918 {
919 if (to_mode != QImode)
920 {
921 from = convert_to_mode (QImode, from, unsignedp);
922 from_mode = QImode;
923 }
924 else
925 {
926 #ifdef HAVE_extendpqiqi2
927 if (HAVE_extendpqiqi2)
928 {
929 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
930 return;
931 }
932 #endif /* HAVE_extendpqiqi2 */
933 abort ();
934 }
935 }
936
937 if (to_mode == PSImode)
938 {
939 if (from_mode != SImode)
940 from = convert_to_mode (SImode, from, unsignedp);
941
942 #ifdef HAVE_truncsipsi2
943 if (HAVE_truncsipsi2)
944 {
945 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
946 return;
947 }
948 #endif /* HAVE_truncsipsi2 */
949 abort ();
950 }
951
952 if (from_mode == PSImode)
953 {
954 if (to_mode != SImode)
955 {
956 from = convert_to_mode (SImode, from, unsignedp);
957 from_mode = SImode;
958 }
959 else
960 {
961 #ifdef HAVE_extendpsisi2
962 if (HAVE_extendpsisi2)
963 {
964 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
965 return;
966 }
967 #endif /* HAVE_extendpsisi2 */
968 abort ();
969 }
970 }
971
972 if (to_mode == PDImode)
973 {
974 if (from_mode != DImode)
975 from = convert_to_mode (DImode, from, unsignedp);
976
977 #ifdef HAVE_truncdipdi2
978 if (HAVE_truncdipdi2)
979 {
980 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
981 return;
982 }
983 #endif /* HAVE_truncdipdi2 */
984 abort ();
985 }
986
987 if (from_mode == PDImode)
988 {
989 if (to_mode != DImode)
990 {
991 from = convert_to_mode (DImode, from, unsignedp);
992 from_mode = DImode;
993 }
994 else
995 {
996 #ifdef HAVE_extendpdidi2
997 if (HAVE_extendpdidi2)
998 {
999 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1000 return;
1001 }
1002 #endif /* HAVE_extendpdidi2 */
1003 abort ();
1004 }
1005 }
1006
1007 /* Now follow all the conversions between integers
1008 no more than a word long. */
1009
1010 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1011 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1012 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1013 GET_MODE_BITSIZE (from_mode)))
1014 {
1015 if (!((GET_CODE (from) == MEM
1016 && ! MEM_VOLATILE_P (from)
1017 && direct_load[(int) to_mode]
1018 && ! mode_dependent_address_p (XEXP (from, 0)))
1019 || GET_CODE (from) == REG
1020 || GET_CODE (from) == SUBREG))
1021 from = force_reg (from_mode, from);
1022 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1023 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1024 from = copy_to_reg (from);
1025 emit_move_insn (to, gen_lowpart (to_mode, from));
1026 return;
1027 }
1028
1029 /* Handle extension. */
1030 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1031 {
1032 /* Convert directly if that works. */
1033 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1034 != CODE_FOR_nothing)
1035 {
1036 emit_unop_insn (code, to, from, equiv_code);
1037 return;
1038 }
1039 else
1040 {
1041 enum machine_mode intermediate;
1042 rtx tmp;
1043 tree shift_amount;
1044
1045 /* Search for a mode to convert via. */
1046 for (intermediate = from_mode; intermediate != VOIDmode;
1047 intermediate = GET_MODE_WIDER_MODE (intermediate))
1048 if (((can_extend_p (to_mode, intermediate, unsignedp)
1049 != CODE_FOR_nothing)
1050 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1051 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1052 GET_MODE_BITSIZE (intermediate))))
1053 && (can_extend_p (intermediate, from_mode, unsignedp)
1054 != CODE_FOR_nothing))
1055 {
1056 convert_move (to, convert_to_mode (intermediate, from,
1057 unsignedp), unsignedp);
1058 return;
1059 }
1060
1061 /* No suitable intermediate mode.
1062 Generate what we need with shifts. */
1063 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1064 - GET_MODE_BITSIZE (from_mode), 0);
1065 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1066 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1067 to, unsignedp);
1068 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1069 to, unsignedp);
1070 if (tmp != to)
1071 emit_move_insn (to, tmp);
1072 return;
1073 }
1074 }
1075
1076 /* Support special truncate insns for certain modes. */
1077
1078 if (from_mode == DImode && to_mode == SImode)
1079 {
1080 #ifdef HAVE_truncdisi2
1081 if (HAVE_truncdisi2)
1082 {
1083 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1084 return;
1085 }
1086 #endif
1087 convert_move (to, force_reg (from_mode, from), unsignedp);
1088 return;
1089 }
1090
1091 if (from_mode == DImode && to_mode == HImode)
1092 {
1093 #ifdef HAVE_truncdihi2
1094 if (HAVE_truncdihi2)
1095 {
1096 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1097 return;
1098 }
1099 #endif
1100 convert_move (to, force_reg (from_mode, from), unsignedp);
1101 return;
1102 }
1103
1104 if (from_mode == DImode && to_mode == QImode)
1105 {
1106 #ifdef HAVE_truncdiqi2
1107 if (HAVE_truncdiqi2)
1108 {
1109 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1110 return;
1111 }
1112 #endif
1113 convert_move (to, force_reg (from_mode, from), unsignedp);
1114 return;
1115 }
1116
1117 if (from_mode == SImode && to_mode == HImode)
1118 {
1119 #ifdef HAVE_truncsihi2
1120 if (HAVE_truncsihi2)
1121 {
1122 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1123 return;
1124 }
1125 #endif
1126 convert_move (to, force_reg (from_mode, from), unsignedp);
1127 return;
1128 }
1129
1130 if (from_mode == SImode && to_mode == QImode)
1131 {
1132 #ifdef HAVE_truncsiqi2
1133 if (HAVE_truncsiqi2)
1134 {
1135 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1136 return;
1137 }
1138 #endif
1139 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 return;
1141 }
1142
1143 if (from_mode == HImode && to_mode == QImode)
1144 {
1145 #ifdef HAVE_trunchiqi2
1146 if (HAVE_trunchiqi2)
1147 {
1148 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1149 return;
1150 }
1151 #endif
1152 convert_move (to, force_reg (from_mode, from), unsignedp);
1153 return;
1154 }
1155
1156 if (from_mode == TImode && to_mode == DImode)
1157 {
1158 #ifdef HAVE_trunctidi2
1159 if (HAVE_trunctidi2)
1160 {
1161 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1162 return;
1163 }
1164 #endif
1165 convert_move (to, force_reg (from_mode, from), unsignedp);
1166 return;
1167 }
1168
1169 if (from_mode == TImode && to_mode == SImode)
1170 {
1171 #ifdef HAVE_trunctisi2
1172 if (HAVE_trunctisi2)
1173 {
1174 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1175 return;
1176 }
1177 #endif
1178 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 return;
1180 }
1181
1182 if (from_mode == TImode && to_mode == HImode)
1183 {
1184 #ifdef HAVE_trunctihi2
1185 if (HAVE_trunctihi2)
1186 {
1187 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1188 return;
1189 }
1190 #endif
1191 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 return;
1193 }
1194
1195 if (from_mode == TImode && to_mode == QImode)
1196 {
1197 #ifdef HAVE_trunctiqi2
1198 if (HAVE_trunctiqi2)
1199 {
1200 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1201 return;
1202 }
1203 #endif
1204 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 return;
1206 }
1207
1208 /* Handle truncation of volatile memrefs, and so on;
1209 the things that couldn't be truncated directly,
1210 and for which there was no special instruction. */
1211 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1212 {
1213 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1214 emit_move_insn (to, temp);
1215 return;
1216 }
1217
1218 /* Mode combination is not recognized. */
1219 abort ();
1220 }
1221
1222 /* Return an rtx for a value that would result
1223 from converting X to mode MODE.
1224 Both X and MODE may be floating, or both integer.
1225 UNSIGNEDP is nonzero if X is an unsigned value.
1226 This can be done by referring to a part of X in place
1227 or by copying to a new temporary with conversion.
1228
1229 This function *must not* call protect_from_queue
1230 except when putting X into an insn (in which case convert_move does it). */
1231
1232 rtx
1233 convert_to_mode (mode, x, unsignedp)
1234 enum machine_mode mode;
1235 rtx x;
1236 int unsignedp;
1237 {
1238 return convert_modes (mode, VOIDmode, x, unsignedp);
1239 }
1240
1241 /* Return an rtx for a value that would result
1242 from converting X from mode OLDMODE to mode MODE.
1243 Both modes may be floating, or both integer.
1244 UNSIGNEDP is nonzero if X is an unsigned value.
1245
1246 This can be done by referring to a part of X in place
1247 or by copying to a new temporary with conversion.
1248
1249 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1250
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
1253
1254 rtx
1255 convert_modes (mode, oldmode, x, unsignedp)
1256 enum machine_mode mode, oldmode;
1257 rtx x;
1258 int unsignedp;
1259 {
1260 register rtx temp;
1261
1262 /* If FROM is a SUBREG that indicates that we have already done at least
1263 the required extension, strip it. */
1264
1265 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1266 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1267 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1268 x = gen_lowpart (mode, x);
1269
1270 if (GET_MODE (x) != VOIDmode)
1271 oldmode = GET_MODE (x);
1272
1273 if (mode == oldmode)
1274 return x;
1275
1276 /* There is one case that we must handle specially: If we are converting
1277 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1278 we are to interpret the constant as unsigned, gen_lowpart will do
1279 the wrong if the constant appears negative. What we want to do is
1280 make the high-order word of the constant zero, not all ones. */
1281
1282 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1283 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1284 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1285 {
1286 HOST_WIDE_INT val = INTVAL (x);
1287
1288 if (oldmode != VOIDmode
1289 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1290 {
1291 int width = GET_MODE_BITSIZE (oldmode);
1292
1293 /* We need to zero extend VAL. */
1294 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1295 }
1296
1297 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1298 }
1299
1300 /* We can do this with a gen_lowpart if both desired and current modes
1301 are integer, and this is either a constant integer, a register, or a
1302 non-volatile MEM. Except for the constant case where MODE is no
1303 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1304
1305 if ((GET_CODE (x) == CONST_INT
1306 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1307 || (GET_MODE_CLASS (mode) == MODE_INT
1308 && GET_MODE_CLASS (oldmode) == MODE_INT
1309 && (GET_CODE (x) == CONST_DOUBLE
1310 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1311 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1312 && direct_load[(int) mode])
1313 || (GET_CODE (x) == REG
1314 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1315 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1316 {
1317 /* ?? If we don't know OLDMODE, we have to assume here that
1318 X does not need sign- or zero-extension. This may not be
1319 the case, but it's the best we can do. */
1320 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1321 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1322 {
1323 HOST_WIDE_INT val = INTVAL (x);
1324 int width = GET_MODE_BITSIZE (oldmode);
1325
1326 /* We must sign or zero-extend in this case. Start by
1327 zero-extending, then sign extend if we need to. */
1328 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1329 if (! unsignedp
1330 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1331 val |= (HOST_WIDE_INT) (-1) << width;
1332
1333 return GEN_INT (val);
1334 }
1335
1336 return gen_lowpart (mode, x);
1337 }
1338
1339 temp = gen_reg_rtx (mode);
1340 convert_move (temp, x, unsignedp);
1341 return temp;
1342 }
1343 \f
1344
1345 /* This macro is used to determine what the largest unit size that
1346 move_by_pieces can use is. */
1347
1348 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1349 move efficiently, as opposed to MOVE_MAX which is the maximum
1350 number of bhytes we can move with a single instruction. */
1351
1352 #ifndef MOVE_MAX_PIECES
1353 #define MOVE_MAX_PIECES MOVE_MAX
1354 #endif
1355
1356 /* Generate several move instructions to copy LEN bytes
1357 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1358 The caller must pass FROM and TO
1359 through protect_from_queue before calling.
1360 ALIGN (in bytes) is maximum alignment we can assume. */
1361
1362 void
1363 move_by_pieces (to, from, len, align)
1364 rtx to, from;
1365 int len;
1366 unsigned int align;
1367 {
1368 struct move_by_pieces data;
1369 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1370 int max_size = MOVE_MAX_PIECES + 1;
1371 enum machine_mode mode = VOIDmode, tmode;
1372 enum insn_code icode;
1373
1374 data.offset = 0;
1375 data.to_addr = to_addr;
1376 data.from_addr = from_addr;
1377 data.to = to;
1378 data.from = from;
1379 data.autinc_to
1380 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1381 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1382 data.autinc_from
1383 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1384 || GET_CODE (from_addr) == POST_INC
1385 || GET_CODE (from_addr) == POST_DEC);
1386
1387 data.explicit_inc_from = 0;
1388 data.explicit_inc_to = 0;
1389 data.reverse
1390 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1391 if (data.reverse) data.offset = len;
1392 data.len = len;
1393
1394 data.to_struct = MEM_IN_STRUCT_P (to);
1395 data.from_struct = MEM_IN_STRUCT_P (from);
1396 data.to_readonly = RTX_UNCHANGING_P (to);
1397 data.from_readonly = RTX_UNCHANGING_P (from);
1398
1399 /* If copying requires more than two move insns,
1400 copy addresses to registers (to make displacements shorter)
1401 and use post-increment if available. */
1402 if (!(data.autinc_from && data.autinc_to)
1403 && move_by_pieces_ninsns (len, align) > 2)
1404 {
1405 /* Find the mode of the largest move... */
1406 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1407 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1408 if (GET_MODE_SIZE (tmode) < max_size)
1409 mode = tmode;
1410
1411 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1412 {
1413 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1414 data.autinc_from = 1;
1415 data.explicit_inc_from = -1;
1416 }
1417 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1418 {
1419 data.from_addr = copy_addr_to_reg (from_addr);
1420 data.autinc_from = 1;
1421 data.explicit_inc_from = 1;
1422 }
1423 if (!data.autinc_from && CONSTANT_P (from_addr))
1424 data.from_addr = copy_addr_to_reg (from_addr);
1425 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1426 {
1427 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1428 data.autinc_to = 1;
1429 data.explicit_inc_to = -1;
1430 }
1431 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1432 {
1433 data.to_addr = copy_addr_to_reg (to_addr);
1434 data.autinc_to = 1;
1435 data.explicit_inc_to = 1;
1436 }
1437 if (!data.autinc_to && CONSTANT_P (to_addr))
1438 data.to_addr = copy_addr_to_reg (to_addr);
1439 }
1440
1441 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1442 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1443 align = MOVE_MAX;
1444
1445 /* First move what we can in the largest integer mode, then go to
1446 successively smaller modes. */
1447
1448 while (max_size > 1)
1449 {
1450 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1451 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1452 if (GET_MODE_SIZE (tmode) < max_size)
1453 mode = tmode;
1454
1455 if (mode == VOIDmode)
1456 break;
1457
1458 icode = mov_optab->handlers[(int) mode].insn_code;
1459 if (icode != CODE_FOR_nothing
1460 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1461 (unsigned int) GET_MODE_SIZE (mode)))
1462 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1463
1464 max_size = GET_MODE_SIZE (mode);
1465 }
1466
1467 /* The code above should have handled everything. */
1468 if (data.len > 0)
1469 abort ();
1470 }
1471
1472 /* Return number of insns required to move L bytes by pieces.
1473 ALIGN (in bytes) is maximum alignment we can assume. */
1474
1475 static int
1476 move_by_pieces_ninsns (l, align)
1477 unsigned int l;
1478 unsigned int align;
1479 {
1480 register int n_insns = 0;
1481 int max_size = MOVE_MAX + 1;
1482
1483 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1484 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1485 align = MOVE_MAX;
1486
1487 while (max_size > 1)
1488 {
1489 enum machine_mode mode = VOIDmode, tmode;
1490 enum insn_code icode;
1491
1492 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1493 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1494 if (GET_MODE_SIZE (tmode) < max_size)
1495 mode = tmode;
1496
1497 if (mode == VOIDmode)
1498 break;
1499
1500 icode = mov_optab->handlers[(int) mode].insn_code;
1501 if (icode != CODE_FOR_nothing
1502 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1503 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1504
1505 max_size = GET_MODE_SIZE (mode);
1506 }
1507
1508 return n_insns;
1509 }
1510
1511 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1512 with move instructions for mode MODE. GENFUN is the gen_... function
1513 to make a move insn for that mode. DATA has all the other info. */
1514
1515 static void
1516 move_by_pieces_1 (genfun, mode, data)
1517 rtx (*genfun) PARAMS ((rtx, ...));
1518 enum machine_mode mode;
1519 struct move_by_pieces *data;
1520 {
1521 register int size = GET_MODE_SIZE (mode);
1522 register rtx to1, from1;
1523
1524 while (data->len >= size)
1525 {
1526 if (data->reverse) data->offset -= size;
1527
1528 to1 = (data->autinc_to
1529 ? gen_rtx_MEM (mode, data->to_addr)
1530 : copy_rtx (change_address (data->to, mode,
1531 plus_constant (data->to_addr,
1532 data->offset))));
1533 MEM_IN_STRUCT_P (to1) = data->to_struct;
1534 RTX_UNCHANGING_P (to1) = data->to_readonly;
1535
1536 from1
1537 = (data->autinc_from
1538 ? gen_rtx_MEM (mode, data->from_addr)
1539 : copy_rtx (change_address (data->from, mode,
1540 plus_constant (data->from_addr,
1541 data->offset))));
1542 MEM_IN_STRUCT_P (from1) = data->from_struct;
1543 RTX_UNCHANGING_P (from1) = data->from_readonly;
1544
1545 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1546 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1547 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1548 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1549
1550 emit_insn ((*genfun) (to1, from1));
1551 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1552 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1553 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1554 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1555
1556 if (! data->reverse) data->offset += size;
1557
1558 data->len -= size;
1559 }
1560 }
1561 \f
1562 /* Emit code to move a block Y to a block X.
1563 This may be done with string-move instructions,
1564 with multiple scalar move instructions, or with a library call.
1565
1566 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1567 with mode BLKmode.
1568 SIZE is an rtx that says how long they are.
1569 ALIGN is the maximum alignment we can assume they have,
1570 measured in bytes.
1571
1572 Return the address of the new block, if memcpy is called and returns it,
1573 0 otherwise. */
1574
1575 rtx
1576 emit_block_move (x, y, size, align)
1577 rtx x, y;
1578 rtx size;
1579 unsigned int align;
1580 {
1581 rtx retval = 0;
1582 #ifdef TARGET_MEM_FUNCTIONS
1583 static tree fn;
1584 tree call_expr, arg_list;
1585 #endif
1586
1587 if (GET_MODE (x) != BLKmode)
1588 abort ();
1589
1590 if (GET_MODE (y) != BLKmode)
1591 abort ();
1592
1593 x = protect_from_queue (x, 1);
1594 y = protect_from_queue (y, 0);
1595 size = protect_from_queue (size, 0);
1596
1597 if (GET_CODE (x) != MEM)
1598 abort ();
1599 if (GET_CODE (y) != MEM)
1600 abort ();
1601 if (size == 0)
1602 abort ();
1603
1604 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1605 move_by_pieces (x, y, INTVAL (size), align);
1606 else
1607 {
1608 /* Try the most limited insn first, because there's no point
1609 including more than one in the machine description unless
1610 the more limited one has some advantage. */
1611
1612 rtx opalign = GEN_INT (align);
1613 enum machine_mode mode;
1614
1615 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1616 mode = GET_MODE_WIDER_MODE (mode))
1617 {
1618 enum insn_code code = movstr_optab[(int) mode];
1619 insn_operand_predicate_fn pred;
1620
1621 if (code != CODE_FOR_nothing
1622 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1623 here because if SIZE is less than the mode mask, as it is
1624 returned by the macro, it will definitely be less than the
1625 actual mode mask. */
1626 && ((GET_CODE (size) == CONST_INT
1627 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1628 <= (GET_MODE_MASK (mode) >> 1)))
1629 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1630 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1631 || (*pred) (x, BLKmode))
1632 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1633 || (*pred) (y, BLKmode))
1634 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1635 || (*pred) (opalign, VOIDmode)))
1636 {
1637 rtx op2;
1638 rtx last = get_last_insn ();
1639 rtx pat;
1640
1641 op2 = convert_to_mode (mode, size, 1);
1642 pred = insn_data[(int) code].operand[2].predicate;
1643 if (pred != 0 && ! (*pred) (op2, mode))
1644 op2 = copy_to_mode_reg (mode, op2);
1645
1646 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1647 if (pat)
1648 {
1649 emit_insn (pat);
1650 return 0;
1651 }
1652 else
1653 delete_insns_since (last);
1654 }
1655 }
1656
1657 /* X, Y, or SIZE may have been passed through protect_from_queue.
1658
1659 It is unsafe to save the value generated by protect_from_queue
1660 and reuse it later. Consider what happens if emit_queue is
1661 called before the return value from protect_from_queue is used.
1662
1663 Expansion of the CALL_EXPR below will call emit_queue before
1664 we are finished emitting RTL for argument setup. So if we are
1665 not careful we could get the wrong value for an argument.
1666
1667 To avoid this problem we go ahead and emit code to copy X, Y &
1668 SIZE into new pseudos. We can then place those new pseudos
1669 into an RTL_EXPR and use them later, even after a call to
1670 emit_queue.
1671
1672 Note this is not strictly needed for library calls since they
1673 do not call emit_queue before loading their arguments. However,
1674 we may need to have library calls call emit_queue in the future
1675 since failing to do so could cause problems for targets which
1676 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1677 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1678 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1679
1680 #ifdef TARGET_MEM_FUNCTIONS
1681 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1682 #else
1683 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1684 TREE_UNSIGNED (integer_type_node));
1685 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1686 #endif
1687
1688 #ifdef TARGET_MEM_FUNCTIONS
1689 /* It is incorrect to use the libcall calling conventions to call
1690 memcpy in this context.
1691
1692 This could be a user call to memcpy and the user may wish to
1693 examine the return value from memcpy.
1694
1695 For targets where libcalls and normal calls have different conventions
1696 for returning pointers, we could end up generating incorrect code.
1697
1698 So instead of using a libcall sequence we build up a suitable
1699 CALL_EXPR and expand the call in the normal fashion. */
1700 if (fn == NULL_TREE)
1701 {
1702 tree fntype;
1703
1704 /* This was copied from except.c, I don't know if all this is
1705 necessary in this context or not. */
1706 fn = get_identifier ("memcpy");
1707 push_obstacks_nochange ();
1708 end_temporary_allocation ();
1709 fntype = build_pointer_type (void_type_node);
1710 fntype = build_function_type (fntype, NULL_TREE);
1711 fn = build_decl (FUNCTION_DECL, fn, fntype);
1712 ggc_add_tree_root (&fn, 1);
1713 DECL_EXTERNAL (fn) = 1;
1714 TREE_PUBLIC (fn) = 1;
1715 DECL_ARTIFICIAL (fn) = 1;
1716 make_decl_rtl (fn, NULL_PTR, 1);
1717 assemble_external (fn);
1718 pop_obstacks ();
1719 }
1720
1721 /* We need to make an argument list for the function call.
1722
1723 memcpy has three arguments, the first two are void * addresses and
1724 the last is a size_t byte count for the copy. */
1725 arg_list
1726 = build_tree_list (NULL_TREE,
1727 make_tree (build_pointer_type (void_type_node), x));
1728 TREE_CHAIN (arg_list)
1729 = build_tree_list (NULL_TREE,
1730 make_tree (build_pointer_type (void_type_node), y));
1731 TREE_CHAIN (TREE_CHAIN (arg_list))
1732 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1733 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1734
1735 /* Now we have to build up the CALL_EXPR itself. */
1736 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1737 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1738 call_expr, arg_list, NULL_TREE);
1739 TREE_SIDE_EFFECTS (call_expr) = 1;
1740
1741 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1742 #else
1743 emit_library_call (bcopy_libfunc, 0,
1744 VOIDmode, 3, y, Pmode, x, Pmode,
1745 convert_to_mode (TYPE_MODE (integer_type_node), size,
1746 TREE_UNSIGNED (integer_type_node)),
1747 TYPE_MODE (integer_type_node));
1748 #endif
1749 }
1750
1751 return retval;
1752 }
1753 \f
1754 /* Copy all or part of a value X into registers starting at REGNO.
1755 The number of registers to be filled is NREGS. */
1756
1757 void
1758 move_block_to_reg (regno, x, nregs, mode)
1759 int regno;
1760 rtx x;
1761 int nregs;
1762 enum machine_mode mode;
1763 {
1764 int i;
1765 #ifdef HAVE_load_multiple
1766 rtx pat;
1767 rtx last;
1768 #endif
1769
1770 if (nregs == 0)
1771 return;
1772
1773 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1774 x = validize_mem (force_const_mem (mode, x));
1775
1776 /* See if the machine can do this with a load multiple insn. */
1777 #ifdef HAVE_load_multiple
1778 if (HAVE_load_multiple)
1779 {
1780 last = get_last_insn ();
1781 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1782 GEN_INT (nregs));
1783 if (pat)
1784 {
1785 emit_insn (pat);
1786 return;
1787 }
1788 else
1789 delete_insns_since (last);
1790 }
1791 #endif
1792
1793 for (i = 0; i < nregs; i++)
1794 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1795 operand_subword_force (x, i, mode));
1796 }
1797
1798 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1799 The number of registers to be filled is NREGS. SIZE indicates the number
1800 of bytes in the object X. */
1801
1802
1803 void
1804 move_block_from_reg (regno, x, nregs, size)
1805 int regno;
1806 rtx x;
1807 int nregs;
1808 int size;
1809 {
1810 int i;
1811 #ifdef HAVE_store_multiple
1812 rtx pat;
1813 rtx last;
1814 #endif
1815 enum machine_mode mode;
1816
1817 /* If SIZE is that of a mode no bigger than a word, just use that
1818 mode's store operation. */
1819 if (size <= UNITS_PER_WORD
1820 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1821 {
1822 emit_move_insn (change_address (x, mode, NULL),
1823 gen_rtx_REG (mode, regno));
1824 return;
1825 }
1826
1827 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1828 to the left before storing to memory. Note that the previous test
1829 doesn't handle all cases (e.g. SIZE == 3). */
1830 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1831 {
1832 rtx tem = operand_subword (x, 0, 1, BLKmode);
1833 rtx shift;
1834
1835 if (tem == 0)
1836 abort ();
1837
1838 shift = expand_shift (LSHIFT_EXPR, word_mode,
1839 gen_rtx_REG (word_mode, regno),
1840 build_int_2 ((UNITS_PER_WORD - size)
1841 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1842 emit_move_insn (tem, shift);
1843 return;
1844 }
1845
1846 /* See if the machine can do this with a store multiple insn. */
1847 #ifdef HAVE_store_multiple
1848 if (HAVE_store_multiple)
1849 {
1850 last = get_last_insn ();
1851 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1852 GEN_INT (nregs));
1853 if (pat)
1854 {
1855 emit_insn (pat);
1856 return;
1857 }
1858 else
1859 delete_insns_since (last);
1860 }
1861 #endif
1862
1863 for (i = 0; i < nregs; i++)
1864 {
1865 rtx tem = operand_subword (x, i, 1, BLKmode);
1866
1867 if (tem == 0)
1868 abort ();
1869
1870 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1871 }
1872 }
1873
1874 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1875 registers represented by a PARALLEL. SSIZE represents the total size of
1876 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1877 SRC in bits. */
1878 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1879 the balance will be in what would be the low-order memory addresses, i.e.
1880 left justified for big endian, right justified for little endian. This
1881 happens to be true for the targets currently using this support. If this
1882 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1883 would be needed. */
1884
1885 void
1886 emit_group_load (dst, orig_src, ssize, align)
1887 rtx dst, orig_src;
1888 unsigned int align;
1889 int ssize;
1890 {
1891 rtx *tmps, src;
1892 int start, i;
1893
1894 if (GET_CODE (dst) != PARALLEL)
1895 abort ();
1896
1897 /* Check for a NULL entry, used to indicate that the parameter goes
1898 both on the stack and in registers. */
1899 if (XEXP (XVECEXP (dst, 0, 0), 0))
1900 start = 0;
1901 else
1902 start = 1;
1903
1904 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1905
1906 /* If we won't be loading directly from memory, protect the real source
1907 from strange tricks we might play. */
1908 src = orig_src;
1909 if (GET_CODE (src) != MEM)
1910 {
1911 if (GET_CODE (src) == VOIDmode)
1912 src = gen_reg_rtx (GET_MODE (dst));
1913 else
1914 src = gen_reg_rtx (GET_MODE (orig_src));
1915 emit_move_insn (src, orig_src);
1916 }
1917
1918 /* Process the pieces. */
1919 for (i = start; i < XVECLEN (dst, 0); i++)
1920 {
1921 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1922 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1923 int bytelen = GET_MODE_SIZE (mode);
1924 int shift = 0;
1925
1926 /* Handle trailing fragments that run over the size of the struct. */
1927 if (ssize >= 0 && bytepos + bytelen > ssize)
1928 {
1929 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1930 bytelen = ssize - bytepos;
1931 if (bytelen <= 0)
1932 abort ();
1933 }
1934
1935 /* Optimize the access just a bit. */
1936 if (GET_CODE (src) == MEM
1937 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1938 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1939 && bytelen == GET_MODE_SIZE (mode))
1940 {
1941 tmps[i] = gen_reg_rtx (mode);
1942 emit_move_insn (tmps[i],
1943 change_address (src, mode,
1944 plus_constant (XEXP (src, 0),
1945 bytepos)));
1946 }
1947 else if (GET_CODE (src) == CONCAT)
1948 {
1949 if (bytepos == 0
1950 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1951 tmps[i] = XEXP (src, 0);
1952 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1953 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1954 tmps[i] = XEXP (src, 1);
1955 else
1956 abort ();
1957 }
1958 else
1959 {
1960 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1961 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1962 mode, mode, align, ssize);
1963 }
1964
1965 if (BYTES_BIG_ENDIAN && shift)
1966 {
1967 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1968 tmps[i], 0, OPTAB_WIDEN);
1969 }
1970 }
1971 emit_queue();
1972
1973 /* Copy the extracted pieces into the proper (probable) hard regs. */
1974 for (i = start; i < XVECLEN (dst, 0); i++)
1975 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1976 }
1977
1978 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1979 registers represented by a PARALLEL. SSIZE represents the total size of
1980 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1981
1982 void
1983 emit_group_store (orig_dst, src, ssize, align)
1984 rtx orig_dst, src;
1985 int ssize;
1986 unsigned int align;
1987 {
1988 rtx *tmps, dst;
1989 int start, i;
1990
1991 if (GET_CODE (src) != PARALLEL)
1992 abort ();
1993
1994 /* Check for a NULL entry, used to indicate that the parameter goes
1995 both on the stack and in registers. */
1996 if (XEXP (XVECEXP (src, 0, 0), 0))
1997 start = 0;
1998 else
1999 start = 1;
2000
2001 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2002
2003 /* Copy the (probable) hard regs into pseudos. */
2004 for (i = start; i < XVECLEN (src, 0); i++)
2005 {
2006 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2007 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2008 emit_move_insn (tmps[i], reg);
2009 }
2010 emit_queue();
2011
2012 /* If we won't be storing directly into memory, protect the real destination
2013 from strange tricks we might play. */
2014 dst = orig_dst;
2015 if (GET_CODE (dst) == PARALLEL)
2016 {
2017 rtx temp;
2018
2019 /* We can get a PARALLEL dst if there is a conditional expression in
2020 a return statement. In that case, the dst and src are the same,
2021 so no action is necessary. */
2022 if (rtx_equal_p (dst, src))
2023 return;
2024
2025 /* It is unclear if we can ever reach here, but we may as well handle
2026 it. Allocate a temporary, and split this into a store/load to/from
2027 the temporary. */
2028
2029 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2030 emit_group_store (temp, src, ssize, align);
2031 emit_group_load (dst, temp, ssize, align);
2032 return;
2033 }
2034 else if (GET_CODE (dst) != MEM)
2035 {
2036 dst = gen_reg_rtx (GET_MODE (orig_dst));
2037 /* Make life a bit easier for combine. */
2038 emit_move_insn (dst, const0_rtx);
2039 }
2040 else if (! MEM_IN_STRUCT_P (dst))
2041 {
2042 /* store_bit_field requires that memory operations have
2043 mem_in_struct_p set; we might not. */
2044
2045 dst = copy_rtx (orig_dst);
2046 MEM_SET_IN_STRUCT_P (dst, 1);
2047 }
2048
2049 /* Process the pieces. */
2050 for (i = start; i < XVECLEN (src, 0); i++)
2051 {
2052 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2053 enum machine_mode mode = GET_MODE (tmps[i]);
2054 int bytelen = GET_MODE_SIZE (mode);
2055
2056 /* Handle trailing fragments that run over the size of the struct. */
2057 if (ssize >= 0 && bytepos + bytelen > ssize)
2058 {
2059 if (BYTES_BIG_ENDIAN)
2060 {
2061 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2062 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2063 tmps[i], 0, OPTAB_WIDEN);
2064 }
2065 bytelen = ssize - bytepos;
2066 }
2067
2068 /* Optimize the access just a bit. */
2069 if (GET_CODE (dst) == MEM
2070 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2071 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2072 && bytelen == GET_MODE_SIZE (mode))
2073 emit_move_insn (change_address (dst, mode,
2074 plus_constant (XEXP (dst, 0),
2075 bytepos)),
2076 tmps[i]);
2077 else
2078 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2079 mode, tmps[i], align, ssize);
2080 }
2081
2082 emit_queue();
2083
2084 /* Copy from the pseudo into the (probable) hard reg. */
2085 if (GET_CODE (dst) == REG)
2086 emit_move_insn (orig_dst, dst);
2087 }
2088
2089 /* Generate code to copy a BLKmode object of TYPE out of a
2090 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2091 is null, a stack temporary is created. TGTBLK is returned.
2092
2093 The primary purpose of this routine is to handle functions
2094 that return BLKmode structures in registers. Some machines
2095 (the PA for example) want to return all small structures
2096 in registers regardless of the structure's alignment. */
2097
2098 rtx
2099 copy_blkmode_from_reg (tgtblk,srcreg,type)
2100 rtx tgtblk;
2101 rtx srcreg;
2102 tree type;
2103 {
2104 int bytes = int_size_in_bytes (type);
2105 rtx src = NULL, dst = NULL;
2106 int bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2107 int bitpos, xbitpos, big_endian_correction = 0;
2108
2109 if (tgtblk == 0)
2110 {
2111 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2112 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2113 preserve_temp_slots (tgtblk);
2114 }
2115
2116 /* This code assumes srcreg is at least a full word. If it isn't,
2117 copy it into a new pseudo which is a full word. */
2118 if (GET_MODE (srcreg) != BLKmode
2119 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2120 srcreg = convert_to_mode (word_mode, srcreg,
2121 TREE_UNSIGNED (type));
2122
2123 /* Structures whose size is not a multiple of a word are aligned
2124 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2125 machine, this means we must skip the empty high order bytes when
2126 calculating the bit offset. */
2127 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2128 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2129 * BITS_PER_UNIT));
2130
2131 /* Copy the structure BITSIZE bites at a time.
2132
2133 We could probably emit more efficient code for machines
2134 which do not use strict alignment, but it doesn't seem
2135 worth the effort at the current time. */
2136 for (bitpos = 0, xbitpos = big_endian_correction;
2137 bitpos < bytes * BITS_PER_UNIT;
2138 bitpos += bitsize, xbitpos += bitsize)
2139 {
2140
2141 /* We need a new source operand each time xbitpos is on a
2142 word boundary and when xbitpos == big_endian_correction
2143 (the first time through). */
2144 if (xbitpos % BITS_PER_WORD == 0
2145 || xbitpos == big_endian_correction)
2146 src = operand_subword_force (srcreg,
2147 xbitpos / BITS_PER_WORD,
2148 BLKmode);
2149
2150 /* We need a new destination operand each time bitpos is on
2151 a word boundary. */
2152 if (bitpos % BITS_PER_WORD == 0)
2153 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2154
2155 /* Use xbitpos for the source extraction (right justified) and
2156 xbitpos for the destination store (left justified). */
2157 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2158 extract_bit_field (src, bitsize,
2159 xbitpos % BITS_PER_WORD, 1,
2160 NULL_RTX, word_mode,
2161 word_mode,
2162 bitsize / BITS_PER_UNIT,
2163 BITS_PER_WORD),
2164 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2165 }
2166 return tgtblk;
2167 }
2168
2169
2170 /* Add a USE expression for REG to the (possibly empty) list pointed
2171 to by CALL_FUSAGE. REG must denote a hard register. */
2172
2173 void
2174 use_reg (call_fusage, reg)
2175 rtx *call_fusage, reg;
2176 {
2177 if (GET_CODE (reg) != REG
2178 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2179 abort();
2180
2181 *call_fusage
2182 = gen_rtx_EXPR_LIST (VOIDmode,
2183 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2184 }
2185
2186 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2187 starting at REGNO. All of these registers must be hard registers. */
2188
2189 void
2190 use_regs (call_fusage, regno, nregs)
2191 rtx *call_fusage;
2192 int regno;
2193 int nregs;
2194 {
2195 int i;
2196
2197 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2198 abort ();
2199
2200 for (i = 0; i < nregs; i++)
2201 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2202 }
2203
2204 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2205 PARALLEL REGS. This is for calls that pass values in multiple
2206 non-contiguous locations. The Irix 6 ABI has examples of this. */
2207
2208 void
2209 use_group_regs (call_fusage, regs)
2210 rtx *call_fusage;
2211 rtx regs;
2212 {
2213 int i;
2214
2215 for (i = 0; i < XVECLEN (regs, 0); i++)
2216 {
2217 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2218
2219 /* A NULL entry means the parameter goes both on the stack and in
2220 registers. This can also be a MEM for targets that pass values
2221 partially on the stack and partially in registers. */
2222 if (reg != 0 && GET_CODE (reg) == REG)
2223 use_reg (call_fusage, reg);
2224 }
2225 }
2226 \f
2227 /* Generate several move instructions to clear LEN bytes of block TO.
2228 (A MEM rtx with BLKmode). The caller must pass TO through
2229 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2230 we can assume. */
2231
2232 static void
2233 clear_by_pieces (to, len, align)
2234 rtx to;
2235 int len;
2236 unsigned int align;
2237 {
2238 struct clear_by_pieces data;
2239 rtx to_addr = XEXP (to, 0);
2240 int max_size = MOVE_MAX_PIECES + 1;
2241 enum machine_mode mode = VOIDmode, tmode;
2242 enum insn_code icode;
2243
2244 data.offset = 0;
2245 data.to_addr = to_addr;
2246 data.to = to;
2247 data.autinc_to
2248 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2249 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2250
2251 data.explicit_inc_to = 0;
2252 data.reverse
2253 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2254 if (data.reverse) data.offset = len;
2255 data.len = len;
2256
2257 data.to_struct = MEM_IN_STRUCT_P (to);
2258
2259 /* If copying requires more than two move insns,
2260 copy addresses to registers (to make displacements shorter)
2261 and use post-increment if available. */
2262 if (!data.autinc_to
2263 && move_by_pieces_ninsns (len, align) > 2)
2264 {
2265 /* Determine the main mode we'll be using */
2266 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2267 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2268 if (GET_MODE_SIZE (tmode) < max_size)
2269 mode = tmode;
2270
2271 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2272 {
2273 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2274 data.autinc_to = 1;
2275 data.explicit_inc_to = -1;
2276 }
2277 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2278 {
2279 data.to_addr = copy_addr_to_reg (to_addr);
2280 data.autinc_to = 1;
2281 data.explicit_inc_to = 1;
2282 }
2283 if (!data.autinc_to && CONSTANT_P (to_addr))
2284 data.to_addr = copy_addr_to_reg (to_addr);
2285 }
2286
2287 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2288 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2289 align = MOVE_MAX;
2290
2291 /* First move what we can in the largest integer mode, then go to
2292 successively smaller modes. */
2293
2294 while (max_size > 1)
2295 {
2296 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2297 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2298 if (GET_MODE_SIZE (tmode) < max_size)
2299 mode = tmode;
2300
2301 if (mode == VOIDmode)
2302 break;
2303
2304 icode = mov_optab->handlers[(int) mode].insn_code;
2305 if (icode != CODE_FOR_nothing
2306 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2307 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2308
2309 max_size = GET_MODE_SIZE (mode);
2310 }
2311
2312 /* The code above should have handled everything. */
2313 if (data.len != 0)
2314 abort ();
2315 }
2316
2317 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2318 with move instructions for mode MODE. GENFUN is the gen_... function
2319 to make a move insn for that mode. DATA has all the other info. */
2320
2321 static void
2322 clear_by_pieces_1 (genfun, mode, data)
2323 rtx (*genfun) PARAMS ((rtx, ...));
2324 enum machine_mode mode;
2325 struct clear_by_pieces *data;
2326 {
2327 register int size = GET_MODE_SIZE (mode);
2328 register rtx to1;
2329
2330 while (data->len >= size)
2331 {
2332 if (data->reverse) data->offset -= size;
2333
2334 to1 = (data->autinc_to
2335 ? gen_rtx_MEM (mode, data->to_addr)
2336 : copy_rtx (change_address (data->to, mode,
2337 plus_constant (data->to_addr,
2338 data->offset))));
2339 MEM_IN_STRUCT_P (to1) = data->to_struct;
2340
2341 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2342 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2343
2344 emit_insn ((*genfun) (to1, const0_rtx));
2345 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2346 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2347
2348 if (! data->reverse) data->offset += size;
2349
2350 data->len -= size;
2351 }
2352 }
2353 \f
2354 /* Write zeros through the storage of OBJECT.
2355 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2356 the maximum alignment we can is has, measured in bytes.
2357
2358 If we call a function that returns the length of the block, return it. */
2359
2360 rtx
2361 clear_storage (object, size, align)
2362 rtx object;
2363 rtx size;
2364 unsigned int align;
2365 {
2366 #ifdef TARGET_MEM_FUNCTIONS
2367 static tree fn;
2368 tree call_expr, arg_list;
2369 #endif
2370 rtx retval = 0;
2371
2372 if (GET_MODE (object) == BLKmode)
2373 {
2374 object = protect_from_queue (object, 1);
2375 size = protect_from_queue (size, 0);
2376
2377 if (GET_CODE (size) == CONST_INT
2378 && MOVE_BY_PIECES_P (INTVAL (size), align))
2379 clear_by_pieces (object, INTVAL (size), align);
2380
2381 else
2382 {
2383 /* Try the most limited insn first, because there's no point
2384 including more than one in the machine description unless
2385 the more limited one has some advantage. */
2386
2387 rtx opalign = GEN_INT (align);
2388 enum machine_mode mode;
2389
2390 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2391 mode = GET_MODE_WIDER_MODE (mode))
2392 {
2393 enum insn_code code = clrstr_optab[(int) mode];
2394 insn_operand_predicate_fn pred;
2395
2396 if (code != CODE_FOR_nothing
2397 /* We don't need MODE to be narrower than
2398 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2399 the mode mask, as it is returned by the macro, it will
2400 definitely be less than the actual mode mask. */
2401 && ((GET_CODE (size) == CONST_INT
2402 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2403 <= (GET_MODE_MASK (mode) >> 1)))
2404 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2405 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2406 || (*pred) (object, BLKmode))
2407 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2408 || (*pred) (opalign, VOIDmode)))
2409 {
2410 rtx op1;
2411 rtx last = get_last_insn ();
2412 rtx pat;
2413
2414 op1 = convert_to_mode (mode, size, 1);
2415 pred = insn_data[(int) code].operand[1].predicate;
2416 if (pred != 0 && ! (*pred) (op1, mode))
2417 op1 = copy_to_mode_reg (mode, op1);
2418
2419 pat = GEN_FCN ((int) code) (object, op1, opalign);
2420 if (pat)
2421 {
2422 emit_insn (pat);
2423 return 0;
2424 }
2425 else
2426 delete_insns_since (last);
2427 }
2428 }
2429
2430 /* OBJECT or SIZE may have been passed through protect_from_queue.
2431
2432 It is unsafe to save the value generated by protect_from_queue
2433 and reuse it later. Consider what happens if emit_queue is
2434 called before the return value from protect_from_queue is used.
2435
2436 Expansion of the CALL_EXPR below will call emit_queue before
2437 we are finished emitting RTL for argument setup. So if we are
2438 not careful we could get the wrong value for an argument.
2439
2440 To avoid this problem we go ahead and emit code to copy OBJECT
2441 and SIZE into new pseudos. We can then place those new pseudos
2442 into an RTL_EXPR and use them later, even after a call to
2443 emit_queue.
2444
2445 Note this is not strictly needed for library calls since they
2446 do not call emit_queue before loading their arguments. However,
2447 we may need to have library calls call emit_queue in the future
2448 since failing to do so could cause problems for targets which
2449 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2450 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2451
2452 #ifdef TARGET_MEM_FUNCTIONS
2453 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2454 #else
2455 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2456 TREE_UNSIGNED (integer_type_node));
2457 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2458 #endif
2459
2460
2461 #ifdef TARGET_MEM_FUNCTIONS
2462 /* It is incorrect to use the libcall calling conventions to call
2463 memset in this context.
2464
2465 This could be a user call to memset and the user may wish to
2466 examine the return value from memset.
2467
2468 For targets where libcalls and normal calls have different
2469 conventions for returning pointers, we could end up generating
2470 incorrect code.
2471
2472 So instead of using a libcall sequence we build up a suitable
2473 CALL_EXPR and expand the call in the normal fashion. */
2474 if (fn == NULL_TREE)
2475 {
2476 tree fntype;
2477
2478 /* This was copied from except.c, I don't know if all this is
2479 necessary in this context or not. */
2480 fn = get_identifier ("memset");
2481 push_obstacks_nochange ();
2482 end_temporary_allocation ();
2483 fntype = build_pointer_type (void_type_node);
2484 fntype = build_function_type (fntype, NULL_TREE);
2485 fn = build_decl (FUNCTION_DECL, fn, fntype);
2486 ggc_add_tree_root (&fn, 1);
2487 DECL_EXTERNAL (fn) = 1;
2488 TREE_PUBLIC (fn) = 1;
2489 DECL_ARTIFICIAL (fn) = 1;
2490 make_decl_rtl (fn, NULL_PTR, 1);
2491 assemble_external (fn);
2492 pop_obstacks ();
2493 }
2494
2495 /* We need to make an argument list for the function call.
2496
2497 memset has three arguments, the first is a void * addresses, the
2498 second a integer with the initialization value, the last is a
2499 size_t byte count for the copy. */
2500 arg_list
2501 = build_tree_list (NULL_TREE,
2502 make_tree (build_pointer_type (void_type_node),
2503 object));
2504 TREE_CHAIN (arg_list)
2505 = build_tree_list (NULL_TREE,
2506 make_tree (integer_type_node, const0_rtx));
2507 TREE_CHAIN (TREE_CHAIN (arg_list))
2508 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2509 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2510
2511 /* Now we have to build up the CALL_EXPR itself. */
2512 call_expr = build1 (ADDR_EXPR,
2513 build_pointer_type (TREE_TYPE (fn)), fn);
2514 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2515 call_expr, arg_list, NULL_TREE);
2516 TREE_SIDE_EFFECTS (call_expr) = 1;
2517
2518 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2519 #else
2520 emit_library_call (bzero_libfunc, 0,
2521 VOIDmode, 2, object, Pmode, size,
2522 TYPE_MODE (integer_type_node));
2523 #endif
2524 }
2525 }
2526 else
2527 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2528
2529 return retval;
2530 }
2531
2532 /* Generate code to copy Y into X.
2533 Both Y and X must have the same mode, except that
2534 Y can be a constant with VOIDmode.
2535 This mode cannot be BLKmode; use emit_block_move for that.
2536
2537 Return the last instruction emitted. */
2538
2539 rtx
2540 emit_move_insn (x, y)
2541 rtx x, y;
2542 {
2543 enum machine_mode mode = GET_MODE (x);
2544
2545 x = protect_from_queue (x, 1);
2546 y = protect_from_queue (y, 0);
2547
2548 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2549 abort ();
2550
2551 /* Never force constant_p_rtx to memory. */
2552 if (GET_CODE (y) == CONSTANT_P_RTX)
2553 ;
2554 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2555 y = force_const_mem (mode, y);
2556
2557 /* If X or Y are memory references, verify that their addresses are valid
2558 for the machine. */
2559 if (GET_CODE (x) == MEM
2560 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2561 && ! push_operand (x, GET_MODE (x)))
2562 || (flag_force_addr
2563 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2564 x = change_address (x, VOIDmode, XEXP (x, 0));
2565
2566 if (GET_CODE (y) == MEM
2567 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2568 || (flag_force_addr
2569 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2570 y = change_address (y, VOIDmode, XEXP (y, 0));
2571
2572 if (mode == BLKmode)
2573 abort ();
2574
2575 return emit_move_insn_1 (x, y);
2576 }
2577
2578 /* Low level part of emit_move_insn.
2579 Called just like emit_move_insn, but assumes X and Y
2580 are basically valid. */
2581
2582 rtx
2583 emit_move_insn_1 (x, y)
2584 rtx x, y;
2585 {
2586 enum machine_mode mode = GET_MODE (x);
2587 enum machine_mode submode;
2588 enum mode_class class = GET_MODE_CLASS (mode);
2589 int i;
2590
2591 if (mode >= MAX_MACHINE_MODE)
2592 abort ();
2593
2594 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2595 return
2596 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2597
2598 /* Expand complex moves by moving real part and imag part, if possible. */
2599 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2600 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2601 * BITS_PER_UNIT),
2602 (class == MODE_COMPLEX_INT
2603 ? MODE_INT : MODE_FLOAT),
2604 0))
2605 && (mov_optab->handlers[(int) submode].insn_code
2606 != CODE_FOR_nothing))
2607 {
2608 /* Don't split destination if it is a stack push. */
2609 int stack = push_operand (x, GET_MODE (x));
2610
2611 /* If this is a stack, push the highpart first, so it
2612 will be in the argument order.
2613
2614 In that case, change_address is used only to convert
2615 the mode, not to change the address. */
2616 if (stack)
2617 {
2618 /* Note that the real part always precedes the imag part in memory
2619 regardless of machine's endianness. */
2620 #ifdef STACK_GROWS_DOWNWARD
2621 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2622 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2623 gen_imagpart (submode, y)));
2624 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2625 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2626 gen_realpart (submode, y)));
2627 #else
2628 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2629 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2630 gen_realpart (submode, y)));
2631 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2632 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2633 gen_imagpart (submode, y)));
2634 #endif
2635 }
2636 else
2637 {
2638 rtx realpart_x, realpart_y;
2639 rtx imagpart_x, imagpart_y;
2640
2641 /* If this is a complex value with each part being smaller than a
2642 word, the usual calling sequence will likely pack the pieces into
2643 a single register. Unfortunately, SUBREG of hard registers only
2644 deals in terms of words, so we have a problem converting input
2645 arguments to the CONCAT of two registers that is used elsewhere
2646 for complex values. If this is before reload, we can copy it into
2647 memory and reload. FIXME, we should see about using extract and
2648 insert on integer registers, but complex short and complex char
2649 variables should be rarely used. */
2650 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2651 && (reload_in_progress | reload_completed) == 0)
2652 {
2653 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2654 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2655
2656 if (packed_dest_p || packed_src_p)
2657 {
2658 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2659 ? MODE_FLOAT : MODE_INT);
2660
2661 enum machine_mode reg_mode =
2662 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2663
2664 if (reg_mode != BLKmode)
2665 {
2666 rtx mem = assign_stack_temp (reg_mode,
2667 GET_MODE_SIZE (mode), 0);
2668
2669 rtx cmem = change_address (mem, mode, NULL_RTX);
2670
2671 cfun->cannot_inline = "function uses short complex types";
2672
2673 if (packed_dest_p)
2674 {
2675 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2676 emit_move_insn_1 (cmem, y);
2677 return emit_move_insn_1 (sreg, mem);
2678 }
2679 else
2680 {
2681 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2682 emit_move_insn_1 (mem, sreg);
2683 return emit_move_insn_1 (x, cmem);
2684 }
2685 }
2686 }
2687 }
2688
2689 realpart_x = gen_realpart (submode, x);
2690 realpart_y = gen_realpart (submode, y);
2691 imagpart_x = gen_imagpart (submode, x);
2692 imagpart_y = gen_imagpart (submode, y);
2693
2694 /* Show the output dies here. This is necessary for SUBREGs
2695 of pseudos since we cannot track their lifetimes correctly;
2696 hard regs shouldn't appear here except as return values.
2697 We never want to emit such a clobber after reload. */
2698 if (x != y
2699 && ! (reload_in_progress || reload_completed)
2700 && (GET_CODE (realpart_x) == SUBREG
2701 || GET_CODE (imagpart_x) == SUBREG))
2702 {
2703 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2704 }
2705
2706 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2707 (realpart_x, realpart_y));
2708 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2709 (imagpart_x, imagpart_y));
2710 }
2711
2712 return get_last_insn ();
2713 }
2714
2715 /* This will handle any multi-word mode that lacks a move_insn pattern.
2716 However, you will get better code if you define such patterns,
2717 even if they must turn into multiple assembler instructions. */
2718 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2719 {
2720 rtx last_insn = 0;
2721 rtx seq;
2722 int need_clobber;
2723
2724 #ifdef PUSH_ROUNDING
2725
2726 /* If X is a push on the stack, do the push now and replace
2727 X with a reference to the stack pointer. */
2728 if (push_operand (x, GET_MODE (x)))
2729 {
2730 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2731 x = change_address (x, VOIDmode, stack_pointer_rtx);
2732 }
2733 #endif
2734
2735 start_sequence ();
2736
2737 need_clobber = 0;
2738 for (i = 0;
2739 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2740 i++)
2741 {
2742 rtx xpart = operand_subword (x, i, 1, mode);
2743 rtx ypart = operand_subword (y, i, 1, mode);
2744
2745 /* If we can't get a part of Y, put Y into memory if it is a
2746 constant. Otherwise, force it into a register. If we still
2747 can't get a part of Y, abort. */
2748 if (ypart == 0 && CONSTANT_P (y))
2749 {
2750 y = force_const_mem (mode, y);
2751 ypart = operand_subword (y, i, 1, mode);
2752 }
2753 else if (ypart == 0)
2754 ypart = operand_subword_force (y, i, mode);
2755
2756 if (xpart == 0 || ypart == 0)
2757 abort ();
2758
2759 need_clobber |= (GET_CODE (xpart) == SUBREG);
2760
2761 last_insn = emit_move_insn (xpart, ypart);
2762 }
2763
2764 seq = gen_sequence ();
2765 end_sequence ();
2766
2767 /* Show the output dies here. This is necessary for SUBREGs
2768 of pseudos since we cannot track their lifetimes correctly;
2769 hard regs shouldn't appear here except as return values.
2770 We never want to emit such a clobber after reload. */
2771 if (x != y
2772 && ! (reload_in_progress || reload_completed)
2773 && need_clobber != 0)
2774 {
2775 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2776 }
2777
2778 emit_insn (seq);
2779
2780 return last_insn;
2781 }
2782 else
2783 abort ();
2784 }
2785 \f
2786 /* Pushing data onto the stack. */
2787
2788 /* Push a block of length SIZE (perhaps variable)
2789 and return an rtx to address the beginning of the block.
2790 Note that it is not possible for the value returned to be a QUEUED.
2791 The value may be virtual_outgoing_args_rtx.
2792
2793 EXTRA is the number of bytes of padding to push in addition to SIZE.
2794 BELOW nonzero means this padding comes at low addresses;
2795 otherwise, the padding comes at high addresses. */
2796
2797 rtx
2798 push_block (size, extra, below)
2799 rtx size;
2800 int extra, below;
2801 {
2802 register rtx temp;
2803
2804 size = convert_modes (Pmode, ptr_mode, size, 1);
2805 if (CONSTANT_P (size))
2806 anti_adjust_stack (plus_constant (size, extra));
2807 else if (GET_CODE (size) == REG && extra == 0)
2808 anti_adjust_stack (size);
2809 else
2810 {
2811 rtx temp = copy_to_mode_reg (Pmode, size);
2812 if (extra != 0)
2813 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2814 temp, 0, OPTAB_LIB_WIDEN);
2815 anti_adjust_stack (temp);
2816 }
2817
2818 #if defined (STACK_GROWS_DOWNWARD) \
2819 || (defined (ARGS_GROW_DOWNWARD) \
2820 && !defined (ACCUMULATE_OUTGOING_ARGS))
2821
2822 /* Return the lowest stack address when STACK or ARGS grow downward and
2823 we are not aaccumulating outgoing arguments (the c4x port uses such
2824 conventions). */
2825 temp = virtual_outgoing_args_rtx;
2826 if (extra != 0 && below)
2827 temp = plus_constant (temp, extra);
2828 #else
2829 if (GET_CODE (size) == CONST_INT)
2830 temp = plus_constant (virtual_outgoing_args_rtx,
2831 - INTVAL (size) - (below ? 0 : extra));
2832 else if (extra != 0 && !below)
2833 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2834 negate_rtx (Pmode, plus_constant (size, extra)));
2835 else
2836 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2837 negate_rtx (Pmode, size));
2838 #endif
2839
2840 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2841 }
2842
2843 rtx
2844 gen_push_operand ()
2845 {
2846 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2847 }
2848
2849 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2850 block of SIZE bytes. */
2851
2852 static rtx
2853 get_push_address (size)
2854 int size;
2855 {
2856 register rtx temp;
2857
2858 if (STACK_PUSH_CODE == POST_DEC)
2859 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2860 else if (STACK_PUSH_CODE == POST_INC)
2861 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2862 else
2863 temp = stack_pointer_rtx;
2864
2865 return copy_to_reg (temp);
2866 }
2867
2868 /* Generate code to push X onto the stack, assuming it has mode MODE and
2869 type TYPE.
2870 MODE is redundant except when X is a CONST_INT (since they don't
2871 carry mode info).
2872 SIZE is an rtx for the size of data to be copied (in bytes),
2873 needed only if X is BLKmode.
2874
2875 ALIGN (in bytes) is maximum alignment we can assume.
2876
2877 If PARTIAL and REG are both nonzero, then copy that many of the first
2878 words of X into registers starting with REG, and push the rest of X.
2879 The amount of space pushed is decreased by PARTIAL words,
2880 rounded *down* to a multiple of PARM_BOUNDARY.
2881 REG must be a hard register in this case.
2882 If REG is zero but PARTIAL is not, take any all others actions for an
2883 argument partially in registers, but do not actually load any
2884 registers.
2885
2886 EXTRA is the amount in bytes of extra space to leave next to this arg.
2887 This is ignored if an argument block has already been allocated.
2888
2889 On a machine that lacks real push insns, ARGS_ADDR is the address of
2890 the bottom of the argument block for this call. We use indexing off there
2891 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2892 argument block has not been preallocated.
2893
2894 ARGS_SO_FAR is the size of args previously pushed for this call.
2895
2896 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2897 for arguments passed in registers. If nonzero, it will be the number
2898 of bytes required. */
2899
2900 void
2901 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2902 args_addr, args_so_far, reg_parm_stack_space,
2903 alignment_pad)
2904 register rtx x;
2905 enum machine_mode mode;
2906 tree type;
2907 rtx size;
2908 unsigned int align;
2909 int partial;
2910 rtx reg;
2911 int extra;
2912 rtx args_addr;
2913 rtx args_so_far;
2914 int reg_parm_stack_space;
2915 rtx alignment_pad;
2916 {
2917 rtx xinner;
2918 enum direction stack_direction
2919 #ifdef STACK_GROWS_DOWNWARD
2920 = downward;
2921 #else
2922 = upward;
2923 #endif
2924
2925 /* Decide where to pad the argument: `downward' for below,
2926 `upward' for above, or `none' for don't pad it.
2927 Default is below for small data on big-endian machines; else above. */
2928 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2929
2930 /* Invert direction if stack is post-update. */
2931 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2932 if (where_pad != none)
2933 where_pad = (where_pad == downward ? upward : downward);
2934
2935 xinner = x = protect_from_queue (x, 0);
2936
2937 if (mode == BLKmode)
2938 {
2939 /* Copy a block into the stack, entirely or partially. */
2940
2941 register rtx temp;
2942 int used = partial * UNITS_PER_WORD;
2943 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2944 int skip;
2945
2946 if (size == 0)
2947 abort ();
2948
2949 used -= offset;
2950
2951 /* USED is now the # of bytes we need not copy to the stack
2952 because registers will take care of them. */
2953
2954 if (partial != 0)
2955 xinner = change_address (xinner, BLKmode,
2956 plus_constant (XEXP (xinner, 0), used));
2957
2958 /* If the partial register-part of the arg counts in its stack size,
2959 skip the part of stack space corresponding to the registers.
2960 Otherwise, start copying to the beginning of the stack space,
2961 by setting SKIP to 0. */
2962 skip = (reg_parm_stack_space == 0) ? 0 : used;
2963
2964 #ifdef PUSH_ROUNDING
2965 /* Do it with several push insns if that doesn't take lots of insns
2966 and if there is no difficulty with push insns that skip bytes
2967 on the stack for alignment purposes. */
2968 if (args_addr == 0
2969 && GET_CODE (size) == CONST_INT
2970 && skip == 0
2971 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2972 /* Here we avoid the case of a structure whose weak alignment
2973 forces many pushes of a small amount of data,
2974 and such small pushes do rounding that causes trouble. */
2975 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
2976 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2977 || PUSH_ROUNDING (align) == align)
2978 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2979 {
2980 /* Push padding now if padding above and stack grows down,
2981 or if padding below and stack grows up.
2982 But if space already allocated, this has already been done. */
2983 if (extra && args_addr == 0
2984 && where_pad != none && where_pad != stack_direction)
2985 anti_adjust_stack (GEN_INT (extra));
2986
2987 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2988 INTVAL (size) - used, align);
2989
2990 if (current_function_check_memory_usage && ! in_check_memory_usage)
2991 {
2992 rtx temp;
2993
2994 in_check_memory_usage = 1;
2995 temp = get_push_address (INTVAL(size) - used);
2996 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2997 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2998 temp, Pmode,
2999 XEXP (xinner, 0), Pmode,
3000 GEN_INT (INTVAL(size) - used),
3001 TYPE_MODE (sizetype));
3002 else
3003 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3004 temp, Pmode,
3005 GEN_INT (INTVAL(size) - used),
3006 TYPE_MODE (sizetype),
3007 GEN_INT (MEMORY_USE_RW),
3008 TYPE_MODE (integer_type_node));
3009 in_check_memory_usage = 0;
3010 }
3011 }
3012 else
3013 #endif /* PUSH_ROUNDING */
3014 {
3015 /* Otherwise make space on the stack and copy the data
3016 to the address of that space. */
3017
3018 /* Deduct words put into registers from the size we must copy. */
3019 if (partial != 0)
3020 {
3021 if (GET_CODE (size) == CONST_INT)
3022 size = GEN_INT (INTVAL (size) - used);
3023 else
3024 size = expand_binop (GET_MODE (size), sub_optab, size,
3025 GEN_INT (used), NULL_RTX, 0,
3026 OPTAB_LIB_WIDEN);
3027 }
3028
3029 /* Get the address of the stack space.
3030 In this case, we do not deal with EXTRA separately.
3031 A single stack adjust will do. */
3032 if (! args_addr)
3033 {
3034 temp = push_block (size, extra, where_pad == downward);
3035 extra = 0;
3036 }
3037 else if (GET_CODE (args_so_far) == CONST_INT)
3038 temp = memory_address (BLKmode,
3039 plus_constant (args_addr,
3040 skip + INTVAL (args_so_far)));
3041 else
3042 temp = memory_address (BLKmode,
3043 plus_constant (gen_rtx_PLUS (Pmode,
3044 args_addr,
3045 args_so_far),
3046 skip));
3047 if (current_function_check_memory_usage && ! in_check_memory_usage)
3048 {
3049 rtx target;
3050
3051 in_check_memory_usage = 1;
3052 target = copy_to_reg (temp);
3053 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3054 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3055 target, Pmode,
3056 XEXP (xinner, 0), Pmode,
3057 size, TYPE_MODE (sizetype));
3058 else
3059 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3060 target, Pmode,
3061 size, TYPE_MODE (sizetype),
3062 GEN_INT (MEMORY_USE_RW),
3063 TYPE_MODE (integer_type_node));
3064 in_check_memory_usage = 0;
3065 }
3066
3067 /* TEMP is the address of the block. Copy the data there. */
3068 if (GET_CODE (size) == CONST_INT
3069 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3070 {
3071 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3072 INTVAL (size), align);
3073 goto ret;
3074 }
3075 else
3076 {
3077 rtx opalign = GEN_INT (align);
3078 enum machine_mode mode;
3079 rtx target = gen_rtx_MEM (BLKmode, temp);
3080
3081 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3082 mode != VOIDmode;
3083 mode = GET_MODE_WIDER_MODE (mode))
3084 {
3085 enum insn_code code = movstr_optab[(int) mode];
3086 insn_operand_predicate_fn pred;
3087
3088 if (code != CODE_FOR_nothing
3089 && ((GET_CODE (size) == CONST_INT
3090 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3091 <= (GET_MODE_MASK (mode) >> 1)))
3092 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3093 && (!(pred = insn_data[(int) code].operand[0].predicate)
3094 || ((*pred) (target, BLKmode)))
3095 && (!(pred = insn_data[(int) code].operand[1].predicate)
3096 || ((*pred) (xinner, BLKmode)))
3097 && (!(pred = insn_data[(int) code].operand[3].predicate)
3098 || ((*pred) (opalign, VOIDmode))))
3099 {
3100 rtx op2 = convert_to_mode (mode, size, 1);
3101 rtx last = get_last_insn ();
3102 rtx pat;
3103
3104 pred = insn_data[(int) code].operand[2].predicate;
3105 if (pred != 0 && ! (*pred) (op2, mode))
3106 op2 = copy_to_mode_reg (mode, op2);
3107
3108 pat = GEN_FCN ((int) code) (target, xinner,
3109 op2, opalign);
3110 if (pat)
3111 {
3112 emit_insn (pat);
3113 goto ret;
3114 }
3115 else
3116 delete_insns_since (last);
3117 }
3118 }
3119 }
3120
3121 #ifndef ACCUMULATE_OUTGOING_ARGS
3122 /* If the source is referenced relative to the stack pointer,
3123 copy it to another register to stabilize it. We do not need
3124 to do this if we know that we won't be changing sp. */
3125
3126 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3127 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3128 temp = copy_to_reg (temp);
3129 #endif
3130
3131 /* Make inhibit_defer_pop nonzero around the library call
3132 to force it to pop the bcopy-arguments right away. */
3133 NO_DEFER_POP;
3134 #ifdef TARGET_MEM_FUNCTIONS
3135 emit_library_call (memcpy_libfunc, 0,
3136 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3137 convert_to_mode (TYPE_MODE (sizetype),
3138 size, TREE_UNSIGNED (sizetype)),
3139 TYPE_MODE (sizetype));
3140 #else
3141 emit_library_call (bcopy_libfunc, 0,
3142 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3143 convert_to_mode (TYPE_MODE (integer_type_node),
3144 size,
3145 TREE_UNSIGNED (integer_type_node)),
3146 TYPE_MODE (integer_type_node));
3147 #endif
3148 OK_DEFER_POP;
3149 }
3150 }
3151 else if (partial > 0)
3152 {
3153 /* Scalar partly in registers. */
3154
3155 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3156 int i;
3157 int not_stack;
3158 /* # words of start of argument
3159 that we must make space for but need not store. */
3160 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3161 int args_offset = INTVAL (args_so_far);
3162 int skip;
3163
3164 /* Push padding now if padding above and stack grows down,
3165 or if padding below and stack grows up.
3166 But if space already allocated, this has already been done. */
3167 if (extra && args_addr == 0
3168 && where_pad != none && where_pad != stack_direction)
3169 anti_adjust_stack (GEN_INT (extra));
3170
3171 /* If we make space by pushing it, we might as well push
3172 the real data. Otherwise, we can leave OFFSET nonzero
3173 and leave the space uninitialized. */
3174 if (args_addr == 0)
3175 offset = 0;
3176
3177 /* Now NOT_STACK gets the number of words that we don't need to
3178 allocate on the stack. */
3179 not_stack = partial - offset;
3180
3181 /* If the partial register-part of the arg counts in its stack size,
3182 skip the part of stack space corresponding to the registers.
3183 Otherwise, start copying to the beginning of the stack space,
3184 by setting SKIP to 0. */
3185 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3186
3187 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3188 x = validize_mem (force_const_mem (mode, x));
3189
3190 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3191 SUBREGs of such registers are not allowed. */
3192 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3193 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3194 x = copy_to_reg (x);
3195
3196 /* Loop over all the words allocated on the stack for this arg. */
3197 /* We can do it by words, because any scalar bigger than a word
3198 has a size a multiple of a word. */
3199 #ifndef PUSH_ARGS_REVERSED
3200 for (i = not_stack; i < size; i++)
3201 #else
3202 for (i = size - 1; i >= not_stack; i--)
3203 #endif
3204 if (i >= not_stack + offset)
3205 emit_push_insn (operand_subword_force (x, i, mode),
3206 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3207 0, args_addr,
3208 GEN_INT (args_offset + ((i - not_stack + skip)
3209 * UNITS_PER_WORD)),
3210 reg_parm_stack_space, alignment_pad);
3211 }
3212 else
3213 {
3214 rtx addr;
3215 rtx target = NULL_RTX;
3216
3217 /* Push padding now if padding above and stack grows down,
3218 or if padding below and stack grows up.
3219 But if space already allocated, this has already been done. */
3220 if (extra && args_addr == 0
3221 && where_pad != none && where_pad != stack_direction)
3222 anti_adjust_stack (GEN_INT (extra));
3223
3224 #ifdef PUSH_ROUNDING
3225 if (args_addr == 0)
3226 addr = gen_push_operand ();
3227 else
3228 #endif
3229 {
3230 if (GET_CODE (args_so_far) == CONST_INT)
3231 addr
3232 = memory_address (mode,
3233 plus_constant (args_addr,
3234 INTVAL (args_so_far)));
3235 else
3236 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3237 args_so_far));
3238 target = addr;
3239 }
3240
3241 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3242
3243 if (current_function_check_memory_usage && ! in_check_memory_usage)
3244 {
3245 in_check_memory_usage = 1;
3246 if (target == 0)
3247 target = get_push_address (GET_MODE_SIZE (mode));
3248
3249 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3250 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3251 target, Pmode,
3252 XEXP (x, 0), Pmode,
3253 GEN_INT (GET_MODE_SIZE (mode)),
3254 TYPE_MODE (sizetype));
3255 else
3256 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3257 target, Pmode,
3258 GEN_INT (GET_MODE_SIZE (mode)),
3259 TYPE_MODE (sizetype),
3260 GEN_INT (MEMORY_USE_RW),
3261 TYPE_MODE (integer_type_node));
3262 in_check_memory_usage = 0;
3263 }
3264 }
3265
3266 ret:
3267 /* If part should go in registers, copy that part
3268 into the appropriate registers. Do this now, at the end,
3269 since mem-to-mem copies above may do function calls. */
3270 if (partial > 0 && reg != 0)
3271 {
3272 /* Handle calls that pass values in multiple non-contiguous locations.
3273 The Irix 6 ABI has examples of this. */
3274 if (GET_CODE (reg) == PARALLEL)
3275 emit_group_load (reg, x, -1, align); /* ??? size? */
3276 else
3277 move_block_to_reg (REGNO (reg), x, partial, mode);
3278 }
3279
3280 if (extra && args_addr == 0 && where_pad == stack_direction)
3281 anti_adjust_stack (GEN_INT (extra));
3282
3283 if (alignment_pad)
3284 anti_adjust_stack (alignment_pad);
3285 }
3286 \f
3287 /* Expand an assignment that stores the value of FROM into TO.
3288 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3289 (This may contain a QUEUED rtx;
3290 if the value is constant, this rtx is a constant.)
3291 Otherwise, the returned value is NULL_RTX.
3292
3293 SUGGEST_REG is no longer actually used.
3294 It used to mean, copy the value through a register
3295 and return that register, if that is possible.
3296 We now use WANT_VALUE to decide whether to do this. */
3297
3298 rtx
3299 expand_assignment (to, from, want_value, suggest_reg)
3300 tree to, from;
3301 int want_value;
3302 int suggest_reg ATTRIBUTE_UNUSED;
3303 {
3304 register rtx to_rtx = 0;
3305 rtx result;
3306
3307 /* Don't crash if the lhs of the assignment was erroneous. */
3308
3309 if (TREE_CODE (to) == ERROR_MARK)
3310 {
3311 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3312 return want_value ? result : NULL_RTX;
3313 }
3314
3315 /* Assignment of a structure component needs special treatment
3316 if the structure component's rtx is not simply a MEM.
3317 Assignment of an array element at a constant index, and assignment of
3318 an array element in an unaligned packed structure field, has the same
3319 problem. */
3320
3321 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3322 || TREE_CODE (to) == ARRAY_REF)
3323 {
3324 enum machine_mode mode1;
3325 int bitsize;
3326 int bitpos;
3327 tree offset;
3328 int unsignedp;
3329 int volatilep = 0;
3330 tree tem;
3331 unsigned int alignment;
3332
3333 push_temp_slots ();
3334 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3335 &unsignedp, &volatilep, &alignment);
3336
3337 /* If we are going to use store_bit_field and extract_bit_field,
3338 make sure to_rtx will be safe for multiple use. */
3339
3340 if (mode1 == VOIDmode && want_value)
3341 tem = stabilize_reference (tem);
3342
3343 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3344 if (offset != 0)
3345 {
3346 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3347
3348 if (GET_CODE (to_rtx) != MEM)
3349 abort ();
3350
3351 if (GET_MODE (offset_rtx) != ptr_mode)
3352 {
3353 #ifdef POINTERS_EXTEND_UNSIGNED
3354 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3355 #else
3356 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3357 #endif
3358 }
3359
3360 /* A constant address in TO_RTX can have VOIDmode, we must not try
3361 to call force_reg for that case. Avoid that case. */
3362 if (GET_CODE (to_rtx) == MEM
3363 && GET_MODE (to_rtx) == BLKmode
3364 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3365 && bitsize
3366 && (bitpos % bitsize) == 0
3367 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3368 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3369 {
3370 rtx temp = change_address (to_rtx, mode1,
3371 plus_constant (XEXP (to_rtx, 0),
3372 (bitpos /
3373 BITS_PER_UNIT)));
3374 if (GET_CODE (XEXP (temp, 0)) == REG)
3375 to_rtx = temp;
3376 else
3377 to_rtx = change_address (to_rtx, mode1,
3378 force_reg (GET_MODE (XEXP (temp, 0)),
3379 XEXP (temp, 0)));
3380 bitpos = 0;
3381 }
3382
3383 to_rtx = change_address (to_rtx, VOIDmode,
3384 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3385 force_reg (ptr_mode,
3386 offset_rtx)));
3387 }
3388
3389 if (volatilep)
3390 {
3391 if (GET_CODE (to_rtx) == MEM)
3392 {
3393 /* When the offset is zero, to_rtx is the address of the
3394 structure we are storing into, and hence may be shared.
3395 We must make a new MEM before setting the volatile bit. */
3396 if (offset == 0)
3397 to_rtx = copy_rtx (to_rtx);
3398
3399 MEM_VOLATILE_P (to_rtx) = 1;
3400 }
3401 #if 0 /* This was turned off because, when a field is volatile
3402 in an object which is not volatile, the object may be in a register,
3403 and then we would abort over here. */
3404 else
3405 abort ();
3406 #endif
3407 }
3408
3409 if (TREE_CODE (to) == COMPONENT_REF
3410 && TREE_READONLY (TREE_OPERAND (to, 1)))
3411 {
3412 if (offset == 0)
3413 to_rtx = copy_rtx (to_rtx);
3414
3415 RTX_UNCHANGING_P (to_rtx) = 1;
3416 }
3417
3418 /* Check the access. */
3419 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3420 {
3421 rtx to_addr;
3422 int size;
3423 int best_mode_size;
3424 enum machine_mode best_mode;
3425
3426 best_mode = get_best_mode (bitsize, bitpos,
3427 TYPE_ALIGN (TREE_TYPE (tem)),
3428 mode1, volatilep);
3429 if (best_mode == VOIDmode)
3430 best_mode = QImode;
3431
3432 best_mode_size = GET_MODE_BITSIZE (best_mode);
3433 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3434 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3435 size *= GET_MODE_SIZE (best_mode);
3436
3437 /* Check the access right of the pointer. */
3438 if (size)
3439 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3440 to_addr, Pmode,
3441 GEN_INT (size), TYPE_MODE (sizetype),
3442 GEN_INT (MEMORY_USE_WO),
3443 TYPE_MODE (integer_type_node));
3444 }
3445
3446 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3447 (want_value
3448 /* Spurious cast makes HPUX compiler happy. */
3449 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3450 : VOIDmode),
3451 unsignedp,
3452 /* Required alignment of containing datum. */
3453 alignment,
3454 int_size_in_bytes (TREE_TYPE (tem)),
3455 get_alias_set (to));
3456 preserve_temp_slots (result);
3457 free_temp_slots ();
3458 pop_temp_slots ();
3459
3460 /* If the value is meaningful, convert RESULT to the proper mode.
3461 Otherwise, return nothing. */
3462 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3463 TYPE_MODE (TREE_TYPE (from)),
3464 result,
3465 TREE_UNSIGNED (TREE_TYPE (to)))
3466 : NULL_RTX);
3467 }
3468
3469 /* If the rhs is a function call and its value is not an aggregate,
3470 call the function before we start to compute the lhs.
3471 This is needed for correct code for cases such as
3472 val = setjmp (buf) on machines where reference to val
3473 requires loading up part of an address in a separate insn.
3474
3475 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3476 a promoted variable where the zero- or sign- extension needs to be done.
3477 Handling this in the normal way is safe because no computation is done
3478 before the call. */
3479 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3480 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3481 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3482 {
3483 rtx value;
3484
3485 push_temp_slots ();
3486 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3487 if (to_rtx == 0)
3488 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3489
3490 /* Handle calls that return values in multiple non-contiguous locations.
3491 The Irix 6 ABI has examples of this. */
3492 if (GET_CODE (to_rtx) == PARALLEL)
3493 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3494 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3495 else if (GET_MODE (to_rtx) == BLKmode)
3496 emit_block_move (to_rtx, value, expr_size (from),
3497 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3498 else
3499 {
3500 #ifdef POINTERS_EXTEND_UNSIGNED
3501 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3502 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3503 value = convert_memory_address (GET_MODE (to_rtx), value);
3504 #endif
3505 emit_move_insn (to_rtx, value);
3506 }
3507 preserve_temp_slots (to_rtx);
3508 free_temp_slots ();
3509 pop_temp_slots ();
3510 return want_value ? to_rtx : NULL_RTX;
3511 }
3512
3513 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3514 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3515
3516 if (to_rtx == 0)
3517 {
3518 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3519 if (GET_CODE (to_rtx) == MEM)
3520 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3521 }
3522
3523 /* Don't move directly into a return register. */
3524 if (TREE_CODE (to) == RESULT_DECL
3525 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3526 {
3527 rtx temp;
3528
3529 push_temp_slots ();
3530 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3531
3532 if (GET_CODE (to_rtx) == PARALLEL)
3533 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3534 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3535 else
3536 emit_move_insn (to_rtx, temp);
3537
3538 preserve_temp_slots (to_rtx);
3539 free_temp_slots ();
3540 pop_temp_slots ();
3541 return want_value ? to_rtx : NULL_RTX;
3542 }
3543
3544 /* In case we are returning the contents of an object which overlaps
3545 the place the value is being stored, use a safe function when copying
3546 a value through a pointer into a structure value return block. */
3547 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3548 && current_function_returns_struct
3549 && !current_function_returns_pcc_struct)
3550 {
3551 rtx from_rtx, size;
3552
3553 push_temp_slots ();
3554 size = expr_size (from);
3555 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3556 EXPAND_MEMORY_USE_DONT);
3557
3558 /* Copy the rights of the bitmap. */
3559 if (current_function_check_memory_usage)
3560 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3561 XEXP (to_rtx, 0), Pmode,
3562 XEXP (from_rtx, 0), Pmode,
3563 convert_to_mode (TYPE_MODE (sizetype),
3564 size, TREE_UNSIGNED (sizetype)),
3565 TYPE_MODE (sizetype));
3566
3567 #ifdef TARGET_MEM_FUNCTIONS
3568 emit_library_call (memcpy_libfunc, 0,
3569 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3570 XEXP (from_rtx, 0), Pmode,
3571 convert_to_mode (TYPE_MODE (sizetype),
3572 size, TREE_UNSIGNED (sizetype)),
3573 TYPE_MODE (sizetype));
3574 #else
3575 emit_library_call (bcopy_libfunc, 0,
3576 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3577 XEXP (to_rtx, 0), Pmode,
3578 convert_to_mode (TYPE_MODE (integer_type_node),
3579 size, TREE_UNSIGNED (integer_type_node)),
3580 TYPE_MODE (integer_type_node));
3581 #endif
3582
3583 preserve_temp_slots (to_rtx);
3584 free_temp_slots ();
3585 pop_temp_slots ();
3586 return want_value ? to_rtx : NULL_RTX;
3587 }
3588
3589 /* Compute FROM and store the value in the rtx we got. */
3590
3591 push_temp_slots ();
3592 result = store_expr (from, to_rtx, want_value);
3593 preserve_temp_slots (result);
3594 free_temp_slots ();
3595 pop_temp_slots ();
3596 return want_value ? result : NULL_RTX;
3597 }
3598
3599 /* Generate code for computing expression EXP,
3600 and storing the value into TARGET.
3601 TARGET may contain a QUEUED rtx.
3602
3603 If WANT_VALUE is nonzero, return a copy of the value
3604 not in TARGET, so that we can be sure to use the proper
3605 value in a containing expression even if TARGET has something
3606 else stored in it. If possible, we copy the value through a pseudo
3607 and return that pseudo. Or, if the value is constant, we try to
3608 return the constant. In some cases, we return a pseudo
3609 copied *from* TARGET.
3610
3611 If the mode is BLKmode then we may return TARGET itself.
3612 It turns out that in BLKmode it doesn't cause a problem.
3613 because C has no operators that could combine two different
3614 assignments into the same BLKmode object with different values
3615 with no sequence point. Will other languages need this to
3616 be more thorough?
3617
3618 If WANT_VALUE is 0, we return NULL, to make sure
3619 to catch quickly any cases where the caller uses the value
3620 and fails to set WANT_VALUE. */
3621
3622 rtx
3623 store_expr (exp, target, want_value)
3624 register tree exp;
3625 register rtx target;
3626 int want_value;
3627 {
3628 register rtx temp;
3629 int dont_return_target = 0;
3630
3631 if (TREE_CODE (exp) == COMPOUND_EXPR)
3632 {
3633 /* Perform first part of compound expression, then assign from second
3634 part. */
3635 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3636 emit_queue ();
3637 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3638 }
3639 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3640 {
3641 /* For conditional expression, get safe form of the target. Then
3642 test the condition, doing the appropriate assignment on either
3643 side. This avoids the creation of unnecessary temporaries.
3644 For non-BLKmode, it is more efficient not to do this. */
3645
3646 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3647
3648 emit_queue ();
3649 target = protect_from_queue (target, 1);
3650
3651 do_pending_stack_adjust ();
3652 NO_DEFER_POP;
3653 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3654 start_cleanup_deferral ();
3655 store_expr (TREE_OPERAND (exp, 1), target, 0);
3656 end_cleanup_deferral ();
3657 emit_queue ();
3658 emit_jump_insn (gen_jump (lab2));
3659 emit_barrier ();
3660 emit_label (lab1);
3661 start_cleanup_deferral ();
3662 store_expr (TREE_OPERAND (exp, 2), target, 0);
3663 end_cleanup_deferral ();
3664 emit_queue ();
3665 emit_label (lab2);
3666 OK_DEFER_POP;
3667
3668 return want_value ? target : NULL_RTX;
3669 }
3670 else if (queued_subexp_p (target))
3671 /* If target contains a postincrement, let's not risk
3672 using it as the place to generate the rhs. */
3673 {
3674 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3675 {
3676 /* Expand EXP into a new pseudo. */
3677 temp = gen_reg_rtx (GET_MODE (target));
3678 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3679 }
3680 else
3681 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3682
3683 /* If target is volatile, ANSI requires accessing the value
3684 *from* the target, if it is accessed. So make that happen.
3685 In no case return the target itself. */
3686 if (! MEM_VOLATILE_P (target) && want_value)
3687 dont_return_target = 1;
3688 }
3689 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3690 && GET_MODE (target) != BLKmode)
3691 /* If target is in memory and caller wants value in a register instead,
3692 arrange that. Pass TARGET as target for expand_expr so that,
3693 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3694 We know expand_expr will not use the target in that case.
3695 Don't do this if TARGET is volatile because we are supposed
3696 to write it and then read it. */
3697 {
3698 temp = expand_expr (exp, target, GET_MODE (target), 0);
3699 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3700 temp = copy_to_reg (temp);
3701 dont_return_target = 1;
3702 }
3703 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3704 /* If this is an scalar in a register that is stored in a wider mode
3705 than the declared mode, compute the result into its declared mode
3706 and then convert to the wider mode. Our value is the computed
3707 expression. */
3708 {
3709 /* If we don't want a value, we can do the conversion inside EXP,
3710 which will often result in some optimizations. Do the conversion
3711 in two steps: first change the signedness, if needed, then
3712 the extend. But don't do this if the type of EXP is a subtype
3713 of something else since then the conversion might involve
3714 more than just converting modes. */
3715 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3716 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3717 {
3718 if (TREE_UNSIGNED (TREE_TYPE (exp))
3719 != SUBREG_PROMOTED_UNSIGNED_P (target))
3720 exp
3721 = convert
3722 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3723 TREE_TYPE (exp)),
3724 exp);
3725
3726 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3727 SUBREG_PROMOTED_UNSIGNED_P (target)),
3728 exp);
3729 }
3730
3731 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3732
3733 /* If TEMP is a volatile MEM and we want a result value, make
3734 the access now so it gets done only once. Likewise if
3735 it contains TARGET. */
3736 if (GET_CODE (temp) == MEM && want_value
3737 && (MEM_VOLATILE_P (temp)
3738 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3739 temp = copy_to_reg (temp);
3740
3741 /* If TEMP is a VOIDmode constant, use convert_modes to make
3742 sure that we properly convert it. */
3743 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3744 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3745 TYPE_MODE (TREE_TYPE (exp)), temp,
3746 SUBREG_PROMOTED_UNSIGNED_P (target));
3747
3748 convert_move (SUBREG_REG (target), temp,
3749 SUBREG_PROMOTED_UNSIGNED_P (target));
3750
3751 /* If we promoted a constant, change the mode back down to match
3752 target. Otherwise, the caller might get confused by a result whose
3753 mode is larger than expected. */
3754
3755 if (want_value && GET_MODE (temp) != GET_MODE (target)
3756 && GET_MODE (temp) != VOIDmode)
3757 {
3758 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3759 SUBREG_PROMOTED_VAR_P (temp) = 1;
3760 SUBREG_PROMOTED_UNSIGNED_P (temp)
3761 = SUBREG_PROMOTED_UNSIGNED_P (target);
3762 }
3763
3764 return want_value ? temp : NULL_RTX;
3765 }
3766 else
3767 {
3768 temp = expand_expr (exp, target, GET_MODE (target), 0);
3769 /* Return TARGET if it's a specified hardware register.
3770 If TARGET is a volatile mem ref, either return TARGET
3771 or return a reg copied *from* TARGET; ANSI requires this.
3772
3773 Otherwise, if TEMP is not TARGET, return TEMP
3774 if it is constant (for efficiency),
3775 or if we really want the correct value. */
3776 if (!(target && GET_CODE (target) == REG
3777 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3778 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3779 && ! rtx_equal_p (temp, target)
3780 && (CONSTANT_P (temp) || want_value))
3781 dont_return_target = 1;
3782 }
3783
3784 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3785 the same as that of TARGET, adjust the constant. This is needed, for
3786 example, in case it is a CONST_DOUBLE and we want only a word-sized
3787 value. */
3788 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3789 && TREE_CODE (exp) != ERROR_MARK
3790 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3791 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3792 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3793
3794 if (current_function_check_memory_usage
3795 && GET_CODE (target) == MEM
3796 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3797 {
3798 if (GET_CODE (temp) == MEM)
3799 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3800 XEXP (target, 0), Pmode,
3801 XEXP (temp, 0), Pmode,
3802 expr_size (exp), TYPE_MODE (sizetype));
3803 else
3804 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3805 XEXP (target, 0), Pmode,
3806 expr_size (exp), TYPE_MODE (sizetype),
3807 GEN_INT (MEMORY_USE_WO),
3808 TYPE_MODE (integer_type_node));
3809 }
3810
3811 /* If value was not generated in the target, store it there.
3812 Convert the value to TARGET's type first if nec. */
3813 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3814 one or both of them are volatile memory refs, we have to distinguish
3815 two cases:
3816 - expand_expr has used TARGET. In this case, we must not generate
3817 another copy. This can be detected by TARGET being equal according
3818 to == .
3819 - expand_expr has not used TARGET - that means that the source just
3820 happens to have the same RTX form. Since temp will have been created
3821 by expand_expr, it will compare unequal according to == .
3822 We must generate a copy in this case, to reach the correct number
3823 of volatile memory references. */
3824
3825 if ((! rtx_equal_p (temp, target)
3826 || (temp != target && (side_effects_p (temp)
3827 || side_effects_p (target))))
3828 && TREE_CODE (exp) != ERROR_MARK)
3829 {
3830 target = protect_from_queue (target, 1);
3831 if (GET_MODE (temp) != GET_MODE (target)
3832 && GET_MODE (temp) != VOIDmode)
3833 {
3834 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3835 if (dont_return_target)
3836 {
3837 /* In this case, we will return TEMP,
3838 so make sure it has the proper mode.
3839 But don't forget to store the value into TARGET. */
3840 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3841 emit_move_insn (target, temp);
3842 }
3843 else
3844 convert_move (target, temp, unsignedp);
3845 }
3846
3847 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3848 {
3849 /* Handle copying a string constant into an array.
3850 The string constant may be shorter than the array.
3851 So copy just the string's actual length, and clear the rest. */
3852 rtx size;
3853 rtx addr;
3854
3855 /* Get the size of the data type of the string,
3856 which is actually the size of the target. */
3857 size = expr_size (exp);
3858 if (GET_CODE (size) == CONST_INT
3859 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3860 emit_block_move (target, temp, size,
3861 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3862 else
3863 {
3864 /* Compute the size of the data to copy from the string. */
3865 tree copy_size
3866 = size_binop (MIN_EXPR,
3867 make_tree (sizetype, size),
3868 size_int (TREE_STRING_LENGTH (exp)));
3869 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3870 VOIDmode, 0);
3871 rtx label = 0;
3872
3873 /* Copy that much. */
3874 emit_block_move (target, temp, copy_size_rtx,
3875 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3876
3877 /* Figure out how much is left in TARGET that we have to clear.
3878 Do all calculations in ptr_mode. */
3879
3880 addr = XEXP (target, 0);
3881 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3882
3883 if (GET_CODE (copy_size_rtx) == CONST_INT)
3884 {
3885 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3886 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3887 }
3888 else
3889 {
3890 addr = force_reg (ptr_mode, addr);
3891 addr = expand_binop (ptr_mode, add_optab, addr,
3892 copy_size_rtx, NULL_RTX, 0,
3893 OPTAB_LIB_WIDEN);
3894
3895 size = expand_binop (ptr_mode, sub_optab, size,
3896 copy_size_rtx, NULL_RTX, 0,
3897 OPTAB_LIB_WIDEN);
3898
3899 label = gen_label_rtx ();
3900 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3901 GET_MODE (size), 0, 0, label);
3902 }
3903
3904 if (size != const0_rtx)
3905 {
3906 /* Be sure we can write on ADDR. */
3907 if (current_function_check_memory_usage)
3908 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3909 addr, Pmode,
3910 size, TYPE_MODE (sizetype),
3911 GEN_INT (MEMORY_USE_WO),
3912 TYPE_MODE (integer_type_node));
3913 #ifdef TARGET_MEM_FUNCTIONS
3914 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3915 addr, ptr_mode,
3916 const0_rtx, TYPE_MODE (integer_type_node),
3917 convert_to_mode (TYPE_MODE (sizetype),
3918 size,
3919 TREE_UNSIGNED (sizetype)),
3920 TYPE_MODE (sizetype));
3921 #else
3922 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3923 addr, ptr_mode,
3924 convert_to_mode (TYPE_MODE (integer_type_node),
3925 size,
3926 TREE_UNSIGNED (integer_type_node)),
3927 TYPE_MODE (integer_type_node));
3928 #endif
3929 }
3930
3931 if (label)
3932 emit_label (label);
3933 }
3934 }
3935 /* Handle calls that return values in multiple non-contiguous locations.
3936 The Irix 6 ABI has examples of this. */
3937 else if (GET_CODE (target) == PARALLEL)
3938 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3939 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3940 else if (GET_MODE (temp) == BLKmode)
3941 emit_block_move (target, temp, expr_size (exp),
3942 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3943 else
3944 emit_move_insn (target, temp);
3945 }
3946
3947 /* If we don't want a value, return NULL_RTX. */
3948 if (! want_value)
3949 return NULL_RTX;
3950
3951 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3952 ??? The latter test doesn't seem to make sense. */
3953 else if (dont_return_target && GET_CODE (temp) != MEM)
3954 return temp;
3955
3956 /* Return TARGET itself if it is a hard register. */
3957 else if (want_value && GET_MODE (target) != BLKmode
3958 && ! (GET_CODE (target) == REG
3959 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3960 return copy_to_reg (target);
3961
3962 else
3963 return target;
3964 }
3965 \f
3966 /* Return 1 if EXP just contains zeros. */
3967
3968 static int
3969 is_zeros_p (exp)
3970 tree exp;
3971 {
3972 tree elt;
3973
3974 switch (TREE_CODE (exp))
3975 {
3976 case CONVERT_EXPR:
3977 case NOP_EXPR:
3978 case NON_LVALUE_EXPR:
3979 return is_zeros_p (TREE_OPERAND (exp, 0));
3980
3981 case INTEGER_CST:
3982 return integer_zerop (exp);
3983
3984 case COMPLEX_CST:
3985 return
3986 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3987
3988 case REAL_CST:
3989 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3990
3991 case CONSTRUCTOR:
3992 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3993 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3994 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3995 if (! is_zeros_p (TREE_VALUE (elt)))
3996 return 0;
3997
3998 return 1;
3999
4000 default:
4001 return 0;
4002 }
4003 }
4004
4005 /* Return 1 if EXP contains mostly (3/4) zeros. */
4006
4007 static int
4008 mostly_zeros_p (exp)
4009 tree exp;
4010 {
4011 if (TREE_CODE (exp) == CONSTRUCTOR)
4012 {
4013 int elts = 0, zeros = 0;
4014 tree elt = CONSTRUCTOR_ELTS (exp);
4015 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4016 {
4017 /* If there are no ranges of true bits, it is all zero. */
4018 return elt == NULL_TREE;
4019 }
4020 for (; elt; elt = TREE_CHAIN (elt))
4021 {
4022 /* We do not handle the case where the index is a RANGE_EXPR,
4023 so the statistic will be somewhat inaccurate.
4024 We do make a more accurate count in store_constructor itself,
4025 so since this function is only used for nested array elements,
4026 this should be close enough. */
4027 if (mostly_zeros_p (TREE_VALUE (elt)))
4028 zeros++;
4029 elts++;
4030 }
4031
4032 return 4 * zeros >= 3 * elts;
4033 }
4034
4035 return is_zeros_p (exp);
4036 }
4037 \f
4038 /* Helper function for store_constructor.
4039 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4040 TYPE is the type of the CONSTRUCTOR, not the element type.
4041 ALIGN and CLEARED are as for store_constructor.
4042
4043 This provides a recursive shortcut back to store_constructor when it isn't
4044 necessary to go through store_field. This is so that we can pass through
4045 the cleared field to let store_constructor know that we may not have to
4046 clear a substructure if the outer structure has already been cleared. */
4047
4048 static void
4049 store_constructor_field (target, bitsize, bitpos,
4050 mode, exp, type, align, cleared)
4051 rtx target;
4052 int bitsize, bitpos;
4053 enum machine_mode mode;
4054 tree exp, type;
4055 unsigned int align;
4056 int cleared;
4057 {
4058 if (TREE_CODE (exp) == CONSTRUCTOR
4059 && bitpos % BITS_PER_UNIT == 0
4060 /* If we have a non-zero bitpos for a register target, then we just
4061 let store_field do the bitfield handling. This is unlikely to
4062 generate unnecessary clear instructions anyways. */
4063 && (bitpos == 0 || GET_CODE (target) == MEM))
4064 {
4065 if (bitpos != 0)
4066 target
4067 = change_address (target,
4068 GET_MODE (target) == BLKmode
4069 || 0 != (bitpos
4070 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4071 ? BLKmode : VOIDmode,
4072 plus_constant (XEXP (target, 0),
4073 bitpos / BITS_PER_UNIT));
4074 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4075 }
4076 else
4077 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4078 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4079 int_size_in_bytes (type), 0);
4080 }
4081
4082 /* Store the value of constructor EXP into the rtx TARGET.
4083 TARGET is either a REG or a MEM.
4084 ALIGN is the maximum known alignment for TARGET, in bits.
4085 CLEARED is true if TARGET is known to have been zero'd.
4086 SIZE is the number of bytes of TARGET we are allowed to modify: this
4087 may not be the same as the size of EXP if we are assigning to a field
4088 which has been packed to exclude padding bits. */
4089
4090 static void
4091 store_constructor (exp, target, align, cleared, size)
4092 tree exp;
4093 rtx target;
4094 unsigned int align;
4095 int cleared;
4096 int size;
4097 {
4098 tree type = TREE_TYPE (exp);
4099 #ifdef WORD_REGISTER_OPERATIONS
4100 rtx exp_size = expr_size (exp);
4101 #endif
4102
4103 /* We know our target cannot conflict, since safe_from_p has been called. */
4104 #if 0
4105 /* Don't try copying piece by piece into a hard register
4106 since that is vulnerable to being clobbered by EXP.
4107 Instead, construct in a pseudo register and then copy it all. */
4108 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4109 {
4110 rtx temp = gen_reg_rtx (GET_MODE (target));
4111 store_constructor (exp, temp, align, cleared, size);
4112 emit_move_insn (target, temp);
4113 return;
4114 }
4115 #endif
4116
4117 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4118 || TREE_CODE (type) == QUAL_UNION_TYPE)
4119 {
4120 register tree elt;
4121
4122 /* Inform later passes that the whole union value is dead. */
4123 if ((TREE_CODE (type) == UNION_TYPE
4124 || TREE_CODE (type) == QUAL_UNION_TYPE)
4125 && ! cleared)
4126 {
4127 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4128
4129 /* If the constructor is empty, clear the union. */
4130 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4131 clear_storage (target, expr_size (exp),
4132 TYPE_ALIGN (type) / BITS_PER_UNIT);
4133 }
4134
4135 /* If we are building a static constructor into a register,
4136 set the initial value as zero so we can fold the value into
4137 a constant. But if more than one register is involved,
4138 this probably loses. */
4139 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4140 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4141 {
4142 if (! cleared)
4143 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4144
4145 cleared = 1;
4146 }
4147
4148 /* If the constructor has fewer fields than the structure
4149 or if we are initializing the structure to mostly zeros,
4150 clear the whole structure first. */
4151 else if (size > 0
4152 && ((list_length (CONSTRUCTOR_ELTS (exp))
4153 != list_length (TYPE_FIELDS (type)))
4154 || mostly_zeros_p (exp)))
4155 {
4156 if (! cleared)
4157 clear_storage (target, GEN_INT (size),
4158 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4159
4160 cleared = 1;
4161 }
4162 else if (! cleared)
4163 /* Inform later passes that the old value is dead. */
4164 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4165
4166 /* Store each element of the constructor into
4167 the corresponding field of TARGET. */
4168
4169 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4170 {
4171 register tree field = TREE_PURPOSE (elt);
4172 #ifdef WORD_REGISTER_OPERATIONS
4173 tree value = TREE_VALUE (elt);
4174 #endif
4175 register enum machine_mode mode;
4176 int bitsize;
4177 int bitpos = 0;
4178 int unsignedp;
4179 tree pos, constant = 0, offset = 0;
4180 rtx to_rtx = target;
4181
4182 /* Just ignore missing fields.
4183 We cleared the whole structure, above,
4184 if any fields are missing. */
4185 if (field == 0)
4186 continue;
4187
4188 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4189 continue;
4190
4191 if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST)
4192 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4193 else
4194 bitsize = -1;
4195
4196 unsignedp = TREE_UNSIGNED (field);
4197 mode = DECL_MODE (field);
4198 if (DECL_BIT_FIELD (field))
4199 mode = VOIDmode;
4200
4201 pos = DECL_FIELD_BITPOS (field);
4202 if (TREE_CODE (pos) == INTEGER_CST)
4203 constant = pos;
4204 else if (TREE_CODE (pos) == PLUS_EXPR
4205 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4206 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4207 else
4208 offset = pos;
4209
4210 if (constant)
4211 bitpos = TREE_INT_CST_LOW (constant);
4212
4213 if (offset)
4214 {
4215 rtx offset_rtx;
4216
4217 if (contains_placeholder_p (offset))
4218 offset = build (WITH_RECORD_EXPR, bitsizetype,
4219 offset, make_tree (TREE_TYPE (exp), target));
4220
4221 offset = size_binop (EXACT_DIV_EXPR, offset,
4222 bitsize_int (BITS_PER_UNIT));
4223 offset = convert (sizetype, offset);
4224
4225 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4226 if (GET_CODE (to_rtx) != MEM)
4227 abort ();
4228
4229 if (GET_MODE (offset_rtx) != ptr_mode)
4230 {
4231 #ifdef POINTERS_EXTEND_UNSIGNED
4232 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4233 #else
4234 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4235 #endif
4236 }
4237
4238 to_rtx
4239 = change_address (to_rtx, VOIDmode,
4240 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4241 force_reg (ptr_mode,
4242 offset_rtx)));
4243 }
4244
4245 if (TREE_READONLY (field))
4246 {
4247 if (GET_CODE (to_rtx) == MEM)
4248 to_rtx = copy_rtx (to_rtx);
4249
4250 RTX_UNCHANGING_P (to_rtx) = 1;
4251 }
4252
4253 #ifdef WORD_REGISTER_OPERATIONS
4254 /* If this initializes a field that is smaller than a word, at the
4255 start of a word, try to widen it to a full word.
4256 This special case allows us to output C++ member function
4257 initializations in a form that the optimizers can understand. */
4258 if (constant
4259 && GET_CODE (target) == REG
4260 && bitsize < BITS_PER_WORD
4261 && bitpos % BITS_PER_WORD == 0
4262 && GET_MODE_CLASS (mode) == MODE_INT
4263 && TREE_CODE (value) == INTEGER_CST
4264 && GET_CODE (exp_size) == CONST_INT
4265 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4266 {
4267 tree type = TREE_TYPE (value);
4268 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4269 {
4270 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4271 value = convert (type, value);
4272 }
4273 if (BYTES_BIG_ENDIAN)
4274 value
4275 = fold (build (LSHIFT_EXPR, type, value,
4276 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4277 bitsize = BITS_PER_WORD;
4278 mode = word_mode;
4279 }
4280 #endif
4281 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4282 TREE_VALUE (elt), type,
4283 MIN (align,
4284 DECL_ALIGN (TREE_PURPOSE (elt))),
4285 cleared);
4286 }
4287 }
4288 else if (TREE_CODE (type) == ARRAY_TYPE)
4289 {
4290 register tree elt;
4291 register int i;
4292 int need_to_clear;
4293 tree domain = TYPE_DOMAIN (type);
4294 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4295 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4296 tree elttype = TREE_TYPE (type);
4297
4298 /* If the constructor has fewer elements than the array,
4299 clear the whole array first. Similarly if this is
4300 static constructor of a non-BLKmode object. */
4301 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4302 need_to_clear = 1;
4303 else
4304 {
4305 HOST_WIDE_INT count = 0, zero_count = 0;
4306 need_to_clear = 0;
4307 /* This loop is a more accurate version of the loop in
4308 mostly_zeros_p (it handles RANGE_EXPR in an index).
4309 It is also needed to check for missing elements. */
4310 for (elt = CONSTRUCTOR_ELTS (exp);
4311 elt != NULL_TREE;
4312 elt = TREE_CHAIN (elt))
4313 {
4314 tree index = TREE_PURPOSE (elt);
4315 HOST_WIDE_INT this_node_count;
4316 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4317 {
4318 tree lo_index = TREE_OPERAND (index, 0);
4319 tree hi_index = TREE_OPERAND (index, 1);
4320
4321 if (TREE_CODE (lo_index) != INTEGER_CST
4322 || TREE_CODE (hi_index) != INTEGER_CST)
4323 {
4324 need_to_clear = 1;
4325 break;
4326 }
4327 this_node_count = (TREE_INT_CST_LOW (hi_index)
4328 - TREE_INT_CST_LOW (lo_index) + 1);
4329 }
4330 else
4331 this_node_count = 1;
4332 count += this_node_count;
4333 if (mostly_zeros_p (TREE_VALUE (elt)))
4334 zero_count += this_node_count;
4335 }
4336 /* Clear the entire array first if there are any missing elements,
4337 or if the incidence of zero elements is >= 75%. */
4338 if (count < maxelt - minelt + 1
4339 || 4 * zero_count >= 3 * count)
4340 need_to_clear = 1;
4341 }
4342 if (need_to_clear && size > 0)
4343 {
4344 if (! cleared)
4345 clear_storage (target, GEN_INT (size),
4346 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4347 cleared = 1;
4348 }
4349 else
4350 /* Inform later passes that the old value is dead. */
4351 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4352
4353 /* Store each element of the constructor into
4354 the corresponding element of TARGET, determined
4355 by counting the elements. */
4356 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4357 elt;
4358 elt = TREE_CHAIN (elt), i++)
4359 {
4360 register enum machine_mode mode;
4361 int bitsize;
4362 int bitpos;
4363 int unsignedp;
4364 tree value = TREE_VALUE (elt);
4365 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4366 tree index = TREE_PURPOSE (elt);
4367 rtx xtarget = target;
4368
4369 if (cleared && is_zeros_p (value))
4370 continue;
4371
4372 unsignedp = TREE_UNSIGNED (elttype);
4373 mode = TYPE_MODE (elttype);
4374 if (mode == BLKmode)
4375 {
4376 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4377 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4378 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4379 else
4380 bitsize = -1;
4381 }
4382 else
4383 bitsize = GET_MODE_BITSIZE (mode);
4384
4385 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4386 {
4387 tree lo_index = TREE_OPERAND (index, 0);
4388 tree hi_index = TREE_OPERAND (index, 1);
4389 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4390 struct nesting *loop;
4391 HOST_WIDE_INT lo, hi, count;
4392 tree position;
4393
4394 /* If the range is constant and "small", unroll the loop. */
4395 if (TREE_CODE (lo_index) == INTEGER_CST
4396 && TREE_CODE (hi_index) == INTEGER_CST
4397 && (lo = TREE_INT_CST_LOW (lo_index),
4398 hi = TREE_INT_CST_LOW (hi_index),
4399 count = hi - lo + 1,
4400 (GET_CODE (target) != MEM
4401 || count <= 2
4402 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4403 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4404 <= 40 * 8))))
4405 {
4406 lo -= minelt; hi -= minelt;
4407 for (; lo <= hi; lo++)
4408 {
4409 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4410 store_constructor_field (target, bitsize, bitpos, mode,
4411 value, type, align, cleared);
4412 }
4413 }
4414 else
4415 {
4416 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4417 loop_top = gen_label_rtx ();
4418 loop_end = gen_label_rtx ();
4419
4420 unsignedp = TREE_UNSIGNED (domain);
4421
4422 index = build_decl (VAR_DECL, NULL_TREE, domain);
4423
4424 DECL_RTL (index) = index_r
4425 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4426 &unsignedp, 0));
4427
4428 if (TREE_CODE (value) == SAVE_EXPR
4429 && SAVE_EXPR_RTL (value) == 0)
4430 {
4431 /* Make sure value gets expanded once before the
4432 loop. */
4433 expand_expr (value, const0_rtx, VOIDmode, 0);
4434 emit_queue ();
4435 }
4436 store_expr (lo_index, index_r, 0);
4437 loop = expand_start_loop (0);
4438
4439 /* Assign value to element index. */
4440 position
4441 = convert (ssizetype,
4442 fold (build (MINUS_EXPR, TREE_TYPE (index),
4443 index, TYPE_MIN_VALUE (domain))));
4444 position = size_binop (MULT_EXPR, position,
4445 convert (ssizetype,
4446 TYPE_SIZE_UNIT (elttype)));
4447
4448 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4449 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4450 xtarget = change_address (target, mode, addr);
4451 if (TREE_CODE (value) == CONSTRUCTOR)
4452 store_constructor (value, xtarget, align, cleared,
4453 bitsize / BITS_PER_UNIT);
4454 else
4455 store_expr (value, xtarget, 0);
4456
4457 expand_exit_loop_if_false (loop,
4458 build (LT_EXPR, integer_type_node,
4459 index, hi_index));
4460
4461 expand_increment (build (PREINCREMENT_EXPR,
4462 TREE_TYPE (index),
4463 index, integer_one_node), 0, 0);
4464 expand_end_loop ();
4465 emit_label (loop_end);
4466 }
4467 }
4468 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4469 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4470 {
4471 rtx pos_rtx, addr;
4472 tree position;
4473
4474 if (index == 0)
4475 index = ssize_int (1);
4476
4477 if (minelt)
4478 index = convert (ssizetype,
4479 fold (build (MINUS_EXPR, index,
4480 TYPE_MIN_VALUE (domain))));
4481 position = size_binop (MULT_EXPR, index,
4482 convert (ssizetype,
4483 TYPE_SIZE_UNIT (elttype)));
4484 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4485 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4486 xtarget = change_address (target, mode, addr);
4487 store_expr (value, xtarget, 0);
4488 }
4489 else
4490 {
4491 if (index != 0)
4492 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4493 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4494 else
4495 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4496 store_constructor_field (target, bitsize, bitpos, mode, value,
4497 type, align, cleared);
4498 }
4499 }
4500 }
4501 /* set constructor assignments */
4502 else if (TREE_CODE (type) == SET_TYPE)
4503 {
4504 tree elt = CONSTRUCTOR_ELTS (exp);
4505 int nbytes = int_size_in_bytes (type), nbits;
4506 tree domain = TYPE_DOMAIN (type);
4507 tree domain_min, domain_max, bitlength;
4508
4509 /* The default implementation strategy is to extract the constant
4510 parts of the constructor, use that to initialize the target,
4511 and then "or" in whatever non-constant ranges we need in addition.
4512
4513 If a large set is all zero or all ones, it is
4514 probably better to set it using memset (if available) or bzero.
4515 Also, if a large set has just a single range, it may also be
4516 better to first clear all the first clear the set (using
4517 bzero/memset), and set the bits we want. */
4518
4519 /* Check for all zeros. */
4520 if (elt == NULL_TREE && size > 0)
4521 {
4522 if (!cleared)
4523 clear_storage (target, GEN_INT (size),
4524 TYPE_ALIGN (type) / BITS_PER_UNIT);
4525 return;
4526 }
4527
4528 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4529 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4530 bitlength = size_binop (PLUS_EXPR,
4531 size_diffop (domain_max, domain_min),
4532 ssize_int (1));
4533
4534 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4535 abort ();
4536 nbits = TREE_INT_CST_LOW (bitlength);
4537
4538 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4539 are "complicated" (more than one range), initialize (the
4540 constant parts) by copying from a constant. */
4541 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4542 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4543 {
4544 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4545 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4546 char *bit_buffer = (char *) alloca (nbits);
4547 HOST_WIDE_INT word = 0;
4548 int bit_pos = 0;
4549 int ibit = 0;
4550 int offset = 0; /* In bytes from beginning of set. */
4551 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4552 for (;;)
4553 {
4554 if (bit_buffer[ibit])
4555 {
4556 if (BYTES_BIG_ENDIAN)
4557 word |= (1 << (set_word_size - 1 - bit_pos));
4558 else
4559 word |= 1 << bit_pos;
4560 }
4561 bit_pos++; ibit++;
4562 if (bit_pos >= set_word_size || ibit == nbits)
4563 {
4564 if (word != 0 || ! cleared)
4565 {
4566 rtx datum = GEN_INT (word);
4567 rtx to_rtx;
4568 /* The assumption here is that it is safe to use
4569 XEXP if the set is multi-word, but not if
4570 it's single-word. */
4571 if (GET_CODE (target) == MEM)
4572 {
4573 to_rtx = plus_constant (XEXP (target, 0), offset);
4574 to_rtx = change_address (target, mode, to_rtx);
4575 }
4576 else if (offset == 0)
4577 to_rtx = target;
4578 else
4579 abort ();
4580 emit_move_insn (to_rtx, datum);
4581 }
4582 if (ibit == nbits)
4583 break;
4584 word = 0;
4585 bit_pos = 0;
4586 offset += set_word_size / BITS_PER_UNIT;
4587 }
4588 }
4589 }
4590 else if (!cleared)
4591 {
4592 /* Don't bother clearing storage if the set is all ones. */
4593 if (TREE_CHAIN (elt) != NULL_TREE
4594 || (TREE_PURPOSE (elt) == NULL_TREE
4595 ? nbits != 1
4596 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4597 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4598 || ((HOST_WIDE_INT) TREE_INT_CST_LOW (TREE_VALUE (elt))
4599 - (HOST_WIDE_INT) TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4600 != nbits))))
4601 clear_storage (target, expr_size (exp),
4602 TYPE_ALIGN (type) / BITS_PER_UNIT);
4603 }
4604
4605 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4606 {
4607 /* start of range of element or NULL */
4608 tree startbit = TREE_PURPOSE (elt);
4609 /* end of range of element, or element value */
4610 tree endbit = TREE_VALUE (elt);
4611 #ifdef TARGET_MEM_FUNCTIONS
4612 HOST_WIDE_INT startb, endb;
4613 #endif
4614 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4615
4616 bitlength_rtx = expand_expr (bitlength,
4617 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4618
4619 /* handle non-range tuple element like [ expr ] */
4620 if (startbit == NULL_TREE)
4621 {
4622 startbit = save_expr (endbit);
4623 endbit = startbit;
4624 }
4625 startbit = convert (sizetype, startbit);
4626 endbit = convert (sizetype, endbit);
4627 if (! integer_zerop (domain_min))
4628 {
4629 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4630 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4631 }
4632 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4633 EXPAND_CONST_ADDRESS);
4634 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4635 EXPAND_CONST_ADDRESS);
4636
4637 if (REG_P (target))
4638 {
4639 targetx = assign_stack_temp (GET_MODE (target),
4640 GET_MODE_SIZE (GET_MODE (target)),
4641 0);
4642 emit_move_insn (targetx, target);
4643 }
4644 else if (GET_CODE (target) == MEM)
4645 targetx = target;
4646 else
4647 abort ();
4648
4649 #ifdef TARGET_MEM_FUNCTIONS
4650 /* Optimization: If startbit and endbit are
4651 constants divisible by BITS_PER_UNIT,
4652 call memset instead. */
4653 if (TREE_CODE (startbit) == INTEGER_CST
4654 && TREE_CODE (endbit) == INTEGER_CST
4655 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4656 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4657 {
4658 emit_library_call (memset_libfunc, 0,
4659 VOIDmode, 3,
4660 plus_constant (XEXP (targetx, 0),
4661 startb / BITS_PER_UNIT),
4662 Pmode,
4663 constm1_rtx, TYPE_MODE (integer_type_node),
4664 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4665 TYPE_MODE (sizetype));
4666 }
4667 else
4668 #endif
4669 {
4670 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4671 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4672 bitlength_rtx, TYPE_MODE (sizetype),
4673 startbit_rtx, TYPE_MODE (sizetype),
4674 endbit_rtx, TYPE_MODE (sizetype));
4675 }
4676 if (REG_P (target))
4677 emit_move_insn (target, targetx);
4678 }
4679 }
4680
4681 else
4682 abort ();
4683 }
4684
4685 /* Store the value of EXP (an expression tree)
4686 into a subfield of TARGET which has mode MODE and occupies
4687 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4688 If MODE is VOIDmode, it means that we are storing into a bit-field.
4689
4690 If VALUE_MODE is VOIDmode, return nothing in particular.
4691 UNSIGNEDP is not used in this case.
4692
4693 Otherwise, return an rtx for the value stored. This rtx
4694 has mode VALUE_MODE if that is convenient to do.
4695 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4696
4697 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4698 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4699
4700 ALIAS_SET is the alias set for the destination. This value will
4701 (in general) be different from that for TARGET, since TARGET is a
4702 reference to the containing structure. */
4703
4704 static rtx
4705 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4706 unsignedp, align, total_size, alias_set)
4707 rtx target;
4708 int bitsize, bitpos;
4709 enum machine_mode mode;
4710 tree exp;
4711 enum machine_mode value_mode;
4712 int unsignedp;
4713 unsigned int align;
4714 int total_size;
4715 int alias_set;
4716 {
4717 HOST_WIDE_INT width_mask = 0;
4718
4719 if (TREE_CODE (exp) == ERROR_MARK)
4720 return const0_rtx;
4721
4722 if (bitsize < HOST_BITS_PER_WIDE_INT)
4723 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4724
4725 /* If we are storing into an unaligned field of an aligned union that is
4726 in a register, we may have the mode of TARGET being an integer mode but
4727 MODE == BLKmode. In that case, get an aligned object whose size and
4728 alignment are the same as TARGET and store TARGET into it (we can avoid
4729 the store if the field being stored is the entire width of TARGET). Then
4730 call ourselves recursively to store the field into a BLKmode version of
4731 that object. Finally, load from the object into TARGET. This is not
4732 very efficient in general, but should only be slightly more expensive
4733 than the otherwise-required unaligned accesses. Perhaps this can be
4734 cleaned up later. */
4735
4736 if (mode == BLKmode
4737 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4738 {
4739 rtx object = assign_stack_temp (GET_MODE (target),
4740 GET_MODE_SIZE (GET_MODE (target)), 0);
4741 rtx blk_object = copy_rtx (object);
4742
4743 MEM_SET_IN_STRUCT_P (object, 1);
4744 MEM_SET_IN_STRUCT_P (blk_object, 1);
4745 PUT_MODE (blk_object, BLKmode);
4746
4747 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4748 emit_move_insn (object, target);
4749
4750 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4751 align, total_size, alias_set);
4752
4753 /* Even though we aren't returning target, we need to
4754 give it the updated value. */
4755 emit_move_insn (target, object);
4756
4757 return blk_object;
4758 }
4759
4760 /* If the structure is in a register or if the component
4761 is a bit field, we cannot use addressing to access it.
4762 Use bit-field techniques or SUBREG to store in it. */
4763
4764 if (mode == VOIDmode
4765 || (mode != BLKmode && ! direct_store[(int) mode]
4766 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4767 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4768 || GET_CODE (target) == REG
4769 || GET_CODE (target) == SUBREG
4770 /* If the field isn't aligned enough to store as an ordinary memref,
4771 store it as a bit field. */
4772 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4773 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4774 || bitpos % GET_MODE_ALIGNMENT (mode)))
4775 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4776 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4777 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4778 /* If the RHS and field are a constant size and the size of the
4779 RHS isn't the same size as the bitfield, we must use bitfield
4780 operations. */
4781 || (bitsize >= 0
4782 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4783 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4784 {
4785 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4786
4787 /* If BITSIZE is narrower than the size of the type of EXP
4788 we will be narrowing TEMP. Normally, what's wanted are the
4789 low-order bits. However, if EXP's type is a record and this is
4790 big-endian machine, we want the upper BITSIZE bits. */
4791 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4792 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4793 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4794 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4795 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4796 - bitsize),
4797 temp, 1);
4798
4799 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4800 MODE. */
4801 if (mode != VOIDmode && mode != BLKmode
4802 && mode != TYPE_MODE (TREE_TYPE (exp)))
4803 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4804
4805 /* If the modes of TARGET and TEMP are both BLKmode, both
4806 must be in memory and BITPOS must be aligned on a byte
4807 boundary. If so, we simply do a block copy. */
4808 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4809 {
4810 unsigned int exp_align = expr_align (exp) / BITS_PER_UNIT;
4811
4812 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4813 || bitpos % BITS_PER_UNIT != 0)
4814 abort ();
4815
4816 target = change_address (target, VOIDmode,
4817 plus_constant (XEXP (target, 0),
4818 bitpos / BITS_PER_UNIT));
4819
4820 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4821 align = MIN (exp_align, align);
4822
4823 /* Find an alignment that is consistent with the bit position. */
4824 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4825 align >>= 1;
4826
4827 emit_block_move (target, temp,
4828 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4829 / BITS_PER_UNIT),
4830 align);
4831
4832 return value_mode == VOIDmode ? const0_rtx : target;
4833 }
4834
4835 /* Store the value in the bitfield. */
4836 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4837 if (value_mode != VOIDmode)
4838 {
4839 /* The caller wants an rtx for the value. */
4840 /* If possible, avoid refetching from the bitfield itself. */
4841 if (width_mask != 0
4842 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4843 {
4844 tree count;
4845 enum machine_mode tmode;
4846
4847 if (unsignedp)
4848 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4849 tmode = GET_MODE (temp);
4850 if (tmode == VOIDmode)
4851 tmode = value_mode;
4852 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4853 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4854 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4855 }
4856 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4857 NULL_RTX, value_mode, 0, align,
4858 total_size);
4859 }
4860 return const0_rtx;
4861 }
4862 else
4863 {
4864 rtx addr = XEXP (target, 0);
4865 rtx to_rtx;
4866
4867 /* If a value is wanted, it must be the lhs;
4868 so make the address stable for multiple use. */
4869
4870 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4871 && ! CONSTANT_ADDRESS_P (addr)
4872 /* A frame-pointer reference is already stable. */
4873 && ! (GET_CODE (addr) == PLUS
4874 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4875 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4876 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4877 addr = copy_to_reg (addr);
4878
4879 /* Now build a reference to just the desired component. */
4880
4881 to_rtx = copy_rtx (change_address (target, mode,
4882 plus_constant (addr,
4883 (bitpos
4884 / BITS_PER_UNIT))));
4885 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4886 MEM_ALIAS_SET (to_rtx) = alias_set;
4887
4888 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4889 }
4890 }
4891 \f
4892 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4893 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4894 ARRAY_REFs and find the ultimate containing object, which we return.
4895
4896 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4897 bit position, and *PUNSIGNEDP to the signedness of the field.
4898 If the position of the field is variable, we store a tree
4899 giving the variable offset (in units) in *POFFSET.
4900 This offset is in addition to the bit position.
4901 If the position is not variable, we store 0 in *POFFSET.
4902 We set *PALIGNMENT to the alignment in bytes of the address that will be
4903 computed. This is the alignment of the thing we return if *POFFSET
4904 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4905
4906 If any of the extraction expressions is volatile,
4907 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4908
4909 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4910 is a mode that can be used to access the field. In that case, *PBITSIZE
4911 is redundant.
4912
4913 If the field describes a variable-sized object, *PMODE is set to
4914 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4915 this case, but the address of the object can be found. */
4916
4917 tree
4918 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4919 punsignedp, pvolatilep, palignment)
4920 tree exp;
4921 int *pbitsize;
4922 int *pbitpos;
4923 tree *poffset;
4924 enum machine_mode *pmode;
4925 int *punsignedp;
4926 int *pvolatilep;
4927 unsigned int *palignment;
4928 {
4929 tree orig_exp = exp;
4930 tree size_tree = 0;
4931 enum machine_mode mode = VOIDmode;
4932 tree offset = size_zero_node;
4933 unsigned int alignment = BIGGEST_ALIGNMENT;
4934
4935 if (TREE_CODE (exp) == COMPONENT_REF)
4936 {
4937 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4938 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4939 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4940 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4941 }
4942 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4943 {
4944 size_tree = TREE_OPERAND (exp, 1);
4945 *punsignedp = TREE_UNSIGNED (exp);
4946 }
4947 else
4948 {
4949 mode = TYPE_MODE (TREE_TYPE (exp));
4950 if (mode == BLKmode)
4951 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4952
4953 *pbitsize = GET_MODE_BITSIZE (mode);
4954 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4955 }
4956
4957 if (size_tree)
4958 {
4959 if (TREE_CODE (size_tree) != INTEGER_CST)
4960 mode = BLKmode, *pbitsize = -1;
4961 else
4962 *pbitsize = TREE_INT_CST_LOW (size_tree);
4963 }
4964
4965 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4966 and find the ultimate containing object. */
4967
4968 *pbitpos = 0;
4969
4970 while (1)
4971 {
4972 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4973 {
4974 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4975 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4976 : TREE_OPERAND (exp, 2));
4977 tree constant = bitsize_int (0), var = pos;
4978
4979 /* If this field hasn't been filled in yet, don't go
4980 past it. This should only happen when folding expressions
4981 made during type construction. */
4982 if (pos == 0)
4983 break;
4984
4985 /* Assume here that the offset is a multiple of a unit.
4986 If not, there should be an explicitly added constant. */
4987 if (TREE_CODE (pos) == PLUS_EXPR
4988 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4989 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4990 else if (TREE_CODE (pos) == INTEGER_CST)
4991 constant = pos, var = bitsize_int (0);
4992
4993 *pbitpos += TREE_INT_CST_LOW (constant);
4994 offset
4995 = size_binop (PLUS_EXPR, offset,
4996 convert (sizetype,
4997 size_binop (EXACT_DIV_EXPR, var,
4998 bitsize_int (BITS_PER_UNIT))));
4999 }
5000
5001 else if (TREE_CODE (exp) == ARRAY_REF)
5002 {
5003 /* This code is based on the code in case ARRAY_REF in expand_expr
5004 below. We assume here that the size of an array element is
5005 always an integral multiple of BITS_PER_UNIT. */
5006
5007 tree index = TREE_OPERAND (exp, 1);
5008 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5009 tree low_bound
5010 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5011 tree index_type = TREE_TYPE (index);
5012 tree xindex;
5013
5014 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5015 {
5016 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5017 index);
5018 index_type = TREE_TYPE (index);
5019 }
5020
5021 /* Optimize the special-case of a zero lower bound.
5022
5023 We convert the low_bound to sizetype to avoid some problems
5024 with constant folding. (E.g. suppose the lower bound is 1,
5025 and its mode is QI. Without the conversion, (ARRAY
5026 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5027 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5028
5029 But sizetype isn't quite right either (especially if
5030 the lowbound is negative). FIXME */
5031
5032 if (! integer_zerop (low_bound))
5033 index = fold (build (MINUS_EXPR, index_type, index,
5034 convert (sizetype, low_bound)));
5035
5036 if (TREE_CODE (index) == INTEGER_CST)
5037 {
5038 index = convert (sbitsizetype, index);
5039 index_type = TREE_TYPE (index);
5040 }
5041
5042 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5043 convert (sbitsizetype,
5044 TYPE_SIZE (TREE_TYPE (exp)))));
5045
5046 if (TREE_CODE (xindex) == INTEGER_CST
5047 && TREE_INT_CST_HIGH (xindex) == 0)
5048 *pbitpos += TREE_INT_CST_LOW (xindex);
5049 else
5050 {
5051 /* Either the bit offset calculated above is not constant, or
5052 it overflowed. In either case, redo the multiplication
5053 against the size in units. This is especially important
5054 in the non-constant case to avoid a division at runtime. */
5055 xindex
5056 = fold (build (MULT_EXPR, ssizetype, index,
5057 convert (ssizetype,
5058 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5059
5060 if (contains_placeholder_p (xindex))
5061 xindex = build (WITH_RECORD_EXPR, ssizetype, xindex, exp);
5062
5063 offset
5064 = size_binop (PLUS_EXPR, offset, convert (sizetype, xindex));
5065 }
5066 }
5067 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5068 && ! ((TREE_CODE (exp) == NOP_EXPR
5069 || TREE_CODE (exp) == CONVERT_EXPR)
5070 && (TYPE_MODE (TREE_TYPE (exp))
5071 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5072 break;
5073
5074 /* If any reference in the chain is volatile, the effect is volatile. */
5075 if (TREE_THIS_VOLATILE (exp))
5076 *pvolatilep = 1;
5077
5078 /* If the offset is non-constant already, then we can't assume any
5079 alignment more than the alignment here. */
5080 if (! integer_zerop (offset))
5081 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5082
5083 exp = TREE_OPERAND (exp, 0);
5084 }
5085
5086 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5087 alignment = MIN (alignment, DECL_ALIGN (exp));
5088 else if (TREE_TYPE (exp) != 0)
5089 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5090
5091 if (integer_zerop (offset))
5092 offset = 0;
5093
5094 if (offset != 0 && contains_placeholder_p (offset))
5095 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5096
5097 *pmode = mode;
5098 *poffset = offset;
5099 *palignment = alignment / BITS_PER_UNIT;
5100 return exp;
5101 }
5102
5103 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5104 static enum memory_use_mode
5105 get_memory_usage_from_modifier (modifier)
5106 enum expand_modifier modifier;
5107 {
5108 switch (modifier)
5109 {
5110 case EXPAND_NORMAL:
5111 case EXPAND_SUM:
5112 return MEMORY_USE_RO;
5113 break;
5114 case EXPAND_MEMORY_USE_WO:
5115 return MEMORY_USE_WO;
5116 break;
5117 case EXPAND_MEMORY_USE_RW:
5118 return MEMORY_USE_RW;
5119 break;
5120 case EXPAND_MEMORY_USE_DONT:
5121 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5122 MEMORY_USE_DONT, because they are modifiers to a call of
5123 expand_expr in the ADDR_EXPR case of expand_expr. */
5124 case EXPAND_CONST_ADDRESS:
5125 case EXPAND_INITIALIZER:
5126 return MEMORY_USE_DONT;
5127 case EXPAND_MEMORY_USE_BAD:
5128 default:
5129 abort ();
5130 }
5131 }
5132 \f
5133 /* Given an rtx VALUE that may contain additions and multiplications,
5134 return an equivalent value that just refers to a register or memory.
5135 This is done by generating instructions to perform the arithmetic
5136 and returning a pseudo-register containing the value.
5137
5138 The returned value may be a REG, SUBREG, MEM or constant. */
5139
5140 rtx
5141 force_operand (value, target)
5142 rtx value, target;
5143 {
5144 register optab binoptab = 0;
5145 /* Use a temporary to force order of execution of calls to
5146 `force_operand'. */
5147 rtx tmp;
5148 register rtx op2;
5149 /* Use subtarget as the target for operand 0 of a binary operation. */
5150 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5151
5152 /* Check for a PIC address load. */
5153 if (flag_pic
5154 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5155 && XEXP (value, 0) == pic_offset_table_rtx
5156 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5157 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5158 || GET_CODE (XEXP (value, 1)) == CONST))
5159 {
5160 if (!subtarget)
5161 subtarget = gen_reg_rtx (GET_MODE (value));
5162 emit_move_insn (subtarget, value);
5163 return subtarget;
5164 }
5165
5166 if (GET_CODE (value) == PLUS)
5167 binoptab = add_optab;
5168 else if (GET_CODE (value) == MINUS)
5169 binoptab = sub_optab;
5170 else if (GET_CODE (value) == MULT)
5171 {
5172 op2 = XEXP (value, 1);
5173 if (!CONSTANT_P (op2)
5174 && !(GET_CODE (op2) == REG && op2 != subtarget))
5175 subtarget = 0;
5176 tmp = force_operand (XEXP (value, 0), subtarget);
5177 return expand_mult (GET_MODE (value), tmp,
5178 force_operand (op2, NULL_RTX),
5179 target, 0);
5180 }
5181
5182 if (binoptab)
5183 {
5184 op2 = XEXP (value, 1);
5185 if (!CONSTANT_P (op2)
5186 && !(GET_CODE (op2) == REG && op2 != subtarget))
5187 subtarget = 0;
5188 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5189 {
5190 binoptab = add_optab;
5191 op2 = negate_rtx (GET_MODE (value), op2);
5192 }
5193
5194 /* Check for an addition with OP2 a constant integer and our first
5195 operand a PLUS of a virtual register and something else. In that
5196 case, we want to emit the sum of the virtual register and the
5197 constant first and then add the other value. This allows virtual
5198 register instantiation to simply modify the constant rather than
5199 creating another one around this addition. */
5200 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5201 && GET_CODE (XEXP (value, 0)) == PLUS
5202 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5203 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5204 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5205 {
5206 rtx temp = expand_binop (GET_MODE (value), binoptab,
5207 XEXP (XEXP (value, 0), 0), op2,
5208 subtarget, 0, OPTAB_LIB_WIDEN);
5209 return expand_binop (GET_MODE (value), binoptab, temp,
5210 force_operand (XEXP (XEXP (value, 0), 1), 0),
5211 target, 0, OPTAB_LIB_WIDEN);
5212 }
5213
5214 tmp = force_operand (XEXP (value, 0), subtarget);
5215 return expand_binop (GET_MODE (value), binoptab, tmp,
5216 force_operand (op2, NULL_RTX),
5217 target, 0, OPTAB_LIB_WIDEN);
5218 /* We give UNSIGNEDP = 0 to expand_binop
5219 because the only operations we are expanding here are signed ones. */
5220 }
5221 return value;
5222 }
5223 \f
5224 /* Subroutine of expand_expr:
5225 save the non-copied parts (LIST) of an expr (LHS), and return a list
5226 which can restore these values to their previous values,
5227 should something modify their storage. */
5228
5229 static tree
5230 save_noncopied_parts (lhs, list)
5231 tree lhs;
5232 tree list;
5233 {
5234 tree tail;
5235 tree parts = 0;
5236
5237 for (tail = list; tail; tail = TREE_CHAIN (tail))
5238 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5239 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5240 else
5241 {
5242 tree part = TREE_VALUE (tail);
5243 tree part_type = TREE_TYPE (part);
5244 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5245 rtx target = assign_temp (part_type, 0, 1, 1);
5246 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5247 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5248 parts = tree_cons (to_be_saved,
5249 build (RTL_EXPR, part_type, NULL_TREE,
5250 (tree) target),
5251 parts);
5252 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5253 }
5254 return parts;
5255 }
5256
5257 /* Subroutine of expand_expr:
5258 record the non-copied parts (LIST) of an expr (LHS), and return a list
5259 which specifies the initial values of these parts. */
5260
5261 static tree
5262 init_noncopied_parts (lhs, list)
5263 tree lhs;
5264 tree list;
5265 {
5266 tree tail;
5267 tree parts = 0;
5268
5269 for (tail = list; tail; tail = TREE_CHAIN (tail))
5270 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5271 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5272 else if (TREE_PURPOSE (tail))
5273 {
5274 tree part = TREE_VALUE (tail);
5275 tree part_type = TREE_TYPE (part);
5276 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5277 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5278 }
5279 return parts;
5280 }
5281
5282 /* Subroutine of expand_expr: return nonzero iff there is no way that
5283 EXP can reference X, which is being modified. TOP_P is nonzero if this
5284 call is going to be used to determine whether we need a temporary
5285 for EXP, as opposed to a recursive call to this function.
5286
5287 It is always safe for this routine to return zero since it merely
5288 searches for optimization opportunities. */
5289
5290 static int
5291 safe_from_p (x, exp, top_p)
5292 rtx x;
5293 tree exp;
5294 int top_p;
5295 {
5296 rtx exp_rtl = 0;
5297 int i, nops;
5298 static int save_expr_count;
5299 static int save_expr_size = 0;
5300 static tree *save_expr_rewritten;
5301 static tree save_expr_trees[256];
5302
5303 if (x == 0
5304 /* If EXP has varying size, we MUST use a target since we currently
5305 have no way of allocating temporaries of variable size
5306 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5307 So we assume here that something at a higher level has prevented a
5308 clash. This is somewhat bogus, but the best we can do. Only
5309 do this when X is BLKmode and when we are at the top level. */
5310 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5311 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5312 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5313 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5314 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5315 != INTEGER_CST)
5316 && GET_MODE (x) == BLKmode))
5317 return 1;
5318
5319 if (top_p && save_expr_size == 0)
5320 {
5321 int rtn;
5322
5323 save_expr_count = 0;
5324 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5325 save_expr_rewritten = &save_expr_trees[0];
5326
5327 rtn = safe_from_p (x, exp, 1);
5328
5329 for (i = 0; i < save_expr_count; ++i)
5330 {
5331 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5332 abort ();
5333 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5334 }
5335
5336 save_expr_size = 0;
5337
5338 return rtn;
5339 }
5340
5341 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5342 find the underlying pseudo. */
5343 if (GET_CODE (x) == SUBREG)
5344 {
5345 x = SUBREG_REG (x);
5346 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5347 return 0;
5348 }
5349
5350 /* If X is a location in the outgoing argument area, it is always safe. */
5351 if (GET_CODE (x) == MEM
5352 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5353 || (GET_CODE (XEXP (x, 0)) == PLUS
5354 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5355 return 1;
5356
5357 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5358 {
5359 case 'd':
5360 exp_rtl = DECL_RTL (exp);
5361 break;
5362
5363 case 'c':
5364 return 1;
5365
5366 case 'x':
5367 if (TREE_CODE (exp) == TREE_LIST)
5368 return ((TREE_VALUE (exp) == 0
5369 || safe_from_p (x, TREE_VALUE (exp), 0))
5370 && (TREE_CHAIN (exp) == 0
5371 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5372 else if (TREE_CODE (exp) == ERROR_MARK)
5373 return 1; /* An already-visited SAVE_EXPR? */
5374 else
5375 return 0;
5376
5377 case '1':
5378 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5379
5380 case '2':
5381 case '<':
5382 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5383 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5384
5385 case 'e':
5386 case 'r':
5387 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5388 the expression. If it is set, we conflict iff we are that rtx or
5389 both are in memory. Otherwise, we check all operands of the
5390 expression recursively. */
5391
5392 switch (TREE_CODE (exp))
5393 {
5394 case ADDR_EXPR:
5395 return (staticp (TREE_OPERAND (exp, 0))
5396 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5397 || TREE_STATIC (exp));
5398
5399 case INDIRECT_REF:
5400 if (GET_CODE (x) == MEM)
5401 return 0;
5402 break;
5403
5404 case CALL_EXPR:
5405 exp_rtl = CALL_EXPR_RTL (exp);
5406 if (exp_rtl == 0)
5407 {
5408 /* Assume that the call will clobber all hard registers and
5409 all of memory. */
5410 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5411 || GET_CODE (x) == MEM)
5412 return 0;
5413 }
5414
5415 break;
5416
5417 case RTL_EXPR:
5418 /* If a sequence exists, we would have to scan every instruction
5419 in the sequence to see if it was safe. This is probably not
5420 worthwhile. */
5421 if (RTL_EXPR_SEQUENCE (exp))
5422 return 0;
5423
5424 exp_rtl = RTL_EXPR_RTL (exp);
5425 break;
5426
5427 case WITH_CLEANUP_EXPR:
5428 exp_rtl = RTL_EXPR_RTL (exp);
5429 break;
5430
5431 case CLEANUP_POINT_EXPR:
5432 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5433
5434 case SAVE_EXPR:
5435 exp_rtl = SAVE_EXPR_RTL (exp);
5436 if (exp_rtl)
5437 break;
5438
5439 /* This SAVE_EXPR might appear many times in the top-level
5440 safe_from_p() expression, and if it has a complex
5441 subexpression, examining it multiple times could result
5442 in a combinatorial explosion. E.g. on an Alpha
5443 running at least 200MHz, a Fortran test case compiled with
5444 optimization took about 28 minutes to compile -- even though
5445 it was only a few lines long, and the complicated line causing
5446 so much time to be spent in the earlier version of safe_from_p()
5447 had only 293 or so unique nodes.
5448
5449 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5450 where it is so we can turn it back in the top-level safe_from_p()
5451 when we're done. */
5452
5453 /* For now, don't bother re-sizing the array. */
5454 if (save_expr_count >= save_expr_size)
5455 return 0;
5456 save_expr_rewritten[save_expr_count++] = exp;
5457
5458 nops = tree_code_length[(int) SAVE_EXPR];
5459 for (i = 0; i < nops; i++)
5460 {
5461 tree operand = TREE_OPERAND (exp, i);
5462 if (operand == NULL_TREE)
5463 continue;
5464 TREE_SET_CODE (exp, ERROR_MARK);
5465 if (!safe_from_p (x, operand, 0))
5466 return 0;
5467 TREE_SET_CODE (exp, SAVE_EXPR);
5468 }
5469 TREE_SET_CODE (exp, ERROR_MARK);
5470 return 1;
5471
5472 case BIND_EXPR:
5473 /* The only operand we look at is operand 1. The rest aren't
5474 part of the expression. */
5475 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5476
5477 case METHOD_CALL_EXPR:
5478 /* This takes a rtx argument, but shouldn't appear here. */
5479 abort ();
5480
5481 default:
5482 break;
5483 }
5484
5485 /* If we have an rtx, we do not need to scan our operands. */
5486 if (exp_rtl)
5487 break;
5488
5489 nops = tree_code_length[(int) TREE_CODE (exp)];
5490 for (i = 0; i < nops; i++)
5491 if (TREE_OPERAND (exp, i) != 0
5492 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5493 return 0;
5494 }
5495
5496 /* If we have an rtl, find any enclosed object. Then see if we conflict
5497 with it. */
5498 if (exp_rtl)
5499 {
5500 if (GET_CODE (exp_rtl) == SUBREG)
5501 {
5502 exp_rtl = SUBREG_REG (exp_rtl);
5503 if (GET_CODE (exp_rtl) == REG
5504 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5505 return 0;
5506 }
5507
5508 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5509 are memory and EXP is not readonly. */
5510 return ! (rtx_equal_p (x, exp_rtl)
5511 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5512 && ! TREE_READONLY (exp)));
5513 }
5514
5515 /* If we reach here, it is safe. */
5516 return 1;
5517 }
5518
5519 /* Subroutine of expand_expr: return nonzero iff EXP is an
5520 expression whose type is statically determinable. */
5521
5522 static int
5523 fixed_type_p (exp)
5524 tree exp;
5525 {
5526 if (TREE_CODE (exp) == PARM_DECL
5527 || TREE_CODE (exp) == VAR_DECL
5528 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5529 || TREE_CODE (exp) == COMPONENT_REF
5530 || TREE_CODE (exp) == ARRAY_REF)
5531 return 1;
5532 return 0;
5533 }
5534
5535 /* Subroutine of expand_expr: return rtx if EXP is a
5536 variable or parameter; else return 0. */
5537
5538 static rtx
5539 var_rtx (exp)
5540 tree exp;
5541 {
5542 STRIP_NOPS (exp);
5543 switch (TREE_CODE (exp))
5544 {
5545 case PARM_DECL:
5546 case VAR_DECL:
5547 return DECL_RTL (exp);
5548 default:
5549 return 0;
5550 }
5551 }
5552
5553 #ifdef MAX_INTEGER_COMPUTATION_MODE
5554 void
5555 check_max_integer_computation_mode (exp)
5556 tree exp;
5557 {
5558 enum tree_code code;
5559 enum machine_mode mode;
5560
5561 /* Strip any NOPs that don't change the mode. */
5562 STRIP_NOPS (exp);
5563 code = TREE_CODE (exp);
5564
5565 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5566 if (code == NOP_EXPR
5567 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5568 return;
5569
5570 /* First check the type of the overall operation. We need only look at
5571 unary, binary and relational operations. */
5572 if (TREE_CODE_CLASS (code) == '1'
5573 || TREE_CODE_CLASS (code) == '2'
5574 || TREE_CODE_CLASS (code) == '<')
5575 {
5576 mode = TYPE_MODE (TREE_TYPE (exp));
5577 if (GET_MODE_CLASS (mode) == MODE_INT
5578 && mode > MAX_INTEGER_COMPUTATION_MODE)
5579 fatal ("unsupported wide integer operation");
5580 }
5581
5582 /* Check operand of a unary op. */
5583 if (TREE_CODE_CLASS (code) == '1')
5584 {
5585 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5586 if (GET_MODE_CLASS (mode) == MODE_INT
5587 && mode > MAX_INTEGER_COMPUTATION_MODE)
5588 fatal ("unsupported wide integer operation");
5589 }
5590
5591 /* Check operands of a binary/comparison op. */
5592 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5593 {
5594 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5595 if (GET_MODE_CLASS (mode) == MODE_INT
5596 && mode > MAX_INTEGER_COMPUTATION_MODE)
5597 fatal ("unsupported wide integer operation");
5598
5599 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5600 if (GET_MODE_CLASS (mode) == MODE_INT
5601 && mode > MAX_INTEGER_COMPUTATION_MODE)
5602 fatal ("unsupported wide integer operation");
5603 }
5604 }
5605 #endif
5606
5607 \f
5608 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5609 has any readonly fields. If any of the fields have types that
5610 contain readonly fields, return true as well. */
5611
5612 static int
5613 readonly_fields_p (type)
5614 tree type;
5615 {
5616 tree field;
5617
5618 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5619 if (TREE_CODE (field) == FIELD_DECL
5620 && (TREE_READONLY (field)
5621 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5622 && readonly_fields_p (TREE_TYPE (field)))))
5623 return 1;
5624
5625 return 0;
5626 }
5627 \f
5628 /* expand_expr: generate code for computing expression EXP.
5629 An rtx for the computed value is returned. The value is never null.
5630 In the case of a void EXP, const0_rtx is returned.
5631
5632 The value may be stored in TARGET if TARGET is nonzero.
5633 TARGET is just a suggestion; callers must assume that
5634 the rtx returned may not be the same as TARGET.
5635
5636 If TARGET is CONST0_RTX, it means that the value will be ignored.
5637
5638 If TMODE is not VOIDmode, it suggests generating the
5639 result in mode TMODE. But this is done only when convenient.
5640 Otherwise, TMODE is ignored and the value generated in its natural mode.
5641 TMODE is just a suggestion; callers must assume that
5642 the rtx returned may not have mode TMODE.
5643
5644 Note that TARGET may have neither TMODE nor MODE. In that case, it
5645 probably will not be used.
5646
5647 If MODIFIER is EXPAND_SUM then when EXP is an addition
5648 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5649 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5650 products as above, or REG or MEM, or constant.
5651 Ordinarily in such cases we would output mul or add instructions
5652 and then return a pseudo reg containing the sum.
5653
5654 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5655 it also marks a label as absolutely required (it can't be dead).
5656 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5657 This is used for outputting expressions used in initializers.
5658
5659 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5660 with a constant address even if that address is not normally legitimate.
5661 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5662
5663 rtx
5664 expand_expr (exp, target, tmode, modifier)
5665 register tree exp;
5666 rtx target;
5667 enum machine_mode tmode;
5668 enum expand_modifier modifier;
5669 {
5670 register rtx op0, op1, temp;
5671 tree type = TREE_TYPE (exp);
5672 int unsignedp = TREE_UNSIGNED (type);
5673 register enum machine_mode mode;
5674 register enum tree_code code = TREE_CODE (exp);
5675 optab this_optab;
5676 rtx subtarget, original_target;
5677 int ignore;
5678 tree context;
5679 /* Used by check-memory-usage to make modifier read only. */
5680 enum expand_modifier ro_modifier;
5681
5682 /* Handle ERROR_MARK before anybody tries to access its type. */
5683 if (TREE_CODE (exp) == ERROR_MARK)
5684 {
5685 op0 = CONST0_RTX (tmode);
5686 if (op0 != 0)
5687 return op0;
5688 return const0_rtx;
5689 }
5690
5691 mode = TYPE_MODE (type);
5692 /* Use subtarget as the target for operand 0 of a binary operation. */
5693 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5694 original_target = target;
5695 ignore = (target == const0_rtx
5696 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5697 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5698 || code == COND_EXPR)
5699 && TREE_CODE (type) == VOID_TYPE));
5700
5701 /* Make a read-only version of the modifier. */
5702 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5703 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5704 ro_modifier = modifier;
5705 else
5706 ro_modifier = EXPAND_NORMAL;
5707
5708 /* Don't use hard regs as subtargets, because the combiner
5709 can only handle pseudo regs. */
5710 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5711 subtarget = 0;
5712 /* Avoid subtargets inside loops,
5713 since they hide some invariant expressions. */
5714 if (preserve_subexpressions_p ())
5715 subtarget = 0;
5716
5717 /* If we are going to ignore this result, we need only do something
5718 if there is a side-effect somewhere in the expression. If there
5719 is, short-circuit the most common cases here. Note that we must
5720 not call expand_expr with anything but const0_rtx in case this
5721 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5722
5723 if (ignore)
5724 {
5725 if (! TREE_SIDE_EFFECTS (exp))
5726 return const0_rtx;
5727
5728 /* Ensure we reference a volatile object even if value is ignored, but
5729 don't do this if all we are doing is taking its address. */
5730 if (TREE_THIS_VOLATILE (exp)
5731 && TREE_CODE (exp) != FUNCTION_DECL
5732 && mode != VOIDmode && mode != BLKmode
5733 && modifier != EXPAND_CONST_ADDRESS)
5734 {
5735 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5736 if (GET_CODE (temp) == MEM)
5737 temp = copy_to_reg (temp);
5738 return const0_rtx;
5739 }
5740
5741 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5742 || code == INDIRECT_REF || code == BUFFER_REF)
5743 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5744 VOIDmode, ro_modifier);
5745 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5746 || code == ARRAY_REF)
5747 {
5748 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5749 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5750 return const0_rtx;
5751 }
5752 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5753 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5754 /* If the second operand has no side effects, just evaluate
5755 the first. */
5756 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5757 VOIDmode, ro_modifier);
5758 else if (code == BIT_FIELD_REF)
5759 {
5760 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5761 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5762 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5763 return const0_rtx;
5764 }
5765 ;
5766 target = 0;
5767 }
5768
5769 #ifdef MAX_INTEGER_COMPUTATION_MODE
5770 /* Only check stuff here if the mode we want is different from the mode
5771 of the expression; if it's the same, check_max_integer_computiation_mode
5772 will handle it. Do we really need to check this stuff at all? */
5773
5774 if (target
5775 && GET_MODE (target) != mode
5776 && TREE_CODE (exp) != INTEGER_CST
5777 && TREE_CODE (exp) != PARM_DECL
5778 && TREE_CODE (exp) != ARRAY_REF
5779 && TREE_CODE (exp) != COMPONENT_REF
5780 && TREE_CODE (exp) != BIT_FIELD_REF
5781 && TREE_CODE (exp) != INDIRECT_REF
5782 && TREE_CODE (exp) != CALL_EXPR
5783 && TREE_CODE (exp) != VAR_DECL
5784 && TREE_CODE (exp) != RTL_EXPR)
5785 {
5786 enum machine_mode mode = GET_MODE (target);
5787
5788 if (GET_MODE_CLASS (mode) == MODE_INT
5789 && mode > MAX_INTEGER_COMPUTATION_MODE)
5790 fatal ("unsupported wide integer operation");
5791 }
5792
5793 if (tmode != mode
5794 && TREE_CODE (exp) != INTEGER_CST
5795 && TREE_CODE (exp) != PARM_DECL
5796 && TREE_CODE (exp) != ARRAY_REF
5797 && TREE_CODE (exp) != COMPONENT_REF
5798 && TREE_CODE (exp) != BIT_FIELD_REF
5799 && TREE_CODE (exp) != INDIRECT_REF
5800 && TREE_CODE (exp) != VAR_DECL
5801 && TREE_CODE (exp) != CALL_EXPR
5802 && TREE_CODE (exp) != RTL_EXPR
5803 && GET_MODE_CLASS (tmode) == MODE_INT
5804 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5805 fatal ("unsupported wide integer operation");
5806
5807 check_max_integer_computation_mode (exp);
5808 #endif
5809
5810 /* If will do cse, generate all results into pseudo registers
5811 since 1) that allows cse to find more things
5812 and 2) otherwise cse could produce an insn the machine
5813 cannot support. */
5814
5815 if (! cse_not_expected && mode != BLKmode && target
5816 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5817 target = subtarget;
5818
5819 switch (code)
5820 {
5821 case LABEL_DECL:
5822 {
5823 tree function = decl_function_context (exp);
5824 /* Handle using a label in a containing function. */
5825 if (function != current_function_decl
5826 && function != inline_function_decl && function != 0)
5827 {
5828 struct function *p = find_function_data (function);
5829 /* Allocate in the memory associated with the function
5830 that the label is in. */
5831 push_obstacks (p->function_obstack,
5832 p->function_maybepermanent_obstack);
5833
5834 p->expr->x_forced_labels
5835 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5836 p->expr->x_forced_labels);
5837 pop_obstacks ();
5838 }
5839 else
5840 {
5841 if (modifier == EXPAND_INITIALIZER)
5842 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5843 label_rtx (exp),
5844 forced_labels);
5845 }
5846
5847 temp = gen_rtx_MEM (FUNCTION_MODE,
5848 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5849 if (function != current_function_decl
5850 && function != inline_function_decl && function != 0)
5851 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5852 return temp;
5853 }
5854
5855 case PARM_DECL:
5856 if (DECL_RTL (exp) == 0)
5857 {
5858 error_with_decl (exp, "prior parameter's size depends on `%s'");
5859 return CONST0_RTX (mode);
5860 }
5861
5862 /* ... fall through ... */
5863
5864 case VAR_DECL:
5865 /* If a static var's type was incomplete when the decl was written,
5866 but the type is complete now, lay out the decl now. */
5867 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5868 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5869 {
5870 push_obstacks_nochange ();
5871 end_temporary_allocation ();
5872 layout_decl (exp, 0);
5873 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5874 pop_obstacks ();
5875 }
5876
5877 /* Although static-storage variables start off initialized, according to
5878 ANSI C, a memcpy could overwrite them with uninitialized values. So
5879 we check them too. This also lets us check for read-only variables
5880 accessed via a non-const declaration, in case it won't be detected
5881 any other way (e.g., in an embedded system or OS kernel without
5882 memory protection).
5883
5884 Aggregates are not checked here; they're handled elsewhere. */
5885 if (cfun && current_function_check_memory_usage
5886 && code == VAR_DECL
5887 && GET_CODE (DECL_RTL (exp)) == MEM
5888 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5889 {
5890 enum memory_use_mode memory_usage;
5891 memory_usage = get_memory_usage_from_modifier (modifier);
5892
5893 if (memory_usage != MEMORY_USE_DONT)
5894 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5895 XEXP (DECL_RTL (exp), 0), Pmode,
5896 GEN_INT (int_size_in_bytes (type)),
5897 TYPE_MODE (sizetype),
5898 GEN_INT (memory_usage),
5899 TYPE_MODE (integer_type_node));
5900 }
5901
5902 /* ... fall through ... */
5903
5904 case FUNCTION_DECL:
5905 case RESULT_DECL:
5906 if (DECL_RTL (exp) == 0)
5907 abort ();
5908
5909 /* Ensure variable marked as used even if it doesn't go through
5910 a parser. If it hasn't be used yet, write out an external
5911 definition. */
5912 if (! TREE_USED (exp))
5913 {
5914 assemble_external (exp);
5915 TREE_USED (exp) = 1;
5916 }
5917
5918 /* Show we haven't gotten RTL for this yet. */
5919 temp = 0;
5920
5921 /* Handle variables inherited from containing functions. */
5922 context = decl_function_context (exp);
5923
5924 /* We treat inline_function_decl as an alias for the current function
5925 because that is the inline function whose vars, types, etc.
5926 are being merged into the current function.
5927 See expand_inline_function. */
5928
5929 if (context != 0 && context != current_function_decl
5930 && context != inline_function_decl
5931 /* If var is static, we don't need a static chain to access it. */
5932 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5933 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5934 {
5935 rtx addr;
5936
5937 /* Mark as non-local and addressable. */
5938 DECL_NONLOCAL (exp) = 1;
5939 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5940 abort ();
5941 mark_addressable (exp);
5942 if (GET_CODE (DECL_RTL (exp)) != MEM)
5943 abort ();
5944 addr = XEXP (DECL_RTL (exp), 0);
5945 if (GET_CODE (addr) == MEM)
5946 addr = gen_rtx_MEM (Pmode,
5947 fix_lexical_addr (XEXP (addr, 0), exp));
5948 else
5949 addr = fix_lexical_addr (addr, exp);
5950 temp = change_address (DECL_RTL (exp), mode, addr);
5951 }
5952
5953 /* This is the case of an array whose size is to be determined
5954 from its initializer, while the initializer is still being parsed.
5955 See expand_decl. */
5956
5957 else if (GET_CODE (DECL_RTL (exp)) == MEM
5958 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5959 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5960 XEXP (DECL_RTL (exp), 0));
5961
5962 /* If DECL_RTL is memory, we are in the normal case and either
5963 the address is not valid or it is not a register and -fforce-addr
5964 is specified, get the address into a register. */
5965
5966 else if (GET_CODE (DECL_RTL (exp)) == MEM
5967 && modifier != EXPAND_CONST_ADDRESS
5968 && modifier != EXPAND_SUM
5969 && modifier != EXPAND_INITIALIZER
5970 && (! memory_address_p (DECL_MODE (exp),
5971 XEXP (DECL_RTL (exp), 0))
5972 || (flag_force_addr
5973 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5974 temp = change_address (DECL_RTL (exp), VOIDmode,
5975 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5976
5977 /* If we got something, return it. But first, set the alignment
5978 the address is a register. */
5979 if (temp != 0)
5980 {
5981 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5982 mark_reg_pointer (XEXP (temp, 0),
5983 DECL_ALIGN (exp) / BITS_PER_UNIT);
5984
5985 return temp;
5986 }
5987
5988 /* If the mode of DECL_RTL does not match that of the decl, it
5989 must be a promoted value. We return a SUBREG of the wanted mode,
5990 but mark it so that we know that it was already extended. */
5991
5992 if (GET_CODE (DECL_RTL (exp)) == REG
5993 && GET_MODE (DECL_RTL (exp)) != mode)
5994 {
5995 /* Get the signedness used for this variable. Ensure we get the
5996 same mode we got when the variable was declared. */
5997 if (GET_MODE (DECL_RTL (exp))
5998 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5999 abort ();
6000
6001 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6002 SUBREG_PROMOTED_VAR_P (temp) = 1;
6003 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6004 return temp;
6005 }
6006
6007 return DECL_RTL (exp);
6008
6009 case INTEGER_CST:
6010 return immed_double_const (TREE_INT_CST_LOW (exp),
6011 TREE_INT_CST_HIGH (exp), mode);
6012
6013 case CONST_DECL:
6014 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6015 EXPAND_MEMORY_USE_BAD);
6016
6017 case REAL_CST:
6018 /* If optimized, generate immediate CONST_DOUBLE
6019 which will be turned into memory by reload if necessary.
6020
6021 We used to force a register so that loop.c could see it. But
6022 this does not allow gen_* patterns to perform optimizations with
6023 the constants. It also produces two insns in cases like "x = 1.0;".
6024 On most machines, floating-point constants are not permitted in
6025 many insns, so we'd end up copying it to a register in any case.
6026
6027 Now, we do the copying in expand_binop, if appropriate. */
6028 return immed_real_const (exp);
6029
6030 case COMPLEX_CST:
6031 case STRING_CST:
6032 if (! TREE_CST_RTL (exp))
6033 output_constant_def (exp);
6034
6035 /* TREE_CST_RTL probably contains a constant address.
6036 On RISC machines where a constant address isn't valid,
6037 make some insns to get that address into a register. */
6038 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6039 && modifier != EXPAND_CONST_ADDRESS
6040 && modifier != EXPAND_INITIALIZER
6041 && modifier != EXPAND_SUM
6042 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6043 || (flag_force_addr
6044 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6045 return change_address (TREE_CST_RTL (exp), VOIDmode,
6046 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6047 return TREE_CST_RTL (exp);
6048
6049 case EXPR_WITH_FILE_LOCATION:
6050 {
6051 rtx to_return;
6052 char *saved_input_filename = input_filename;
6053 int saved_lineno = lineno;
6054 input_filename = EXPR_WFL_FILENAME (exp);
6055 lineno = EXPR_WFL_LINENO (exp);
6056 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6057 emit_line_note (input_filename, lineno);
6058 /* Possibly avoid switching back and force here */
6059 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6060 input_filename = saved_input_filename;
6061 lineno = saved_lineno;
6062 return to_return;
6063 }
6064
6065 case SAVE_EXPR:
6066 context = decl_function_context (exp);
6067
6068 /* If this SAVE_EXPR was at global context, assume we are an
6069 initialization function and move it into our context. */
6070 if (context == 0)
6071 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6072
6073 /* We treat inline_function_decl as an alias for the current function
6074 because that is the inline function whose vars, types, etc.
6075 are being merged into the current function.
6076 See expand_inline_function. */
6077 if (context == current_function_decl || context == inline_function_decl)
6078 context = 0;
6079
6080 /* If this is non-local, handle it. */
6081 if (context)
6082 {
6083 /* The following call just exists to abort if the context is
6084 not of a containing function. */
6085 find_function_data (context);
6086
6087 temp = SAVE_EXPR_RTL (exp);
6088 if (temp && GET_CODE (temp) == REG)
6089 {
6090 put_var_into_stack (exp);
6091 temp = SAVE_EXPR_RTL (exp);
6092 }
6093 if (temp == 0 || GET_CODE (temp) != MEM)
6094 abort ();
6095 return change_address (temp, mode,
6096 fix_lexical_addr (XEXP (temp, 0), exp));
6097 }
6098 if (SAVE_EXPR_RTL (exp) == 0)
6099 {
6100 if (mode == VOIDmode)
6101 temp = const0_rtx;
6102 else
6103 temp = assign_temp (type, 3, 0, 0);
6104
6105 SAVE_EXPR_RTL (exp) = temp;
6106 if (!optimize && GET_CODE (temp) == REG)
6107 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6108 save_expr_regs);
6109
6110 /* If the mode of TEMP does not match that of the expression, it
6111 must be a promoted value. We pass store_expr a SUBREG of the
6112 wanted mode but mark it so that we know that it was already
6113 extended. Note that `unsignedp' was modified above in
6114 this case. */
6115
6116 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6117 {
6118 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6119 SUBREG_PROMOTED_VAR_P (temp) = 1;
6120 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6121 }
6122
6123 if (temp == const0_rtx)
6124 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6125 EXPAND_MEMORY_USE_BAD);
6126 else
6127 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6128
6129 TREE_USED (exp) = 1;
6130 }
6131
6132 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6133 must be a promoted value. We return a SUBREG of the wanted mode,
6134 but mark it so that we know that it was already extended. */
6135
6136 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6137 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6138 {
6139 /* Compute the signedness and make the proper SUBREG. */
6140 promote_mode (type, mode, &unsignedp, 0);
6141 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6142 SUBREG_PROMOTED_VAR_P (temp) = 1;
6143 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6144 return temp;
6145 }
6146
6147 return SAVE_EXPR_RTL (exp);
6148
6149 case UNSAVE_EXPR:
6150 {
6151 rtx temp;
6152 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6153 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6154 return temp;
6155 }
6156
6157 case PLACEHOLDER_EXPR:
6158 {
6159 tree placeholder_expr;
6160
6161 /* If there is an object on the head of the placeholder list,
6162 see if some object in it of type TYPE or a pointer to it. For
6163 further information, see tree.def. */
6164 for (placeholder_expr = placeholder_list;
6165 placeholder_expr != 0;
6166 placeholder_expr = TREE_CHAIN (placeholder_expr))
6167 {
6168 tree need_type = TYPE_MAIN_VARIANT (type);
6169 tree object = 0;
6170 tree old_list = placeholder_list;
6171 tree elt;
6172
6173 /* Find the outermost reference that is of the type we want.
6174 If none, see if any object has a type that is a pointer to
6175 the type we want. */
6176 for (elt = TREE_PURPOSE (placeholder_expr);
6177 elt != 0 && object == 0;
6178 elt
6179 = ((TREE_CODE (elt) == COMPOUND_EXPR
6180 || TREE_CODE (elt) == COND_EXPR)
6181 ? TREE_OPERAND (elt, 1)
6182 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6183 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6184 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6185 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6186 ? TREE_OPERAND (elt, 0) : 0))
6187 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6188 object = elt;
6189
6190 for (elt = TREE_PURPOSE (placeholder_expr);
6191 elt != 0 && object == 0;
6192 elt
6193 = ((TREE_CODE (elt) == COMPOUND_EXPR
6194 || TREE_CODE (elt) == COND_EXPR)
6195 ? TREE_OPERAND (elt, 1)
6196 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6197 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6198 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6199 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6200 ? TREE_OPERAND (elt, 0) : 0))
6201 if (POINTER_TYPE_P (TREE_TYPE (elt))
6202 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6203 == need_type))
6204 object = build1 (INDIRECT_REF, need_type, elt);
6205
6206 if (object != 0)
6207 {
6208 /* Expand this object skipping the list entries before
6209 it was found in case it is also a PLACEHOLDER_EXPR.
6210 In that case, we want to translate it using subsequent
6211 entries. */
6212 placeholder_list = TREE_CHAIN (placeholder_expr);
6213 temp = expand_expr (object, original_target, tmode,
6214 ro_modifier);
6215 placeholder_list = old_list;
6216 return temp;
6217 }
6218 }
6219 }
6220
6221 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6222 abort ();
6223
6224 case WITH_RECORD_EXPR:
6225 /* Put the object on the placeholder list, expand our first operand,
6226 and pop the list. */
6227 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6228 placeholder_list);
6229 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6230 tmode, ro_modifier);
6231 placeholder_list = TREE_CHAIN (placeholder_list);
6232 return target;
6233
6234 case GOTO_EXPR:
6235 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6236 expand_goto (TREE_OPERAND (exp, 0));
6237 else
6238 expand_computed_goto (TREE_OPERAND (exp, 0));
6239 return const0_rtx;
6240
6241 case EXIT_EXPR:
6242 expand_exit_loop_if_false (NULL_PTR,
6243 invert_truthvalue (TREE_OPERAND (exp, 0)));
6244 return const0_rtx;
6245
6246 case LABELED_BLOCK_EXPR:
6247 if (LABELED_BLOCK_BODY (exp))
6248 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6249 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6250 return const0_rtx;
6251
6252 case EXIT_BLOCK_EXPR:
6253 if (EXIT_BLOCK_RETURN (exp))
6254 sorry ("returned value in block_exit_expr");
6255 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6256 return const0_rtx;
6257
6258 case LOOP_EXPR:
6259 push_temp_slots ();
6260 expand_start_loop (1);
6261 expand_expr_stmt (TREE_OPERAND (exp, 0));
6262 expand_end_loop ();
6263 pop_temp_slots ();
6264
6265 return const0_rtx;
6266
6267 case BIND_EXPR:
6268 {
6269 tree vars = TREE_OPERAND (exp, 0);
6270 int vars_need_expansion = 0;
6271
6272 /* Need to open a binding contour here because
6273 if there are any cleanups they must be contained here. */
6274 expand_start_bindings (2);
6275
6276 /* Mark the corresponding BLOCK for output in its proper place. */
6277 if (TREE_OPERAND (exp, 2) != 0
6278 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6279 insert_block (TREE_OPERAND (exp, 2));
6280
6281 /* If VARS have not yet been expanded, expand them now. */
6282 while (vars)
6283 {
6284 if (DECL_RTL (vars) == 0)
6285 {
6286 vars_need_expansion = 1;
6287 expand_decl (vars);
6288 }
6289 expand_decl_init (vars);
6290 vars = TREE_CHAIN (vars);
6291 }
6292
6293 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6294
6295 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6296
6297 return temp;
6298 }
6299
6300 case RTL_EXPR:
6301 if (RTL_EXPR_SEQUENCE (exp))
6302 {
6303 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6304 abort ();
6305 emit_insns (RTL_EXPR_SEQUENCE (exp));
6306 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6307 }
6308 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6309 free_temps_for_rtl_expr (exp);
6310 return RTL_EXPR_RTL (exp);
6311
6312 case CONSTRUCTOR:
6313 /* If we don't need the result, just ensure we evaluate any
6314 subexpressions. */
6315 if (ignore)
6316 {
6317 tree elt;
6318 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6319 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6320 EXPAND_MEMORY_USE_BAD);
6321 return const0_rtx;
6322 }
6323
6324 /* All elts simple constants => refer to a constant in memory. But
6325 if this is a non-BLKmode mode, let it store a field at a time
6326 since that should make a CONST_INT or CONST_DOUBLE when we
6327 fold. Likewise, if we have a target we can use, it is best to
6328 store directly into the target unless the type is large enough
6329 that memcpy will be used. If we are making an initializer and
6330 all operands are constant, put it in memory as well. */
6331 else if ((TREE_STATIC (exp)
6332 && ((mode == BLKmode
6333 && ! (target != 0 && safe_from_p (target, exp, 1)))
6334 || TREE_ADDRESSABLE (exp)
6335 || (TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST
6336 && TREE_INT_CST_HIGH (TYPE_SIZE_UNIT (type)) == 0
6337 && (! MOVE_BY_PIECES_P
6338 (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type)),
6339 TYPE_ALIGN (type) / BITS_PER_UNIT))
6340 && ! mostly_zeros_p (exp))))
6341 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6342 {
6343 rtx constructor = output_constant_def (exp);
6344 if (modifier != EXPAND_CONST_ADDRESS
6345 && modifier != EXPAND_INITIALIZER
6346 && modifier != EXPAND_SUM
6347 && (! memory_address_p (GET_MODE (constructor),
6348 XEXP (constructor, 0))
6349 || (flag_force_addr
6350 && GET_CODE (XEXP (constructor, 0)) != REG)))
6351 constructor = change_address (constructor, VOIDmode,
6352 XEXP (constructor, 0));
6353 return constructor;
6354 }
6355
6356 else
6357 {
6358 /* Handle calls that pass values in multiple non-contiguous
6359 locations. The Irix 6 ABI has examples of this. */
6360 if (target == 0 || ! safe_from_p (target, exp, 1)
6361 || GET_CODE (target) == PARALLEL)
6362 {
6363 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6364 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6365 else
6366 target = assign_temp (type, 0, 1, 1);
6367 }
6368
6369 if (TREE_READONLY (exp))
6370 {
6371 if (GET_CODE (target) == MEM)
6372 target = copy_rtx (target);
6373
6374 RTX_UNCHANGING_P (target) = 1;
6375 }
6376
6377 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6378 int_size_in_bytes (TREE_TYPE (exp)));
6379 return target;
6380 }
6381
6382 case INDIRECT_REF:
6383 {
6384 tree exp1 = TREE_OPERAND (exp, 0);
6385 tree exp2;
6386 tree index;
6387 tree string = string_constant (exp1, &index);
6388
6389 /* Try to optimize reads from const strings. */
6390 if (string
6391 && TREE_CODE (string) == STRING_CST
6392 && TREE_CODE (index) == INTEGER_CST
6393 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6394 && GET_MODE_CLASS (mode) == MODE_INT
6395 && GET_MODE_SIZE (mode) == 1
6396 && modifier != EXPAND_MEMORY_USE_WO)
6397 return
6398 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6399
6400 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6401 op0 = memory_address (mode, op0);
6402
6403 if (cfun && current_function_check_memory_usage
6404 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6405 {
6406 enum memory_use_mode memory_usage;
6407 memory_usage = get_memory_usage_from_modifier (modifier);
6408
6409 if (memory_usage != MEMORY_USE_DONT)
6410 {
6411 in_check_memory_usage = 1;
6412 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6413 op0, Pmode,
6414 GEN_INT (int_size_in_bytes (type)),
6415 TYPE_MODE (sizetype),
6416 GEN_INT (memory_usage),
6417 TYPE_MODE (integer_type_node));
6418 in_check_memory_usage = 0;
6419 }
6420 }
6421
6422 temp = gen_rtx_MEM (mode, op0);
6423 /* If address was computed by addition,
6424 mark this as an element of an aggregate. */
6425 if (TREE_CODE (exp1) == PLUS_EXPR
6426 || (TREE_CODE (exp1) == SAVE_EXPR
6427 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6428 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6429 || (TREE_CODE (exp1) == ADDR_EXPR
6430 && (exp2 = TREE_OPERAND (exp1, 0))
6431 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6432 MEM_SET_IN_STRUCT_P (temp, 1);
6433
6434 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6435 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6436
6437 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6438 here, because, in C and C++, the fact that a location is accessed
6439 through a pointer to const does not mean that the value there can
6440 never change. Languages where it can never change should
6441 also set TREE_STATIC. */
6442 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6443
6444 /* If we are writing to this object and its type is a record with
6445 readonly fields, we must mark it as readonly so it will
6446 conflict with readonly references to those fields. */
6447 if (modifier == EXPAND_MEMORY_USE_WO
6448 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6449 RTX_UNCHANGING_P (temp) = 1;
6450
6451 return temp;
6452 }
6453
6454 case ARRAY_REF:
6455 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6456 abort ();
6457
6458 {
6459 tree array = TREE_OPERAND (exp, 0);
6460 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6461 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6462 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6463 HOST_WIDE_INT i;
6464
6465 /* Optimize the special-case of a zero lower bound.
6466
6467 We convert the low_bound to sizetype to avoid some problems
6468 with constant folding. (E.g. suppose the lower bound is 1,
6469 and its mode is QI. Without the conversion, (ARRAY
6470 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6471 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6472
6473 if (! integer_zerop (low_bound))
6474 index = size_diffop (index, convert (sizetype, low_bound));
6475
6476 /* Fold an expression like: "foo"[2].
6477 This is not done in fold so it won't happen inside &.
6478 Don't fold if this is for wide characters since it's too
6479 difficult to do correctly and this is a very rare case. */
6480
6481 if (TREE_CODE (array) == STRING_CST
6482 && TREE_CODE (index) == INTEGER_CST
6483 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6484 && GET_MODE_CLASS (mode) == MODE_INT
6485 && GET_MODE_SIZE (mode) == 1)
6486 return
6487 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6488
6489 /* If this is a constant index into a constant array,
6490 just get the value from the array. Handle both the cases when
6491 we have an explicit constructor and when our operand is a variable
6492 that was declared const. */
6493
6494 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6495 && TREE_CODE (index) == INTEGER_CST
6496 && 0 > compare_tree_int (index,
6497 list_length (CONSTRUCTOR_ELTS
6498 (TREE_OPERAND (exp, 0)))))
6499 {
6500 tree elem;
6501
6502 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6503 i = TREE_INT_CST_LOW (index);
6504 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6505 ;
6506
6507 if (elem)
6508 return expand_expr (fold (TREE_VALUE (elem)), target,
6509 tmode, ro_modifier);
6510 }
6511
6512 else if (optimize >= 1
6513 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6514 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6515 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6516 {
6517 if (TREE_CODE (index) == INTEGER_CST)
6518 {
6519 tree init = DECL_INITIAL (array);
6520
6521 if (TREE_CODE (init) == CONSTRUCTOR)
6522 {
6523 tree elem = CONSTRUCTOR_ELTS (init);
6524
6525 for (elem = CONSTRUCTOR_ELTS (init);
6526 (elem
6527 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6528 elem = TREE_CHAIN (elem))
6529 ;
6530
6531 if (elem)
6532 return expand_expr (fold (TREE_VALUE (elem)), target,
6533 tmode, ro_modifier);
6534 }
6535 else if (TREE_CODE (init) == STRING_CST
6536 && 0 > compare_tree_int (index,
6537 TREE_STRING_LENGTH (init)))
6538 return (GEN_INT
6539 (TREE_STRING_POINTER
6540 (init)[TREE_INT_CST_LOW (index)]));
6541 }
6542 }
6543 }
6544
6545 /* ... fall through ... */
6546
6547 case COMPONENT_REF:
6548 case BIT_FIELD_REF:
6549 /* If the operand is a CONSTRUCTOR, we can just extract the
6550 appropriate field if it is present. Don't do this if we have
6551 already written the data since we want to refer to that copy
6552 and varasm.c assumes that's what we'll do. */
6553 if (code != ARRAY_REF
6554 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6555 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6556 {
6557 tree elt;
6558
6559 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6560 elt = TREE_CHAIN (elt))
6561 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6562 /* We can normally use the value of the field in the
6563 CONSTRUCTOR. However, if this is a bitfield in
6564 an integral mode that we can fit in a HOST_WIDE_INT,
6565 we must mask only the number of bits in the bitfield,
6566 since this is done implicitly by the constructor. If
6567 the bitfield does not meet either of those conditions,
6568 we can't do this optimization. */
6569 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6570 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6571 == MODE_INT)
6572 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6573 <= HOST_BITS_PER_WIDE_INT))))
6574 {
6575 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6576 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6577 {
6578 HOST_WIDE_INT bitsize
6579 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6580
6581 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6582 {
6583 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6584 op0 = expand_and (op0, op1, target);
6585 }
6586 else
6587 {
6588 enum machine_mode imode
6589 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6590 tree count
6591 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6592 0);
6593
6594 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6595 target, 0);
6596 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6597 target, 0);
6598 }
6599 }
6600
6601 return op0;
6602 }
6603 }
6604
6605 {
6606 enum machine_mode mode1;
6607 int bitsize;
6608 int bitpos;
6609 tree offset;
6610 int volatilep = 0;
6611 unsigned int alignment;
6612 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6613 &mode1, &unsignedp, &volatilep,
6614 &alignment);
6615
6616 /* If we got back the original object, something is wrong. Perhaps
6617 we are evaluating an expression too early. In any event, don't
6618 infinitely recurse. */
6619 if (tem == exp)
6620 abort ();
6621
6622 /* If TEM's type is a union of variable size, pass TARGET to the inner
6623 computation, since it will need a temporary and TARGET is known
6624 to have to do. This occurs in unchecked conversion in Ada. */
6625
6626 op0 = expand_expr (tem,
6627 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6628 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6629 != INTEGER_CST)
6630 ? target : NULL_RTX),
6631 VOIDmode,
6632 (modifier == EXPAND_INITIALIZER
6633 || modifier == EXPAND_CONST_ADDRESS)
6634 ? modifier : EXPAND_NORMAL);
6635
6636 /* If this is a constant, put it into a register if it is a
6637 legitimate constant and OFFSET is 0 and memory if it isn't. */
6638 if (CONSTANT_P (op0))
6639 {
6640 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6641 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6642 && offset == 0)
6643 op0 = force_reg (mode, op0);
6644 else
6645 op0 = validize_mem (force_const_mem (mode, op0));
6646 }
6647
6648 if (offset != 0)
6649 {
6650 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6651
6652 /* If this object is in memory, put it into a register.
6653 This case can't occur in C, but can in Ada if we have
6654 unchecked conversion of an expression from a scalar type to
6655 an array or record type. */
6656 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6657 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6658 {
6659 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6660
6661 mark_temp_addr_taken (memloc);
6662 emit_move_insn (memloc, op0);
6663 op0 = memloc;
6664 }
6665
6666 if (GET_CODE (op0) != MEM)
6667 abort ();
6668
6669 if (GET_MODE (offset_rtx) != ptr_mode)
6670 {
6671 #ifdef POINTERS_EXTEND_UNSIGNED
6672 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6673 #else
6674 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6675 #endif
6676 }
6677
6678 /* A constant address in OP0 can have VOIDmode, we must not try
6679 to call force_reg for that case. Avoid that case. */
6680 if (GET_CODE (op0) == MEM
6681 && GET_MODE (op0) == BLKmode
6682 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6683 && bitsize != 0
6684 && (bitpos % bitsize) == 0
6685 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6686 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6687 {
6688 rtx temp = change_address (op0, mode1,
6689 plus_constant (XEXP (op0, 0),
6690 (bitpos /
6691 BITS_PER_UNIT)));
6692 if (GET_CODE (XEXP (temp, 0)) == REG)
6693 op0 = temp;
6694 else
6695 op0 = change_address (op0, mode1,
6696 force_reg (GET_MODE (XEXP (temp, 0)),
6697 XEXP (temp, 0)));
6698 bitpos = 0;
6699 }
6700
6701
6702 op0 = change_address (op0, VOIDmode,
6703 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6704 force_reg (ptr_mode,
6705 offset_rtx)));
6706 }
6707
6708 /* Don't forget about volatility even if this is a bitfield. */
6709 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6710 {
6711 op0 = copy_rtx (op0);
6712 MEM_VOLATILE_P (op0) = 1;
6713 }
6714
6715 /* Check the access. */
6716 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6717 {
6718 enum memory_use_mode memory_usage;
6719 memory_usage = get_memory_usage_from_modifier (modifier);
6720
6721 if (memory_usage != MEMORY_USE_DONT)
6722 {
6723 rtx to;
6724 int size;
6725
6726 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6727 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6728
6729 /* Check the access right of the pointer. */
6730 if (size > BITS_PER_UNIT)
6731 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6732 to, Pmode,
6733 GEN_INT (size / BITS_PER_UNIT),
6734 TYPE_MODE (sizetype),
6735 GEN_INT (memory_usage),
6736 TYPE_MODE (integer_type_node));
6737 }
6738 }
6739
6740 /* In cases where an aligned union has an unaligned object
6741 as a field, we might be extracting a BLKmode value from
6742 an integer-mode (e.g., SImode) object. Handle this case
6743 by doing the extract into an object as wide as the field
6744 (which we know to be the width of a basic mode), then
6745 storing into memory, and changing the mode to BLKmode.
6746 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6747 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6748 if (mode1 == VOIDmode
6749 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6750 || (modifier != EXPAND_CONST_ADDRESS
6751 && modifier != EXPAND_INITIALIZER
6752 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6753 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6754 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6755 /* If the field isn't aligned enough to fetch as a memref,
6756 fetch it as a bit field. */
6757 || (mode1 != BLKmode
6758 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6759 && ((TYPE_ALIGN (TREE_TYPE (tem))
6760 < (unsigned int) GET_MODE_ALIGNMENT (mode))
6761 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6762 /* If the type and the field are a constant size and the
6763 size of the type isn't the same size as the bitfield,
6764 we must use bitfield operations. */
6765 || ((bitsize >= 0
6766 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6767 == INTEGER_CST)
6768 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6769 bitsize)))))
6770 || (modifier != EXPAND_CONST_ADDRESS
6771 && modifier != EXPAND_INITIALIZER
6772 && mode == BLKmode
6773 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6774 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6775 || bitpos % TYPE_ALIGN (type) != 0)))
6776 {
6777 enum machine_mode ext_mode = mode;
6778
6779 if (ext_mode == BLKmode
6780 && ! (target != 0 && GET_CODE (op0) == MEM
6781 && GET_CODE (target) == MEM
6782 && bitpos % BITS_PER_UNIT == 0))
6783 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6784
6785 if (ext_mode == BLKmode)
6786 {
6787 /* In this case, BITPOS must start at a byte boundary and
6788 TARGET, if specified, must be a MEM. */
6789 if (GET_CODE (op0) != MEM
6790 || (target != 0 && GET_CODE (target) != MEM)
6791 || bitpos % BITS_PER_UNIT != 0)
6792 abort ();
6793
6794 op0 = change_address (op0, VOIDmode,
6795 plus_constant (XEXP (op0, 0),
6796 bitpos / BITS_PER_UNIT));
6797 if (target == 0)
6798 target = assign_temp (type, 0, 1, 1);
6799
6800 emit_block_move (target, op0,
6801 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6802 / BITS_PER_UNIT),
6803 1);
6804
6805 return target;
6806 }
6807
6808 op0 = validize_mem (op0);
6809
6810 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6811 mark_reg_pointer (XEXP (op0, 0), alignment);
6812
6813 op0 = extract_bit_field (op0, bitsize, bitpos,
6814 unsignedp, target, ext_mode, ext_mode,
6815 alignment,
6816 int_size_in_bytes (TREE_TYPE (tem)));
6817
6818 /* If the result is a record type and BITSIZE is narrower than
6819 the mode of OP0, an integral mode, and this is a big endian
6820 machine, we must put the field into the high-order bits. */
6821 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6822 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6823 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6824 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6825 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6826 - bitsize),
6827 op0, 1);
6828
6829 if (mode == BLKmode)
6830 {
6831 rtx new = assign_stack_temp (ext_mode,
6832 bitsize / BITS_PER_UNIT, 0);
6833
6834 emit_move_insn (new, op0);
6835 op0 = copy_rtx (new);
6836 PUT_MODE (op0, BLKmode);
6837 MEM_SET_IN_STRUCT_P (op0, 1);
6838 }
6839
6840 return op0;
6841 }
6842
6843 /* If the result is BLKmode, use that to access the object
6844 now as well. */
6845 if (mode == BLKmode)
6846 mode1 = BLKmode;
6847
6848 /* Get a reference to just this component. */
6849 if (modifier == EXPAND_CONST_ADDRESS
6850 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6851 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6852 (bitpos / BITS_PER_UNIT)));
6853 else
6854 op0 = change_address (op0, mode1,
6855 plus_constant (XEXP (op0, 0),
6856 (bitpos / BITS_PER_UNIT)));
6857
6858 if (GET_CODE (op0) == MEM)
6859 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6860
6861 if (GET_CODE (XEXP (op0, 0)) == REG)
6862 mark_reg_pointer (XEXP (op0, 0), alignment);
6863
6864 MEM_SET_IN_STRUCT_P (op0, 1);
6865 MEM_VOLATILE_P (op0) |= volatilep;
6866 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6867 || modifier == EXPAND_CONST_ADDRESS
6868 || modifier == EXPAND_INITIALIZER)
6869 return op0;
6870 else if (target == 0)
6871 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6872
6873 convert_move (target, op0, unsignedp);
6874 return target;
6875 }
6876
6877 /* Intended for a reference to a buffer of a file-object in Pascal.
6878 But it's not certain that a special tree code will really be
6879 necessary for these. INDIRECT_REF might work for them. */
6880 case BUFFER_REF:
6881 abort ();
6882
6883 case IN_EXPR:
6884 {
6885 /* Pascal set IN expression.
6886
6887 Algorithm:
6888 rlo = set_low - (set_low%bits_per_word);
6889 the_word = set [ (index - rlo)/bits_per_word ];
6890 bit_index = index % bits_per_word;
6891 bitmask = 1 << bit_index;
6892 return !!(the_word & bitmask); */
6893
6894 tree set = TREE_OPERAND (exp, 0);
6895 tree index = TREE_OPERAND (exp, 1);
6896 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6897 tree set_type = TREE_TYPE (set);
6898 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6899 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6900 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6901 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6902 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6903 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6904 rtx setaddr = XEXP (setval, 0);
6905 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6906 rtx rlow;
6907 rtx diff, quo, rem, addr, bit, result;
6908
6909 preexpand_calls (exp);
6910
6911 /* If domain is empty, answer is no. Likewise if index is constant
6912 and out of bounds. */
6913 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6914 && TREE_CODE (set_low_bound) == INTEGER_CST
6915 && tree_int_cst_lt (set_high_bound, set_low_bound))
6916 || (TREE_CODE (index) == INTEGER_CST
6917 && TREE_CODE (set_low_bound) == INTEGER_CST
6918 && tree_int_cst_lt (index, set_low_bound))
6919 || (TREE_CODE (set_high_bound) == INTEGER_CST
6920 && TREE_CODE (index) == INTEGER_CST
6921 && tree_int_cst_lt (set_high_bound, index))))
6922 return const0_rtx;
6923
6924 if (target == 0)
6925 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6926
6927 /* If we get here, we have to generate the code for both cases
6928 (in range and out of range). */
6929
6930 op0 = gen_label_rtx ();
6931 op1 = gen_label_rtx ();
6932
6933 if (! (GET_CODE (index_val) == CONST_INT
6934 && GET_CODE (lo_r) == CONST_INT))
6935 {
6936 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6937 GET_MODE (index_val), iunsignedp, 0, op1);
6938 }
6939
6940 if (! (GET_CODE (index_val) == CONST_INT
6941 && GET_CODE (hi_r) == CONST_INT))
6942 {
6943 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6944 GET_MODE (index_val), iunsignedp, 0, op1);
6945 }
6946
6947 /* Calculate the element number of bit zero in the first word
6948 of the set. */
6949 if (GET_CODE (lo_r) == CONST_INT)
6950 rlow = GEN_INT (INTVAL (lo_r)
6951 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6952 else
6953 rlow = expand_binop (index_mode, and_optab, lo_r,
6954 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6955 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6956
6957 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6958 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6959
6960 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6961 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6962 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6963 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6964
6965 addr = memory_address (byte_mode,
6966 expand_binop (index_mode, add_optab, diff,
6967 setaddr, NULL_RTX, iunsignedp,
6968 OPTAB_LIB_WIDEN));
6969
6970 /* Extract the bit we want to examine */
6971 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6972 gen_rtx_MEM (byte_mode, addr),
6973 make_tree (TREE_TYPE (index), rem),
6974 NULL_RTX, 1);
6975 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6976 GET_MODE (target) == byte_mode ? target : 0,
6977 1, OPTAB_LIB_WIDEN);
6978
6979 if (result != target)
6980 convert_move (target, result, 1);
6981
6982 /* Output the code to handle the out-of-range case. */
6983 emit_jump (op0);
6984 emit_label (op1);
6985 emit_move_insn (target, const0_rtx);
6986 emit_label (op0);
6987 return target;
6988 }
6989
6990 case WITH_CLEANUP_EXPR:
6991 if (RTL_EXPR_RTL (exp) == 0)
6992 {
6993 RTL_EXPR_RTL (exp)
6994 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6995 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6996
6997 /* That's it for this cleanup. */
6998 TREE_OPERAND (exp, 2) = 0;
6999 }
7000 return RTL_EXPR_RTL (exp);
7001
7002 case CLEANUP_POINT_EXPR:
7003 {
7004 /* Start a new binding layer that will keep track of all cleanup
7005 actions to be performed. */
7006 expand_start_bindings (2);
7007
7008 target_temp_slot_level = temp_slot_level;
7009
7010 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7011 /* If we're going to use this value, load it up now. */
7012 if (! ignore)
7013 op0 = force_not_mem (op0);
7014 preserve_temp_slots (op0);
7015 expand_end_bindings (NULL_TREE, 0, 0);
7016 }
7017 return op0;
7018
7019 case CALL_EXPR:
7020 /* Check for a built-in function. */
7021 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7022 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7023 == FUNCTION_DECL)
7024 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7025 return expand_builtin (exp, target, subtarget, tmode, ignore);
7026
7027 /* If this call was expanded already by preexpand_calls,
7028 just return the result we got. */
7029 if (CALL_EXPR_RTL (exp) != 0)
7030 return CALL_EXPR_RTL (exp);
7031
7032 return expand_call (exp, target, ignore);
7033
7034 case NON_LVALUE_EXPR:
7035 case NOP_EXPR:
7036 case CONVERT_EXPR:
7037 case REFERENCE_EXPR:
7038 if (TREE_CODE (type) == UNION_TYPE)
7039 {
7040 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7041
7042 /* If both input and output are BLKmode, this conversion
7043 isn't actually doing anything unless we need to make the
7044 alignment stricter. */
7045 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7046 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7047 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7048 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7049 modifier);
7050
7051 if (target == 0)
7052 {
7053 if (mode != BLKmode)
7054 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7055 else
7056 target = assign_temp (type, 0, 1, 1);
7057 }
7058
7059 if (GET_CODE (target) == MEM)
7060 /* Store data into beginning of memory target. */
7061 store_expr (TREE_OPERAND (exp, 0),
7062 change_address (target, TYPE_MODE (valtype), 0), 0);
7063
7064 else if (GET_CODE (target) == REG)
7065 /* Store this field into a union of the proper type. */
7066 store_field (target,
7067 MIN ((int_size_in_bytes (TREE_TYPE
7068 (TREE_OPERAND (exp, 0)))
7069 * BITS_PER_UNIT),
7070 GET_MODE_BITSIZE (mode)),
7071 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7072 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
7073 else
7074 abort ();
7075
7076 /* Return the entire union. */
7077 return target;
7078 }
7079
7080 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7081 {
7082 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7083 ro_modifier);
7084
7085 /* If the signedness of the conversion differs and OP0 is
7086 a promoted SUBREG, clear that indication since we now
7087 have to do the proper extension. */
7088 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7089 && GET_CODE (op0) == SUBREG)
7090 SUBREG_PROMOTED_VAR_P (op0) = 0;
7091
7092 return op0;
7093 }
7094
7095 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7096 if (GET_MODE (op0) == mode)
7097 return op0;
7098
7099 /* If OP0 is a constant, just convert it into the proper mode. */
7100 if (CONSTANT_P (op0))
7101 return
7102 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7103 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7104
7105 if (modifier == EXPAND_INITIALIZER)
7106 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7107
7108 if (target == 0)
7109 return
7110 convert_to_mode (mode, op0,
7111 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7112 else
7113 convert_move (target, op0,
7114 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7115 return target;
7116
7117 case PLUS_EXPR:
7118 /* We come here from MINUS_EXPR when the second operand is a
7119 constant. */
7120 plus_expr:
7121 this_optab = add_optab;
7122
7123 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7124 something else, make sure we add the register to the constant and
7125 then to the other thing. This case can occur during strength
7126 reduction and doing it this way will produce better code if the
7127 frame pointer or argument pointer is eliminated.
7128
7129 fold-const.c will ensure that the constant is always in the inner
7130 PLUS_EXPR, so the only case we need to do anything about is if
7131 sp, ap, or fp is our second argument, in which case we must swap
7132 the innermost first argument and our second argument. */
7133
7134 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7135 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7136 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7137 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7138 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7139 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7140 {
7141 tree t = TREE_OPERAND (exp, 1);
7142
7143 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7144 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7145 }
7146
7147 /* If the result is to be ptr_mode and we are adding an integer to
7148 something, we might be forming a constant. So try to use
7149 plus_constant. If it produces a sum and we can't accept it,
7150 use force_operand. This allows P = &ARR[const] to generate
7151 efficient code on machines where a SYMBOL_REF is not a valid
7152 address.
7153
7154 If this is an EXPAND_SUM call, always return the sum. */
7155 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7156 || mode == ptr_mode)
7157 {
7158 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7159 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7160 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7161 {
7162 rtx constant_part;
7163
7164 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7165 EXPAND_SUM);
7166 /* Use immed_double_const to ensure that the constant is
7167 truncated according to the mode of OP1, then sign extended
7168 to a HOST_WIDE_INT. Using the constant directly can result
7169 in non-canonical RTL in a 64x32 cross compile. */
7170 constant_part
7171 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7172 (HOST_WIDE_INT) 0,
7173 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7174 op1 = plus_constant (op1, INTVAL (constant_part));
7175 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7176 op1 = force_operand (op1, target);
7177 return op1;
7178 }
7179
7180 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7181 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7182 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7183 {
7184 rtx constant_part;
7185
7186 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7187 EXPAND_SUM);
7188 if (! CONSTANT_P (op0))
7189 {
7190 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7191 VOIDmode, modifier);
7192 /* Don't go to both_summands if modifier
7193 says it's not right to return a PLUS. */
7194 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7195 goto binop2;
7196 goto both_summands;
7197 }
7198 /* Use immed_double_const to ensure that the constant is
7199 truncated according to the mode of OP1, then sign extended
7200 to a HOST_WIDE_INT. Using the constant directly can result
7201 in non-canonical RTL in a 64x32 cross compile. */
7202 constant_part
7203 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7204 (HOST_WIDE_INT) 0,
7205 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7206 op0 = plus_constant (op0, INTVAL (constant_part));
7207 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7208 op0 = force_operand (op0, target);
7209 return op0;
7210 }
7211 }
7212
7213 /* No sense saving up arithmetic to be done
7214 if it's all in the wrong mode to form part of an address.
7215 And force_operand won't know whether to sign-extend or
7216 zero-extend. */
7217 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7218 || mode != ptr_mode)
7219 goto binop;
7220
7221 preexpand_calls (exp);
7222 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7223 subtarget = 0;
7224
7225 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7226 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7227
7228 both_summands:
7229 /* Make sure any term that's a sum with a constant comes last. */
7230 if (GET_CODE (op0) == PLUS
7231 && CONSTANT_P (XEXP (op0, 1)))
7232 {
7233 temp = op0;
7234 op0 = op1;
7235 op1 = temp;
7236 }
7237 /* If adding to a sum including a constant,
7238 associate it to put the constant outside. */
7239 if (GET_CODE (op1) == PLUS
7240 && CONSTANT_P (XEXP (op1, 1)))
7241 {
7242 rtx constant_term = const0_rtx;
7243
7244 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7245 if (temp != 0)
7246 op0 = temp;
7247 /* Ensure that MULT comes first if there is one. */
7248 else if (GET_CODE (op0) == MULT)
7249 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7250 else
7251 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7252
7253 /* Let's also eliminate constants from op0 if possible. */
7254 op0 = eliminate_constant_term (op0, &constant_term);
7255
7256 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7257 their sum should be a constant. Form it into OP1, since the
7258 result we want will then be OP0 + OP1. */
7259
7260 temp = simplify_binary_operation (PLUS, mode, constant_term,
7261 XEXP (op1, 1));
7262 if (temp != 0)
7263 op1 = temp;
7264 else
7265 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7266 }
7267
7268 /* Put a constant term last and put a multiplication first. */
7269 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7270 temp = op1, op1 = op0, op0 = temp;
7271
7272 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7273 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7274
7275 case MINUS_EXPR:
7276 /* For initializers, we are allowed to return a MINUS of two
7277 symbolic constants. Here we handle all cases when both operands
7278 are constant. */
7279 /* Handle difference of two symbolic constants,
7280 for the sake of an initializer. */
7281 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7282 && really_constant_p (TREE_OPERAND (exp, 0))
7283 && really_constant_p (TREE_OPERAND (exp, 1)))
7284 {
7285 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7286 VOIDmode, ro_modifier);
7287 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7288 VOIDmode, ro_modifier);
7289
7290 /* If the last operand is a CONST_INT, use plus_constant of
7291 the negated constant. Else make the MINUS. */
7292 if (GET_CODE (op1) == CONST_INT)
7293 return plus_constant (op0, - INTVAL (op1));
7294 else
7295 return gen_rtx_MINUS (mode, op0, op1);
7296 }
7297 /* Convert A - const to A + (-const). */
7298 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7299 {
7300 tree negated = fold (build1 (NEGATE_EXPR, type,
7301 TREE_OPERAND (exp, 1)));
7302
7303 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7304 /* If we can't negate the constant in TYPE, leave it alone and
7305 expand_binop will negate it for us. We used to try to do it
7306 here in the signed version of TYPE, but that doesn't work
7307 on POINTER_TYPEs. */;
7308 else
7309 {
7310 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7311 goto plus_expr;
7312 }
7313 }
7314 this_optab = sub_optab;
7315 goto binop;
7316
7317 case MULT_EXPR:
7318 preexpand_calls (exp);
7319 /* If first operand is constant, swap them.
7320 Thus the following special case checks need only
7321 check the second operand. */
7322 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7323 {
7324 register tree t1 = TREE_OPERAND (exp, 0);
7325 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7326 TREE_OPERAND (exp, 1) = t1;
7327 }
7328
7329 /* Attempt to return something suitable for generating an
7330 indexed address, for machines that support that. */
7331
7332 if (modifier == EXPAND_SUM && mode == ptr_mode
7333 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7334 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7335 {
7336 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7337 EXPAND_SUM);
7338
7339 /* Apply distributive law if OP0 is x+c. */
7340 if (GET_CODE (op0) == PLUS
7341 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7342 return
7343 gen_rtx_PLUS
7344 (mode,
7345 gen_rtx_MULT
7346 (mode, XEXP (op0, 0),
7347 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7348 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7349 * INTVAL (XEXP (op0, 1))));
7350
7351 if (GET_CODE (op0) != REG)
7352 op0 = force_operand (op0, NULL_RTX);
7353 if (GET_CODE (op0) != REG)
7354 op0 = copy_to_mode_reg (mode, op0);
7355
7356 return
7357 gen_rtx_MULT (mode, op0,
7358 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7359 }
7360
7361 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7362 subtarget = 0;
7363
7364 /* Check for multiplying things that have been extended
7365 from a narrower type. If this machine supports multiplying
7366 in that narrower type with a result in the desired type,
7367 do it that way, and avoid the explicit type-conversion. */
7368 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7369 && TREE_CODE (type) == INTEGER_TYPE
7370 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7371 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7372 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7373 && int_fits_type_p (TREE_OPERAND (exp, 1),
7374 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7375 /* Don't use a widening multiply if a shift will do. */
7376 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7377 > HOST_BITS_PER_WIDE_INT)
7378 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7379 ||
7380 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7381 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7382 ==
7383 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7384 /* If both operands are extended, they must either both
7385 be zero-extended or both be sign-extended. */
7386 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7387 ==
7388 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7389 {
7390 enum machine_mode innermode
7391 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7392 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7393 ? smul_widen_optab : umul_widen_optab);
7394 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7395 ? umul_widen_optab : smul_widen_optab);
7396 if (mode == GET_MODE_WIDER_MODE (innermode))
7397 {
7398 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7399 {
7400 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7401 NULL_RTX, VOIDmode, 0);
7402 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7403 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7404 VOIDmode, 0);
7405 else
7406 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7407 NULL_RTX, VOIDmode, 0);
7408 goto binop2;
7409 }
7410 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7411 && innermode == word_mode)
7412 {
7413 rtx htem;
7414 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7415 NULL_RTX, VOIDmode, 0);
7416 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7417 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7418 VOIDmode, 0);
7419 else
7420 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7421 NULL_RTX, VOIDmode, 0);
7422 temp = expand_binop (mode, other_optab, op0, op1, target,
7423 unsignedp, OPTAB_LIB_WIDEN);
7424 htem = expand_mult_highpart_adjust (innermode,
7425 gen_highpart (innermode, temp),
7426 op0, op1,
7427 gen_highpart (innermode, temp),
7428 unsignedp);
7429 emit_move_insn (gen_highpart (innermode, temp), htem);
7430 return temp;
7431 }
7432 }
7433 }
7434 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7435 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7436 return expand_mult (mode, op0, op1, target, unsignedp);
7437
7438 case TRUNC_DIV_EXPR:
7439 case FLOOR_DIV_EXPR:
7440 case CEIL_DIV_EXPR:
7441 case ROUND_DIV_EXPR:
7442 case EXACT_DIV_EXPR:
7443 preexpand_calls (exp);
7444 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7445 subtarget = 0;
7446 /* Possible optimization: compute the dividend with EXPAND_SUM
7447 then if the divisor is constant can optimize the case
7448 where some terms of the dividend have coeffs divisible by it. */
7449 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7450 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7451 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7452
7453 case RDIV_EXPR:
7454 this_optab = flodiv_optab;
7455 goto binop;
7456
7457 case TRUNC_MOD_EXPR:
7458 case FLOOR_MOD_EXPR:
7459 case CEIL_MOD_EXPR:
7460 case ROUND_MOD_EXPR:
7461 preexpand_calls (exp);
7462 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7463 subtarget = 0;
7464 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7465 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7466 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7467
7468 case FIX_ROUND_EXPR:
7469 case FIX_FLOOR_EXPR:
7470 case FIX_CEIL_EXPR:
7471 abort (); /* Not used for C. */
7472
7473 case FIX_TRUNC_EXPR:
7474 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7475 if (target == 0)
7476 target = gen_reg_rtx (mode);
7477 expand_fix (target, op0, unsignedp);
7478 return target;
7479
7480 case FLOAT_EXPR:
7481 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7482 if (target == 0)
7483 target = gen_reg_rtx (mode);
7484 /* expand_float can't figure out what to do if FROM has VOIDmode.
7485 So give it the correct mode. With -O, cse will optimize this. */
7486 if (GET_MODE (op0) == VOIDmode)
7487 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7488 op0);
7489 expand_float (target, op0,
7490 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7491 return target;
7492
7493 case NEGATE_EXPR:
7494 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7495 temp = expand_unop (mode, neg_optab, op0, target, 0);
7496 if (temp == 0)
7497 abort ();
7498 return temp;
7499
7500 case ABS_EXPR:
7501 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7502
7503 /* Handle complex values specially. */
7504 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7505 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7506 return expand_complex_abs (mode, op0, target, unsignedp);
7507
7508 /* Unsigned abs is simply the operand. Testing here means we don't
7509 risk generating incorrect code below. */
7510 if (TREE_UNSIGNED (type))
7511 return op0;
7512
7513 return expand_abs (mode, op0, target,
7514 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7515
7516 case MAX_EXPR:
7517 case MIN_EXPR:
7518 target = original_target;
7519 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7520 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7521 || GET_MODE (target) != mode
7522 || (GET_CODE (target) == REG
7523 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7524 target = gen_reg_rtx (mode);
7525 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7526 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7527
7528 /* First try to do it with a special MIN or MAX instruction.
7529 If that does not win, use a conditional jump to select the proper
7530 value. */
7531 this_optab = (TREE_UNSIGNED (type)
7532 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7533 : (code == MIN_EXPR ? smin_optab : smax_optab));
7534
7535 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7536 OPTAB_WIDEN);
7537 if (temp != 0)
7538 return temp;
7539
7540 /* At this point, a MEM target is no longer useful; we will get better
7541 code without it. */
7542
7543 if (GET_CODE (target) == MEM)
7544 target = gen_reg_rtx (mode);
7545
7546 if (target != op0)
7547 emit_move_insn (target, op0);
7548
7549 op0 = gen_label_rtx ();
7550
7551 /* If this mode is an integer too wide to compare properly,
7552 compare word by word. Rely on cse to optimize constant cases. */
7553 if (GET_MODE_CLASS (mode) == MODE_INT
7554 && ! can_compare_p (GE, mode, ccp_jump))
7555 {
7556 if (code == MAX_EXPR)
7557 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7558 target, op1, NULL_RTX, op0);
7559 else
7560 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7561 op1, target, NULL_RTX, op0);
7562 }
7563 else
7564 {
7565 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7566 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7567 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7568 op0);
7569 }
7570 emit_move_insn (target, op1);
7571 emit_label (op0);
7572 return target;
7573
7574 case BIT_NOT_EXPR:
7575 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7576 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7577 if (temp == 0)
7578 abort ();
7579 return temp;
7580
7581 case FFS_EXPR:
7582 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7583 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7584 if (temp == 0)
7585 abort ();
7586 return temp;
7587
7588 /* ??? Can optimize bitwise operations with one arg constant.
7589 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7590 and (a bitwise1 b) bitwise2 b (etc)
7591 but that is probably not worth while. */
7592
7593 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7594 boolean values when we want in all cases to compute both of them. In
7595 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7596 as actual zero-or-1 values and then bitwise anding. In cases where
7597 there cannot be any side effects, better code would be made by
7598 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7599 how to recognize those cases. */
7600
7601 case TRUTH_AND_EXPR:
7602 case BIT_AND_EXPR:
7603 this_optab = and_optab;
7604 goto binop;
7605
7606 case TRUTH_OR_EXPR:
7607 case BIT_IOR_EXPR:
7608 this_optab = ior_optab;
7609 goto binop;
7610
7611 case TRUTH_XOR_EXPR:
7612 case BIT_XOR_EXPR:
7613 this_optab = xor_optab;
7614 goto binop;
7615
7616 case LSHIFT_EXPR:
7617 case RSHIFT_EXPR:
7618 case LROTATE_EXPR:
7619 case RROTATE_EXPR:
7620 preexpand_calls (exp);
7621 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7622 subtarget = 0;
7623 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7624 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7625 unsignedp);
7626
7627 /* Could determine the answer when only additive constants differ. Also,
7628 the addition of one can be handled by changing the condition. */
7629 case LT_EXPR:
7630 case LE_EXPR:
7631 case GT_EXPR:
7632 case GE_EXPR:
7633 case EQ_EXPR:
7634 case NE_EXPR:
7635 case UNORDERED_EXPR:
7636 case ORDERED_EXPR:
7637 case UNLT_EXPR:
7638 case UNLE_EXPR:
7639 case UNGT_EXPR:
7640 case UNGE_EXPR:
7641 case UNEQ_EXPR:
7642 preexpand_calls (exp);
7643 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7644 if (temp != 0)
7645 return temp;
7646
7647 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7648 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7649 && original_target
7650 && GET_CODE (original_target) == REG
7651 && (GET_MODE (original_target)
7652 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7653 {
7654 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7655 VOIDmode, 0);
7656
7657 if (temp != original_target)
7658 temp = copy_to_reg (temp);
7659
7660 op1 = gen_label_rtx ();
7661 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7662 GET_MODE (temp), unsignedp, 0, op1);
7663 emit_move_insn (temp, const1_rtx);
7664 emit_label (op1);
7665 return temp;
7666 }
7667
7668 /* If no set-flag instruction, must generate a conditional
7669 store into a temporary variable. Drop through
7670 and handle this like && and ||. */
7671
7672 case TRUTH_ANDIF_EXPR:
7673 case TRUTH_ORIF_EXPR:
7674 if (! ignore
7675 && (target == 0 || ! safe_from_p (target, exp, 1)
7676 /* Make sure we don't have a hard reg (such as function's return
7677 value) live across basic blocks, if not optimizing. */
7678 || (!optimize && GET_CODE (target) == REG
7679 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7680 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7681
7682 if (target)
7683 emit_clr_insn (target);
7684
7685 op1 = gen_label_rtx ();
7686 jumpifnot (exp, op1);
7687
7688 if (target)
7689 emit_0_to_1_insn (target);
7690
7691 emit_label (op1);
7692 return ignore ? const0_rtx : target;
7693
7694 case TRUTH_NOT_EXPR:
7695 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7696 /* The parser is careful to generate TRUTH_NOT_EXPR
7697 only with operands that are always zero or one. */
7698 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7699 target, 1, OPTAB_LIB_WIDEN);
7700 if (temp == 0)
7701 abort ();
7702 return temp;
7703
7704 case COMPOUND_EXPR:
7705 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7706 emit_queue ();
7707 return expand_expr (TREE_OPERAND (exp, 1),
7708 (ignore ? const0_rtx : target),
7709 VOIDmode, 0);
7710
7711 case COND_EXPR:
7712 /* If we would have a "singleton" (see below) were it not for a
7713 conversion in each arm, bring that conversion back out. */
7714 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7715 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7716 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7717 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7718 {
7719 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7720 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7721
7722 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7723 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7724 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7725 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7726 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7727 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7728 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7729 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7730 return expand_expr (build1 (NOP_EXPR, type,
7731 build (COND_EXPR, TREE_TYPE (true),
7732 TREE_OPERAND (exp, 0),
7733 true, false)),
7734 target, tmode, modifier);
7735 }
7736
7737 {
7738 /* Note that COND_EXPRs whose type is a structure or union
7739 are required to be constructed to contain assignments of
7740 a temporary variable, so that we can evaluate them here
7741 for side effect only. If type is void, we must do likewise. */
7742
7743 /* If an arm of the branch requires a cleanup,
7744 only that cleanup is performed. */
7745
7746 tree singleton = 0;
7747 tree binary_op = 0, unary_op = 0;
7748
7749 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7750 convert it to our mode, if necessary. */
7751 if (integer_onep (TREE_OPERAND (exp, 1))
7752 && integer_zerop (TREE_OPERAND (exp, 2))
7753 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7754 {
7755 if (ignore)
7756 {
7757 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7758 ro_modifier);
7759 return const0_rtx;
7760 }
7761
7762 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7763 if (GET_MODE (op0) == mode)
7764 return op0;
7765
7766 if (target == 0)
7767 target = gen_reg_rtx (mode);
7768 convert_move (target, op0, unsignedp);
7769 return target;
7770 }
7771
7772 /* Check for X ? A + B : A. If we have this, we can copy A to the
7773 output and conditionally add B. Similarly for unary operations.
7774 Don't do this if X has side-effects because those side effects
7775 might affect A or B and the "?" operation is a sequence point in
7776 ANSI. (operand_equal_p tests for side effects.) */
7777
7778 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7779 && operand_equal_p (TREE_OPERAND (exp, 2),
7780 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7781 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7782 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7783 && operand_equal_p (TREE_OPERAND (exp, 1),
7784 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7785 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7786 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7787 && operand_equal_p (TREE_OPERAND (exp, 2),
7788 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7789 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7790 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7791 && operand_equal_p (TREE_OPERAND (exp, 1),
7792 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7793 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7794
7795 /* If we are not to produce a result, we have no target. Otherwise,
7796 if a target was specified use it; it will not be used as an
7797 intermediate target unless it is safe. If no target, use a
7798 temporary. */
7799
7800 if (ignore)
7801 temp = 0;
7802 else if (original_target
7803 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7804 || (singleton && GET_CODE (original_target) == REG
7805 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7806 && original_target == var_rtx (singleton)))
7807 && GET_MODE (original_target) == mode
7808 #ifdef HAVE_conditional_move
7809 && (! can_conditionally_move_p (mode)
7810 || GET_CODE (original_target) == REG
7811 || TREE_ADDRESSABLE (type))
7812 #endif
7813 && ! (GET_CODE (original_target) == MEM
7814 && MEM_VOLATILE_P (original_target)))
7815 temp = original_target;
7816 else if (TREE_ADDRESSABLE (type))
7817 abort ();
7818 else
7819 temp = assign_temp (type, 0, 0, 1);
7820
7821 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7822 do the test of X as a store-flag operation, do this as
7823 A + ((X != 0) << log C). Similarly for other simple binary
7824 operators. Only do for C == 1 if BRANCH_COST is low. */
7825 if (temp && singleton && binary_op
7826 && (TREE_CODE (binary_op) == PLUS_EXPR
7827 || TREE_CODE (binary_op) == MINUS_EXPR
7828 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7829 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7830 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7831 : integer_onep (TREE_OPERAND (binary_op, 1)))
7832 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7833 {
7834 rtx result;
7835 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7836 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7837 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7838 : xor_optab);
7839
7840 /* If we had X ? A : A + 1, do this as A + (X == 0).
7841
7842 We have to invert the truth value here and then put it
7843 back later if do_store_flag fails. We cannot simply copy
7844 TREE_OPERAND (exp, 0) to another variable and modify that
7845 because invert_truthvalue can modify the tree pointed to
7846 by its argument. */
7847 if (singleton == TREE_OPERAND (exp, 1))
7848 TREE_OPERAND (exp, 0)
7849 = invert_truthvalue (TREE_OPERAND (exp, 0));
7850
7851 result = do_store_flag (TREE_OPERAND (exp, 0),
7852 (safe_from_p (temp, singleton, 1)
7853 ? temp : NULL_RTX),
7854 mode, BRANCH_COST <= 1);
7855
7856 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7857 result = expand_shift (LSHIFT_EXPR, mode, result,
7858 build_int_2 (tree_log2
7859 (TREE_OPERAND
7860 (binary_op, 1)),
7861 0),
7862 (safe_from_p (temp, singleton, 1)
7863 ? temp : NULL_RTX), 0);
7864
7865 if (result)
7866 {
7867 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7868 return expand_binop (mode, boptab, op1, result, temp,
7869 unsignedp, OPTAB_LIB_WIDEN);
7870 }
7871 else if (singleton == TREE_OPERAND (exp, 1))
7872 TREE_OPERAND (exp, 0)
7873 = invert_truthvalue (TREE_OPERAND (exp, 0));
7874 }
7875
7876 do_pending_stack_adjust ();
7877 NO_DEFER_POP;
7878 op0 = gen_label_rtx ();
7879
7880 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7881 {
7882 if (temp != 0)
7883 {
7884 /* If the target conflicts with the other operand of the
7885 binary op, we can't use it. Also, we can't use the target
7886 if it is a hard register, because evaluating the condition
7887 might clobber it. */
7888 if ((binary_op
7889 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7890 || (GET_CODE (temp) == REG
7891 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7892 temp = gen_reg_rtx (mode);
7893 store_expr (singleton, temp, 0);
7894 }
7895 else
7896 expand_expr (singleton,
7897 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7898 if (singleton == TREE_OPERAND (exp, 1))
7899 jumpif (TREE_OPERAND (exp, 0), op0);
7900 else
7901 jumpifnot (TREE_OPERAND (exp, 0), op0);
7902
7903 start_cleanup_deferral ();
7904 if (binary_op && temp == 0)
7905 /* Just touch the other operand. */
7906 expand_expr (TREE_OPERAND (binary_op, 1),
7907 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7908 else if (binary_op)
7909 store_expr (build (TREE_CODE (binary_op), type,
7910 make_tree (type, temp),
7911 TREE_OPERAND (binary_op, 1)),
7912 temp, 0);
7913 else
7914 store_expr (build1 (TREE_CODE (unary_op), type,
7915 make_tree (type, temp)),
7916 temp, 0);
7917 op1 = op0;
7918 }
7919 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7920 comparison operator. If we have one of these cases, set the
7921 output to A, branch on A (cse will merge these two references),
7922 then set the output to FOO. */
7923 else if (temp
7924 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7925 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7926 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7927 TREE_OPERAND (exp, 1), 0)
7928 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7929 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7930 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7931 {
7932 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7933 temp = gen_reg_rtx (mode);
7934 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7935 jumpif (TREE_OPERAND (exp, 0), op0);
7936
7937 start_cleanup_deferral ();
7938 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7939 op1 = op0;
7940 }
7941 else if (temp
7942 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7943 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7944 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7945 TREE_OPERAND (exp, 2), 0)
7946 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7947 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7948 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7949 {
7950 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7951 temp = gen_reg_rtx (mode);
7952 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7953 jumpifnot (TREE_OPERAND (exp, 0), op0);
7954
7955 start_cleanup_deferral ();
7956 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7957 op1 = op0;
7958 }
7959 else
7960 {
7961 op1 = gen_label_rtx ();
7962 jumpifnot (TREE_OPERAND (exp, 0), op0);
7963
7964 start_cleanup_deferral ();
7965
7966 /* One branch of the cond can be void, if it never returns. For
7967 example A ? throw : E */
7968 if (temp != 0
7969 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7970 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7971 else
7972 expand_expr (TREE_OPERAND (exp, 1),
7973 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7974 end_cleanup_deferral ();
7975 emit_queue ();
7976 emit_jump_insn (gen_jump (op1));
7977 emit_barrier ();
7978 emit_label (op0);
7979 start_cleanup_deferral ();
7980 if (temp != 0
7981 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7982 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7983 else
7984 expand_expr (TREE_OPERAND (exp, 2),
7985 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7986 }
7987
7988 end_cleanup_deferral ();
7989
7990 emit_queue ();
7991 emit_label (op1);
7992 OK_DEFER_POP;
7993
7994 return temp;
7995 }
7996
7997 case TARGET_EXPR:
7998 {
7999 /* Something needs to be initialized, but we didn't know
8000 where that thing was when building the tree. For example,
8001 it could be the return value of a function, or a parameter
8002 to a function which lays down in the stack, or a temporary
8003 variable which must be passed by reference.
8004
8005 We guarantee that the expression will either be constructed
8006 or copied into our original target. */
8007
8008 tree slot = TREE_OPERAND (exp, 0);
8009 tree cleanups = NULL_TREE;
8010 tree exp1;
8011
8012 if (TREE_CODE (slot) != VAR_DECL)
8013 abort ();
8014
8015 if (! ignore)
8016 target = original_target;
8017
8018 /* Set this here so that if we get a target that refers to a
8019 register variable that's already been used, put_reg_into_stack
8020 knows that it should fix up those uses. */
8021 TREE_USED (slot) = 1;
8022
8023 if (target == 0)
8024 {
8025 if (DECL_RTL (slot) != 0)
8026 {
8027 target = DECL_RTL (slot);
8028 /* If we have already expanded the slot, so don't do
8029 it again. (mrs) */
8030 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8031 return target;
8032 }
8033 else
8034 {
8035 target = assign_temp (type, 2, 0, 1);
8036 /* All temp slots at this level must not conflict. */
8037 preserve_temp_slots (target);
8038 DECL_RTL (slot) = target;
8039 if (TREE_ADDRESSABLE (slot))
8040 {
8041 TREE_ADDRESSABLE (slot) = 0;
8042 mark_addressable (slot);
8043 }
8044
8045 /* Since SLOT is not known to the called function
8046 to belong to its stack frame, we must build an explicit
8047 cleanup. This case occurs when we must build up a reference
8048 to pass the reference as an argument. In this case,
8049 it is very likely that such a reference need not be
8050 built here. */
8051
8052 if (TREE_OPERAND (exp, 2) == 0)
8053 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8054 cleanups = TREE_OPERAND (exp, 2);
8055 }
8056 }
8057 else
8058 {
8059 /* This case does occur, when expanding a parameter which
8060 needs to be constructed on the stack. The target
8061 is the actual stack address that we want to initialize.
8062 The function we call will perform the cleanup in this case. */
8063
8064 /* If we have already assigned it space, use that space,
8065 not target that we were passed in, as our target
8066 parameter is only a hint. */
8067 if (DECL_RTL (slot) != 0)
8068 {
8069 target = DECL_RTL (slot);
8070 /* If we have already expanded the slot, so don't do
8071 it again. (mrs) */
8072 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8073 return target;
8074 }
8075 else
8076 {
8077 DECL_RTL (slot) = target;
8078 /* If we must have an addressable slot, then make sure that
8079 the RTL that we just stored in slot is OK. */
8080 if (TREE_ADDRESSABLE (slot))
8081 {
8082 TREE_ADDRESSABLE (slot) = 0;
8083 mark_addressable (slot);
8084 }
8085 }
8086 }
8087
8088 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8089 /* Mark it as expanded. */
8090 TREE_OPERAND (exp, 1) = NULL_TREE;
8091
8092 store_expr (exp1, target, 0);
8093
8094 expand_decl_cleanup (NULL_TREE, cleanups);
8095
8096 return target;
8097 }
8098
8099 case INIT_EXPR:
8100 {
8101 tree lhs = TREE_OPERAND (exp, 0);
8102 tree rhs = TREE_OPERAND (exp, 1);
8103 tree noncopied_parts = 0;
8104 tree lhs_type = TREE_TYPE (lhs);
8105
8106 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8107 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8108 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8109 TYPE_NONCOPIED_PARTS (lhs_type));
8110 while (noncopied_parts != 0)
8111 {
8112 expand_assignment (TREE_VALUE (noncopied_parts),
8113 TREE_PURPOSE (noncopied_parts), 0, 0);
8114 noncopied_parts = TREE_CHAIN (noncopied_parts);
8115 }
8116 return temp;
8117 }
8118
8119 case MODIFY_EXPR:
8120 {
8121 /* If lhs is complex, expand calls in rhs before computing it.
8122 That's so we don't compute a pointer and save it over a call.
8123 If lhs is simple, compute it first so we can give it as a
8124 target if the rhs is just a call. This avoids an extra temp and copy
8125 and that prevents a partial-subsumption which makes bad code.
8126 Actually we could treat component_ref's of vars like vars. */
8127
8128 tree lhs = TREE_OPERAND (exp, 0);
8129 tree rhs = TREE_OPERAND (exp, 1);
8130 tree noncopied_parts = 0;
8131 tree lhs_type = TREE_TYPE (lhs);
8132
8133 temp = 0;
8134
8135 if (TREE_CODE (lhs) != VAR_DECL
8136 && TREE_CODE (lhs) != RESULT_DECL
8137 && TREE_CODE (lhs) != PARM_DECL
8138 && ! (TREE_CODE (lhs) == INDIRECT_REF
8139 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8140 preexpand_calls (exp);
8141
8142 /* Check for |= or &= of a bitfield of size one into another bitfield
8143 of size 1. In this case, (unless we need the result of the
8144 assignment) we can do this more efficiently with a
8145 test followed by an assignment, if necessary.
8146
8147 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8148 things change so we do, this code should be enhanced to
8149 support it. */
8150 if (ignore
8151 && TREE_CODE (lhs) == COMPONENT_REF
8152 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8153 || TREE_CODE (rhs) == BIT_AND_EXPR)
8154 && TREE_OPERAND (rhs, 0) == lhs
8155 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8156 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8157 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8158 {
8159 rtx label = gen_label_rtx ();
8160
8161 do_jump (TREE_OPERAND (rhs, 1),
8162 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8163 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8164 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8165 (TREE_CODE (rhs) == BIT_IOR_EXPR
8166 ? integer_one_node
8167 : integer_zero_node)),
8168 0, 0);
8169 do_pending_stack_adjust ();
8170 emit_label (label);
8171 return const0_rtx;
8172 }
8173
8174 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8175 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8176 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8177 TYPE_NONCOPIED_PARTS (lhs_type));
8178
8179 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8180 while (noncopied_parts != 0)
8181 {
8182 expand_assignment (TREE_PURPOSE (noncopied_parts),
8183 TREE_VALUE (noncopied_parts), 0, 0);
8184 noncopied_parts = TREE_CHAIN (noncopied_parts);
8185 }
8186 return temp;
8187 }
8188
8189 case RETURN_EXPR:
8190 if (!TREE_OPERAND (exp, 0))
8191 expand_null_return ();
8192 else
8193 expand_return (TREE_OPERAND (exp, 0));
8194 return const0_rtx;
8195
8196 case PREINCREMENT_EXPR:
8197 case PREDECREMENT_EXPR:
8198 return expand_increment (exp, 0, ignore);
8199
8200 case POSTINCREMENT_EXPR:
8201 case POSTDECREMENT_EXPR:
8202 /* Faster to treat as pre-increment if result is not used. */
8203 return expand_increment (exp, ! ignore, ignore);
8204
8205 case ADDR_EXPR:
8206 /* If nonzero, TEMP will be set to the address of something that might
8207 be a MEM corresponding to a stack slot. */
8208 temp = 0;
8209
8210 /* Are we taking the address of a nested function? */
8211 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8212 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8213 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8214 && ! TREE_STATIC (exp))
8215 {
8216 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8217 op0 = force_operand (op0, target);
8218 }
8219 /* If we are taking the address of something erroneous, just
8220 return a zero. */
8221 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8222 return const0_rtx;
8223 else
8224 {
8225 /* We make sure to pass const0_rtx down if we came in with
8226 ignore set, to avoid doing the cleanups twice for something. */
8227 op0 = expand_expr (TREE_OPERAND (exp, 0),
8228 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8229 (modifier == EXPAND_INITIALIZER
8230 ? modifier : EXPAND_CONST_ADDRESS));
8231
8232 /* If we are going to ignore the result, OP0 will have been set
8233 to const0_rtx, so just return it. Don't get confused and
8234 think we are taking the address of the constant. */
8235 if (ignore)
8236 return op0;
8237
8238 op0 = protect_from_queue (op0, 0);
8239
8240 /* We would like the object in memory. If it is a constant, we can
8241 have it be statically allocated into memory. For a non-constant,
8242 we need to allocate some memory and store the value into it. */
8243
8244 if (CONSTANT_P (op0))
8245 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8246 op0);
8247 else if (GET_CODE (op0) == MEM)
8248 {
8249 mark_temp_addr_taken (op0);
8250 temp = XEXP (op0, 0);
8251 }
8252
8253 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8254 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8255 {
8256 /* If this object is in a register, it must be not
8257 be BLKmode. */
8258 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8259 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8260
8261 mark_temp_addr_taken (memloc);
8262 emit_move_insn (memloc, op0);
8263 op0 = memloc;
8264 }
8265
8266 if (GET_CODE (op0) != MEM)
8267 abort ();
8268
8269 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8270 {
8271 temp = XEXP (op0, 0);
8272 #ifdef POINTERS_EXTEND_UNSIGNED
8273 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8274 && mode == ptr_mode)
8275 temp = convert_memory_address (ptr_mode, temp);
8276 #endif
8277 return temp;
8278 }
8279
8280 op0 = force_operand (XEXP (op0, 0), target);
8281 }
8282
8283 if (flag_force_addr && GET_CODE (op0) != REG)
8284 op0 = force_reg (Pmode, op0);
8285
8286 if (GET_CODE (op0) == REG
8287 && ! REG_USERVAR_P (op0))
8288 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8289
8290 /* If we might have had a temp slot, add an equivalent address
8291 for it. */
8292 if (temp != 0)
8293 update_temp_slot_address (temp, op0);
8294
8295 #ifdef POINTERS_EXTEND_UNSIGNED
8296 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8297 && mode == ptr_mode)
8298 op0 = convert_memory_address (ptr_mode, op0);
8299 #endif
8300
8301 return op0;
8302
8303 case ENTRY_VALUE_EXPR:
8304 abort ();
8305
8306 /* COMPLEX type for Extended Pascal & Fortran */
8307 case COMPLEX_EXPR:
8308 {
8309 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8310 rtx insns;
8311
8312 /* Get the rtx code of the operands. */
8313 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8314 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8315
8316 if (! target)
8317 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8318
8319 start_sequence ();
8320
8321 /* Move the real (op0) and imaginary (op1) parts to their location. */
8322 emit_move_insn (gen_realpart (mode, target), op0);
8323 emit_move_insn (gen_imagpart (mode, target), op1);
8324
8325 insns = get_insns ();
8326 end_sequence ();
8327
8328 /* Complex construction should appear as a single unit. */
8329 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8330 each with a separate pseudo as destination.
8331 It's not correct for flow to treat them as a unit. */
8332 if (GET_CODE (target) != CONCAT)
8333 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8334 else
8335 emit_insns (insns);
8336
8337 return target;
8338 }
8339
8340 case REALPART_EXPR:
8341 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8342 return gen_realpart (mode, op0);
8343
8344 case IMAGPART_EXPR:
8345 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8346 return gen_imagpart (mode, op0);
8347
8348 case CONJ_EXPR:
8349 {
8350 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8351 rtx imag_t;
8352 rtx insns;
8353
8354 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8355
8356 if (! target)
8357 target = gen_reg_rtx (mode);
8358
8359 start_sequence ();
8360
8361 /* Store the realpart and the negated imagpart to target. */
8362 emit_move_insn (gen_realpart (partmode, target),
8363 gen_realpart (partmode, op0));
8364
8365 imag_t = gen_imagpart (partmode, target);
8366 temp = expand_unop (partmode, neg_optab,
8367 gen_imagpart (partmode, op0), imag_t, 0);
8368 if (temp != imag_t)
8369 emit_move_insn (imag_t, temp);
8370
8371 insns = get_insns ();
8372 end_sequence ();
8373
8374 /* Conjugate should appear as a single unit
8375 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8376 each with a separate pseudo as destination.
8377 It's not correct for flow to treat them as a unit. */
8378 if (GET_CODE (target) != CONCAT)
8379 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8380 else
8381 emit_insns (insns);
8382
8383 return target;
8384 }
8385
8386 case TRY_CATCH_EXPR:
8387 {
8388 tree handler = TREE_OPERAND (exp, 1);
8389
8390 expand_eh_region_start ();
8391
8392 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8393
8394 expand_eh_region_end (handler);
8395
8396 return op0;
8397 }
8398
8399 case TRY_FINALLY_EXPR:
8400 {
8401 tree try_block = TREE_OPERAND (exp, 0);
8402 tree finally_block = TREE_OPERAND (exp, 1);
8403 rtx finally_label = gen_label_rtx ();
8404 rtx done_label = gen_label_rtx ();
8405 rtx return_link = gen_reg_rtx (Pmode);
8406 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8407 (tree) finally_label, (tree) return_link);
8408 TREE_SIDE_EFFECTS (cleanup) = 1;
8409
8410 /* Start a new binding layer that will keep track of all cleanup
8411 actions to be performed. */
8412 expand_start_bindings (2);
8413
8414 target_temp_slot_level = temp_slot_level;
8415
8416 expand_decl_cleanup (NULL_TREE, cleanup);
8417 op0 = expand_expr (try_block, target, tmode, modifier);
8418
8419 preserve_temp_slots (op0);
8420 expand_end_bindings (NULL_TREE, 0, 0);
8421 emit_jump (done_label);
8422 emit_label (finally_label);
8423 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8424 emit_indirect_jump (return_link);
8425 emit_label (done_label);
8426 return op0;
8427 }
8428
8429 case GOTO_SUBROUTINE_EXPR:
8430 {
8431 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8432 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8433 rtx return_address = gen_label_rtx ();
8434 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8435 emit_jump (subr);
8436 emit_label (return_address);
8437 return const0_rtx;
8438 }
8439
8440 case POPDCC_EXPR:
8441 {
8442 rtx dcc = get_dynamic_cleanup_chain ();
8443 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8444 return const0_rtx;
8445 }
8446
8447 case POPDHC_EXPR:
8448 {
8449 rtx dhc = get_dynamic_handler_chain ();
8450 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8451 return const0_rtx;
8452 }
8453
8454 case VA_ARG_EXPR:
8455 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8456
8457 default:
8458 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8459 }
8460
8461 /* Here to do an ordinary binary operator, generating an instruction
8462 from the optab already placed in `this_optab'. */
8463 binop:
8464 preexpand_calls (exp);
8465 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8466 subtarget = 0;
8467 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8468 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8469 binop2:
8470 temp = expand_binop (mode, this_optab, op0, op1, target,
8471 unsignedp, OPTAB_LIB_WIDEN);
8472 if (temp == 0)
8473 abort ();
8474 return temp;
8475 }
8476 \f
8477 /* Similar to expand_expr, except that we don't specify a target, target
8478 mode, or modifier and we return the alignment of the inner type. This is
8479 used in cases where it is not necessary to align the result to the
8480 alignment of its type as long as we know the alignment of the result, for
8481 example for comparisons of BLKmode values. */
8482
8483 static rtx
8484 expand_expr_unaligned (exp, palign)
8485 register tree exp;
8486 unsigned int *palign;
8487 {
8488 register rtx op0;
8489 tree type = TREE_TYPE (exp);
8490 register enum machine_mode mode = TYPE_MODE (type);
8491
8492 /* Default the alignment we return to that of the type. */
8493 *palign = TYPE_ALIGN (type);
8494
8495 /* The only cases in which we do anything special is if the resulting mode
8496 is BLKmode. */
8497 if (mode != BLKmode)
8498 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8499
8500 switch (TREE_CODE (exp))
8501 {
8502 case CONVERT_EXPR:
8503 case NOP_EXPR:
8504 case NON_LVALUE_EXPR:
8505 /* Conversions between BLKmode values don't change the underlying
8506 alignment or value. */
8507 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8508 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8509 break;
8510
8511 case ARRAY_REF:
8512 /* Much of the code for this case is copied directly from expand_expr.
8513 We need to duplicate it here because we will do something different
8514 in the fall-through case, so we need to handle the same exceptions
8515 it does. */
8516 {
8517 tree array = TREE_OPERAND (exp, 0);
8518 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8519 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8520 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8521 HOST_WIDE_INT i;
8522
8523 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8524 abort ();
8525
8526 /* Optimize the special-case of a zero lower bound.
8527
8528 We convert the low_bound to sizetype to avoid some problems
8529 with constant folding. (E.g. suppose the lower bound is 1,
8530 and its mode is QI. Without the conversion, (ARRAY
8531 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8532 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8533
8534 if (! integer_zerop (low_bound))
8535 index = size_diffop (index, convert (sizetype, low_bound));
8536
8537 /* If this is a constant index into a constant array,
8538 just get the value from the array. Handle both the cases when
8539 we have an explicit constructor and when our operand is a variable
8540 that was declared const. */
8541
8542 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8543 && 0 > compare_tree_int (index,
8544 list_length (CONSTRUCTOR_ELTS
8545 (TREE_OPERAND (exp, 0)))))
8546 {
8547 tree elem;
8548
8549 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8550 i = TREE_INT_CST_LOW (index);
8551 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8552 ;
8553
8554 if (elem)
8555 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8556 }
8557
8558 else if (optimize >= 1
8559 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8560 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8561 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8562 {
8563 if (TREE_CODE (index) == INTEGER_CST)
8564 {
8565 tree init = DECL_INITIAL (array);
8566
8567 if (TREE_CODE (init) == CONSTRUCTOR)
8568 {
8569 tree elem;
8570
8571 for (elem = CONSTRUCTOR_ELTS (init);
8572 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8573 elem = TREE_CHAIN (elem))
8574 ;
8575
8576 if (elem)
8577 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8578 palign);
8579 }
8580 }
8581 }
8582 }
8583
8584 /* ... fall through ... */
8585
8586 case COMPONENT_REF:
8587 case BIT_FIELD_REF:
8588 /* If the operand is a CONSTRUCTOR, we can just extract the
8589 appropriate field if it is present. Don't do this if we have
8590 already written the data since we want to refer to that copy
8591 and varasm.c assumes that's what we'll do. */
8592 if (TREE_CODE (exp) != ARRAY_REF
8593 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8594 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8595 {
8596 tree elt;
8597
8598 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8599 elt = TREE_CHAIN (elt))
8600 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8601 /* Note that unlike the case in expand_expr, we know this is
8602 BLKmode and hence not an integer. */
8603 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8604 }
8605
8606 {
8607 enum machine_mode mode1;
8608 int bitsize;
8609 int bitpos;
8610 tree offset;
8611 int volatilep = 0;
8612 unsigned int alignment;
8613 int unsignedp;
8614 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8615 &mode1, &unsignedp, &volatilep,
8616 &alignment);
8617
8618 /* If we got back the original object, something is wrong. Perhaps
8619 we are evaluating an expression too early. In any event, don't
8620 infinitely recurse. */
8621 if (tem == exp)
8622 abort ();
8623
8624 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8625
8626 /* If this is a constant, put it into a register if it is a
8627 legitimate constant and OFFSET is 0 and memory if it isn't. */
8628 if (CONSTANT_P (op0))
8629 {
8630 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8631
8632 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8633 && offset == 0)
8634 op0 = force_reg (inner_mode, op0);
8635 else
8636 op0 = validize_mem (force_const_mem (inner_mode, op0));
8637 }
8638
8639 if (offset != 0)
8640 {
8641 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8642
8643 /* If this object is in a register, put it into memory.
8644 This case can't occur in C, but can in Ada if we have
8645 unchecked conversion of an expression from a scalar type to
8646 an array or record type. */
8647 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8648 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8649 {
8650 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8651
8652 mark_temp_addr_taken (memloc);
8653 emit_move_insn (memloc, op0);
8654 op0 = memloc;
8655 }
8656
8657 if (GET_CODE (op0) != MEM)
8658 abort ();
8659
8660 if (GET_MODE (offset_rtx) != ptr_mode)
8661 {
8662 #ifdef POINTERS_EXTEND_UNSIGNED
8663 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8664 #else
8665 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8666 #endif
8667 }
8668
8669 op0 = change_address (op0, VOIDmode,
8670 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8671 force_reg (ptr_mode,
8672 offset_rtx)));
8673 }
8674
8675 /* Don't forget about volatility even if this is a bitfield. */
8676 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8677 {
8678 op0 = copy_rtx (op0);
8679 MEM_VOLATILE_P (op0) = 1;
8680 }
8681
8682 /* Check the access. */
8683 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8684 {
8685 rtx to;
8686 int size;
8687
8688 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8689 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8690
8691 /* Check the access right of the pointer. */
8692 if (size > BITS_PER_UNIT)
8693 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8694 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8695 TYPE_MODE (sizetype),
8696 GEN_INT (MEMORY_USE_RO),
8697 TYPE_MODE (integer_type_node));
8698 }
8699
8700 /* In cases where an aligned union has an unaligned object
8701 as a field, we might be extracting a BLKmode value from
8702 an integer-mode (e.g., SImode) object. Handle this case
8703 by doing the extract into an object as wide as the field
8704 (which we know to be the width of a basic mode), then
8705 storing into memory, and changing the mode to BLKmode.
8706 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8707 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8708 if (mode1 == VOIDmode
8709 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8710 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8711 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
8712 || bitpos % TYPE_ALIGN (type) != 0)))
8713 {
8714 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8715
8716 if (ext_mode == BLKmode)
8717 {
8718 /* In this case, BITPOS must start at a byte boundary. */
8719 if (GET_CODE (op0) != MEM
8720 || bitpos % BITS_PER_UNIT != 0)
8721 abort ();
8722
8723 op0 = change_address (op0, VOIDmode,
8724 plus_constant (XEXP (op0, 0),
8725 bitpos / BITS_PER_UNIT));
8726 }
8727 else
8728 {
8729 rtx new = assign_stack_temp (ext_mode,
8730 bitsize / BITS_PER_UNIT, 0);
8731
8732 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8733 unsignedp, NULL_RTX, ext_mode,
8734 ext_mode, alignment,
8735 int_size_in_bytes (TREE_TYPE (tem)));
8736
8737 /* If the result is a record type and BITSIZE is narrower than
8738 the mode of OP0, an integral mode, and this is a big endian
8739 machine, we must put the field into the high-order bits. */
8740 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8741 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8742 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8743 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8744 size_int (GET_MODE_BITSIZE
8745 (GET_MODE (op0))
8746 - bitsize),
8747 op0, 1);
8748
8749
8750 emit_move_insn (new, op0);
8751 op0 = copy_rtx (new);
8752 PUT_MODE (op0, BLKmode);
8753 }
8754 }
8755 else
8756 /* Get a reference to just this component. */
8757 op0 = change_address (op0, mode1,
8758 plus_constant (XEXP (op0, 0),
8759 (bitpos / BITS_PER_UNIT)));
8760
8761 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8762
8763 /* Adjust the alignment in case the bit position is not
8764 a multiple of the alignment of the inner object. */
8765 while (bitpos % alignment != 0)
8766 alignment >>= 1;
8767
8768 if (GET_CODE (XEXP (op0, 0)) == REG)
8769 mark_reg_pointer (XEXP (op0, 0), alignment);
8770
8771 MEM_IN_STRUCT_P (op0) = 1;
8772 MEM_VOLATILE_P (op0) |= volatilep;
8773
8774 *palign = alignment;
8775 return op0;
8776 }
8777
8778 default:
8779 break;
8780
8781 }
8782
8783 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8784 }
8785 \f
8786 /* Return the tree node if a ARG corresponds to a string constant or zero
8787 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8788 in bytes within the string that ARG is accessing. The type of the
8789 offset will be `sizetype'. */
8790
8791 tree
8792 string_constant (arg, ptr_offset)
8793 tree arg;
8794 tree *ptr_offset;
8795 {
8796 STRIP_NOPS (arg);
8797
8798 if (TREE_CODE (arg) == ADDR_EXPR
8799 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8800 {
8801 *ptr_offset = size_zero_node;
8802 return TREE_OPERAND (arg, 0);
8803 }
8804 else if (TREE_CODE (arg) == PLUS_EXPR)
8805 {
8806 tree arg0 = TREE_OPERAND (arg, 0);
8807 tree arg1 = TREE_OPERAND (arg, 1);
8808
8809 STRIP_NOPS (arg0);
8810 STRIP_NOPS (arg1);
8811
8812 if (TREE_CODE (arg0) == ADDR_EXPR
8813 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8814 {
8815 *ptr_offset = convert (sizetype, arg1);
8816 return TREE_OPERAND (arg0, 0);
8817 }
8818 else if (TREE_CODE (arg1) == ADDR_EXPR
8819 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8820 {
8821 *ptr_offset = convert (sizetype, arg0);
8822 return TREE_OPERAND (arg1, 0);
8823 }
8824 }
8825
8826 return 0;
8827 }
8828 \f
8829 /* Expand code for a post- or pre- increment or decrement
8830 and return the RTX for the result.
8831 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8832
8833 static rtx
8834 expand_increment (exp, post, ignore)
8835 register tree exp;
8836 int post, ignore;
8837 {
8838 register rtx op0, op1;
8839 register rtx temp, value;
8840 register tree incremented = TREE_OPERAND (exp, 0);
8841 optab this_optab = add_optab;
8842 int icode;
8843 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8844 int op0_is_copy = 0;
8845 int single_insn = 0;
8846 /* 1 means we can't store into OP0 directly,
8847 because it is a subreg narrower than a word,
8848 and we don't dare clobber the rest of the word. */
8849 int bad_subreg = 0;
8850
8851 /* Stabilize any component ref that might need to be
8852 evaluated more than once below. */
8853 if (!post
8854 || TREE_CODE (incremented) == BIT_FIELD_REF
8855 || (TREE_CODE (incremented) == COMPONENT_REF
8856 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8857 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8858 incremented = stabilize_reference (incremented);
8859 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8860 ones into save exprs so that they don't accidentally get evaluated
8861 more than once by the code below. */
8862 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8863 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8864 incremented = save_expr (incremented);
8865
8866 /* Compute the operands as RTX.
8867 Note whether OP0 is the actual lvalue or a copy of it:
8868 I believe it is a copy iff it is a register or subreg
8869 and insns were generated in computing it. */
8870
8871 temp = get_last_insn ();
8872 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8873
8874 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8875 in place but instead must do sign- or zero-extension during assignment,
8876 so we copy it into a new register and let the code below use it as
8877 a copy.
8878
8879 Note that we can safely modify this SUBREG since it is know not to be
8880 shared (it was made by the expand_expr call above). */
8881
8882 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8883 {
8884 if (post)
8885 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8886 else
8887 bad_subreg = 1;
8888 }
8889 else if (GET_CODE (op0) == SUBREG
8890 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8891 {
8892 /* We cannot increment this SUBREG in place. If we are
8893 post-incrementing, get a copy of the old value. Otherwise,
8894 just mark that we cannot increment in place. */
8895 if (post)
8896 op0 = copy_to_reg (op0);
8897 else
8898 bad_subreg = 1;
8899 }
8900
8901 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8902 && temp != get_last_insn ());
8903 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8904 EXPAND_MEMORY_USE_BAD);
8905
8906 /* Decide whether incrementing or decrementing. */
8907 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8908 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8909 this_optab = sub_optab;
8910
8911 /* Convert decrement by a constant into a negative increment. */
8912 if (this_optab == sub_optab
8913 && GET_CODE (op1) == CONST_INT)
8914 {
8915 op1 = GEN_INT (- INTVAL (op1));
8916 this_optab = add_optab;
8917 }
8918
8919 /* For a preincrement, see if we can do this with a single instruction. */
8920 if (!post)
8921 {
8922 icode = (int) this_optab->handlers[(int) mode].insn_code;
8923 if (icode != (int) CODE_FOR_nothing
8924 /* Make sure that OP0 is valid for operands 0 and 1
8925 of the insn we want to queue. */
8926 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8927 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8928 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8929 single_insn = 1;
8930 }
8931
8932 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8933 then we cannot just increment OP0. We must therefore contrive to
8934 increment the original value. Then, for postincrement, we can return
8935 OP0 since it is a copy of the old value. For preincrement, expand here
8936 unless we can do it with a single insn.
8937
8938 Likewise if storing directly into OP0 would clobber high bits
8939 we need to preserve (bad_subreg). */
8940 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8941 {
8942 /* This is the easiest way to increment the value wherever it is.
8943 Problems with multiple evaluation of INCREMENTED are prevented
8944 because either (1) it is a component_ref or preincrement,
8945 in which case it was stabilized above, or (2) it is an array_ref
8946 with constant index in an array in a register, which is
8947 safe to reevaluate. */
8948 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8949 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8950 ? MINUS_EXPR : PLUS_EXPR),
8951 TREE_TYPE (exp),
8952 incremented,
8953 TREE_OPERAND (exp, 1));
8954
8955 while (TREE_CODE (incremented) == NOP_EXPR
8956 || TREE_CODE (incremented) == CONVERT_EXPR)
8957 {
8958 newexp = convert (TREE_TYPE (incremented), newexp);
8959 incremented = TREE_OPERAND (incremented, 0);
8960 }
8961
8962 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8963 return post ? op0 : temp;
8964 }
8965
8966 if (post)
8967 {
8968 /* We have a true reference to the value in OP0.
8969 If there is an insn to add or subtract in this mode, queue it.
8970 Queueing the increment insn avoids the register shuffling
8971 that often results if we must increment now and first save
8972 the old value for subsequent use. */
8973
8974 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8975 op0 = stabilize (op0);
8976 #endif
8977
8978 icode = (int) this_optab->handlers[(int) mode].insn_code;
8979 if (icode != (int) CODE_FOR_nothing
8980 /* Make sure that OP0 is valid for operands 0 and 1
8981 of the insn we want to queue. */
8982 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8983 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8984 {
8985 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8986 op1 = force_reg (mode, op1);
8987
8988 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8989 }
8990 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8991 {
8992 rtx addr = (general_operand (XEXP (op0, 0), mode)
8993 ? force_reg (Pmode, XEXP (op0, 0))
8994 : copy_to_reg (XEXP (op0, 0)));
8995 rtx temp, result;
8996
8997 op0 = change_address (op0, VOIDmode, addr);
8998 temp = force_reg (GET_MODE (op0), op0);
8999 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9000 op1 = force_reg (mode, op1);
9001
9002 /* The increment queue is LIFO, thus we have to `queue'
9003 the instructions in reverse order. */
9004 enqueue_insn (op0, gen_move_insn (op0, temp));
9005 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9006 return result;
9007 }
9008 }
9009
9010 /* Preincrement, or we can't increment with one simple insn. */
9011 if (post)
9012 /* Save a copy of the value before inc or dec, to return it later. */
9013 temp = value = copy_to_reg (op0);
9014 else
9015 /* Arrange to return the incremented value. */
9016 /* Copy the rtx because expand_binop will protect from the queue,
9017 and the results of that would be invalid for us to return
9018 if our caller does emit_queue before using our result. */
9019 temp = copy_rtx (value = op0);
9020
9021 /* Increment however we can. */
9022 op1 = expand_binop (mode, this_optab, value, op1,
9023 current_function_check_memory_usage ? NULL_RTX : op0,
9024 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9025 /* Make sure the value is stored into OP0. */
9026 if (op1 != op0)
9027 emit_move_insn (op0, op1);
9028
9029 return temp;
9030 }
9031 \f
9032 /* Expand all function calls contained within EXP, innermost ones first.
9033 But don't look within expressions that have sequence points.
9034 For each CALL_EXPR, record the rtx for its value
9035 in the CALL_EXPR_RTL field. */
9036
9037 static void
9038 preexpand_calls (exp)
9039 tree exp;
9040 {
9041 register int nops, i;
9042 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9043
9044 if (! do_preexpand_calls)
9045 return;
9046
9047 /* Only expressions and references can contain calls. */
9048
9049 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9050 return;
9051
9052 switch (TREE_CODE (exp))
9053 {
9054 case CALL_EXPR:
9055 /* Do nothing if already expanded. */
9056 if (CALL_EXPR_RTL (exp) != 0
9057 /* Do nothing if the call returns a variable-sized object. */
9058 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9059 /* Do nothing to built-in functions. */
9060 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9061 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9062 == FUNCTION_DECL)
9063 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9064 return;
9065
9066 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9067 return;
9068
9069 case COMPOUND_EXPR:
9070 case COND_EXPR:
9071 case TRUTH_ANDIF_EXPR:
9072 case TRUTH_ORIF_EXPR:
9073 /* If we find one of these, then we can be sure
9074 the adjust will be done for it (since it makes jumps).
9075 Do it now, so that if this is inside an argument
9076 of a function, we don't get the stack adjustment
9077 after some other args have already been pushed. */
9078 do_pending_stack_adjust ();
9079 return;
9080
9081 case BLOCK:
9082 case RTL_EXPR:
9083 case WITH_CLEANUP_EXPR:
9084 case CLEANUP_POINT_EXPR:
9085 case TRY_CATCH_EXPR:
9086 return;
9087
9088 case SAVE_EXPR:
9089 if (SAVE_EXPR_RTL (exp) != 0)
9090 return;
9091
9092 default:
9093 break;
9094 }
9095
9096 nops = tree_code_length[(int) TREE_CODE (exp)];
9097 for (i = 0; i < nops; i++)
9098 if (TREE_OPERAND (exp, i) != 0)
9099 {
9100 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9101 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9102 It doesn't happen before the call is made. */
9103 ;
9104 else
9105 {
9106 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9107 if (type == 'e' || type == '<' || type == '1' || type == '2'
9108 || type == 'r')
9109 preexpand_calls (TREE_OPERAND (exp, i));
9110 }
9111 }
9112 }
9113 \f
9114 /* At the start of a function, record that we have no previously-pushed
9115 arguments waiting to be popped. */
9116
9117 void
9118 init_pending_stack_adjust ()
9119 {
9120 pending_stack_adjust = 0;
9121 }
9122
9123 /* When exiting from function, if safe, clear out any pending stack adjust
9124 so the adjustment won't get done.
9125
9126 Note, if the current function calls alloca, then it must have a
9127 frame pointer regardless of the value of flag_omit_frame_pointer. */
9128
9129 void
9130 clear_pending_stack_adjust ()
9131 {
9132 #ifdef EXIT_IGNORE_STACK
9133 if (optimize > 0
9134 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9135 && EXIT_IGNORE_STACK
9136 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9137 && ! flag_inline_functions)
9138 pending_stack_adjust = 0;
9139 #endif
9140 }
9141
9142 /* Pop any previously-pushed arguments that have not been popped yet. */
9143
9144 void
9145 do_pending_stack_adjust ()
9146 {
9147 if (inhibit_defer_pop == 0)
9148 {
9149 if (pending_stack_adjust != 0)
9150 adjust_stack (GEN_INT (pending_stack_adjust));
9151 pending_stack_adjust = 0;
9152 }
9153 }
9154 \f
9155 /* Expand conditional expressions. */
9156
9157 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9158 LABEL is an rtx of code CODE_LABEL, in this function and all the
9159 functions here. */
9160
9161 void
9162 jumpifnot (exp, label)
9163 tree exp;
9164 rtx label;
9165 {
9166 do_jump (exp, label, NULL_RTX);
9167 }
9168
9169 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9170
9171 void
9172 jumpif (exp, label)
9173 tree exp;
9174 rtx label;
9175 {
9176 do_jump (exp, NULL_RTX, label);
9177 }
9178
9179 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9180 the result is zero, or IF_TRUE_LABEL if the result is one.
9181 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9182 meaning fall through in that case.
9183
9184 do_jump always does any pending stack adjust except when it does not
9185 actually perform a jump. An example where there is no jump
9186 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9187
9188 This function is responsible for optimizing cases such as
9189 &&, || and comparison operators in EXP. */
9190
9191 void
9192 do_jump (exp, if_false_label, if_true_label)
9193 tree exp;
9194 rtx if_false_label, if_true_label;
9195 {
9196 register enum tree_code code = TREE_CODE (exp);
9197 /* Some cases need to create a label to jump to
9198 in order to properly fall through.
9199 These cases set DROP_THROUGH_LABEL nonzero. */
9200 rtx drop_through_label = 0;
9201 rtx temp;
9202 int i;
9203 tree type;
9204 enum machine_mode mode;
9205
9206 #ifdef MAX_INTEGER_COMPUTATION_MODE
9207 check_max_integer_computation_mode (exp);
9208 #endif
9209
9210 emit_queue ();
9211
9212 switch (code)
9213 {
9214 case ERROR_MARK:
9215 break;
9216
9217 case INTEGER_CST:
9218 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9219 if (temp)
9220 emit_jump (temp);
9221 break;
9222
9223 #if 0
9224 /* This is not true with #pragma weak */
9225 case ADDR_EXPR:
9226 /* The address of something can never be zero. */
9227 if (if_true_label)
9228 emit_jump (if_true_label);
9229 break;
9230 #endif
9231
9232 case NOP_EXPR:
9233 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9234 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9235 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9236 goto normal;
9237 case CONVERT_EXPR:
9238 /* If we are narrowing the operand, we have to do the compare in the
9239 narrower mode. */
9240 if ((TYPE_PRECISION (TREE_TYPE (exp))
9241 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9242 goto normal;
9243 case NON_LVALUE_EXPR:
9244 case REFERENCE_EXPR:
9245 case ABS_EXPR:
9246 case NEGATE_EXPR:
9247 case LROTATE_EXPR:
9248 case RROTATE_EXPR:
9249 /* These cannot change zero->non-zero or vice versa. */
9250 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9251 break;
9252
9253 case WITH_RECORD_EXPR:
9254 /* Put the object on the placeholder list, recurse through our first
9255 operand, and pop the list. */
9256 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9257 placeholder_list);
9258 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9259 placeholder_list = TREE_CHAIN (placeholder_list);
9260 break;
9261
9262 #if 0
9263 /* This is never less insns than evaluating the PLUS_EXPR followed by
9264 a test and can be longer if the test is eliminated. */
9265 case PLUS_EXPR:
9266 /* Reduce to minus. */
9267 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9268 TREE_OPERAND (exp, 0),
9269 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9270 TREE_OPERAND (exp, 1))));
9271 /* Process as MINUS. */
9272 #endif
9273
9274 case MINUS_EXPR:
9275 /* Non-zero iff operands of minus differ. */
9276 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9277 TREE_OPERAND (exp, 0),
9278 TREE_OPERAND (exp, 1)),
9279 NE, NE, if_false_label, if_true_label);
9280 break;
9281
9282 case BIT_AND_EXPR:
9283 /* If we are AND'ing with a small constant, do this comparison in the
9284 smallest type that fits. If the machine doesn't have comparisons
9285 that small, it will be converted back to the wider comparison.
9286 This helps if we are testing the sign bit of a narrower object.
9287 combine can't do this for us because it can't know whether a
9288 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9289
9290 if (! SLOW_BYTE_ACCESS
9291 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9292 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9293 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9294 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9295 && (type = type_for_mode (mode, 1)) != 0
9296 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9297 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9298 != CODE_FOR_nothing))
9299 {
9300 do_jump (convert (type, exp), if_false_label, if_true_label);
9301 break;
9302 }
9303 goto normal;
9304
9305 case TRUTH_NOT_EXPR:
9306 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9307 break;
9308
9309 case TRUTH_ANDIF_EXPR:
9310 if (if_false_label == 0)
9311 if_false_label = drop_through_label = gen_label_rtx ();
9312 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9313 start_cleanup_deferral ();
9314 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9315 end_cleanup_deferral ();
9316 break;
9317
9318 case TRUTH_ORIF_EXPR:
9319 if (if_true_label == 0)
9320 if_true_label = drop_through_label = gen_label_rtx ();
9321 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9322 start_cleanup_deferral ();
9323 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9324 end_cleanup_deferral ();
9325 break;
9326
9327 case COMPOUND_EXPR:
9328 push_temp_slots ();
9329 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9330 preserve_temp_slots (NULL_RTX);
9331 free_temp_slots ();
9332 pop_temp_slots ();
9333 emit_queue ();
9334 do_pending_stack_adjust ();
9335 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9336 break;
9337
9338 case COMPONENT_REF:
9339 case BIT_FIELD_REF:
9340 case ARRAY_REF:
9341 {
9342 int bitsize, bitpos, unsignedp;
9343 enum machine_mode mode;
9344 tree type;
9345 tree offset;
9346 int volatilep = 0;
9347 unsigned int alignment;
9348
9349 /* Get description of this reference. We don't actually care
9350 about the underlying object here. */
9351 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9352 &mode, &unsignedp, &volatilep,
9353 &alignment);
9354
9355 type = type_for_size (bitsize, unsignedp);
9356 if (! SLOW_BYTE_ACCESS
9357 && type != 0 && bitsize >= 0
9358 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9359 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9360 != CODE_FOR_nothing))
9361 {
9362 do_jump (convert (type, exp), if_false_label, if_true_label);
9363 break;
9364 }
9365 goto normal;
9366 }
9367
9368 case COND_EXPR:
9369 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9370 if (integer_onep (TREE_OPERAND (exp, 1))
9371 && integer_zerop (TREE_OPERAND (exp, 2)))
9372 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9373
9374 else if (integer_zerop (TREE_OPERAND (exp, 1))
9375 && integer_onep (TREE_OPERAND (exp, 2)))
9376 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9377
9378 else
9379 {
9380 register rtx label1 = gen_label_rtx ();
9381 drop_through_label = gen_label_rtx ();
9382
9383 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9384
9385 start_cleanup_deferral ();
9386 /* Now the THEN-expression. */
9387 do_jump (TREE_OPERAND (exp, 1),
9388 if_false_label ? if_false_label : drop_through_label,
9389 if_true_label ? if_true_label : drop_through_label);
9390 /* In case the do_jump just above never jumps. */
9391 do_pending_stack_adjust ();
9392 emit_label (label1);
9393
9394 /* Now the ELSE-expression. */
9395 do_jump (TREE_OPERAND (exp, 2),
9396 if_false_label ? if_false_label : drop_through_label,
9397 if_true_label ? if_true_label : drop_through_label);
9398 end_cleanup_deferral ();
9399 }
9400 break;
9401
9402 case EQ_EXPR:
9403 {
9404 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9405
9406 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9407 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9408 {
9409 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9410 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9411 do_jump
9412 (fold
9413 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9414 fold (build (EQ_EXPR, TREE_TYPE (exp),
9415 fold (build1 (REALPART_EXPR,
9416 TREE_TYPE (inner_type),
9417 exp0)),
9418 fold (build1 (REALPART_EXPR,
9419 TREE_TYPE (inner_type),
9420 exp1)))),
9421 fold (build (EQ_EXPR, TREE_TYPE (exp),
9422 fold (build1 (IMAGPART_EXPR,
9423 TREE_TYPE (inner_type),
9424 exp0)),
9425 fold (build1 (IMAGPART_EXPR,
9426 TREE_TYPE (inner_type),
9427 exp1)))))),
9428 if_false_label, if_true_label);
9429 }
9430
9431 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9432 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9433
9434 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9435 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9436 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9437 else
9438 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9439 break;
9440 }
9441
9442 case NE_EXPR:
9443 {
9444 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9445
9446 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9447 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9448 {
9449 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9450 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9451 do_jump
9452 (fold
9453 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9454 fold (build (NE_EXPR, TREE_TYPE (exp),
9455 fold (build1 (REALPART_EXPR,
9456 TREE_TYPE (inner_type),
9457 exp0)),
9458 fold (build1 (REALPART_EXPR,
9459 TREE_TYPE (inner_type),
9460 exp1)))),
9461 fold (build (NE_EXPR, TREE_TYPE (exp),
9462 fold (build1 (IMAGPART_EXPR,
9463 TREE_TYPE (inner_type),
9464 exp0)),
9465 fold (build1 (IMAGPART_EXPR,
9466 TREE_TYPE (inner_type),
9467 exp1)))))),
9468 if_false_label, if_true_label);
9469 }
9470
9471 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9472 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9473
9474 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9475 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9476 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9477 else
9478 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9479 break;
9480 }
9481
9482 case LT_EXPR:
9483 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9484 if (GET_MODE_CLASS (mode) == MODE_INT
9485 && ! can_compare_p (LT, mode, ccp_jump))
9486 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9487 else
9488 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9489 break;
9490
9491 case LE_EXPR:
9492 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9493 if (GET_MODE_CLASS (mode) == MODE_INT
9494 && ! can_compare_p (LE, mode, ccp_jump))
9495 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9496 else
9497 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9498 break;
9499
9500 case GT_EXPR:
9501 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9502 if (GET_MODE_CLASS (mode) == MODE_INT
9503 && ! can_compare_p (GT, mode, ccp_jump))
9504 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9505 else
9506 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9507 break;
9508
9509 case GE_EXPR:
9510 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9511 if (GET_MODE_CLASS (mode) == MODE_INT
9512 && ! can_compare_p (GE, mode, ccp_jump))
9513 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9514 else
9515 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9516 break;
9517
9518 case UNORDERED_EXPR:
9519 case ORDERED_EXPR:
9520 {
9521 enum rtx_code cmp, rcmp;
9522 int do_rev;
9523
9524 if (code == UNORDERED_EXPR)
9525 cmp = UNORDERED, rcmp = ORDERED;
9526 else
9527 cmp = ORDERED, rcmp = UNORDERED;
9528 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9529
9530 do_rev = 0;
9531 if (! can_compare_p (cmp, mode, ccp_jump)
9532 && (can_compare_p (rcmp, mode, ccp_jump)
9533 /* If the target doesn't provide either UNORDERED or ORDERED
9534 comparisons, canonicalize on UNORDERED for the library. */
9535 || rcmp == UNORDERED))
9536 do_rev = 1;
9537
9538 if (! do_rev)
9539 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9540 else
9541 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9542 }
9543 break;
9544
9545 {
9546 enum rtx_code rcode1;
9547 enum tree_code tcode2;
9548
9549 case UNLT_EXPR:
9550 rcode1 = UNLT;
9551 tcode2 = LT_EXPR;
9552 goto unordered_bcc;
9553 case UNLE_EXPR:
9554 rcode1 = UNLE;
9555 tcode2 = LE_EXPR;
9556 goto unordered_bcc;
9557 case UNGT_EXPR:
9558 rcode1 = UNGT;
9559 tcode2 = GT_EXPR;
9560 goto unordered_bcc;
9561 case UNGE_EXPR:
9562 rcode1 = UNGE;
9563 tcode2 = GE_EXPR;
9564 goto unordered_bcc;
9565 case UNEQ_EXPR:
9566 rcode1 = UNEQ;
9567 tcode2 = EQ_EXPR;
9568 goto unordered_bcc;
9569
9570 unordered_bcc:
9571 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9572 if (can_compare_p (rcode1, mode, ccp_jump))
9573 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9574 if_true_label);
9575 else
9576 {
9577 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9578 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9579 tree cmp0, cmp1;
9580
9581 /* If the target doesn't support combined unordered
9582 compares, decompose into UNORDERED + comparison. */
9583 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9584 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9585 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9586 do_jump (exp, if_false_label, if_true_label);
9587 }
9588 }
9589 break;
9590
9591 default:
9592 normal:
9593 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9594 #if 0
9595 /* This is not needed any more and causes poor code since it causes
9596 comparisons and tests from non-SI objects to have different code
9597 sequences. */
9598 /* Copy to register to avoid generating bad insns by cse
9599 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9600 if (!cse_not_expected && GET_CODE (temp) == MEM)
9601 temp = copy_to_reg (temp);
9602 #endif
9603 do_pending_stack_adjust ();
9604 /* Do any postincrements in the expression that was tested. */
9605 emit_queue ();
9606
9607 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9608 {
9609 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9610 if (target)
9611 emit_jump (target);
9612 }
9613 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9614 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9615 /* Note swapping the labels gives us not-equal. */
9616 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9617 else if (GET_MODE (temp) != VOIDmode)
9618 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9619 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9620 GET_MODE (temp), NULL_RTX, 0,
9621 if_false_label, if_true_label);
9622 else
9623 abort ();
9624 }
9625
9626 if (drop_through_label)
9627 {
9628 /* If do_jump produces code that might be jumped around,
9629 do any stack adjusts from that code, before the place
9630 where control merges in. */
9631 do_pending_stack_adjust ();
9632 emit_label (drop_through_label);
9633 }
9634 }
9635 \f
9636 /* Given a comparison expression EXP for values too wide to be compared
9637 with one insn, test the comparison and jump to the appropriate label.
9638 The code of EXP is ignored; we always test GT if SWAP is 0,
9639 and LT if SWAP is 1. */
9640
9641 static void
9642 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9643 tree exp;
9644 int swap;
9645 rtx if_false_label, if_true_label;
9646 {
9647 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9648 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9649 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9650 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9651
9652 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9653 }
9654
9655 /* Compare OP0 with OP1, word at a time, in mode MODE.
9656 UNSIGNEDP says to do unsigned comparison.
9657 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9658
9659 void
9660 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9661 enum machine_mode mode;
9662 int unsignedp;
9663 rtx op0, op1;
9664 rtx if_false_label, if_true_label;
9665 {
9666 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9667 rtx drop_through_label = 0;
9668 int i;
9669
9670 if (! if_true_label || ! if_false_label)
9671 drop_through_label = gen_label_rtx ();
9672 if (! if_true_label)
9673 if_true_label = drop_through_label;
9674 if (! if_false_label)
9675 if_false_label = drop_through_label;
9676
9677 /* Compare a word at a time, high order first. */
9678 for (i = 0; i < nwords; i++)
9679 {
9680 rtx op0_word, op1_word;
9681
9682 if (WORDS_BIG_ENDIAN)
9683 {
9684 op0_word = operand_subword_force (op0, i, mode);
9685 op1_word = operand_subword_force (op1, i, mode);
9686 }
9687 else
9688 {
9689 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9690 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9691 }
9692
9693 /* All but high-order word must be compared as unsigned. */
9694 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9695 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9696 NULL_RTX, if_true_label);
9697
9698 /* Consider lower words only if these are equal. */
9699 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9700 NULL_RTX, 0, NULL_RTX, if_false_label);
9701 }
9702
9703 if (if_false_label)
9704 emit_jump (if_false_label);
9705 if (drop_through_label)
9706 emit_label (drop_through_label);
9707 }
9708
9709 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9710 with one insn, test the comparison and jump to the appropriate label. */
9711
9712 static void
9713 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9714 tree exp;
9715 rtx if_false_label, if_true_label;
9716 {
9717 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9718 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9719 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9720 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9721 int i;
9722 rtx drop_through_label = 0;
9723
9724 if (! if_false_label)
9725 drop_through_label = if_false_label = gen_label_rtx ();
9726
9727 for (i = 0; i < nwords; i++)
9728 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9729 operand_subword_force (op1, i, mode),
9730 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9731 word_mode, NULL_RTX, 0, if_false_label,
9732 NULL_RTX);
9733
9734 if (if_true_label)
9735 emit_jump (if_true_label);
9736 if (drop_through_label)
9737 emit_label (drop_through_label);
9738 }
9739 \f
9740 /* Jump according to whether OP0 is 0.
9741 We assume that OP0 has an integer mode that is too wide
9742 for the available compare insns. */
9743
9744 void
9745 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9746 rtx op0;
9747 rtx if_false_label, if_true_label;
9748 {
9749 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9750 rtx part;
9751 int i;
9752 rtx drop_through_label = 0;
9753
9754 /* The fastest way of doing this comparison on almost any machine is to
9755 "or" all the words and compare the result. If all have to be loaded
9756 from memory and this is a very wide item, it's possible this may
9757 be slower, but that's highly unlikely. */
9758
9759 part = gen_reg_rtx (word_mode);
9760 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9761 for (i = 1; i < nwords && part != 0; i++)
9762 part = expand_binop (word_mode, ior_optab, part,
9763 operand_subword_force (op0, i, GET_MODE (op0)),
9764 part, 1, OPTAB_WIDEN);
9765
9766 if (part != 0)
9767 {
9768 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9769 NULL_RTX, 0, if_false_label, if_true_label);
9770
9771 return;
9772 }
9773
9774 /* If we couldn't do the "or" simply, do this with a series of compares. */
9775 if (! if_false_label)
9776 drop_through_label = if_false_label = gen_label_rtx ();
9777
9778 for (i = 0; i < nwords; i++)
9779 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9780 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9781 if_false_label, NULL_RTX);
9782
9783 if (if_true_label)
9784 emit_jump (if_true_label);
9785
9786 if (drop_through_label)
9787 emit_label (drop_through_label);
9788 }
9789 \f
9790 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9791 (including code to compute the values to be compared)
9792 and set (CC0) according to the result.
9793 The decision as to signed or unsigned comparison must be made by the caller.
9794
9795 We force a stack adjustment unless there are currently
9796 things pushed on the stack that aren't yet used.
9797
9798 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9799 compared.
9800
9801 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9802 size of MODE should be used. */
9803
9804 rtx
9805 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9806 register rtx op0, op1;
9807 enum rtx_code code;
9808 int unsignedp;
9809 enum machine_mode mode;
9810 rtx size;
9811 unsigned int align;
9812 {
9813 rtx tem;
9814
9815 /* If one operand is constant, make it the second one. Only do this
9816 if the other operand is not constant as well. */
9817
9818 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9819 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9820 {
9821 tem = op0;
9822 op0 = op1;
9823 op1 = tem;
9824 code = swap_condition (code);
9825 }
9826
9827 if (flag_force_mem)
9828 {
9829 op0 = force_not_mem (op0);
9830 op1 = force_not_mem (op1);
9831 }
9832
9833 do_pending_stack_adjust ();
9834
9835 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9836 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9837 return tem;
9838
9839 #if 0
9840 /* There's no need to do this now that combine.c can eliminate lots of
9841 sign extensions. This can be less efficient in certain cases on other
9842 machines. */
9843
9844 /* If this is a signed equality comparison, we can do it as an
9845 unsigned comparison since zero-extension is cheaper than sign
9846 extension and comparisons with zero are done as unsigned. This is
9847 the case even on machines that can do fast sign extension, since
9848 zero-extension is easier to combine with other operations than
9849 sign-extension is. If we are comparing against a constant, we must
9850 convert it to what it would look like unsigned. */
9851 if ((code == EQ || code == NE) && ! unsignedp
9852 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9853 {
9854 if (GET_CODE (op1) == CONST_INT
9855 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9856 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9857 unsignedp = 1;
9858 }
9859 #endif
9860
9861 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9862
9863 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9864 }
9865
9866 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9867 The decision as to signed or unsigned comparison must be made by the caller.
9868
9869 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9870 compared.
9871
9872 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9873 size of MODE should be used. */
9874
9875 void
9876 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9877 if_false_label, if_true_label)
9878 register rtx op0, op1;
9879 enum rtx_code code;
9880 int unsignedp;
9881 enum machine_mode mode;
9882 rtx size;
9883 unsigned int align;
9884 rtx if_false_label, if_true_label;
9885 {
9886 rtx tem;
9887 int dummy_true_label = 0;
9888
9889 /* Reverse the comparison if that is safe and we want to jump if it is
9890 false. */
9891 if (! if_true_label && ! FLOAT_MODE_P (mode))
9892 {
9893 if_true_label = if_false_label;
9894 if_false_label = 0;
9895 code = reverse_condition (code);
9896 }
9897
9898 /* If one operand is constant, make it the second one. Only do this
9899 if the other operand is not constant as well. */
9900
9901 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9902 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9903 {
9904 tem = op0;
9905 op0 = op1;
9906 op1 = tem;
9907 code = swap_condition (code);
9908 }
9909
9910 if (flag_force_mem)
9911 {
9912 op0 = force_not_mem (op0);
9913 op1 = force_not_mem (op1);
9914 }
9915
9916 do_pending_stack_adjust ();
9917
9918 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9919 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9920 {
9921 if (tem == const_true_rtx)
9922 {
9923 if (if_true_label)
9924 emit_jump (if_true_label);
9925 }
9926 else
9927 {
9928 if (if_false_label)
9929 emit_jump (if_false_label);
9930 }
9931 return;
9932 }
9933
9934 #if 0
9935 /* There's no need to do this now that combine.c can eliminate lots of
9936 sign extensions. This can be less efficient in certain cases on other
9937 machines. */
9938
9939 /* If this is a signed equality comparison, we can do it as an
9940 unsigned comparison since zero-extension is cheaper than sign
9941 extension and comparisons with zero are done as unsigned. This is
9942 the case even on machines that can do fast sign extension, since
9943 zero-extension is easier to combine with other operations than
9944 sign-extension is. If we are comparing against a constant, we must
9945 convert it to what it would look like unsigned. */
9946 if ((code == EQ || code == NE) && ! unsignedp
9947 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9948 {
9949 if (GET_CODE (op1) == CONST_INT
9950 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9951 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9952 unsignedp = 1;
9953 }
9954 #endif
9955
9956 if (! if_true_label)
9957 {
9958 dummy_true_label = 1;
9959 if_true_label = gen_label_rtx ();
9960 }
9961
9962 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9963 if_true_label);
9964
9965 if (if_false_label)
9966 emit_jump (if_false_label);
9967 if (dummy_true_label)
9968 emit_label (if_true_label);
9969 }
9970
9971 /* Generate code for a comparison expression EXP (including code to compute
9972 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9973 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9974 generated code will drop through.
9975 SIGNED_CODE should be the rtx operation for this comparison for
9976 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9977
9978 We force a stack adjustment unless there are currently
9979 things pushed on the stack that aren't yet used. */
9980
9981 static void
9982 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9983 if_true_label)
9984 register tree exp;
9985 enum rtx_code signed_code, unsigned_code;
9986 rtx if_false_label, if_true_label;
9987 {
9988 unsigned int align0, align1;
9989 register rtx op0, op1;
9990 register tree type;
9991 register enum machine_mode mode;
9992 int unsignedp;
9993 enum rtx_code code;
9994
9995 /* Don't crash if the comparison was erroneous. */
9996 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
9997 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9998 return;
9999
10000 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10001 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10002 mode = TYPE_MODE (type);
10003 unsignedp = TREE_UNSIGNED (type);
10004 code = unsignedp ? unsigned_code : signed_code;
10005
10006 #ifdef HAVE_canonicalize_funcptr_for_compare
10007 /* If function pointers need to be "canonicalized" before they can
10008 be reliably compared, then canonicalize them. */
10009 if (HAVE_canonicalize_funcptr_for_compare
10010 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10011 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10012 == FUNCTION_TYPE))
10013 {
10014 rtx new_op0 = gen_reg_rtx (mode);
10015
10016 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10017 op0 = new_op0;
10018 }
10019
10020 if (HAVE_canonicalize_funcptr_for_compare
10021 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10022 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10023 == FUNCTION_TYPE))
10024 {
10025 rtx new_op1 = gen_reg_rtx (mode);
10026
10027 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10028 op1 = new_op1;
10029 }
10030 #endif
10031
10032 /* Do any postincrements in the expression that was tested. */
10033 emit_queue ();
10034
10035 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10036 ((mode == BLKmode)
10037 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10038 MIN (align0, align1) / BITS_PER_UNIT,
10039 if_false_label, if_true_label);
10040 }
10041 \f
10042 /* Generate code to calculate EXP using a store-flag instruction
10043 and return an rtx for the result. EXP is either a comparison
10044 or a TRUTH_NOT_EXPR whose operand is a comparison.
10045
10046 If TARGET is nonzero, store the result there if convenient.
10047
10048 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10049 cheap.
10050
10051 Return zero if there is no suitable set-flag instruction
10052 available on this machine.
10053
10054 Once expand_expr has been called on the arguments of the comparison,
10055 we are committed to doing the store flag, since it is not safe to
10056 re-evaluate the expression. We emit the store-flag insn by calling
10057 emit_store_flag, but only expand the arguments if we have a reason
10058 to believe that emit_store_flag will be successful. If we think that
10059 it will, but it isn't, we have to simulate the store-flag with a
10060 set/jump/set sequence. */
10061
10062 static rtx
10063 do_store_flag (exp, target, mode, only_cheap)
10064 tree exp;
10065 rtx target;
10066 enum machine_mode mode;
10067 int only_cheap;
10068 {
10069 enum rtx_code code;
10070 tree arg0, arg1, type;
10071 tree tem;
10072 enum machine_mode operand_mode;
10073 int invert = 0;
10074 int unsignedp;
10075 rtx op0, op1;
10076 enum insn_code icode;
10077 rtx subtarget = target;
10078 rtx result, label;
10079
10080 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10081 result at the end. We can't simply invert the test since it would
10082 have already been inverted if it were valid. This case occurs for
10083 some floating-point comparisons. */
10084
10085 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10086 invert = 1, exp = TREE_OPERAND (exp, 0);
10087
10088 arg0 = TREE_OPERAND (exp, 0);
10089 arg1 = TREE_OPERAND (exp, 1);
10090 type = TREE_TYPE (arg0);
10091 operand_mode = TYPE_MODE (type);
10092 unsignedp = TREE_UNSIGNED (type);
10093
10094 /* We won't bother with BLKmode store-flag operations because it would mean
10095 passing a lot of information to emit_store_flag. */
10096 if (operand_mode == BLKmode)
10097 return 0;
10098
10099 /* We won't bother with store-flag operations involving function pointers
10100 when function pointers must be canonicalized before comparisons. */
10101 #ifdef HAVE_canonicalize_funcptr_for_compare
10102 if (HAVE_canonicalize_funcptr_for_compare
10103 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10104 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10105 == FUNCTION_TYPE))
10106 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10107 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10108 == FUNCTION_TYPE))))
10109 return 0;
10110 #endif
10111
10112 STRIP_NOPS (arg0);
10113 STRIP_NOPS (arg1);
10114
10115 /* Get the rtx comparison code to use. We know that EXP is a comparison
10116 operation of some type. Some comparisons against 1 and -1 can be
10117 converted to comparisons with zero. Do so here so that the tests
10118 below will be aware that we have a comparison with zero. These
10119 tests will not catch constants in the first operand, but constants
10120 are rarely passed as the first operand. */
10121
10122 switch (TREE_CODE (exp))
10123 {
10124 case EQ_EXPR:
10125 code = EQ;
10126 break;
10127 case NE_EXPR:
10128 code = NE;
10129 break;
10130 case LT_EXPR:
10131 if (integer_onep (arg1))
10132 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10133 else
10134 code = unsignedp ? LTU : LT;
10135 break;
10136 case LE_EXPR:
10137 if (! unsignedp && integer_all_onesp (arg1))
10138 arg1 = integer_zero_node, code = LT;
10139 else
10140 code = unsignedp ? LEU : LE;
10141 break;
10142 case GT_EXPR:
10143 if (! unsignedp && integer_all_onesp (arg1))
10144 arg1 = integer_zero_node, code = GE;
10145 else
10146 code = unsignedp ? GTU : GT;
10147 break;
10148 case GE_EXPR:
10149 if (integer_onep (arg1))
10150 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10151 else
10152 code = unsignedp ? GEU : GE;
10153 break;
10154
10155 case UNORDERED_EXPR:
10156 code = UNORDERED;
10157 break;
10158 case ORDERED_EXPR:
10159 code = ORDERED;
10160 break;
10161 case UNLT_EXPR:
10162 code = UNLT;
10163 break;
10164 case UNLE_EXPR:
10165 code = UNLE;
10166 break;
10167 case UNGT_EXPR:
10168 code = UNGT;
10169 break;
10170 case UNGE_EXPR:
10171 code = UNGE;
10172 break;
10173 case UNEQ_EXPR:
10174 code = UNEQ;
10175 break;
10176
10177 default:
10178 abort ();
10179 }
10180
10181 /* Put a constant second. */
10182 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10183 {
10184 tem = arg0; arg0 = arg1; arg1 = tem;
10185 code = swap_condition (code);
10186 }
10187
10188 /* If this is an equality or inequality test of a single bit, we can
10189 do this by shifting the bit being tested to the low-order bit and
10190 masking the result with the constant 1. If the condition was EQ,
10191 we xor it with 1. This does not require an scc insn and is faster
10192 than an scc insn even if we have it. */
10193
10194 if ((code == NE || code == EQ)
10195 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10196 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10197 {
10198 tree inner = TREE_OPERAND (arg0, 0);
10199 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10200 int ops_unsignedp;
10201
10202 /* If INNER is a right shift of a constant and it plus BITNUM does
10203 not overflow, adjust BITNUM and INNER. */
10204
10205 if (TREE_CODE (inner) == RSHIFT_EXPR
10206 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10207 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10208 && bitnum < TYPE_PRECISION (type)
10209 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10210 bitnum - TYPE_PRECISION (type)))
10211 {
10212 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10213 inner = TREE_OPERAND (inner, 0);
10214 }
10215
10216 /* If we are going to be able to omit the AND below, we must do our
10217 operations as unsigned. If we must use the AND, we have a choice.
10218 Normally unsigned is faster, but for some machines signed is. */
10219 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10220 #ifdef LOAD_EXTEND_OP
10221 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10222 #else
10223 : 1
10224 #endif
10225 );
10226
10227 if (subtarget == 0 || GET_CODE (subtarget) != REG
10228 || GET_MODE (subtarget) != operand_mode
10229 || ! safe_from_p (subtarget, inner, 1))
10230 subtarget = 0;
10231
10232 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10233
10234 if (bitnum != 0)
10235 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10236 size_int (bitnum), subtarget, ops_unsignedp);
10237
10238 if (GET_MODE (op0) != mode)
10239 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10240
10241 if ((code == EQ && ! invert) || (code == NE && invert))
10242 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10243 ops_unsignedp, OPTAB_LIB_WIDEN);
10244
10245 /* Put the AND last so it can combine with more things. */
10246 if (bitnum != TYPE_PRECISION (type) - 1)
10247 op0 = expand_and (op0, const1_rtx, subtarget);
10248
10249 return op0;
10250 }
10251
10252 /* Now see if we are likely to be able to do this. Return if not. */
10253 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10254 return 0;
10255
10256 icode = setcc_gen_code[(int) code];
10257 if (icode == CODE_FOR_nothing
10258 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10259 {
10260 /* We can only do this if it is one of the special cases that
10261 can be handled without an scc insn. */
10262 if ((code == LT && integer_zerop (arg1))
10263 || (! only_cheap && code == GE && integer_zerop (arg1)))
10264 ;
10265 else if (BRANCH_COST >= 0
10266 && ! only_cheap && (code == NE || code == EQ)
10267 && TREE_CODE (type) != REAL_TYPE
10268 && ((abs_optab->handlers[(int) operand_mode].insn_code
10269 != CODE_FOR_nothing)
10270 || (ffs_optab->handlers[(int) operand_mode].insn_code
10271 != CODE_FOR_nothing)))
10272 ;
10273 else
10274 return 0;
10275 }
10276
10277 preexpand_calls (exp);
10278 if (subtarget == 0 || GET_CODE (subtarget) != REG
10279 || GET_MODE (subtarget) != operand_mode
10280 || ! safe_from_p (subtarget, arg1, 1))
10281 subtarget = 0;
10282
10283 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10284 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10285
10286 if (target == 0)
10287 target = gen_reg_rtx (mode);
10288
10289 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10290 because, if the emit_store_flag does anything it will succeed and
10291 OP0 and OP1 will not be used subsequently. */
10292
10293 result = emit_store_flag (target, code,
10294 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10295 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10296 operand_mode, unsignedp, 1);
10297
10298 if (result)
10299 {
10300 if (invert)
10301 result = expand_binop (mode, xor_optab, result, const1_rtx,
10302 result, 0, OPTAB_LIB_WIDEN);
10303 return result;
10304 }
10305
10306 /* If this failed, we have to do this with set/compare/jump/set code. */
10307 if (GET_CODE (target) != REG
10308 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10309 target = gen_reg_rtx (GET_MODE (target));
10310
10311 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10312 result = compare_from_rtx (op0, op1, code, unsignedp,
10313 operand_mode, NULL_RTX, 0);
10314 if (GET_CODE (result) == CONST_INT)
10315 return (((result == const0_rtx && ! invert)
10316 || (result != const0_rtx && invert))
10317 ? const0_rtx : const1_rtx);
10318
10319 label = gen_label_rtx ();
10320 if (bcc_gen_fctn[(int) code] == 0)
10321 abort ();
10322
10323 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10324 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10325 emit_label (label);
10326
10327 return target;
10328 }
10329 \f
10330 /* Generate a tablejump instruction (used for switch statements). */
10331
10332 #ifdef HAVE_tablejump
10333
10334 /* INDEX is the value being switched on, with the lowest value
10335 in the table already subtracted.
10336 MODE is its expected mode (needed if INDEX is constant).
10337 RANGE is the length of the jump table.
10338 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10339
10340 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10341 index value is out of range. */
10342
10343 void
10344 do_tablejump (index, mode, range, table_label, default_label)
10345 rtx index, range, table_label, default_label;
10346 enum machine_mode mode;
10347 {
10348 register rtx temp, vector;
10349
10350 /* Do an unsigned comparison (in the proper mode) between the index
10351 expression and the value which represents the length of the range.
10352 Since we just finished subtracting the lower bound of the range
10353 from the index expression, this comparison allows us to simultaneously
10354 check that the original index expression value is both greater than
10355 or equal to the minimum value of the range and less than or equal to
10356 the maximum value of the range. */
10357
10358 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10359 0, default_label);
10360
10361 /* If index is in range, it must fit in Pmode.
10362 Convert to Pmode so we can index with it. */
10363 if (mode != Pmode)
10364 index = convert_to_mode (Pmode, index, 1);
10365
10366 /* Don't let a MEM slip thru, because then INDEX that comes
10367 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10368 and break_out_memory_refs will go to work on it and mess it up. */
10369 #ifdef PIC_CASE_VECTOR_ADDRESS
10370 if (flag_pic && GET_CODE (index) != REG)
10371 index = copy_to_mode_reg (Pmode, index);
10372 #endif
10373
10374 /* If flag_force_addr were to affect this address
10375 it could interfere with the tricky assumptions made
10376 about addresses that contain label-refs,
10377 which may be valid only very near the tablejump itself. */
10378 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10379 GET_MODE_SIZE, because this indicates how large insns are. The other
10380 uses should all be Pmode, because they are addresses. This code
10381 could fail if addresses and insns are not the same size. */
10382 index = gen_rtx_PLUS (Pmode,
10383 gen_rtx_MULT (Pmode, index,
10384 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10385 gen_rtx_LABEL_REF (Pmode, table_label));
10386 #ifdef PIC_CASE_VECTOR_ADDRESS
10387 if (flag_pic)
10388 index = PIC_CASE_VECTOR_ADDRESS (index);
10389 else
10390 #endif
10391 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10392 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10393 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10394 RTX_UNCHANGING_P (vector) = 1;
10395 convert_move (temp, vector, 0);
10396
10397 emit_jump_insn (gen_tablejump (temp, table_label));
10398
10399 /* If we are generating PIC code or if the table is PC-relative, the
10400 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10401 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10402 emit_barrier ();
10403 }
10404
10405 #endif /* HAVE_tablejump */