builtins.c (expand_builtin_apply): Pass proper parameters to allocate_dynamic_stack_s...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "machmode.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
47
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
50 #endif
51
52 /* Supply a default definition for PUSH_ARGS. */
53 #ifndef PUSH_ARGS
54 #ifdef PUSH_ROUNDING
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
56 #else
57 #define PUSH_ARGS 0
58 #endif
59 #endif
60
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
63
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
66
67 #ifdef PUSH_ROUNDING
68
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first */
71 #endif
72
73 #endif
74
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
82
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
86 #endif
87
88 /* If this is nonzero, we do not bother generating VOLATILE
89 around volatile memory references, and we are willing to
90 output indirect addresses. If cse is to follow, we reject
91 indirect addresses so a useful potential cse is generated;
92 if it is used only once, instruction combination will produce
93 the same indirect address eventually. */
94 int cse_not_expected;
95
96 /* Nonzero to generate code for all the subroutines within an
97 expression before generating the upper levels of the expression.
98 Nowadays this is never zero. */
99 int do_preexpand_calls = 1;
100
101 /* Don't check memory usage, since code is being emitted to check a memory
102 usage. Used when current_function_check_memory_usage is true, to avoid
103 infinite recursion. */
104 static int in_check_memory_usage;
105
106 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
107 static tree placeholder_list = 0;
108
109 /* This structure is used by move_by_pieces to describe the move to
110 be performed. */
111 struct move_by_pieces
112 {
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 int to_struct;
118 int to_readonly;
119 rtx from;
120 rtx from_addr;
121 int autinc_from;
122 int explicit_inc_from;
123 int from_struct;
124 int from_readonly;
125 int len;
126 int offset;
127 int reverse;
128 };
129
130 /* This structure is used by clear_by_pieces to describe the clear to
131 be performed. */
132
133 struct clear_by_pieces
134 {
135 rtx to;
136 rtx to_addr;
137 int autinc_to;
138 int explicit_inc_to;
139 int to_struct;
140 int len;
141 int offset;
142 int reverse;
143 };
144
145 extern struct obstack permanent_obstack;
146
147 static rtx get_push_address PARAMS ((int));
148
149 static rtx enqueue_insn PARAMS ((rtx, rtx));
150 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
151 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
152 struct move_by_pieces *));
153 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
154 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
155 enum machine_mode,
156 struct clear_by_pieces *));
157 static int is_zeros_p PARAMS ((tree));
158 static int mostly_zeros_p PARAMS ((tree));
159 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
160 HOST_WIDE_INT, enum machine_mode,
161 tree, tree, unsigned int, int));
162 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
163 HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int,
167 unsigned int, HOST_WIDE_INT, int));
168 static enum memory_use_mode
169 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
170 static tree save_noncopied_parts PARAMS ((tree, tree));
171 static tree init_noncopied_parts PARAMS ((tree, tree));
172 static int safe_from_p PARAMS ((rtx, tree, int));
173 static int fixed_type_p PARAMS ((tree));
174 static rtx var_rtx PARAMS ((tree));
175 static int readonly_fields_p PARAMS ((tree));
176 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
177 static rtx expand_increment PARAMS ((tree, int, int));
178 static void preexpand_calls PARAMS ((tree));
179 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
180 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
181 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
182 rtx, rtx));
183 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
184
185 /* Record for each mode whether we can move a register directly to or
186 from an object of that mode in memory. If we can't, we won't try
187 to use that mode directly when accessing a field of that mode. */
188
189 static char direct_load[NUM_MACHINE_MODES];
190 static char direct_store[NUM_MACHINE_MODES];
191
192 /* If a memory-to-memory move would take MOVE_RATIO or more simple
193 move-instruction sequences, we will do a movstr or libcall instead. */
194
195 #ifndef MOVE_RATIO
196 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
197 #define MOVE_RATIO 2
198 #else
199 /* If we are optimizing for space (-Os), cut down the default move ratio */
200 #define MOVE_RATIO (optimize_size ? 3 : 15)
201 #endif
202 #endif
203
204 /* This macro is used to determine whether move_by_pieces should be called
205 to perform a structure copy. */
206 #ifndef MOVE_BY_PIECES_P
207 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
208 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
209 #endif
210
211 /* This array records the insn_code of insns to perform block moves. */
212 enum insn_code movstr_optab[NUM_MACHINE_MODES];
213
214 /* This array records the insn_code of insns to perform block clears. */
215 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
216
217 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
218
219 #ifndef SLOW_UNALIGNED_ACCESS
220 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
221 #endif
222 \f
223 /* This is run once per compilation to set up which modes can be used
224 directly in memory and to initialize the block move optab. */
225
226 void
227 init_expr_once ()
228 {
229 rtx insn, pat;
230 enum machine_mode mode;
231 int num_clobbers;
232 rtx mem, mem1;
233 char *free_point;
234
235 start_sequence ();
236
237 /* Since we are on the permanent obstack, we must be sure we save this
238 spot AFTER we call start_sequence, since it will reuse the rtl it
239 makes. */
240 free_point = (char *) oballoc (0);
241
242 /* Try indexing by frame ptr and try by stack ptr.
243 It is known that on the Convex the stack ptr isn't a valid index.
244 With luck, one or the other is valid on any machine. */
245 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
246 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
247
248 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
249 pat = PATTERN (insn);
250
251 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
252 mode = (enum machine_mode) ((int) mode + 1))
253 {
254 int regno;
255 rtx reg;
256
257 direct_load[(int) mode] = direct_store[(int) mode] = 0;
258 PUT_MODE (mem, mode);
259 PUT_MODE (mem1, mode);
260
261 /* See if there is some register that can be used in this mode and
262 directly loaded or stored from memory. */
263
264 if (mode != VOIDmode && mode != BLKmode)
265 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
266 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
267 regno++)
268 {
269 if (! HARD_REGNO_MODE_OK (regno, mode))
270 continue;
271
272 reg = gen_rtx_REG (mode, regno);
273
274 SET_SRC (pat) = mem;
275 SET_DEST (pat) = reg;
276 if (recog (pat, insn, &num_clobbers) >= 0)
277 direct_load[(int) mode] = 1;
278
279 SET_SRC (pat) = mem1;
280 SET_DEST (pat) = reg;
281 if (recog (pat, insn, &num_clobbers) >= 0)
282 direct_load[(int) mode] = 1;
283
284 SET_SRC (pat) = reg;
285 SET_DEST (pat) = mem;
286 if (recog (pat, insn, &num_clobbers) >= 0)
287 direct_store[(int) mode] = 1;
288
289 SET_SRC (pat) = reg;
290 SET_DEST (pat) = mem1;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_store[(int) mode] = 1;
293 }
294 }
295
296 end_sequence ();
297 obfree (free_point);
298 }
299
300 /* This is run at the start of compiling a function. */
301
302 void
303 init_expr ()
304 {
305 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
306
307 pending_chain = 0;
308 pending_stack_adjust = 0;
309 stack_pointer_delta = 0;
310 inhibit_defer_pop = 0;
311 saveregs_value = 0;
312 apply_args_value = 0;
313 forced_labels = 0;
314 }
315
316 void
317 mark_expr_status (p)
318 struct expr_status *p;
319 {
320 if (p == NULL)
321 return;
322
323 ggc_mark_rtx (p->x_saveregs_value);
324 ggc_mark_rtx (p->x_apply_args_value);
325 ggc_mark_rtx (p->x_forced_labels);
326 }
327
328 void
329 free_expr_status (f)
330 struct function *f;
331 {
332 free (f->expr);
333 f->expr = NULL;
334 }
335
336 /* Small sanity check that the queue is empty at the end of a function. */
337 void
338 finish_expr_for_function ()
339 {
340 if (pending_chain)
341 abort ();
342 }
343 \f
344 /* Manage the queue of increment instructions to be output
345 for POSTINCREMENT_EXPR expressions, etc. */
346
347 /* Queue up to increment (or change) VAR later. BODY says how:
348 BODY should be the same thing you would pass to emit_insn
349 to increment right away. It will go to emit_insn later on.
350
351 The value is a QUEUED expression to be used in place of VAR
352 where you want to guarantee the pre-incrementation value of VAR. */
353
354 static rtx
355 enqueue_insn (var, body)
356 rtx var, body;
357 {
358 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
359 body, pending_chain);
360 return pending_chain;
361 }
362
363 /* Use protect_from_queue to convert a QUEUED expression
364 into something that you can put immediately into an instruction.
365 If the queued incrementation has not happened yet,
366 protect_from_queue returns the variable itself.
367 If the incrementation has happened, protect_from_queue returns a temp
368 that contains a copy of the old value of the variable.
369
370 Any time an rtx which might possibly be a QUEUED is to be put
371 into an instruction, it must be passed through protect_from_queue first.
372 QUEUED expressions are not meaningful in instructions.
373
374 Do not pass a value through protect_from_queue and then hold
375 on to it for a while before putting it in an instruction!
376 If the queue is flushed in between, incorrect code will result. */
377
378 rtx
379 protect_from_queue (x, modify)
380 register rtx x;
381 int modify;
382 {
383 register RTX_CODE code = GET_CODE (x);
384
385 #if 0 /* A QUEUED can hang around after the queue is forced out. */
386 /* Shortcut for most common case. */
387 if (pending_chain == 0)
388 return x;
389 #endif
390
391 if (code != QUEUED)
392 {
393 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
394 use of autoincrement. Make a copy of the contents of the memory
395 location rather than a copy of the address, but not if the value is
396 of mode BLKmode. Don't modify X in place since it might be
397 shared. */
398 if (code == MEM && GET_MODE (x) != BLKmode
399 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
400 {
401 register rtx y = XEXP (x, 0);
402 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
403
404 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
405 MEM_COPY_ATTRIBUTES (new, x);
406 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
407
408 if (QUEUED_INSN (y))
409 {
410 register rtx temp = gen_reg_rtx (GET_MODE (new));
411 emit_insn_before (gen_move_insn (temp, new),
412 QUEUED_INSN (y));
413 return temp;
414 }
415 return new;
416 }
417 /* Otherwise, recursively protect the subexpressions of all
418 the kinds of rtx's that can contain a QUEUED. */
419 if (code == MEM)
420 {
421 rtx tem = protect_from_queue (XEXP (x, 0), 0);
422 if (tem != XEXP (x, 0))
423 {
424 x = copy_rtx (x);
425 XEXP (x, 0) = tem;
426 }
427 }
428 else if (code == PLUS || code == MULT)
429 {
430 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
431 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
432 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
433 {
434 x = copy_rtx (x);
435 XEXP (x, 0) = new0;
436 XEXP (x, 1) = new1;
437 }
438 }
439 return x;
440 }
441 /* If the increment has not happened, use the variable itself. */
442 if (QUEUED_INSN (x) == 0)
443 return QUEUED_VAR (x);
444 /* If the increment has happened and a pre-increment copy exists,
445 use that copy. */
446 if (QUEUED_COPY (x) != 0)
447 return QUEUED_COPY (x);
448 /* The increment has happened but we haven't set up a pre-increment copy.
449 Set one up now, and use it. */
450 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
451 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
452 QUEUED_INSN (x));
453 return QUEUED_COPY (x);
454 }
455
456 /* Return nonzero if X contains a QUEUED expression:
457 if it contains anything that will be altered by a queued increment.
458 We handle only combinations of MEM, PLUS, MINUS and MULT operators
459 since memory addresses generally contain only those. */
460
461 int
462 queued_subexp_p (x)
463 rtx x;
464 {
465 register enum rtx_code code = GET_CODE (x);
466 switch (code)
467 {
468 case QUEUED:
469 return 1;
470 case MEM:
471 return queued_subexp_p (XEXP (x, 0));
472 case MULT:
473 case PLUS:
474 case MINUS:
475 return (queued_subexp_p (XEXP (x, 0))
476 || queued_subexp_p (XEXP (x, 1)));
477 default:
478 return 0;
479 }
480 }
481
482 /* Perform all the pending incrementations. */
483
484 void
485 emit_queue ()
486 {
487 register rtx p;
488 while ((p = pending_chain))
489 {
490 rtx body = QUEUED_BODY (p);
491
492 if (GET_CODE (body) == SEQUENCE)
493 {
494 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
495 emit_insn (QUEUED_BODY (p));
496 }
497 else
498 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
499 pending_chain = QUEUED_NEXT (p);
500 }
501 }
502 \f
503 /* Copy data from FROM to TO, where the machine modes are not the same.
504 Both modes may be integer, or both may be floating.
505 UNSIGNEDP should be nonzero if FROM is an unsigned type.
506 This causes zero-extension instead of sign-extension. */
507
508 void
509 convert_move (to, from, unsignedp)
510 register rtx to, from;
511 int unsignedp;
512 {
513 enum machine_mode to_mode = GET_MODE (to);
514 enum machine_mode from_mode = GET_MODE (from);
515 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
516 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
517 enum insn_code code;
518 rtx libcall;
519
520 /* rtx code for making an equivalent value. */
521 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
522
523 to = protect_from_queue (to, 1);
524 from = protect_from_queue (from, 0);
525
526 if (to_real != from_real)
527 abort ();
528
529 /* If FROM is a SUBREG that indicates that we have already done at least
530 the required extension, strip it. We don't handle such SUBREGs as
531 TO here. */
532
533 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
534 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
535 >= GET_MODE_SIZE (to_mode))
536 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
537 from = gen_lowpart (to_mode, from), from_mode = to_mode;
538
539 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
540 abort ();
541
542 if (to_mode == from_mode
543 || (from_mode == VOIDmode && CONSTANT_P (from)))
544 {
545 emit_move_insn (to, from);
546 return;
547 }
548
549 if (to_real)
550 {
551 rtx value;
552
553 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
554 {
555 /* Try converting directly if the insn is supported. */
556 if ((code = can_extend_p (to_mode, from_mode, 0))
557 != CODE_FOR_nothing)
558 {
559 emit_unop_insn (code, to, from, UNKNOWN);
560 return;
561 }
562 }
563
564 #ifdef HAVE_trunchfqf2
565 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
566 {
567 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
568 return;
569 }
570 #endif
571 #ifdef HAVE_trunctqfqf2
572 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
573 {
574 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
575 return;
576 }
577 #endif
578 #ifdef HAVE_truncsfqf2
579 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
580 {
581 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
582 return;
583 }
584 #endif
585 #ifdef HAVE_truncdfqf2
586 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
587 {
588 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
589 return;
590 }
591 #endif
592 #ifdef HAVE_truncxfqf2
593 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
594 {
595 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
596 return;
597 }
598 #endif
599 #ifdef HAVE_trunctfqf2
600 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
601 {
602 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
603 return;
604 }
605 #endif
606
607 #ifdef HAVE_trunctqfhf2
608 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
609 {
610 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
611 return;
612 }
613 #endif
614 #ifdef HAVE_truncsfhf2
615 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
616 {
617 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
618 return;
619 }
620 #endif
621 #ifdef HAVE_truncdfhf2
622 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
623 {
624 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
625 return;
626 }
627 #endif
628 #ifdef HAVE_truncxfhf2
629 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
630 {
631 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
632 return;
633 }
634 #endif
635 #ifdef HAVE_trunctfhf2
636 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
637 {
638 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
639 return;
640 }
641 #endif
642
643 #ifdef HAVE_truncsftqf2
644 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
645 {
646 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
647 return;
648 }
649 #endif
650 #ifdef HAVE_truncdftqf2
651 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
652 {
653 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
654 return;
655 }
656 #endif
657 #ifdef HAVE_truncxftqf2
658 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
659 {
660 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
661 return;
662 }
663 #endif
664 #ifdef HAVE_trunctftqf2
665 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
666 {
667 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
668 return;
669 }
670 #endif
671
672 #ifdef HAVE_truncdfsf2
673 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
674 {
675 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
676 return;
677 }
678 #endif
679 #ifdef HAVE_truncxfsf2
680 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
681 {
682 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
683 return;
684 }
685 #endif
686 #ifdef HAVE_trunctfsf2
687 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
688 {
689 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
690 return;
691 }
692 #endif
693 #ifdef HAVE_truncxfdf2
694 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
695 {
696 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
697 return;
698 }
699 #endif
700 #ifdef HAVE_trunctfdf2
701 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
702 {
703 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
704 return;
705 }
706 #endif
707
708 libcall = (rtx) 0;
709 switch (from_mode)
710 {
711 case SFmode:
712 switch (to_mode)
713 {
714 case DFmode:
715 libcall = extendsfdf2_libfunc;
716 break;
717
718 case XFmode:
719 libcall = extendsfxf2_libfunc;
720 break;
721
722 case TFmode:
723 libcall = extendsftf2_libfunc;
724 break;
725
726 default:
727 break;
728 }
729 break;
730
731 case DFmode:
732 switch (to_mode)
733 {
734 case SFmode:
735 libcall = truncdfsf2_libfunc;
736 break;
737
738 case XFmode:
739 libcall = extenddfxf2_libfunc;
740 break;
741
742 case TFmode:
743 libcall = extenddftf2_libfunc;
744 break;
745
746 default:
747 break;
748 }
749 break;
750
751 case XFmode:
752 switch (to_mode)
753 {
754 case SFmode:
755 libcall = truncxfsf2_libfunc;
756 break;
757
758 case DFmode:
759 libcall = truncxfdf2_libfunc;
760 break;
761
762 default:
763 break;
764 }
765 break;
766
767 case TFmode:
768 switch (to_mode)
769 {
770 case SFmode:
771 libcall = trunctfsf2_libfunc;
772 break;
773
774 case DFmode:
775 libcall = trunctfdf2_libfunc;
776 break;
777
778 default:
779 break;
780 }
781 break;
782
783 default:
784 break;
785 }
786
787 if (libcall == (rtx) 0)
788 /* This conversion is not implemented yet. */
789 abort ();
790
791 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
792 1, from, from_mode);
793 emit_move_insn (to, value);
794 return;
795 }
796
797 /* Now both modes are integers. */
798
799 /* Handle expanding beyond a word. */
800 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
801 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
802 {
803 rtx insns;
804 rtx lowpart;
805 rtx fill_value;
806 rtx lowfrom;
807 int i;
808 enum machine_mode lowpart_mode;
809 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
810
811 /* Try converting directly if the insn is supported. */
812 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
813 != CODE_FOR_nothing)
814 {
815 /* If FROM is a SUBREG, put it into a register. Do this
816 so that we always generate the same set of insns for
817 better cse'ing; if an intermediate assignment occurred,
818 we won't be doing the operation directly on the SUBREG. */
819 if (optimize > 0 && GET_CODE (from) == SUBREG)
820 from = force_reg (from_mode, from);
821 emit_unop_insn (code, to, from, equiv_code);
822 return;
823 }
824 /* Next, try converting via full word. */
825 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
826 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
827 != CODE_FOR_nothing))
828 {
829 if (GET_CODE (to) == REG)
830 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
831 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
832 emit_unop_insn (code, to,
833 gen_lowpart (word_mode, to), equiv_code);
834 return;
835 }
836
837 /* No special multiword conversion insn; do it by hand. */
838 start_sequence ();
839
840 /* Since we will turn this into a no conflict block, we must ensure
841 that the source does not overlap the target. */
842
843 if (reg_overlap_mentioned_p (to, from))
844 from = force_reg (from_mode, from);
845
846 /* Get a copy of FROM widened to a word, if necessary. */
847 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
848 lowpart_mode = word_mode;
849 else
850 lowpart_mode = from_mode;
851
852 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
853
854 lowpart = gen_lowpart (lowpart_mode, to);
855 emit_move_insn (lowpart, lowfrom);
856
857 /* Compute the value to put in each remaining word. */
858 if (unsignedp)
859 fill_value = const0_rtx;
860 else
861 {
862 #ifdef HAVE_slt
863 if (HAVE_slt
864 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
865 && STORE_FLAG_VALUE == -1)
866 {
867 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
868 lowpart_mode, 0, 0);
869 fill_value = gen_reg_rtx (word_mode);
870 emit_insn (gen_slt (fill_value));
871 }
872 else
873 #endif
874 {
875 fill_value
876 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
877 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
878 NULL_RTX, 0);
879 fill_value = convert_to_mode (word_mode, fill_value, 1);
880 }
881 }
882
883 /* Fill the remaining words. */
884 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
885 {
886 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
887 rtx subword = operand_subword (to, index, 1, to_mode);
888
889 if (subword == 0)
890 abort ();
891
892 if (fill_value != subword)
893 emit_move_insn (subword, fill_value);
894 }
895
896 insns = get_insns ();
897 end_sequence ();
898
899 emit_no_conflict_block (insns, to, from, NULL_RTX,
900 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
901 return;
902 }
903
904 /* Truncating multi-word to a word or less. */
905 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
906 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
907 {
908 if (!((GET_CODE (from) == MEM
909 && ! MEM_VOLATILE_P (from)
910 && direct_load[(int) to_mode]
911 && ! mode_dependent_address_p (XEXP (from, 0)))
912 || GET_CODE (from) == REG
913 || GET_CODE (from) == SUBREG))
914 from = force_reg (from_mode, from);
915 convert_move (to, gen_lowpart (word_mode, from), 0);
916 return;
917 }
918
919 /* Handle pointer conversion */ /* SPEE 900220 */
920 if (to_mode == PQImode)
921 {
922 if (from_mode != QImode)
923 from = convert_to_mode (QImode, from, unsignedp);
924
925 #ifdef HAVE_truncqipqi2
926 if (HAVE_truncqipqi2)
927 {
928 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
929 return;
930 }
931 #endif /* HAVE_truncqipqi2 */
932 abort ();
933 }
934
935 if (from_mode == PQImode)
936 {
937 if (to_mode != QImode)
938 {
939 from = convert_to_mode (QImode, from, unsignedp);
940 from_mode = QImode;
941 }
942 else
943 {
944 #ifdef HAVE_extendpqiqi2
945 if (HAVE_extendpqiqi2)
946 {
947 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
948 return;
949 }
950 #endif /* HAVE_extendpqiqi2 */
951 abort ();
952 }
953 }
954
955 if (to_mode == PSImode)
956 {
957 if (from_mode != SImode)
958 from = convert_to_mode (SImode, from, unsignedp);
959
960 #ifdef HAVE_truncsipsi2
961 if (HAVE_truncsipsi2)
962 {
963 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
964 return;
965 }
966 #endif /* HAVE_truncsipsi2 */
967 abort ();
968 }
969
970 if (from_mode == PSImode)
971 {
972 if (to_mode != SImode)
973 {
974 from = convert_to_mode (SImode, from, unsignedp);
975 from_mode = SImode;
976 }
977 else
978 {
979 #ifdef HAVE_extendpsisi2
980 if (HAVE_extendpsisi2)
981 {
982 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
983 return;
984 }
985 #endif /* HAVE_extendpsisi2 */
986 abort ();
987 }
988 }
989
990 if (to_mode == PDImode)
991 {
992 if (from_mode != DImode)
993 from = convert_to_mode (DImode, from, unsignedp);
994
995 #ifdef HAVE_truncdipdi2
996 if (HAVE_truncdipdi2)
997 {
998 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
999 return;
1000 }
1001 #endif /* HAVE_truncdipdi2 */
1002 abort ();
1003 }
1004
1005 if (from_mode == PDImode)
1006 {
1007 if (to_mode != DImode)
1008 {
1009 from = convert_to_mode (DImode, from, unsignedp);
1010 from_mode = DImode;
1011 }
1012 else
1013 {
1014 #ifdef HAVE_extendpdidi2
1015 if (HAVE_extendpdidi2)
1016 {
1017 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1018 return;
1019 }
1020 #endif /* HAVE_extendpdidi2 */
1021 abort ();
1022 }
1023 }
1024
1025 /* Now follow all the conversions between integers
1026 no more than a word long. */
1027
1028 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1029 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1030 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1031 GET_MODE_BITSIZE (from_mode)))
1032 {
1033 if (!((GET_CODE (from) == MEM
1034 && ! MEM_VOLATILE_P (from)
1035 && direct_load[(int) to_mode]
1036 && ! mode_dependent_address_p (XEXP (from, 0)))
1037 || GET_CODE (from) == REG
1038 || GET_CODE (from) == SUBREG))
1039 from = force_reg (from_mode, from);
1040 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1041 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1042 from = copy_to_reg (from);
1043 emit_move_insn (to, gen_lowpart (to_mode, from));
1044 return;
1045 }
1046
1047 /* Handle extension. */
1048 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1049 {
1050 /* Convert directly if that works. */
1051 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1052 != CODE_FOR_nothing)
1053 {
1054 emit_unop_insn (code, to, from, equiv_code);
1055 return;
1056 }
1057 else
1058 {
1059 enum machine_mode intermediate;
1060 rtx tmp;
1061 tree shift_amount;
1062
1063 /* Search for a mode to convert via. */
1064 for (intermediate = from_mode; intermediate != VOIDmode;
1065 intermediate = GET_MODE_WIDER_MODE (intermediate))
1066 if (((can_extend_p (to_mode, intermediate, unsignedp)
1067 != CODE_FOR_nothing)
1068 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1069 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1070 GET_MODE_BITSIZE (intermediate))))
1071 && (can_extend_p (intermediate, from_mode, unsignedp)
1072 != CODE_FOR_nothing))
1073 {
1074 convert_move (to, convert_to_mode (intermediate, from,
1075 unsignedp), unsignedp);
1076 return;
1077 }
1078
1079 /* No suitable intermediate mode.
1080 Generate what we need with shifts. */
1081 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1082 - GET_MODE_BITSIZE (from_mode), 0);
1083 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1084 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1085 to, unsignedp);
1086 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1087 to, unsignedp);
1088 if (tmp != to)
1089 emit_move_insn (to, tmp);
1090 return;
1091 }
1092 }
1093
1094 /* Support special truncate insns for certain modes. */
1095
1096 if (from_mode == DImode && to_mode == SImode)
1097 {
1098 #ifdef HAVE_truncdisi2
1099 if (HAVE_truncdisi2)
1100 {
1101 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1102 return;
1103 }
1104 #endif
1105 convert_move (to, force_reg (from_mode, from), unsignedp);
1106 return;
1107 }
1108
1109 if (from_mode == DImode && to_mode == HImode)
1110 {
1111 #ifdef HAVE_truncdihi2
1112 if (HAVE_truncdihi2)
1113 {
1114 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1115 return;
1116 }
1117 #endif
1118 convert_move (to, force_reg (from_mode, from), unsignedp);
1119 return;
1120 }
1121
1122 if (from_mode == DImode && to_mode == QImode)
1123 {
1124 #ifdef HAVE_truncdiqi2
1125 if (HAVE_truncdiqi2)
1126 {
1127 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1128 return;
1129 }
1130 #endif
1131 convert_move (to, force_reg (from_mode, from), unsignedp);
1132 return;
1133 }
1134
1135 if (from_mode == SImode && to_mode == HImode)
1136 {
1137 #ifdef HAVE_truncsihi2
1138 if (HAVE_truncsihi2)
1139 {
1140 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1141 return;
1142 }
1143 #endif
1144 convert_move (to, force_reg (from_mode, from), unsignedp);
1145 return;
1146 }
1147
1148 if (from_mode == SImode && to_mode == QImode)
1149 {
1150 #ifdef HAVE_truncsiqi2
1151 if (HAVE_truncsiqi2)
1152 {
1153 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1154 return;
1155 }
1156 #endif
1157 convert_move (to, force_reg (from_mode, from), unsignedp);
1158 return;
1159 }
1160
1161 if (from_mode == HImode && to_mode == QImode)
1162 {
1163 #ifdef HAVE_trunchiqi2
1164 if (HAVE_trunchiqi2)
1165 {
1166 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1167 return;
1168 }
1169 #endif
1170 convert_move (to, force_reg (from_mode, from), unsignedp);
1171 return;
1172 }
1173
1174 if (from_mode == TImode && to_mode == DImode)
1175 {
1176 #ifdef HAVE_trunctidi2
1177 if (HAVE_trunctidi2)
1178 {
1179 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1180 return;
1181 }
1182 #endif
1183 convert_move (to, force_reg (from_mode, from), unsignedp);
1184 return;
1185 }
1186
1187 if (from_mode == TImode && to_mode == SImode)
1188 {
1189 #ifdef HAVE_trunctisi2
1190 if (HAVE_trunctisi2)
1191 {
1192 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1193 return;
1194 }
1195 #endif
1196 convert_move (to, force_reg (from_mode, from), unsignedp);
1197 return;
1198 }
1199
1200 if (from_mode == TImode && to_mode == HImode)
1201 {
1202 #ifdef HAVE_trunctihi2
1203 if (HAVE_trunctihi2)
1204 {
1205 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1206 return;
1207 }
1208 #endif
1209 convert_move (to, force_reg (from_mode, from), unsignedp);
1210 return;
1211 }
1212
1213 if (from_mode == TImode && to_mode == QImode)
1214 {
1215 #ifdef HAVE_trunctiqi2
1216 if (HAVE_trunctiqi2)
1217 {
1218 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1219 return;
1220 }
1221 #endif
1222 convert_move (to, force_reg (from_mode, from), unsignedp);
1223 return;
1224 }
1225
1226 /* Handle truncation of volatile memrefs, and so on;
1227 the things that couldn't be truncated directly,
1228 and for which there was no special instruction. */
1229 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1230 {
1231 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1232 emit_move_insn (to, temp);
1233 return;
1234 }
1235
1236 /* Mode combination is not recognized. */
1237 abort ();
1238 }
1239
1240 /* Return an rtx for a value that would result
1241 from converting X to mode MODE.
1242 Both X and MODE may be floating, or both integer.
1243 UNSIGNEDP is nonzero if X is an unsigned value.
1244 This can be done by referring to a part of X in place
1245 or by copying to a new temporary with conversion.
1246
1247 This function *must not* call protect_from_queue
1248 except when putting X into an insn (in which case convert_move does it). */
1249
1250 rtx
1251 convert_to_mode (mode, x, unsignedp)
1252 enum machine_mode mode;
1253 rtx x;
1254 int unsignedp;
1255 {
1256 return convert_modes (mode, VOIDmode, x, unsignedp);
1257 }
1258
1259 /* Return an rtx for a value that would result
1260 from converting X from mode OLDMODE to mode MODE.
1261 Both modes may be floating, or both integer.
1262 UNSIGNEDP is nonzero if X is an unsigned value.
1263
1264 This can be done by referring to a part of X in place
1265 or by copying to a new temporary with conversion.
1266
1267 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1268
1269 This function *must not* call protect_from_queue
1270 except when putting X into an insn (in which case convert_move does it). */
1271
1272 rtx
1273 convert_modes (mode, oldmode, x, unsignedp)
1274 enum machine_mode mode, oldmode;
1275 rtx x;
1276 int unsignedp;
1277 {
1278 register rtx temp;
1279
1280 /* If FROM is a SUBREG that indicates that we have already done at least
1281 the required extension, strip it. */
1282
1283 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1284 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1285 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1286 x = gen_lowpart (mode, x);
1287
1288 if (GET_MODE (x) != VOIDmode)
1289 oldmode = GET_MODE (x);
1290
1291 if (mode == oldmode)
1292 return x;
1293
1294 /* There is one case that we must handle specially: If we are converting
1295 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1296 we are to interpret the constant as unsigned, gen_lowpart will do
1297 the wrong if the constant appears negative. What we want to do is
1298 make the high-order word of the constant zero, not all ones. */
1299
1300 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1301 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1302 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1303 {
1304 HOST_WIDE_INT val = INTVAL (x);
1305
1306 if (oldmode != VOIDmode
1307 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1308 {
1309 int width = GET_MODE_BITSIZE (oldmode);
1310
1311 /* We need to zero extend VAL. */
1312 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1313 }
1314
1315 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1316 }
1317
1318 /* We can do this with a gen_lowpart if both desired and current modes
1319 are integer, and this is either a constant integer, a register, or a
1320 non-volatile MEM. Except for the constant case where MODE is no
1321 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1322
1323 if ((GET_CODE (x) == CONST_INT
1324 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1325 || (GET_MODE_CLASS (mode) == MODE_INT
1326 && GET_MODE_CLASS (oldmode) == MODE_INT
1327 && (GET_CODE (x) == CONST_DOUBLE
1328 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1329 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1330 && direct_load[(int) mode])
1331 || (GET_CODE (x) == REG
1332 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1333 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1334 {
1335 /* ?? If we don't know OLDMODE, we have to assume here that
1336 X does not need sign- or zero-extension. This may not be
1337 the case, but it's the best we can do. */
1338 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1339 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1340 {
1341 HOST_WIDE_INT val = INTVAL (x);
1342 int width = GET_MODE_BITSIZE (oldmode);
1343
1344 /* We must sign or zero-extend in this case. Start by
1345 zero-extending, then sign extend if we need to. */
1346 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1347 if (! unsignedp
1348 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1349 val |= (HOST_WIDE_INT) (-1) << width;
1350
1351 return GEN_INT (val);
1352 }
1353
1354 return gen_lowpart (mode, x);
1355 }
1356
1357 temp = gen_reg_rtx (mode);
1358 convert_move (temp, x, unsignedp);
1359 return temp;
1360 }
1361 \f
1362
1363 /* This macro is used to determine what the largest unit size that
1364 move_by_pieces can use is. */
1365
1366 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1367 move efficiently, as opposed to MOVE_MAX which is the maximum
1368 number of bytes we can move with a single instruction. */
1369
1370 #ifndef MOVE_MAX_PIECES
1371 #define MOVE_MAX_PIECES MOVE_MAX
1372 #endif
1373
1374 /* Generate several move instructions to copy LEN bytes
1375 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1376 The caller must pass FROM and TO
1377 through protect_from_queue before calling.
1378 ALIGN is maximum alignment we can assume. */
1379
1380 void
1381 move_by_pieces (to, from, len, align)
1382 rtx to, from;
1383 int len;
1384 unsigned int align;
1385 {
1386 struct move_by_pieces data;
1387 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1388 unsigned int max_size = MOVE_MAX_PIECES + 1;
1389 enum machine_mode mode = VOIDmode, tmode;
1390 enum insn_code icode;
1391
1392 data.offset = 0;
1393 data.to_addr = to_addr;
1394 data.from_addr = from_addr;
1395 data.to = to;
1396 data.from = from;
1397 data.autinc_to
1398 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1399 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1400 data.autinc_from
1401 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1402 || GET_CODE (from_addr) == POST_INC
1403 || GET_CODE (from_addr) == POST_DEC);
1404
1405 data.explicit_inc_from = 0;
1406 data.explicit_inc_to = 0;
1407 data.reverse
1408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1409 if (data.reverse) data.offset = len;
1410 data.len = len;
1411
1412 data.to_struct = MEM_IN_STRUCT_P (to);
1413 data.from_struct = MEM_IN_STRUCT_P (from);
1414 data.to_readonly = RTX_UNCHANGING_P (to);
1415 data.from_readonly = RTX_UNCHANGING_P (from);
1416
1417 /* If copying requires more than two move insns,
1418 copy addresses to registers (to make displacements shorter)
1419 and use post-increment if available. */
1420 if (!(data.autinc_from && data.autinc_to)
1421 && move_by_pieces_ninsns (len, align) > 2)
1422 {
1423 /* Find the mode of the largest move... */
1424 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1425 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1426 if (GET_MODE_SIZE (tmode) < max_size)
1427 mode = tmode;
1428
1429 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1430 {
1431 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1432 data.autinc_from = 1;
1433 data.explicit_inc_from = -1;
1434 }
1435 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1436 {
1437 data.from_addr = copy_addr_to_reg (from_addr);
1438 data.autinc_from = 1;
1439 data.explicit_inc_from = 1;
1440 }
1441 if (!data.autinc_from && CONSTANT_P (from_addr))
1442 data.from_addr = copy_addr_to_reg (from_addr);
1443 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1444 {
1445 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1446 data.autinc_to = 1;
1447 data.explicit_inc_to = -1;
1448 }
1449 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1450 {
1451 data.to_addr = copy_addr_to_reg (to_addr);
1452 data.autinc_to = 1;
1453 data.explicit_inc_to = 1;
1454 }
1455 if (!data.autinc_to && CONSTANT_P (to_addr))
1456 data.to_addr = copy_addr_to_reg (to_addr);
1457 }
1458
1459 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1460 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1461 align = MOVE_MAX * BITS_PER_UNIT;
1462
1463 /* First move what we can in the largest integer mode, then go to
1464 successively smaller modes. */
1465
1466 while (max_size > 1)
1467 {
1468 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1469 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1470 if (GET_MODE_SIZE (tmode) < max_size)
1471 mode = tmode;
1472
1473 if (mode == VOIDmode)
1474 break;
1475
1476 icode = mov_optab->handlers[(int) mode].insn_code;
1477 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1478 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1479
1480 max_size = GET_MODE_SIZE (mode);
1481 }
1482
1483 /* The code above should have handled everything. */
1484 if (data.len > 0)
1485 abort ();
1486 }
1487
1488 /* Return number of insns required to move L bytes by pieces.
1489 ALIGN (in bytes) is maximum alignment we can assume. */
1490
1491 static int
1492 move_by_pieces_ninsns (l, align)
1493 unsigned int l;
1494 unsigned int align;
1495 {
1496 register int n_insns = 0;
1497 unsigned int max_size = MOVE_MAX + 1;
1498
1499 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1500 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1501 align = MOVE_MAX * BITS_PER_UNIT;
1502
1503 while (max_size > 1)
1504 {
1505 enum machine_mode mode = VOIDmode, tmode;
1506 enum insn_code icode;
1507
1508 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1509 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1510 if (GET_MODE_SIZE (tmode) < max_size)
1511 mode = tmode;
1512
1513 if (mode == VOIDmode)
1514 break;
1515
1516 icode = mov_optab->handlers[(int) mode].insn_code;
1517 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1518 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1519
1520 max_size = GET_MODE_SIZE (mode);
1521 }
1522
1523 return n_insns;
1524 }
1525
1526 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1527 with move instructions for mode MODE. GENFUN is the gen_... function
1528 to make a move insn for that mode. DATA has all the other info. */
1529
1530 static void
1531 move_by_pieces_1 (genfun, mode, data)
1532 rtx (*genfun) PARAMS ((rtx, ...));
1533 enum machine_mode mode;
1534 struct move_by_pieces *data;
1535 {
1536 register int size = GET_MODE_SIZE (mode);
1537 register rtx to1, from1;
1538
1539 while (data->len >= size)
1540 {
1541 if (data->reverse) data->offset -= size;
1542
1543 to1 = (data->autinc_to
1544 ? gen_rtx_MEM (mode, data->to_addr)
1545 : copy_rtx (change_address (data->to, mode,
1546 plus_constant (data->to_addr,
1547 data->offset))));
1548 MEM_IN_STRUCT_P (to1) = data->to_struct;
1549 RTX_UNCHANGING_P (to1) = data->to_readonly;
1550
1551 from1
1552 = (data->autinc_from
1553 ? gen_rtx_MEM (mode, data->from_addr)
1554 : copy_rtx (change_address (data->from, mode,
1555 plus_constant (data->from_addr,
1556 data->offset))));
1557 MEM_IN_STRUCT_P (from1) = data->from_struct;
1558 RTX_UNCHANGING_P (from1) = data->from_readonly;
1559
1560 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1561 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1562 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1563 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1564
1565 emit_insn ((*genfun) (to1, from1));
1566 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1567 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1568 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1569 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1570
1571 if (! data->reverse) data->offset += size;
1572
1573 data->len -= size;
1574 }
1575 }
1576 \f
1577 /* Emit code to move a block Y to a block X.
1578 This may be done with string-move instructions,
1579 with multiple scalar move instructions, or with a library call.
1580
1581 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1582 with mode BLKmode.
1583 SIZE is an rtx that says how long they are.
1584 ALIGN is the maximum alignment we can assume they have.
1585
1586 Return the address of the new block, if memcpy is called and returns it,
1587 0 otherwise. */
1588
1589 rtx
1590 emit_block_move (x, y, size, align)
1591 rtx x, y;
1592 rtx size;
1593 unsigned int align;
1594 {
1595 rtx retval = 0;
1596 #ifdef TARGET_MEM_FUNCTIONS
1597 static tree fn;
1598 tree call_expr, arg_list;
1599 #endif
1600
1601 if (GET_MODE (x) != BLKmode)
1602 abort ();
1603
1604 if (GET_MODE (y) != BLKmode)
1605 abort ();
1606
1607 x = protect_from_queue (x, 1);
1608 y = protect_from_queue (y, 0);
1609 size = protect_from_queue (size, 0);
1610
1611 if (GET_CODE (x) != MEM)
1612 abort ();
1613 if (GET_CODE (y) != MEM)
1614 abort ();
1615 if (size == 0)
1616 abort ();
1617
1618 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1619 move_by_pieces (x, y, INTVAL (size), align);
1620 else
1621 {
1622 /* Try the most limited insn first, because there's no point
1623 including more than one in the machine description unless
1624 the more limited one has some advantage. */
1625
1626 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1627 enum machine_mode mode;
1628
1629 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1630 mode = GET_MODE_WIDER_MODE (mode))
1631 {
1632 enum insn_code code = movstr_optab[(int) mode];
1633 insn_operand_predicate_fn pred;
1634
1635 if (code != CODE_FOR_nothing
1636 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1637 here because if SIZE is less than the mode mask, as it is
1638 returned by the macro, it will definitely be less than the
1639 actual mode mask. */
1640 && ((GET_CODE (size) == CONST_INT
1641 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1642 <= (GET_MODE_MASK (mode) >> 1)))
1643 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1644 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1645 || (*pred) (x, BLKmode))
1646 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1647 || (*pred) (y, BLKmode))
1648 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1649 || (*pred) (opalign, VOIDmode)))
1650 {
1651 rtx op2;
1652 rtx last = get_last_insn ();
1653 rtx pat;
1654
1655 op2 = convert_to_mode (mode, size, 1);
1656 pred = insn_data[(int) code].operand[2].predicate;
1657 if (pred != 0 && ! (*pred) (op2, mode))
1658 op2 = copy_to_mode_reg (mode, op2);
1659
1660 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1661 if (pat)
1662 {
1663 emit_insn (pat);
1664 return 0;
1665 }
1666 else
1667 delete_insns_since (last);
1668 }
1669 }
1670
1671 /* X, Y, or SIZE may have been passed through protect_from_queue.
1672
1673 It is unsafe to save the value generated by protect_from_queue
1674 and reuse it later. Consider what happens if emit_queue is
1675 called before the return value from protect_from_queue is used.
1676
1677 Expansion of the CALL_EXPR below will call emit_queue before
1678 we are finished emitting RTL for argument setup. So if we are
1679 not careful we could get the wrong value for an argument.
1680
1681 To avoid this problem we go ahead and emit code to copy X, Y &
1682 SIZE into new pseudos. We can then place those new pseudos
1683 into an RTL_EXPR and use them later, even after a call to
1684 emit_queue.
1685
1686 Note this is not strictly needed for library calls since they
1687 do not call emit_queue before loading their arguments. However,
1688 we may need to have library calls call emit_queue in the future
1689 since failing to do so could cause problems for targets which
1690 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1691 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1692 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1693
1694 #ifdef TARGET_MEM_FUNCTIONS
1695 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1696 #else
1697 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1698 TREE_UNSIGNED (integer_type_node));
1699 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1700 #endif
1701
1702 #ifdef TARGET_MEM_FUNCTIONS
1703 /* It is incorrect to use the libcall calling conventions to call
1704 memcpy in this context.
1705
1706 This could be a user call to memcpy and the user may wish to
1707 examine the return value from memcpy.
1708
1709 For targets where libcalls and normal calls have different conventions
1710 for returning pointers, we could end up generating incorrect code.
1711
1712 So instead of using a libcall sequence we build up a suitable
1713 CALL_EXPR and expand the call in the normal fashion. */
1714 if (fn == NULL_TREE)
1715 {
1716 tree fntype;
1717
1718 /* This was copied from except.c, I don't know if all this is
1719 necessary in this context or not. */
1720 fn = get_identifier ("memcpy");
1721 push_obstacks_nochange ();
1722 end_temporary_allocation ();
1723 fntype = build_pointer_type (void_type_node);
1724 fntype = build_function_type (fntype, NULL_TREE);
1725 fn = build_decl (FUNCTION_DECL, fn, fntype);
1726 ggc_add_tree_root (&fn, 1);
1727 DECL_EXTERNAL (fn) = 1;
1728 TREE_PUBLIC (fn) = 1;
1729 DECL_ARTIFICIAL (fn) = 1;
1730 make_decl_rtl (fn, NULL_PTR, 1);
1731 assemble_external (fn);
1732 pop_obstacks ();
1733 }
1734
1735 /* We need to make an argument list for the function call.
1736
1737 memcpy has three arguments, the first two are void * addresses and
1738 the last is a size_t byte count for the copy. */
1739 arg_list
1740 = build_tree_list (NULL_TREE,
1741 make_tree (build_pointer_type (void_type_node), x));
1742 TREE_CHAIN (arg_list)
1743 = build_tree_list (NULL_TREE,
1744 make_tree (build_pointer_type (void_type_node), y));
1745 TREE_CHAIN (TREE_CHAIN (arg_list))
1746 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1747 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1748
1749 /* Now we have to build up the CALL_EXPR itself. */
1750 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1751 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1752 call_expr, arg_list, NULL_TREE);
1753 TREE_SIDE_EFFECTS (call_expr) = 1;
1754
1755 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1756 #else
1757 emit_library_call (bcopy_libfunc, 0,
1758 VOIDmode, 3, y, Pmode, x, Pmode,
1759 convert_to_mode (TYPE_MODE (integer_type_node), size,
1760 TREE_UNSIGNED (integer_type_node)),
1761 TYPE_MODE (integer_type_node));
1762 #endif
1763 }
1764
1765 return retval;
1766 }
1767 \f
1768 /* Copy all or part of a value X into registers starting at REGNO.
1769 The number of registers to be filled is NREGS. */
1770
1771 void
1772 move_block_to_reg (regno, x, nregs, mode)
1773 int regno;
1774 rtx x;
1775 int nregs;
1776 enum machine_mode mode;
1777 {
1778 int i;
1779 #ifdef HAVE_load_multiple
1780 rtx pat;
1781 rtx last;
1782 #endif
1783
1784 if (nregs == 0)
1785 return;
1786
1787 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1788 x = validize_mem (force_const_mem (mode, x));
1789
1790 /* See if the machine can do this with a load multiple insn. */
1791 #ifdef HAVE_load_multiple
1792 if (HAVE_load_multiple)
1793 {
1794 last = get_last_insn ();
1795 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1796 GEN_INT (nregs));
1797 if (pat)
1798 {
1799 emit_insn (pat);
1800 return;
1801 }
1802 else
1803 delete_insns_since (last);
1804 }
1805 #endif
1806
1807 for (i = 0; i < nregs; i++)
1808 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1809 operand_subword_force (x, i, mode));
1810 }
1811
1812 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1813 The number of registers to be filled is NREGS. SIZE indicates the number
1814 of bytes in the object X. */
1815
1816
1817 void
1818 move_block_from_reg (regno, x, nregs, size)
1819 int regno;
1820 rtx x;
1821 int nregs;
1822 int size;
1823 {
1824 int i;
1825 #ifdef HAVE_store_multiple
1826 rtx pat;
1827 rtx last;
1828 #endif
1829 enum machine_mode mode;
1830
1831 /* If SIZE is that of a mode no bigger than a word, just use that
1832 mode's store operation. */
1833 if (size <= UNITS_PER_WORD
1834 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1835 {
1836 emit_move_insn (change_address (x, mode, NULL),
1837 gen_rtx_REG (mode, regno));
1838 return;
1839 }
1840
1841 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1842 to the left before storing to memory. Note that the previous test
1843 doesn't handle all cases (e.g. SIZE == 3). */
1844 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1845 {
1846 rtx tem = operand_subword (x, 0, 1, BLKmode);
1847 rtx shift;
1848
1849 if (tem == 0)
1850 abort ();
1851
1852 shift = expand_shift (LSHIFT_EXPR, word_mode,
1853 gen_rtx_REG (word_mode, regno),
1854 build_int_2 ((UNITS_PER_WORD - size)
1855 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1856 emit_move_insn (tem, shift);
1857 return;
1858 }
1859
1860 /* See if the machine can do this with a store multiple insn. */
1861 #ifdef HAVE_store_multiple
1862 if (HAVE_store_multiple)
1863 {
1864 last = get_last_insn ();
1865 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1866 GEN_INT (nregs));
1867 if (pat)
1868 {
1869 emit_insn (pat);
1870 return;
1871 }
1872 else
1873 delete_insns_since (last);
1874 }
1875 #endif
1876
1877 for (i = 0; i < nregs; i++)
1878 {
1879 rtx tem = operand_subword (x, i, 1, BLKmode);
1880
1881 if (tem == 0)
1882 abort ();
1883
1884 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1885 }
1886 }
1887
1888 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1889 registers represented by a PARALLEL. SSIZE represents the total size of
1890 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1891 SRC in bits. */
1892 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1893 the balance will be in what would be the low-order memory addresses, i.e.
1894 left justified for big endian, right justified for little endian. This
1895 happens to be true for the targets currently using this support. If this
1896 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1897 would be needed. */
1898
1899 void
1900 emit_group_load (dst, orig_src, ssize, align)
1901 rtx dst, orig_src;
1902 unsigned int align;
1903 int ssize;
1904 {
1905 rtx *tmps, src;
1906 int start, i;
1907
1908 if (GET_CODE (dst) != PARALLEL)
1909 abort ();
1910
1911 /* Check for a NULL entry, used to indicate that the parameter goes
1912 both on the stack and in registers. */
1913 if (XEXP (XVECEXP (dst, 0, 0), 0))
1914 start = 0;
1915 else
1916 start = 1;
1917
1918 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1919
1920 /* If we won't be loading directly from memory, protect the real source
1921 from strange tricks we might play. */
1922 src = orig_src;
1923 if (GET_CODE (src) != MEM)
1924 {
1925 if (GET_CODE (src) == VOIDmode)
1926 src = gen_reg_rtx (GET_MODE (dst));
1927 else
1928 src = gen_reg_rtx (GET_MODE (orig_src));
1929 emit_move_insn (src, orig_src);
1930 }
1931
1932 /* Process the pieces. */
1933 for (i = start; i < XVECLEN (dst, 0); i++)
1934 {
1935 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1936 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1937 unsigned int bytelen = GET_MODE_SIZE (mode);
1938 int shift = 0;
1939
1940 /* Handle trailing fragments that run over the size of the struct. */
1941 if (ssize >= 0 && bytepos + bytelen > ssize)
1942 {
1943 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1944 bytelen = ssize - bytepos;
1945 if (bytelen <= 0)
1946 abort ();
1947 }
1948
1949 /* Optimize the access just a bit. */
1950 if (GET_CODE (src) == MEM
1951 && align >= GET_MODE_ALIGNMENT (mode)
1952 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1953 && bytelen == GET_MODE_SIZE (mode))
1954 {
1955 tmps[i] = gen_reg_rtx (mode);
1956 emit_move_insn (tmps[i],
1957 change_address (src, mode,
1958 plus_constant (XEXP (src, 0),
1959 bytepos)));
1960 }
1961 else if (GET_CODE (src) == CONCAT)
1962 {
1963 if (bytepos == 0
1964 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1965 tmps[i] = XEXP (src, 0);
1966 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1967 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1968 tmps[i] = XEXP (src, 1);
1969 else
1970 abort ();
1971 }
1972 else
1973 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1974 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1975 mode, mode, align, ssize);
1976
1977 if (BYTES_BIG_ENDIAN && shift)
1978 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1979 tmps[i], 0, OPTAB_WIDEN);
1980 }
1981
1982 emit_queue();
1983
1984 /* Copy the extracted pieces into the proper (probable) hard regs. */
1985 for (i = start; i < XVECLEN (dst, 0); i++)
1986 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1987 }
1988
1989 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1990 registers represented by a PARALLEL. SSIZE represents the total size of
1991 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1992
1993 void
1994 emit_group_store (orig_dst, src, ssize, align)
1995 rtx orig_dst, src;
1996 int ssize;
1997 unsigned int align;
1998 {
1999 rtx *tmps, dst;
2000 int start, i;
2001
2002 if (GET_CODE (src) != PARALLEL)
2003 abort ();
2004
2005 /* Check for a NULL entry, used to indicate that the parameter goes
2006 both on the stack and in registers. */
2007 if (XEXP (XVECEXP (src, 0, 0), 0))
2008 start = 0;
2009 else
2010 start = 1;
2011
2012 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2013
2014 /* Copy the (probable) hard regs into pseudos. */
2015 for (i = start; i < XVECLEN (src, 0); i++)
2016 {
2017 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2018 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2019 emit_move_insn (tmps[i], reg);
2020 }
2021 emit_queue();
2022
2023 /* If we won't be storing directly into memory, protect the real destination
2024 from strange tricks we might play. */
2025 dst = orig_dst;
2026 if (GET_CODE (dst) == PARALLEL)
2027 {
2028 rtx temp;
2029
2030 /* We can get a PARALLEL dst if there is a conditional expression in
2031 a return statement. In that case, the dst and src are the same,
2032 so no action is necessary. */
2033 if (rtx_equal_p (dst, src))
2034 return;
2035
2036 /* It is unclear if we can ever reach here, but we may as well handle
2037 it. Allocate a temporary, and split this into a store/load to/from
2038 the temporary. */
2039
2040 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2041 emit_group_store (temp, src, ssize, align);
2042 emit_group_load (dst, temp, ssize, align);
2043 return;
2044 }
2045 else if (GET_CODE (dst) != MEM)
2046 {
2047 dst = gen_reg_rtx (GET_MODE (orig_dst));
2048 /* Make life a bit easier for combine. */
2049 emit_move_insn (dst, const0_rtx);
2050 }
2051 else if (! MEM_IN_STRUCT_P (dst))
2052 {
2053 /* store_bit_field requires that memory operations have
2054 mem_in_struct_p set; we might not. */
2055
2056 dst = copy_rtx (orig_dst);
2057 MEM_SET_IN_STRUCT_P (dst, 1);
2058 }
2059
2060 /* Process the pieces. */
2061 for (i = start; i < XVECLEN (src, 0); i++)
2062 {
2063 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2064 enum machine_mode mode = GET_MODE (tmps[i]);
2065 unsigned int bytelen = GET_MODE_SIZE (mode);
2066
2067 /* Handle trailing fragments that run over the size of the struct. */
2068 if (ssize >= 0 && bytepos + bytelen > ssize)
2069 {
2070 if (BYTES_BIG_ENDIAN)
2071 {
2072 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2073 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2074 tmps[i], 0, OPTAB_WIDEN);
2075 }
2076 bytelen = ssize - bytepos;
2077 }
2078
2079 /* Optimize the access just a bit. */
2080 if (GET_CODE (dst) == MEM
2081 && align >= GET_MODE_ALIGNMENT (mode)
2082 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2083 && bytelen == GET_MODE_SIZE (mode))
2084 emit_move_insn (change_address (dst, mode,
2085 plus_constant (XEXP (dst, 0),
2086 bytepos)),
2087 tmps[i]);
2088 else
2089 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2090 mode, tmps[i], align, ssize);
2091 }
2092
2093 emit_queue();
2094
2095 /* Copy from the pseudo into the (probable) hard reg. */
2096 if (GET_CODE (dst) == REG)
2097 emit_move_insn (orig_dst, dst);
2098 }
2099
2100 /* Generate code to copy a BLKmode object of TYPE out of a
2101 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2102 is null, a stack temporary is created. TGTBLK is returned.
2103
2104 The primary purpose of this routine is to handle functions
2105 that return BLKmode structures in registers. Some machines
2106 (the PA for example) want to return all small structures
2107 in registers regardless of the structure's alignment. */
2108
2109 rtx
2110 copy_blkmode_from_reg (tgtblk, srcreg, type)
2111 rtx tgtblk;
2112 rtx srcreg;
2113 tree type;
2114 {
2115 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2116 rtx src = NULL, dst = NULL;
2117 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2118 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2119
2120 if (tgtblk == 0)
2121 {
2122 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2123 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2124 preserve_temp_slots (tgtblk);
2125 }
2126
2127 /* This code assumes srcreg is at least a full word. If it isn't,
2128 copy it into a new pseudo which is a full word. */
2129 if (GET_MODE (srcreg) != BLKmode
2130 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2131 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2132
2133 /* Structures whose size is not a multiple of a word are aligned
2134 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2135 machine, this means we must skip the empty high order bytes when
2136 calculating the bit offset. */
2137 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2138 big_endian_correction
2139 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2140
2141 /* Copy the structure BITSIZE bites at a time.
2142
2143 We could probably emit more efficient code for machines which do not use
2144 strict alignment, but it doesn't seem worth the effort at the current
2145 time. */
2146 for (bitpos = 0, xbitpos = big_endian_correction;
2147 bitpos < bytes * BITS_PER_UNIT;
2148 bitpos += bitsize, xbitpos += bitsize)
2149 {
2150 /* We need a new source operand each time xbitpos is on a
2151 word boundary and when xbitpos == big_endian_correction
2152 (the first time through). */
2153 if (xbitpos % BITS_PER_WORD == 0
2154 || xbitpos == big_endian_correction)
2155 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2156
2157 /* We need a new destination operand each time bitpos is on
2158 a word boundary. */
2159 if (bitpos % BITS_PER_WORD == 0)
2160 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2161
2162 /* Use xbitpos for the source extraction (right justified) and
2163 xbitpos for the destination store (left justified). */
2164 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2165 extract_bit_field (src, bitsize,
2166 xbitpos % BITS_PER_WORD, 1,
2167 NULL_RTX, word_mode, word_mode,
2168 bitsize, BITS_PER_WORD),
2169 bitsize, BITS_PER_WORD);
2170 }
2171
2172 return tgtblk;
2173 }
2174
2175
2176 /* Add a USE expression for REG to the (possibly empty) list pointed
2177 to by CALL_FUSAGE. REG must denote a hard register. */
2178
2179 void
2180 use_reg (call_fusage, reg)
2181 rtx *call_fusage, reg;
2182 {
2183 if (GET_CODE (reg) != REG
2184 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2185 abort();
2186
2187 *call_fusage
2188 = gen_rtx_EXPR_LIST (VOIDmode,
2189 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2190 }
2191
2192 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2193 starting at REGNO. All of these registers must be hard registers. */
2194
2195 void
2196 use_regs (call_fusage, regno, nregs)
2197 rtx *call_fusage;
2198 int regno;
2199 int nregs;
2200 {
2201 int i;
2202
2203 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2204 abort ();
2205
2206 for (i = 0; i < nregs; i++)
2207 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2208 }
2209
2210 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2211 PARALLEL REGS. This is for calls that pass values in multiple
2212 non-contiguous locations. The Irix 6 ABI has examples of this. */
2213
2214 void
2215 use_group_regs (call_fusage, regs)
2216 rtx *call_fusage;
2217 rtx regs;
2218 {
2219 int i;
2220
2221 for (i = 0; i < XVECLEN (regs, 0); i++)
2222 {
2223 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2224
2225 /* A NULL entry means the parameter goes both on the stack and in
2226 registers. This can also be a MEM for targets that pass values
2227 partially on the stack and partially in registers. */
2228 if (reg != 0 && GET_CODE (reg) == REG)
2229 use_reg (call_fusage, reg);
2230 }
2231 }
2232 \f
2233 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2234 rtx with BLKmode). The caller must pass TO through protect_from_queue
2235 before calling. ALIGN is maximum alignment we can assume. */
2236
2237 static void
2238 clear_by_pieces (to, len, align)
2239 rtx to;
2240 int len;
2241 unsigned int align;
2242 {
2243 struct clear_by_pieces data;
2244 rtx to_addr = XEXP (to, 0);
2245 unsigned int max_size = MOVE_MAX_PIECES + 1;
2246 enum machine_mode mode = VOIDmode, tmode;
2247 enum insn_code icode;
2248
2249 data.offset = 0;
2250 data.to_addr = to_addr;
2251 data.to = to;
2252 data.autinc_to
2253 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2254 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2255
2256 data.explicit_inc_to = 0;
2257 data.reverse
2258 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2259 if (data.reverse) data.offset = len;
2260 data.len = len;
2261
2262 data.to_struct = MEM_IN_STRUCT_P (to);
2263
2264 /* If copying requires more than two move insns,
2265 copy addresses to registers (to make displacements shorter)
2266 and use post-increment if available. */
2267 if (!data.autinc_to
2268 && move_by_pieces_ninsns (len, align) > 2)
2269 {
2270 /* Determine the main mode we'll be using */
2271 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2272 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2273 if (GET_MODE_SIZE (tmode) < max_size)
2274 mode = tmode;
2275
2276 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2277 {
2278 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2279 data.autinc_to = 1;
2280 data.explicit_inc_to = -1;
2281 }
2282 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2283 {
2284 data.to_addr = copy_addr_to_reg (to_addr);
2285 data.autinc_to = 1;
2286 data.explicit_inc_to = 1;
2287 }
2288 if (!data.autinc_to && CONSTANT_P (to_addr))
2289 data.to_addr = copy_addr_to_reg (to_addr);
2290 }
2291
2292 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2293 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2294 align = MOVE_MAX * BITS_PER_UNIT;
2295
2296 /* First move what we can in the largest integer mode, then go to
2297 successively smaller modes. */
2298
2299 while (max_size > 1)
2300 {
2301 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2302 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2303 if (GET_MODE_SIZE (tmode) < max_size)
2304 mode = tmode;
2305
2306 if (mode == VOIDmode)
2307 break;
2308
2309 icode = mov_optab->handlers[(int) mode].insn_code;
2310 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2311 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2312
2313 max_size = GET_MODE_SIZE (mode);
2314 }
2315
2316 /* The code above should have handled everything. */
2317 if (data.len != 0)
2318 abort ();
2319 }
2320
2321 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2322 with move instructions for mode MODE. GENFUN is the gen_... function
2323 to make a move insn for that mode. DATA has all the other info. */
2324
2325 static void
2326 clear_by_pieces_1 (genfun, mode, data)
2327 rtx (*genfun) PARAMS ((rtx, ...));
2328 enum machine_mode mode;
2329 struct clear_by_pieces *data;
2330 {
2331 register int size = GET_MODE_SIZE (mode);
2332 register rtx to1;
2333
2334 while (data->len >= size)
2335 {
2336 if (data->reverse) data->offset -= size;
2337
2338 to1 = (data->autinc_to
2339 ? gen_rtx_MEM (mode, data->to_addr)
2340 : copy_rtx (change_address (data->to, mode,
2341 plus_constant (data->to_addr,
2342 data->offset))));
2343 MEM_IN_STRUCT_P (to1) = data->to_struct;
2344
2345 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2346 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2347
2348 emit_insn ((*genfun) (to1, const0_rtx));
2349 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2350 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2351
2352 if (! data->reverse) data->offset += size;
2353
2354 data->len -= size;
2355 }
2356 }
2357 \f
2358 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2359 its length in bytes and ALIGN is the maximum alignment we can is has.
2360
2361 If we call a function that returns the length of the block, return it. */
2362
2363 rtx
2364 clear_storage (object, size, align)
2365 rtx object;
2366 rtx size;
2367 unsigned int align;
2368 {
2369 #ifdef TARGET_MEM_FUNCTIONS
2370 static tree fn;
2371 tree call_expr, arg_list;
2372 #endif
2373 rtx retval = 0;
2374
2375 if (GET_MODE (object) == BLKmode)
2376 {
2377 object = protect_from_queue (object, 1);
2378 size = protect_from_queue (size, 0);
2379
2380 if (GET_CODE (size) == CONST_INT
2381 && MOVE_BY_PIECES_P (INTVAL (size), align))
2382 clear_by_pieces (object, INTVAL (size), align);
2383 else
2384 {
2385 /* Try the most limited insn first, because there's no point
2386 including more than one in the machine description unless
2387 the more limited one has some advantage. */
2388
2389 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2390 enum machine_mode mode;
2391
2392 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2393 mode = GET_MODE_WIDER_MODE (mode))
2394 {
2395 enum insn_code code = clrstr_optab[(int) mode];
2396 insn_operand_predicate_fn pred;
2397
2398 if (code != CODE_FOR_nothing
2399 /* We don't need MODE to be narrower than
2400 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2401 the mode mask, as it is returned by the macro, it will
2402 definitely be less than the actual mode mask. */
2403 && ((GET_CODE (size) == CONST_INT
2404 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2405 <= (GET_MODE_MASK (mode) >> 1)))
2406 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2407 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2408 || (*pred) (object, BLKmode))
2409 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2410 || (*pred) (opalign, VOIDmode)))
2411 {
2412 rtx op1;
2413 rtx last = get_last_insn ();
2414 rtx pat;
2415
2416 op1 = convert_to_mode (mode, size, 1);
2417 pred = insn_data[(int) code].operand[1].predicate;
2418 if (pred != 0 && ! (*pred) (op1, mode))
2419 op1 = copy_to_mode_reg (mode, op1);
2420
2421 pat = GEN_FCN ((int) code) (object, op1, opalign);
2422 if (pat)
2423 {
2424 emit_insn (pat);
2425 return 0;
2426 }
2427 else
2428 delete_insns_since (last);
2429 }
2430 }
2431
2432 /* OBJECT or SIZE may have been passed through protect_from_queue.
2433
2434 It is unsafe to save the value generated by protect_from_queue
2435 and reuse it later. Consider what happens if emit_queue is
2436 called before the return value from protect_from_queue is used.
2437
2438 Expansion of the CALL_EXPR below will call emit_queue before
2439 we are finished emitting RTL for argument setup. So if we are
2440 not careful we could get the wrong value for an argument.
2441
2442 To avoid this problem we go ahead and emit code to copy OBJECT
2443 and SIZE into new pseudos. We can then place those new pseudos
2444 into an RTL_EXPR and use them later, even after a call to
2445 emit_queue.
2446
2447 Note this is not strictly needed for library calls since they
2448 do not call emit_queue before loading their arguments. However,
2449 we may need to have library calls call emit_queue in the future
2450 since failing to do so could cause problems for targets which
2451 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2452 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2453
2454 #ifdef TARGET_MEM_FUNCTIONS
2455 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2456 #else
2457 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2458 TREE_UNSIGNED (integer_type_node));
2459 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2460 #endif
2461
2462
2463 #ifdef TARGET_MEM_FUNCTIONS
2464 /* It is incorrect to use the libcall calling conventions to call
2465 memset in this context.
2466
2467 This could be a user call to memset and the user may wish to
2468 examine the return value from memset.
2469
2470 For targets where libcalls and normal calls have different
2471 conventions for returning pointers, we could end up generating
2472 incorrect code.
2473
2474 So instead of using a libcall sequence we build up a suitable
2475 CALL_EXPR and expand the call in the normal fashion. */
2476 if (fn == NULL_TREE)
2477 {
2478 tree fntype;
2479
2480 /* This was copied from except.c, I don't know if all this is
2481 necessary in this context or not. */
2482 fn = get_identifier ("memset");
2483 push_obstacks_nochange ();
2484 end_temporary_allocation ();
2485 fntype = build_pointer_type (void_type_node);
2486 fntype = build_function_type (fntype, NULL_TREE);
2487 fn = build_decl (FUNCTION_DECL, fn, fntype);
2488 ggc_add_tree_root (&fn, 1);
2489 DECL_EXTERNAL (fn) = 1;
2490 TREE_PUBLIC (fn) = 1;
2491 DECL_ARTIFICIAL (fn) = 1;
2492 make_decl_rtl (fn, NULL_PTR, 1);
2493 assemble_external (fn);
2494 pop_obstacks ();
2495 }
2496
2497 /* We need to make an argument list for the function call.
2498
2499 memset has three arguments, the first is a void * addresses, the
2500 second a integer with the initialization value, the last is a
2501 size_t byte count for the copy. */
2502 arg_list
2503 = build_tree_list (NULL_TREE,
2504 make_tree (build_pointer_type (void_type_node),
2505 object));
2506 TREE_CHAIN (arg_list)
2507 = build_tree_list (NULL_TREE,
2508 make_tree (integer_type_node, const0_rtx));
2509 TREE_CHAIN (TREE_CHAIN (arg_list))
2510 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2511 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2512
2513 /* Now we have to build up the CALL_EXPR itself. */
2514 call_expr = build1 (ADDR_EXPR,
2515 build_pointer_type (TREE_TYPE (fn)), fn);
2516 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2517 call_expr, arg_list, NULL_TREE);
2518 TREE_SIDE_EFFECTS (call_expr) = 1;
2519
2520 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2521 #else
2522 emit_library_call (bzero_libfunc, 0,
2523 VOIDmode, 2, object, Pmode, size,
2524 TYPE_MODE (integer_type_node));
2525 #endif
2526 }
2527 }
2528 else
2529 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2530
2531 return retval;
2532 }
2533
2534 /* Generate code to copy Y into X.
2535 Both Y and X must have the same mode, except that
2536 Y can be a constant with VOIDmode.
2537 This mode cannot be BLKmode; use emit_block_move for that.
2538
2539 Return the last instruction emitted. */
2540
2541 rtx
2542 emit_move_insn (x, y)
2543 rtx x, y;
2544 {
2545 enum machine_mode mode = GET_MODE (x);
2546
2547 x = protect_from_queue (x, 1);
2548 y = protect_from_queue (y, 0);
2549
2550 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2551 abort ();
2552
2553 /* Never force constant_p_rtx to memory. */
2554 if (GET_CODE (y) == CONSTANT_P_RTX)
2555 ;
2556 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2557 y = force_const_mem (mode, y);
2558
2559 /* If X or Y are memory references, verify that their addresses are valid
2560 for the machine. */
2561 if (GET_CODE (x) == MEM
2562 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2563 && ! push_operand (x, GET_MODE (x)))
2564 || (flag_force_addr
2565 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2566 x = change_address (x, VOIDmode, XEXP (x, 0));
2567
2568 if (GET_CODE (y) == MEM
2569 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2570 || (flag_force_addr
2571 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2572 y = change_address (y, VOIDmode, XEXP (y, 0));
2573
2574 if (mode == BLKmode)
2575 abort ();
2576
2577 return emit_move_insn_1 (x, y);
2578 }
2579
2580 /* Low level part of emit_move_insn.
2581 Called just like emit_move_insn, but assumes X and Y
2582 are basically valid. */
2583
2584 rtx
2585 emit_move_insn_1 (x, y)
2586 rtx x, y;
2587 {
2588 enum machine_mode mode = GET_MODE (x);
2589 enum machine_mode submode;
2590 enum mode_class class = GET_MODE_CLASS (mode);
2591 unsigned int i;
2592
2593 if (mode >= MAX_MACHINE_MODE)
2594 abort ();
2595
2596 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2597 return
2598 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2599
2600 /* Expand complex moves by moving real part and imag part, if possible. */
2601 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2602 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2603 * BITS_PER_UNIT),
2604 (class == MODE_COMPLEX_INT
2605 ? MODE_INT : MODE_FLOAT),
2606 0))
2607 && (mov_optab->handlers[(int) submode].insn_code
2608 != CODE_FOR_nothing))
2609 {
2610 /* Don't split destination if it is a stack push. */
2611 int stack = push_operand (x, GET_MODE (x));
2612
2613 /* If this is a stack, push the highpart first, so it
2614 will be in the argument order.
2615
2616 In that case, change_address is used only to convert
2617 the mode, not to change the address. */
2618 if (stack)
2619 {
2620 /* Note that the real part always precedes the imag part in memory
2621 regardless of machine's endianness. */
2622 #ifdef STACK_GROWS_DOWNWARD
2623 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2624 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2625 gen_imagpart (submode, y)));
2626 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2627 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2628 gen_realpart (submode, y)));
2629 #else
2630 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2631 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2632 gen_realpart (submode, y)));
2633 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2634 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2635 gen_imagpart (submode, y)));
2636 #endif
2637 }
2638 else
2639 {
2640 rtx realpart_x, realpart_y;
2641 rtx imagpart_x, imagpart_y;
2642
2643 /* If this is a complex value with each part being smaller than a
2644 word, the usual calling sequence will likely pack the pieces into
2645 a single register. Unfortunately, SUBREG of hard registers only
2646 deals in terms of words, so we have a problem converting input
2647 arguments to the CONCAT of two registers that is used elsewhere
2648 for complex values. If this is before reload, we can copy it into
2649 memory and reload. FIXME, we should see about using extract and
2650 insert on integer registers, but complex short and complex char
2651 variables should be rarely used. */
2652 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2653 && (reload_in_progress | reload_completed) == 0)
2654 {
2655 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2656 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2657
2658 if (packed_dest_p || packed_src_p)
2659 {
2660 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2661 ? MODE_FLOAT : MODE_INT);
2662
2663 enum machine_mode reg_mode =
2664 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2665
2666 if (reg_mode != BLKmode)
2667 {
2668 rtx mem = assign_stack_temp (reg_mode,
2669 GET_MODE_SIZE (mode), 0);
2670
2671 rtx cmem = change_address (mem, mode, NULL_RTX);
2672
2673 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2674
2675 if (packed_dest_p)
2676 {
2677 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2678 emit_move_insn_1 (cmem, y);
2679 return emit_move_insn_1 (sreg, mem);
2680 }
2681 else
2682 {
2683 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2684 emit_move_insn_1 (mem, sreg);
2685 return emit_move_insn_1 (x, cmem);
2686 }
2687 }
2688 }
2689 }
2690
2691 realpart_x = gen_realpart (submode, x);
2692 realpart_y = gen_realpart (submode, y);
2693 imagpart_x = gen_imagpart (submode, x);
2694 imagpart_y = gen_imagpart (submode, y);
2695
2696 /* Show the output dies here. This is necessary for SUBREGs
2697 of pseudos since we cannot track their lifetimes correctly;
2698 hard regs shouldn't appear here except as return values.
2699 We never want to emit such a clobber after reload. */
2700 if (x != y
2701 && ! (reload_in_progress || reload_completed)
2702 && (GET_CODE (realpart_x) == SUBREG
2703 || GET_CODE (imagpart_x) == SUBREG))
2704 {
2705 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2706 }
2707
2708 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2709 (realpart_x, realpart_y));
2710 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2711 (imagpart_x, imagpart_y));
2712 }
2713
2714 return get_last_insn ();
2715 }
2716
2717 /* This will handle any multi-word mode that lacks a move_insn pattern.
2718 However, you will get better code if you define such patterns,
2719 even if they must turn into multiple assembler instructions. */
2720 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2721 {
2722 rtx last_insn = 0;
2723 rtx seq;
2724 int need_clobber;
2725
2726 #ifdef PUSH_ROUNDING
2727
2728 /* If X is a push on the stack, do the push now and replace
2729 X with a reference to the stack pointer. */
2730 if (push_operand (x, GET_MODE (x)))
2731 {
2732 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2733 x = change_address (x, VOIDmode, stack_pointer_rtx);
2734 }
2735 #endif
2736
2737 start_sequence ();
2738
2739 need_clobber = 0;
2740 for (i = 0;
2741 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2742 i++)
2743 {
2744 rtx xpart = operand_subword (x, i, 1, mode);
2745 rtx ypart = operand_subword (y, i, 1, mode);
2746
2747 /* If we can't get a part of Y, put Y into memory if it is a
2748 constant. Otherwise, force it into a register. If we still
2749 can't get a part of Y, abort. */
2750 if (ypart == 0 && CONSTANT_P (y))
2751 {
2752 y = force_const_mem (mode, y);
2753 ypart = operand_subword (y, i, 1, mode);
2754 }
2755 else if (ypart == 0)
2756 ypart = operand_subword_force (y, i, mode);
2757
2758 if (xpart == 0 || ypart == 0)
2759 abort ();
2760
2761 need_clobber |= (GET_CODE (xpart) == SUBREG);
2762
2763 last_insn = emit_move_insn (xpart, ypart);
2764 }
2765
2766 seq = gen_sequence ();
2767 end_sequence ();
2768
2769 /* Show the output dies here. This is necessary for SUBREGs
2770 of pseudos since we cannot track their lifetimes correctly;
2771 hard regs shouldn't appear here except as return values.
2772 We never want to emit such a clobber after reload. */
2773 if (x != y
2774 && ! (reload_in_progress || reload_completed)
2775 && need_clobber != 0)
2776 {
2777 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2778 }
2779
2780 emit_insn (seq);
2781
2782 return last_insn;
2783 }
2784 else
2785 abort ();
2786 }
2787 \f
2788 /* Pushing data onto the stack. */
2789
2790 /* Push a block of length SIZE (perhaps variable)
2791 and return an rtx to address the beginning of the block.
2792 Note that it is not possible for the value returned to be a QUEUED.
2793 The value may be virtual_outgoing_args_rtx.
2794
2795 EXTRA is the number of bytes of padding to push in addition to SIZE.
2796 BELOW nonzero means this padding comes at low addresses;
2797 otherwise, the padding comes at high addresses. */
2798
2799 rtx
2800 push_block (size, extra, below)
2801 rtx size;
2802 int extra, below;
2803 {
2804 register rtx temp;
2805
2806 size = convert_modes (Pmode, ptr_mode, size, 1);
2807 if (CONSTANT_P (size))
2808 anti_adjust_stack (plus_constant (size, extra));
2809 else if (GET_CODE (size) == REG && extra == 0)
2810 anti_adjust_stack (size);
2811 else
2812 {
2813 rtx temp = copy_to_mode_reg (Pmode, size);
2814 if (extra != 0)
2815 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2816 temp, 0, OPTAB_LIB_WIDEN);
2817 anti_adjust_stack (temp);
2818 }
2819
2820 #ifndef STACK_GROWS_DOWNWARD
2821 #ifdef ARGS_GROW_DOWNWARD
2822 if (!ACCUMULATE_OUTGOING_ARGS)
2823 #else
2824 if (0)
2825 #endif
2826 #else
2827 if (1)
2828 #endif
2829 {
2830
2831 /* Return the lowest stack address when STACK or ARGS grow downward and
2832 we are not aaccumulating outgoing arguments (the c4x port uses such
2833 conventions). */
2834 temp = virtual_outgoing_args_rtx;
2835 if (extra != 0 && below)
2836 temp = plus_constant (temp, extra);
2837 }
2838 else
2839 {
2840 if (GET_CODE (size) == CONST_INT)
2841 temp = plus_constant (virtual_outgoing_args_rtx,
2842 - INTVAL (size) - (below ? 0 : extra));
2843 else if (extra != 0 && !below)
2844 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2845 negate_rtx (Pmode, plus_constant (size, extra)));
2846 else
2847 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2848 negate_rtx (Pmode, size));
2849 }
2850
2851 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2852 }
2853
2854 rtx
2855 gen_push_operand ()
2856 {
2857 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2858 }
2859
2860 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2861 block of SIZE bytes. */
2862
2863 static rtx
2864 get_push_address (size)
2865 int size;
2866 {
2867 register rtx temp;
2868
2869 if (STACK_PUSH_CODE == POST_DEC)
2870 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2871 else if (STACK_PUSH_CODE == POST_INC)
2872 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2873 else
2874 temp = stack_pointer_rtx;
2875
2876 return copy_to_reg (temp);
2877 }
2878
2879 /* Generate code to push X onto the stack, assuming it has mode MODE and
2880 type TYPE.
2881 MODE is redundant except when X is a CONST_INT (since they don't
2882 carry mode info).
2883 SIZE is an rtx for the size of data to be copied (in bytes),
2884 needed only if X is BLKmode.
2885
2886 ALIGN is maximum alignment we can assume.
2887
2888 If PARTIAL and REG are both nonzero, then copy that many of the first
2889 words of X into registers starting with REG, and push the rest of X.
2890 The amount of space pushed is decreased by PARTIAL words,
2891 rounded *down* to a multiple of PARM_BOUNDARY.
2892 REG must be a hard register in this case.
2893 If REG is zero but PARTIAL is not, take any all others actions for an
2894 argument partially in registers, but do not actually load any
2895 registers.
2896
2897 EXTRA is the amount in bytes of extra space to leave next to this arg.
2898 This is ignored if an argument block has already been allocated.
2899
2900 On a machine that lacks real push insns, ARGS_ADDR is the address of
2901 the bottom of the argument block for this call. We use indexing off there
2902 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2903 argument block has not been preallocated.
2904
2905 ARGS_SO_FAR is the size of args previously pushed for this call.
2906
2907 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2908 for arguments passed in registers. If nonzero, it will be the number
2909 of bytes required. */
2910
2911 void
2912 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2913 args_addr, args_so_far, reg_parm_stack_space,
2914 alignment_pad)
2915 register rtx x;
2916 enum machine_mode mode;
2917 tree type;
2918 rtx size;
2919 unsigned int align;
2920 int partial;
2921 rtx reg;
2922 int extra;
2923 rtx args_addr;
2924 rtx args_so_far;
2925 int reg_parm_stack_space;
2926 rtx alignment_pad;
2927 {
2928 rtx xinner;
2929 enum direction stack_direction
2930 #ifdef STACK_GROWS_DOWNWARD
2931 = downward;
2932 #else
2933 = upward;
2934 #endif
2935
2936 /* Decide where to pad the argument: `downward' for below,
2937 `upward' for above, or `none' for don't pad it.
2938 Default is below for small data on big-endian machines; else above. */
2939 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2940
2941 /* Invert direction if stack is post-update. */
2942 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2943 if (where_pad != none)
2944 where_pad = (where_pad == downward ? upward : downward);
2945
2946 xinner = x = protect_from_queue (x, 0);
2947
2948 if (mode == BLKmode)
2949 {
2950 /* Copy a block into the stack, entirely or partially. */
2951
2952 register rtx temp;
2953 int used = partial * UNITS_PER_WORD;
2954 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2955 int skip;
2956
2957 if (size == 0)
2958 abort ();
2959
2960 used -= offset;
2961
2962 /* USED is now the # of bytes we need not copy to the stack
2963 because registers will take care of them. */
2964
2965 if (partial != 0)
2966 xinner = change_address (xinner, BLKmode,
2967 plus_constant (XEXP (xinner, 0), used));
2968
2969 /* If the partial register-part of the arg counts in its stack size,
2970 skip the part of stack space corresponding to the registers.
2971 Otherwise, start copying to the beginning of the stack space,
2972 by setting SKIP to 0. */
2973 skip = (reg_parm_stack_space == 0) ? 0 : used;
2974
2975 #ifdef PUSH_ROUNDING
2976 /* Do it with several push insns if that doesn't take lots of insns
2977 and if there is no difficulty with push insns that skip bytes
2978 on the stack for alignment purposes. */
2979 if (args_addr == 0
2980 && PUSH_ARGS
2981 && GET_CODE (size) == CONST_INT
2982 && skip == 0
2983 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2984 /* Here we avoid the case of a structure whose weak alignment
2985 forces many pushes of a small amount of data,
2986 and such small pushes do rounding that causes trouble. */
2987 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
2988 || align >= BIGGEST_ALIGNMENT
2989 || PUSH_ROUNDING (align) == align)
2990 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2991 {
2992 /* Push padding now if padding above and stack grows down,
2993 or if padding below and stack grows up.
2994 But if space already allocated, this has already been done. */
2995 if (extra && args_addr == 0
2996 && where_pad != none && where_pad != stack_direction)
2997 anti_adjust_stack (GEN_INT (extra));
2998
2999 stack_pointer_delta += INTVAL (size) - used;
3000 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3001 INTVAL (size) - used, align);
3002
3003 if (current_function_check_memory_usage && ! in_check_memory_usage)
3004 {
3005 rtx temp;
3006
3007 in_check_memory_usage = 1;
3008 temp = get_push_address (INTVAL(size) - used);
3009 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3010 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3011 temp, Pmode,
3012 XEXP (xinner, 0), Pmode,
3013 GEN_INT (INTVAL(size) - used),
3014 TYPE_MODE (sizetype));
3015 else
3016 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3017 temp, Pmode,
3018 GEN_INT (INTVAL(size) - used),
3019 TYPE_MODE (sizetype),
3020 GEN_INT (MEMORY_USE_RW),
3021 TYPE_MODE (integer_type_node));
3022 in_check_memory_usage = 0;
3023 }
3024 }
3025 else
3026 #endif /* PUSH_ROUNDING */
3027 {
3028 /* Otherwise make space on the stack and copy the data
3029 to the address of that space. */
3030
3031 /* Deduct words put into registers from the size we must copy. */
3032 if (partial != 0)
3033 {
3034 if (GET_CODE (size) == CONST_INT)
3035 size = GEN_INT (INTVAL (size) - used);
3036 else
3037 size = expand_binop (GET_MODE (size), sub_optab, size,
3038 GEN_INT (used), NULL_RTX, 0,
3039 OPTAB_LIB_WIDEN);
3040 }
3041
3042 /* Get the address of the stack space.
3043 In this case, we do not deal with EXTRA separately.
3044 A single stack adjust will do. */
3045 if (! args_addr)
3046 {
3047 temp = push_block (size, extra, where_pad == downward);
3048 extra = 0;
3049 }
3050 else if (GET_CODE (args_so_far) == CONST_INT)
3051 temp = memory_address (BLKmode,
3052 plus_constant (args_addr,
3053 skip + INTVAL (args_so_far)));
3054 else
3055 temp = memory_address (BLKmode,
3056 plus_constant (gen_rtx_PLUS (Pmode,
3057 args_addr,
3058 args_so_far),
3059 skip));
3060 if (current_function_check_memory_usage && ! in_check_memory_usage)
3061 {
3062 rtx target;
3063
3064 in_check_memory_usage = 1;
3065 target = copy_to_reg (temp);
3066 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3067 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3068 target, Pmode,
3069 XEXP (xinner, 0), Pmode,
3070 size, TYPE_MODE (sizetype));
3071 else
3072 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3073 target, Pmode,
3074 size, TYPE_MODE (sizetype),
3075 GEN_INT (MEMORY_USE_RW),
3076 TYPE_MODE (integer_type_node));
3077 in_check_memory_usage = 0;
3078 }
3079
3080 /* TEMP is the address of the block. Copy the data there. */
3081 if (GET_CODE (size) == CONST_INT
3082 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3083 {
3084 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3085 INTVAL (size), align);
3086 goto ret;
3087 }
3088 else
3089 {
3090 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3091 enum machine_mode mode;
3092 rtx target = gen_rtx_MEM (BLKmode, temp);
3093
3094 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3095 mode != VOIDmode;
3096 mode = GET_MODE_WIDER_MODE (mode))
3097 {
3098 enum insn_code code = movstr_optab[(int) mode];
3099 insn_operand_predicate_fn pred;
3100
3101 if (code != CODE_FOR_nothing
3102 && ((GET_CODE (size) == CONST_INT
3103 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3104 <= (GET_MODE_MASK (mode) >> 1)))
3105 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3106 && (!(pred = insn_data[(int) code].operand[0].predicate)
3107 || ((*pred) (target, BLKmode)))
3108 && (!(pred = insn_data[(int) code].operand[1].predicate)
3109 || ((*pred) (xinner, BLKmode)))
3110 && (!(pred = insn_data[(int) code].operand[3].predicate)
3111 || ((*pred) (opalign, VOIDmode))))
3112 {
3113 rtx op2 = convert_to_mode (mode, size, 1);
3114 rtx last = get_last_insn ();
3115 rtx pat;
3116
3117 pred = insn_data[(int) code].operand[2].predicate;
3118 if (pred != 0 && ! (*pred) (op2, mode))
3119 op2 = copy_to_mode_reg (mode, op2);
3120
3121 pat = GEN_FCN ((int) code) (target, xinner,
3122 op2, opalign);
3123 if (pat)
3124 {
3125 emit_insn (pat);
3126 goto ret;
3127 }
3128 else
3129 delete_insns_since (last);
3130 }
3131 }
3132 }
3133
3134 if (!ACCUMULATE_OUTGOING_ARGS)
3135 {
3136 /* If the source is referenced relative to the stack pointer,
3137 copy it to another register to stabilize it. We do not need
3138 to do this if we know that we won't be changing sp. */
3139
3140 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3141 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3142 temp = copy_to_reg (temp);
3143 }
3144
3145 /* Make inhibit_defer_pop nonzero around the library call
3146 to force it to pop the bcopy-arguments right away. */
3147 NO_DEFER_POP;
3148 #ifdef TARGET_MEM_FUNCTIONS
3149 emit_library_call (memcpy_libfunc, 0,
3150 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3151 convert_to_mode (TYPE_MODE (sizetype),
3152 size, TREE_UNSIGNED (sizetype)),
3153 TYPE_MODE (sizetype));
3154 #else
3155 emit_library_call (bcopy_libfunc, 0,
3156 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3157 convert_to_mode (TYPE_MODE (integer_type_node),
3158 size,
3159 TREE_UNSIGNED (integer_type_node)),
3160 TYPE_MODE (integer_type_node));
3161 #endif
3162 OK_DEFER_POP;
3163 }
3164 }
3165 else if (partial > 0)
3166 {
3167 /* Scalar partly in registers. */
3168
3169 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3170 int i;
3171 int not_stack;
3172 /* # words of start of argument
3173 that we must make space for but need not store. */
3174 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3175 int args_offset = INTVAL (args_so_far);
3176 int skip;
3177
3178 /* Push padding now if padding above and stack grows down,
3179 or if padding below and stack grows up.
3180 But if space already allocated, this has already been done. */
3181 if (extra && args_addr == 0
3182 && where_pad != none && where_pad != stack_direction)
3183 anti_adjust_stack (GEN_INT (extra));
3184
3185 /* If we make space by pushing it, we might as well push
3186 the real data. Otherwise, we can leave OFFSET nonzero
3187 and leave the space uninitialized. */
3188 if (args_addr == 0)
3189 offset = 0;
3190
3191 /* Now NOT_STACK gets the number of words that we don't need to
3192 allocate on the stack. */
3193 not_stack = partial - offset;
3194
3195 /* If the partial register-part of the arg counts in its stack size,
3196 skip the part of stack space corresponding to the registers.
3197 Otherwise, start copying to the beginning of the stack space,
3198 by setting SKIP to 0. */
3199 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3200
3201 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3202 x = validize_mem (force_const_mem (mode, x));
3203
3204 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3205 SUBREGs of such registers are not allowed. */
3206 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3207 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3208 x = copy_to_reg (x);
3209
3210 /* Loop over all the words allocated on the stack for this arg. */
3211 /* We can do it by words, because any scalar bigger than a word
3212 has a size a multiple of a word. */
3213 #ifndef PUSH_ARGS_REVERSED
3214 for (i = not_stack; i < size; i++)
3215 #else
3216 for (i = size - 1; i >= not_stack; i--)
3217 #endif
3218 if (i >= not_stack + offset)
3219 emit_push_insn (operand_subword_force (x, i, mode),
3220 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3221 0, args_addr,
3222 GEN_INT (args_offset + ((i - not_stack + skip)
3223 * UNITS_PER_WORD)),
3224 reg_parm_stack_space, alignment_pad);
3225 }
3226 else
3227 {
3228 rtx addr;
3229 rtx target = NULL_RTX;
3230
3231 /* Push padding now if padding above and stack grows down,
3232 or if padding below and stack grows up.
3233 But if space already allocated, this has already been done. */
3234 if (extra && args_addr == 0
3235 && where_pad != none && where_pad != stack_direction)
3236 anti_adjust_stack (GEN_INT (extra));
3237
3238 #ifdef PUSH_ROUNDING
3239 if (args_addr == 0 && PUSH_ARGS)
3240 {
3241 addr = gen_push_operand ();
3242 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3243 }
3244 else
3245 #endif
3246 {
3247 if (GET_CODE (args_so_far) == CONST_INT)
3248 addr
3249 = memory_address (mode,
3250 plus_constant (args_addr,
3251 INTVAL (args_so_far)));
3252 else
3253 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3254 args_so_far));
3255 target = addr;
3256 }
3257
3258 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3259
3260 if (current_function_check_memory_usage && ! in_check_memory_usage)
3261 {
3262 in_check_memory_usage = 1;
3263 if (target == 0)
3264 target = get_push_address (GET_MODE_SIZE (mode));
3265
3266 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3267 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3268 target, Pmode,
3269 XEXP (x, 0), Pmode,
3270 GEN_INT (GET_MODE_SIZE (mode)),
3271 TYPE_MODE (sizetype));
3272 else
3273 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3274 target, Pmode,
3275 GEN_INT (GET_MODE_SIZE (mode)),
3276 TYPE_MODE (sizetype),
3277 GEN_INT (MEMORY_USE_RW),
3278 TYPE_MODE (integer_type_node));
3279 in_check_memory_usage = 0;
3280 }
3281 }
3282
3283 ret:
3284 /* If part should go in registers, copy that part
3285 into the appropriate registers. Do this now, at the end,
3286 since mem-to-mem copies above may do function calls. */
3287 if (partial > 0 && reg != 0)
3288 {
3289 /* Handle calls that pass values in multiple non-contiguous locations.
3290 The Irix 6 ABI has examples of this. */
3291 if (GET_CODE (reg) == PARALLEL)
3292 emit_group_load (reg, x, -1, align); /* ??? size? */
3293 else
3294 move_block_to_reg (REGNO (reg), x, partial, mode);
3295 }
3296
3297 if (extra && args_addr == 0 && where_pad == stack_direction)
3298 anti_adjust_stack (GEN_INT (extra));
3299
3300 if (alignment_pad)
3301 anti_adjust_stack (alignment_pad);
3302 }
3303 \f
3304 /* Expand an assignment that stores the value of FROM into TO.
3305 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3306 (This may contain a QUEUED rtx;
3307 if the value is constant, this rtx is a constant.)
3308 Otherwise, the returned value is NULL_RTX.
3309
3310 SUGGEST_REG is no longer actually used.
3311 It used to mean, copy the value through a register
3312 and return that register, if that is possible.
3313 We now use WANT_VALUE to decide whether to do this. */
3314
3315 rtx
3316 expand_assignment (to, from, want_value, suggest_reg)
3317 tree to, from;
3318 int want_value;
3319 int suggest_reg ATTRIBUTE_UNUSED;
3320 {
3321 register rtx to_rtx = 0;
3322 rtx result;
3323
3324 /* Don't crash if the lhs of the assignment was erroneous. */
3325
3326 if (TREE_CODE (to) == ERROR_MARK)
3327 {
3328 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3329 return want_value ? result : NULL_RTX;
3330 }
3331
3332 /* Assignment of a structure component needs special treatment
3333 if the structure component's rtx is not simply a MEM.
3334 Assignment of an array element at a constant index, and assignment of
3335 an array element in an unaligned packed structure field, has the same
3336 problem. */
3337
3338 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3339 || TREE_CODE (to) == ARRAY_REF)
3340 {
3341 enum machine_mode mode1;
3342 HOST_WIDE_INT bitsize, bitpos;
3343 tree offset;
3344 int unsignedp;
3345 int volatilep = 0;
3346 tree tem;
3347 unsigned int alignment;
3348
3349 push_temp_slots ();
3350 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3351 &unsignedp, &volatilep, &alignment);
3352
3353 /* If we are going to use store_bit_field and extract_bit_field,
3354 make sure to_rtx will be safe for multiple use. */
3355
3356 if (mode1 == VOIDmode && want_value)
3357 tem = stabilize_reference (tem);
3358
3359 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3360 if (offset != 0)
3361 {
3362 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3363
3364 if (GET_CODE (to_rtx) != MEM)
3365 abort ();
3366
3367 if (GET_MODE (offset_rtx) != ptr_mode)
3368 {
3369 #ifdef POINTERS_EXTEND_UNSIGNED
3370 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3371 #else
3372 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3373 #endif
3374 }
3375
3376 /* A constant address in TO_RTX can have VOIDmode, we must not try
3377 to call force_reg for that case. Avoid that case. */
3378 if (GET_CODE (to_rtx) == MEM
3379 && GET_MODE (to_rtx) == BLKmode
3380 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3381 && bitsize
3382 && (bitpos % bitsize) == 0
3383 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3384 && alignment == GET_MODE_ALIGNMENT (mode1))
3385 {
3386 rtx temp = change_address (to_rtx, mode1,
3387 plus_constant (XEXP (to_rtx, 0),
3388 (bitpos /
3389 BITS_PER_UNIT)));
3390 if (GET_CODE (XEXP (temp, 0)) == REG)
3391 to_rtx = temp;
3392 else
3393 to_rtx = change_address (to_rtx, mode1,
3394 force_reg (GET_MODE (XEXP (temp, 0)),
3395 XEXP (temp, 0)));
3396 bitpos = 0;
3397 }
3398
3399 to_rtx = change_address (to_rtx, VOIDmode,
3400 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3401 force_reg (ptr_mode,
3402 offset_rtx)));
3403 }
3404
3405 if (volatilep)
3406 {
3407 if (GET_CODE (to_rtx) == MEM)
3408 {
3409 /* When the offset is zero, to_rtx is the address of the
3410 structure we are storing into, and hence may be shared.
3411 We must make a new MEM before setting the volatile bit. */
3412 if (offset == 0)
3413 to_rtx = copy_rtx (to_rtx);
3414
3415 MEM_VOLATILE_P (to_rtx) = 1;
3416 }
3417 #if 0 /* This was turned off because, when a field is volatile
3418 in an object which is not volatile, the object may be in a register,
3419 and then we would abort over here. */
3420 else
3421 abort ();
3422 #endif
3423 }
3424
3425 if (TREE_CODE (to) == COMPONENT_REF
3426 && TREE_READONLY (TREE_OPERAND (to, 1)))
3427 {
3428 if (offset == 0)
3429 to_rtx = copy_rtx (to_rtx);
3430
3431 RTX_UNCHANGING_P (to_rtx) = 1;
3432 }
3433
3434 /* Check the access. */
3435 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3436 {
3437 rtx to_addr;
3438 int size;
3439 int best_mode_size;
3440 enum machine_mode best_mode;
3441
3442 best_mode = get_best_mode (bitsize, bitpos,
3443 TYPE_ALIGN (TREE_TYPE (tem)),
3444 mode1, volatilep);
3445 if (best_mode == VOIDmode)
3446 best_mode = QImode;
3447
3448 best_mode_size = GET_MODE_BITSIZE (best_mode);
3449 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3450 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3451 size *= GET_MODE_SIZE (best_mode);
3452
3453 /* Check the access right of the pointer. */
3454 if (size)
3455 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3456 to_addr, Pmode,
3457 GEN_INT (size), TYPE_MODE (sizetype),
3458 GEN_INT (MEMORY_USE_WO),
3459 TYPE_MODE (integer_type_node));
3460 }
3461
3462 /* If this is a varying-length object, we must get the address of
3463 the source and do an explicit block move. */
3464 if (bitsize < 0)
3465 {
3466 unsigned int from_align;
3467 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3468 rtx inner_to_rtx
3469 = change_address (to_rtx, VOIDmode,
3470 plus_constant (XEXP (to_rtx, 0),
3471 bitpos / BITS_PER_UNIT));
3472
3473 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3474 MIN (alignment, from_align));
3475 free_temp_slots ();
3476 pop_temp_slots ();
3477 return to_rtx;
3478 }
3479 else
3480 {
3481 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3482 (want_value
3483 /* Spurious cast for HPUX compiler. */
3484 ? ((enum machine_mode)
3485 TYPE_MODE (TREE_TYPE (to)))
3486 : VOIDmode),
3487 unsignedp,
3488 alignment,
3489 int_size_in_bytes (TREE_TYPE (tem)),
3490 get_alias_set (to));
3491
3492 preserve_temp_slots (result);
3493 free_temp_slots ();
3494 pop_temp_slots ();
3495
3496 /* If the value is meaningful, convert RESULT to the proper mode.
3497 Otherwise, return nothing. */
3498 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3499 TYPE_MODE (TREE_TYPE (from)),
3500 result,
3501 TREE_UNSIGNED (TREE_TYPE (to)))
3502 : NULL_RTX);
3503 }
3504 }
3505
3506 /* If the rhs is a function call and its value is not an aggregate,
3507 call the function before we start to compute the lhs.
3508 This is needed for correct code for cases such as
3509 val = setjmp (buf) on machines where reference to val
3510 requires loading up part of an address in a separate insn.
3511
3512 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3513 since it might be a promoted variable where the zero- or sign- extension
3514 needs to be done. Handling this in the normal way is safe because no
3515 computation is done before the call. */
3516 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3517 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3518 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3519 && GET_CODE (DECL_RTL (to)) == REG))
3520 {
3521 rtx value;
3522
3523 push_temp_slots ();
3524 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3525 if (to_rtx == 0)
3526 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3527
3528 /* Handle calls that return values in multiple non-contiguous locations.
3529 The Irix 6 ABI has examples of this. */
3530 if (GET_CODE (to_rtx) == PARALLEL)
3531 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3532 TYPE_ALIGN (TREE_TYPE (from)));
3533 else if (GET_MODE (to_rtx) == BLKmode)
3534 emit_block_move (to_rtx, value, expr_size (from),
3535 TYPE_ALIGN (TREE_TYPE (from)));
3536 else
3537 {
3538 #ifdef POINTERS_EXTEND_UNSIGNED
3539 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3540 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3541 value = convert_memory_address (GET_MODE (to_rtx), value);
3542 #endif
3543 emit_move_insn (to_rtx, value);
3544 }
3545 preserve_temp_slots (to_rtx);
3546 free_temp_slots ();
3547 pop_temp_slots ();
3548 return want_value ? to_rtx : NULL_RTX;
3549 }
3550
3551 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3552 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3553
3554 if (to_rtx == 0)
3555 {
3556 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3557 if (GET_CODE (to_rtx) == MEM)
3558 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3559 }
3560
3561 /* Don't move directly into a return register. */
3562 if (TREE_CODE (to) == RESULT_DECL
3563 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3564 {
3565 rtx temp;
3566
3567 push_temp_slots ();
3568 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3569
3570 if (GET_CODE (to_rtx) == PARALLEL)
3571 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3572 TYPE_ALIGN (TREE_TYPE (from)));
3573 else
3574 emit_move_insn (to_rtx, temp);
3575
3576 preserve_temp_slots (to_rtx);
3577 free_temp_slots ();
3578 pop_temp_slots ();
3579 return want_value ? to_rtx : NULL_RTX;
3580 }
3581
3582 /* In case we are returning the contents of an object which overlaps
3583 the place the value is being stored, use a safe function when copying
3584 a value through a pointer into a structure value return block. */
3585 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3586 && current_function_returns_struct
3587 && !current_function_returns_pcc_struct)
3588 {
3589 rtx from_rtx, size;
3590
3591 push_temp_slots ();
3592 size = expr_size (from);
3593 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3594 EXPAND_MEMORY_USE_DONT);
3595
3596 /* Copy the rights of the bitmap. */
3597 if (current_function_check_memory_usage)
3598 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3599 XEXP (to_rtx, 0), Pmode,
3600 XEXP (from_rtx, 0), Pmode,
3601 convert_to_mode (TYPE_MODE (sizetype),
3602 size, TREE_UNSIGNED (sizetype)),
3603 TYPE_MODE (sizetype));
3604
3605 #ifdef TARGET_MEM_FUNCTIONS
3606 emit_library_call (memcpy_libfunc, 0,
3607 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3608 XEXP (from_rtx, 0), Pmode,
3609 convert_to_mode (TYPE_MODE (sizetype),
3610 size, TREE_UNSIGNED (sizetype)),
3611 TYPE_MODE (sizetype));
3612 #else
3613 emit_library_call (bcopy_libfunc, 0,
3614 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3615 XEXP (to_rtx, 0), Pmode,
3616 convert_to_mode (TYPE_MODE (integer_type_node),
3617 size, TREE_UNSIGNED (integer_type_node)),
3618 TYPE_MODE (integer_type_node));
3619 #endif
3620
3621 preserve_temp_slots (to_rtx);
3622 free_temp_slots ();
3623 pop_temp_slots ();
3624 return want_value ? to_rtx : NULL_RTX;
3625 }
3626
3627 /* Compute FROM and store the value in the rtx we got. */
3628
3629 push_temp_slots ();
3630 result = store_expr (from, to_rtx, want_value);
3631 preserve_temp_slots (result);
3632 free_temp_slots ();
3633 pop_temp_slots ();
3634 return want_value ? result : NULL_RTX;
3635 }
3636
3637 /* Generate code for computing expression EXP,
3638 and storing the value into TARGET.
3639 TARGET may contain a QUEUED rtx.
3640
3641 If WANT_VALUE is nonzero, return a copy of the value
3642 not in TARGET, so that we can be sure to use the proper
3643 value in a containing expression even if TARGET has something
3644 else stored in it. If possible, we copy the value through a pseudo
3645 and return that pseudo. Or, if the value is constant, we try to
3646 return the constant. In some cases, we return a pseudo
3647 copied *from* TARGET.
3648
3649 If the mode is BLKmode then we may return TARGET itself.
3650 It turns out that in BLKmode it doesn't cause a problem.
3651 because C has no operators that could combine two different
3652 assignments into the same BLKmode object with different values
3653 with no sequence point. Will other languages need this to
3654 be more thorough?
3655
3656 If WANT_VALUE is 0, we return NULL, to make sure
3657 to catch quickly any cases where the caller uses the value
3658 and fails to set WANT_VALUE. */
3659
3660 rtx
3661 store_expr (exp, target, want_value)
3662 register tree exp;
3663 register rtx target;
3664 int want_value;
3665 {
3666 register rtx temp;
3667 int dont_return_target = 0;
3668
3669 if (TREE_CODE (exp) == COMPOUND_EXPR)
3670 {
3671 /* Perform first part of compound expression, then assign from second
3672 part. */
3673 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3674 emit_queue ();
3675 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3676 }
3677 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3678 {
3679 /* For conditional expression, get safe form of the target. Then
3680 test the condition, doing the appropriate assignment on either
3681 side. This avoids the creation of unnecessary temporaries.
3682 For non-BLKmode, it is more efficient not to do this. */
3683
3684 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3685
3686 emit_queue ();
3687 target = protect_from_queue (target, 1);
3688
3689 do_pending_stack_adjust ();
3690 NO_DEFER_POP;
3691 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3692 start_cleanup_deferral ();
3693 store_expr (TREE_OPERAND (exp, 1), target, 0);
3694 end_cleanup_deferral ();
3695 emit_queue ();
3696 emit_jump_insn (gen_jump (lab2));
3697 emit_barrier ();
3698 emit_label (lab1);
3699 start_cleanup_deferral ();
3700 store_expr (TREE_OPERAND (exp, 2), target, 0);
3701 end_cleanup_deferral ();
3702 emit_queue ();
3703 emit_label (lab2);
3704 OK_DEFER_POP;
3705
3706 return want_value ? target : NULL_RTX;
3707 }
3708 else if (queued_subexp_p (target))
3709 /* If target contains a postincrement, let's not risk
3710 using it as the place to generate the rhs. */
3711 {
3712 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3713 {
3714 /* Expand EXP into a new pseudo. */
3715 temp = gen_reg_rtx (GET_MODE (target));
3716 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3717 }
3718 else
3719 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3720
3721 /* If target is volatile, ANSI requires accessing the value
3722 *from* the target, if it is accessed. So make that happen.
3723 In no case return the target itself. */
3724 if (! MEM_VOLATILE_P (target) && want_value)
3725 dont_return_target = 1;
3726 }
3727 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3728 && GET_MODE (target) != BLKmode)
3729 /* If target is in memory and caller wants value in a register instead,
3730 arrange that. Pass TARGET as target for expand_expr so that,
3731 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3732 We know expand_expr will not use the target in that case.
3733 Don't do this if TARGET is volatile because we are supposed
3734 to write it and then read it. */
3735 {
3736 temp = expand_expr (exp, target, GET_MODE (target), 0);
3737 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3738 temp = copy_to_reg (temp);
3739 dont_return_target = 1;
3740 }
3741 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3742 /* If this is an scalar in a register that is stored in a wider mode
3743 than the declared mode, compute the result into its declared mode
3744 and then convert to the wider mode. Our value is the computed
3745 expression. */
3746 {
3747 /* If we don't want a value, we can do the conversion inside EXP,
3748 which will often result in some optimizations. Do the conversion
3749 in two steps: first change the signedness, if needed, then
3750 the extend. But don't do this if the type of EXP is a subtype
3751 of something else since then the conversion might involve
3752 more than just converting modes. */
3753 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3754 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3755 {
3756 if (TREE_UNSIGNED (TREE_TYPE (exp))
3757 != SUBREG_PROMOTED_UNSIGNED_P (target))
3758 exp
3759 = convert
3760 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3761 TREE_TYPE (exp)),
3762 exp);
3763
3764 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3765 SUBREG_PROMOTED_UNSIGNED_P (target)),
3766 exp);
3767 }
3768
3769 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3770
3771 /* If TEMP is a volatile MEM and we want a result value, make
3772 the access now so it gets done only once. Likewise if
3773 it contains TARGET. */
3774 if (GET_CODE (temp) == MEM && want_value
3775 && (MEM_VOLATILE_P (temp)
3776 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3777 temp = copy_to_reg (temp);
3778
3779 /* If TEMP is a VOIDmode constant, use convert_modes to make
3780 sure that we properly convert it. */
3781 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3782 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3783 TYPE_MODE (TREE_TYPE (exp)), temp,
3784 SUBREG_PROMOTED_UNSIGNED_P (target));
3785
3786 convert_move (SUBREG_REG (target), temp,
3787 SUBREG_PROMOTED_UNSIGNED_P (target));
3788
3789 /* If we promoted a constant, change the mode back down to match
3790 target. Otherwise, the caller might get confused by a result whose
3791 mode is larger than expected. */
3792
3793 if (want_value && GET_MODE (temp) != GET_MODE (target)
3794 && GET_MODE (temp) != VOIDmode)
3795 {
3796 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3797 SUBREG_PROMOTED_VAR_P (temp) = 1;
3798 SUBREG_PROMOTED_UNSIGNED_P (temp)
3799 = SUBREG_PROMOTED_UNSIGNED_P (target);
3800 }
3801
3802 return want_value ? temp : NULL_RTX;
3803 }
3804 else
3805 {
3806 temp = expand_expr (exp, target, GET_MODE (target), 0);
3807 /* Return TARGET if it's a specified hardware register.
3808 If TARGET is a volatile mem ref, either return TARGET
3809 or return a reg copied *from* TARGET; ANSI requires this.
3810
3811 Otherwise, if TEMP is not TARGET, return TEMP
3812 if it is constant (for efficiency),
3813 or if we really want the correct value. */
3814 if (!(target && GET_CODE (target) == REG
3815 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3816 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3817 && ! rtx_equal_p (temp, target)
3818 && (CONSTANT_P (temp) || want_value))
3819 dont_return_target = 1;
3820 }
3821
3822 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3823 the same as that of TARGET, adjust the constant. This is needed, for
3824 example, in case it is a CONST_DOUBLE and we want only a word-sized
3825 value. */
3826 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3827 && TREE_CODE (exp) != ERROR_MARK
3828 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3829 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3830 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3831
3832 if (current_function_check_memory_usage
3833 && GET_CODE (target) == MEM
3834 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3835 {
3836 if (GET_CODE (temp) == MEM)
3837 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3838 XEXP (target, 0), Pmode,
3839 XEXP (temp, 0), Pmode,
3840 expr_size (exp), TYPE_MODE (sizetype));
3841 else
3842 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3843 XEXP (target, 0), Pmode,
3844 expr_size (exp), TYPE_MODE (sizetype),
3845 GEN_INT (MEMORY_USE_WO),
3846 TYPE_MODE (integer_type_node));
3847 }
3848
3849 /* If value was not generated in the target, store it there.
3850 Convert the value to TARGET's type first if nec. */
3851 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3852 one or both of them are volatile memory refs, we have to distinguish
3853 two cases:
3854 - expand_expr has used TARGET. In this case, we must not generate
3855 another copy. This can be detected by TARGET being equal according
3856 to == .
3857 - expand_expr has not used TARGET - that means that the source just
3858 happens to have the same RTX form. Since temp will have been created
3859 by expand_expr, it will compare unequal according to == .
3860 We must generate a copy in this case, to reach the correct number
3861 of volatile memory references. */
3862
3863 if ((! rtx_equal_p (temp, target)
3864 || (temp != target && (side_effects_p (temp)
3865 || side_effects_p (target))))
3866 && TREE_CODE (exp) != ERROR_MARK)
3867 {
3868 target = protect_from_queue (target, 1);
3869 if (GET_MODE (temp) != GET_MODE (target)
3870 && GET_MODE (temp) != VOIDmode)
3871 {
3872 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3873 if (dont_return_target)
3874 {
3875 /* In this case, we will return TEMP,
3876 so make sure it has the proper mode.
3877 But don't forget to store the value into TARGET. */
3878 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3879 emit_move_insn (target, temp);
3880 }
3881 else
3882 convert_move (target, temp, unsignedp);
3883 }
3884
3885 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3886 {
3887 /* Handle copying a string constant into an array.
3888 The string constant may be shorter than the array.
3889 So copy just the string's actual length, and clear the rest. */
3890 rtx size;
3891 rtx addr;
3892
3893 /* Get the size of the data type of the string,
3894 which is actually the size of the target. */
3895 size = expr_size (exp);
3896 if (GET_CODE (size) == CONST_INT
3897 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3898 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3899 else
3900 {
3901 /* Compute the size of the data to copy from the string. */
3902 tree copy_size
3903 = size_binop (MIN_EXPR,
3904 make_tree (sizetype, size),
3905 size_int (TREE_STRING_LENGTH (exp)));
3906 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3907 VOIDmode, 0);
3908 rtx label = 0;
3909
3910 /* Copy that much. */
3911 emit_block_move (target, temp, copy_size_rtx,
3912 TYPE_ALIGN (TREE_TYPE (exp)));
3913
3914 /* Figure out how much is left in TARGET that we have to clear.
3915 Do all calculations in ptr_mode. */
3916
3917 addr = XEXP (target, 0);
3918 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3919
3920 if (GET_CODE (copy_size_rtx) == CONST_INT)
3921 {
3922 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3923 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3924 }
3925 else
3926 {
3927 addr = force_reg (ptr_mode, addr);
3928 addr = expand_binop (ptr_mode, add_optab, addr,
3929 copy_size_rtx, NULL_RTX, 0,
3930 OPTAB_LIB_WIDEN);
3931
3932 size = expand_binop (ptr_mode, sub_optab, size,
3933 copy_size_rtx, NULL_RTX, 0,
3934 OPTAB_LIB_WIDEN);
3935
3936 label = gen_label_rtx ();
3937 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3938 GET_MODE (size), 0, 0, label);
3939 }
3940
3941 if (size != const0_rtx)
3942 {
3943 /* Be sure we can write on ADDR. */
3944 if (current_function_check_memory_usage)
3945 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3946 addr, Pmode,
3947 size, TYPE_MODE (sizetype),
3948 GEN_INT (MEMORY_USE_WO),
3949 TYPE_MODE (integer_type_node));
3950 #ifdef TARGET_MEM_FUNCTIONS
3951 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3952 addr, ptr_mode,
3953 const0_rtx, TYPE_MODE (integer_type_node),
3954 convert_to_mode (TYPE_MODE (sizetype),
3955 size,
3956 TREE_UNSIGNED (sizetype)),
3957 TYPE_MODE (sizetype));
3958 #else
3959 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3960 addr, ptr_mode,
3961 convert_to_mode (TYPE_MODE (integer_type_node),
3962 size,
3963 TREE_UNSIGNED (integer_type_node)),
3964 TYPE_MODE (integer_type_node));
3965 #endif
3966 }
3967
3968 if (label)
3969 emit_label (label);
3970 }
3971 }
3972 /* Handle calls that return values in multiple non-contiguous locations.
3973 The Irix 6 ABI has examples of this. */
3974 else if (GET_CODE (target) == PARALLEL)
3975 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3976 TYPE_ALIGN (TREE_TYPE (exp)));
3977 else if (GET_MODE (temp) == BLKmode)
3978 emit_block_move (target, temp, expr_size (exp),
3979 TYPE_ALIGN (TREE_TYPE (exp)));
3980 else
3981 emit_move_insn (target, temp);
3982 }
3983
3984 /* If we don't want a value, return NULL_RTX. */
3985 if (! want_value)
3986 return NULL_RTX;
3987
3988 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3989 ??? The latter test doesn't seem to make sense. */
3990 else if (dont_return_target && GET_CODE (temp) != MEM)
3991 return temp;
3992
3993 /* Return TARGET itself if it is a hard register. */
3994 else if (want_value && GET_MODE (target) != BLKmode
3995 && ! (GET_CODE (target) == REG
3996 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3997 return copy_to_reg (target);
3998
3999 else
4000 return target;
4001 }
4002 \f
4003 /* Return 1 if EXP just contains zeros. */
4004
4005 static int
4006 is_zeros_p (exp)
4007 tree exp;
4008 {
4009 tree elt;
4010
4011 switch (TREE_CODE (exp))
4012 {
4013 case CONVERT_EXPR:
4014 case NOP_EXPR:
4015 case NON_LVALUE_EXPR:
4016 return is_zeros_p (TREE_OPERAND (exp, 0));
4017
4018 case INTEGER_CST:
4019 return integer_zerop (exp);
4020
4021 case COMPLEX_CST:
4022 return
4023 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4024
4025 case REAL_CST:
4026 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4027
4028 case CONSTRUCTOR:
4029 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4030 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4031 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4032 if (! is_zeros_p (TREE_VALUE (elt)))
4033 return 0;
4034
4035 return 1;
4036
4037 default:
4038 return 0;
4039 }
4040 }
4041
4042 /* Return 1 if EXP contains mostly (3/4) zeros. */
4043
4044 static int
4045 mostly_zeros_p (exp)
4046 tree exp;
4047 {
4048 if (TREE_CODE (exp) == CONSTRUCTOR)
4049 {
4050 int elts = 0, zeros = 0;
4051 tree elt = CONSTRUCTOR_ELTS (exp);
4052 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4053 {
4054 /* If there are no ranges of true bits, it is all zero. */
4055 return elt == NULL_TREE;
4056 }
4057 for (; elt; elt = TREE_CHAIN (elt))
4058 {
4059 /* We do not handle the case where the index is a RANGE_EXPR,
4060 so the statistic will be somewhat inaccurate.
4061 We do make a more accurate count in store_constructor itself,
4062 so since this function is only used for nested array elements,
4063 this should be close enough. */
4064 if (mostly_zeros_p (TREE_VALUE (elt)))
4065 zeros++;
4066 elts++;
4067 }
4068
4069 return 4 * zeros >= 3 * elts;
4070 }
4071
4072 return is_zeros_p (exp);
4073 }
4074 \f
4075 /* Helper function for store_constructor.
4076 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4077 TYPE is the type of the CONSTRUCTOR, not the element type.
4078 ALIGN and CLEARED are as for store_constructor.
4079
4080 This provides a recursive shortcut back to store_constructor when it isn't
4081 necessary to go through store_field. This is so that we can pass through
4082 the cleared field to let store_constructor know that we may not have to
4083 clear a substructure if the outer structure has already been cleared. */
4084
4085 static void
4086 store_constructor_field (target, bitsize, bitpos,
4087 mode, exp, type, align, cleared)
4088 rtx target;
4089 unsigned HOST_WIDE_INT bitsize;
4090 HOST_WIDE_INT bitpos;
4091 enum machine_mode mode;
4092 tree exp, type;
4093 unsigned int align;
4094 int cleared;
4095 {
4096 if (TREE_CODE (exp) == CONSTRUCTOR
4097 && bitpos % BITS_PER_UNIT == 0
4098 /* If we have a non-zero bitpos for a register target, then we just
4099 let store_field do the bitfield handling. This is unlikely to
4100 generate unnecessary clear instructions anyways. */
4101 && (bitpos == 0 || GET_CODE (target) == MEM))
4102 {
4103 if (bitpos != 0)
4104 target
4105 = change_address (target,
4106 GET_MODE (target) == BLKmode
4107 || 0 != (bitpos
4108 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4109 ? BLKmode : VOIDmode,
4110 plus_constant (XEXP (target, 0),
4111 bitpos / BITS_PER_UNIT));
4112 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4113 }
4114 else
4115 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4116 int_size_in_bytes (type), 0);
4117 }
4118
4119 /* Store the value of constructor EXP into the rtx TARGET.
4120 TARGET is either a REG or a MEM.
4121 ALIGN is the maximum known alignment for TARGET.
4122 CLEARED is true if TARGET is known to have been zero'd.
4123 SIZE is the number of bytes of TARGET we are allowed to modify: this
4124 may not be the same as the size of EXP if we are assigning to a field
4125 which has been packed to exclude padding bits. */
4126
4127 static void
4128 store_constructor (exp, target, align, cleared, size)
4129 tree exp;
4130 rtx target;
4131 unsigned int align;
4132 int cleared;
4133 HOST_WIDE_INT size;
4134 {
4135 tree type = TREE_TYPE (exp);
4136 #ifdef WORD_REGISTER_OPERATIONS
4137 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4138 #endif
4139
4140 /* We know our target cannot conflict, since safe_from_p has been called. */
4141 #if 0
4142 /* Don't try copying piece by piece into a hard register
4143 since that is vulnerable to being clobbered by EXP.
4144 Instead, construct in a pseudo register and then copy it all. */
4145 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4146 {
4147 rtx temp = gen_reg_rtx (GET_MODE (target));
4148 store_constructor (exp, temp, align, cleared, size);
4149 emit_move_insn (target, temp);
4150 return;
4151 }
4152 #endif
4153
4154 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4155 || TREE_CODE (type) == QUAL_UNION_TYPE)
4156 {
4157 register tree elt;
4158
4159 /* Inform later passes that the whole union value is dead. */
4160 if ((TREE_CODE (type) == UNION_TYPE
4161 || TREE_CODE (type) == QUAL_UNION_TYPE)
4162 && ! cleared)
4163 {
4164 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4165
4166 /* If the constructor is empty, clear the union. */
4167 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4168 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4169 }
4170
4171 /* If we are building a static constructor into a register,
4172 set the initial value as zero so we can fold the value into
4173 a constant. But if more than one register is involved,
4174 this probably loses. */
4175 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4176 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4177 {
4178 if (! cleared)
4179 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4180
4181 cleared = 1;
4182 }
4183
4184 /* If the constructor has fewer fields than the structure
4185 or if we are initializing the structure to mostly zeros,
4186 clear the whole structure first. */
4187 else if (size > 0
4188 && ((list_length (CONSTRUCTOR_ELTS (exp))
4189 != fields_length (type))
4190 || mostly_zeros_p (exp)))
4191 {
4192 if (! cleared)
4193 clear_storage (target, GEN_INT (size), align);
4194
4195 cleared = 1;
4196 }
4197 else if (! cleared)
4198 /* Inform later passes that the old value is dead. */
4199 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4200
4201 /* Store each element of the constructor into
4202 the corresponding field of TARGET. */
4203
4204 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4205 {
4206 register tree field = TREE_PURPOSE (elt);
4207 #ifdef WORD_REGISTER_OPERATIONS
4208 tree value = TREE_VALUE (elt);
4209 #endif
4210 register enum machine_mode mode;
4211 HOST_WIDE_INT bitsize;
4212 HOST_WIDE_INT bitpos = 0;
4213 int unsignedp;
4214 tree offset;
4215 rtx to_rtx = target;
4216
4217 /* Just ignore missing fields.
4218 We cleared the whole structure, above,
4219 if any fields are missing. */
4220 if (field == 0)
4221 continue;
4222
4223 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4224 continue;
4225
4226 if (host_integerp (DECL_SIZE (field), 1))
4227 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4228 else
4229 bitsize = -1;
4230
4231 unsignedp = TREE_UNSIGNED (field);
4232 mode = DECL_MODE (field);
4233 if (DECL_BIT_FIELD (field))
4234 mode = VOIDmode;
4235
4236 offset = DECL_FIELD_OFFSET (field);
4237 if (host_integerp (offset, 0)
4238 && host_integerp (bit_position (field), 0))
4239 {
4240 bitpos = int_bit_position (field);
4241 offset = 0;
4242 }
4243 else
4244 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4245
4246 if (offset)
4247 {
4248 rtx offset_rtx;
4249
4250 if (contains_placeholder_p (offset))
4251 offset = build (WITH_RECORD_EXPR, sizetype,
4252 offset, make_tree (TREE_TYPE (exp), target));
4253
4254 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4255 if (GET_CODE (to_rtx) != MEM)
4256 abort ();
4257
4258 if (GET_MODE (offset_rtx) != ptr_mode)
4259 {
4260 #ifdef POINTERS_EXTEND_UNSIGNED
4261 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4262 #else
4263 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4264 #endif
4265 }
4266
4267 to_rtx
4268 = change_address (to_rtx, VOIDmode,
4269 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4270 force_reg (ptr_mode,
4271 offset_rtx)));
4272 align = DECL_OFFSET_ALIGN (field);
4273 }
4274
4275 if (TREE_READONLY (field))
4276 {
4277 if (GET_CODE (to_rtx) == MEM)
4278 to_rtx = copy_rtx (to_rtx);
4279
4280 RTX_UNCHANGING_P (to_rtx) = 1;
4281 }
4282
4283 #ifdef WORD_REGISTER_OPERATIONS
4284 /* If this initializes a field that is smaller than a word, at the
4285 start of a word, try to widen it to a full word.
4286 This special case allows us to output C++ member function
4287 initializations in a form that the optimizers can understand. */
4288 if (GET_CODE (target) == REG
4289 && bitsize < BITS_PER_WORD
4290 && bitpos % BITS_PER_WORD == 0
4291 && GET_MODE_CLASS (mode) == MODE_INT
4292 && TREE_CODE (value) == INTEGER_CST
4293 && exp_size >= 0
4294 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4295 {
4296 tree type = TREE_TYPE (value);
4297 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4298 {
4299 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4300 value = convert (type, value);
4301 }
4302 if (BYTES_BIG_ENDIAN)
4303 value
4304 = fold (build (LSHIFT_EXPR, type, value,
4305 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4306 bitsize = BITS_PER_WORD;
4307 mode = word_mode;
4308 }
4309 #endif
4310 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4311 TREE_VALUE (elt), type, align, cleared);
4312 }
4313 }
4314 else if (TREE_CODE (type) == ARRAY_TYPE)
4315 {
4316 register tree elt;
4317 register int i;
4318 int need_to_clear;
4319 tree domain = TYPE_DOMAIN (type);
4320 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4321 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4322 tree elttype = TREE_TYPE (type);
4323
4324 /* If the constructor has fewer elements than the array,
4325 clear the whole array first. Similarly if this is
4326 static constructor of a non-BLKmode object. */
4327 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4328 need_to_clear = 1;
4329 else
4330 {
4331 HOST_WIDE_INT count = 0, zero_count = 0;
4332 need_to_clear = 0;
4333 /* This loop is a more accurate version of the loop in
4334 mostly_zeros_p (it handles RANGE_EXPR in an index).
4335 It is also needed to check for missing elements. */
4336 for (elt = CONSTRUCTOR_ELTS (exp);
4337 elt != NULL_TREE;
4338 elt = TREE_CHAIN (elt))
4339 {
4340 tree index = TREE_PURPOSE (elt);
4341 HOST_WIDE_INT this_node_count;
4342
4343 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4344 {
4345 tree lo_index = TREE_OPERAND (index, 0);
4346 tree hi_index = TREE_OPERAND (index, 1);
4347
4348 if (! host_integerp (lo_index, 1)
4349 || ! host_integerp (hi_index, 1))
4350 {
4351 need_to_clear = 1;
4352 break;
4353 }
4354
4355 this_node_count = (tree_low_cst (hi_index, 1)
4356 - tree_low_cst (lo_index, 1) + 1);
4357 }
4358 else
4359 this_node_count = 1;
4360 count += this_node_count;
4361 if (mostly_zeros_p (TREE_VALUE (elt)))
4362 zero_count += this_node_count;
4363 }
4364 /* Clear the entire array first if there are any missing elements,
4365 or if the incidence of zero elements is >= 75%. */
4366 if (count < maxelt - minelt + 1
4367 || 4 * zero_count >= 3 * count)
4368 need_to_clear = 1;
4369 }
4370 if (need_to_clear && size > 0)
4371 {
4372 if (! cleared)
4373 clear_storage (target, GEN_INT (size), align);
4374 cleared = 1;
4375 }
4376 else
4377 /* Inform later passes that the old value is dead. */
4378 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4379
4380 /* Store each element of the constructor into
4381 the corresponding element of TARGET, determined
4382 by counting the elements. */
4383 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4384 elt;
4385 elt = TREE_CHAIN (elt), i++)
4386 {
4387 register enum machine_mode mode;
4388 HOST_WIDE_INT bitsize;
4389 HOST_WIDE_INT bitpos;
4390 int unsignedp;
4391 tree value = TREE_VALUE (elt);
4392 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4393 tree index = TREE_PURPOSE (elt);
4394 rtx xtarget = target;
4395
4396 if (cleared && is_zeros_p (value))
4397 continue;
4398
4399 unsignedp = TREE_UNSIGNED (elttype);
4400 mode = TYPE_MODE (elttype);
4401 if (mode == BLKmode)
4402 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4403 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4404 : -1);
4405 else
4406 bitsize = GET_MODE_BITSIZE (mode);
4407
4408 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4409 {
4410 tree lo_index = TREE_OPERAND (index, 0);
4411 tree hi_index = TREE_OPERAND (index, 1);
4412 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4413 struct nesting *loop;
4414 HOST_WIDE_INT lo, hi, count;
4415 tree position;
4416
4417 /* If the range is constant and "small", unroll the loop. */
4418 if (host_integerp (lo_index, 0)
4419 && host_integerp (hi_index, 0)
4420 && (lo = tree_low_cst (lo_index, 0),
4421 hi = tree_low_cst (hi_index, 0),
4422 count = hi - lo + 1,
4423 (GET_CODE (target) != MEM
4424 || count <= 2
4425 || (host_integerp (TYPE_SIZE (elttype), 1)
4426 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4427 <= 40 * 8)))))
4428 {
4429 lo -= minelt; hi -= minelt;
4430 for (; lo <= hi; lo++)
4431 {
4432 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4433 store_constructor_field (target, bitsize, bitpos, mode,
4434 value, type, align, cleared);
4435 }
4436 }
4437 else
4438 {
4439 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4440 loop_top = gen_label_rtx ();
4441 loop_end = gen_label_rtx ();
4442
4443 unsignedp = TREE_UNSIGNED (domain);
4444
4445 index = build_decl (VAR_DECL, NULL_TREE, domain);
4446
4447 DECL_RTL (index) = index_r
4448 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4449 &unsignedp, 0));
4450
4451 if (TREE_CODE (value) == SAVE_EXPR
4452 && SAVE_EXPR_RTL (value) == 0)
4453 {
4454 /* Make sure value gets expanded once before the
4455 loop. */
4456 expand_expr (value, const0_rtx, VOIDmode, 0);
4457 emit_queue ();
4458 }
4459 store_expr (lo_index, index_r, 0);
4460 loop = expand_start_loop (0);
4461
4462 /* Assign value to element index. */
4463 position
4464 = convert (ssizetype,
4465 fold (build (MINUS_EXPR, TREE_TYPE (index),
4466 index, TYPE_MIN_VALUE (domain))));
4467 position = size_binop (MULT_EXPR, position,
4468 convert (ssizetype,
4469 TYPE_SIZE_UNIT (elttype)));
4470
4471 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4472 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4473 xtarget = change_address (target, mode, addr);
4474 if (TREE_CODE (value) == CONSTRUCTOR)
4475 store_constructor (value, xtarget, align, cleared,
4476 bitsize / BITS_PER_UNIT);
4477 else
4478 store_expr (value, xtarget, 0);
4479
4480 expand_exit_loop_if_false (loop,
4481 build (LT_EXPR, integer_type_node,
4482 index, hi_index));
4483
4484 expand_increment (build (PREINCREMENT_EXPR,
4485 TREE_TYPE (index),
4486 index, integer_one_node), 0, 0);
4487 expand_end_loop ();
4488 emit_label (loop_end);
4489 }
4490 }
4491 else if ((index != 0 && ! host_integerp (index, 0))
4492 || ! host_integerp (TYPE_SIZE (elttype), 1))
4493 {
4494 rtx pos_rtx, addr;
4495 tree position;
4496
4497 if (index == 0)
4498 index = ssize_int (1);
4499
4500 if (minelt)
4501 index = convert (ssizetype,
4502 fold (build (MINUS_EXPR, index,
4503 TYPE_MIN_VALUE (domain))));
4504
4505 position = size_binop (MULT_EXPR, index,
4506 convert (ssizetype,
4507 TYPE_SIZE_UNIT (elttype)));
4508 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4509 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4510 xtarget = change_address (target, mode, addr);
4511 store_expr (value, xtarget, 0);
4512 }
4513 else
4514 {
4515 if (index != 0)
4516 bitpos = ((tree_low_cst (index, 0) - minelt)
4517 * tree_low_cst (TYPE_SIZE (elttype), 1));
4518 else
4519 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4520
4521 store_constructor_field (target, bitsize, bitpos, mode, value,
4522 type, align, cleared);
4523 }
4524 }
4525 }
4526
4527 /* Set constructor assignments */
4528 else if (TREE_CODE (type) == SET_TYPE)
4529 {
4530 tree elt = CONSTRUCTOR_ELTS (exp);
4531 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4532 tree domain = TYPE_DOMAIN (type);
4533 tree domain_min, domain_max, bitlength;
4534
4535 /* The default implementation strategy is to extract the constant
4536 parts of the constructor, use that to initialize the target,
4537 and then "or" in whatever non-constant ranges we need in addition.
4538
4539 If a large set is all zero or all ones, it is
4540 probably better to set it using memset (if available) or bzero.
4541 Also, if a large set has just a single range, it may also be
4542 better to first clear all the first clear the set (using
4543 bzero/memset), and set the bits we want. */
4544
4545 /* Check for all zeros. */
4546 if (elt == NULL_TREE && size > 0)
4547 {
4548 if (!cleared)
4549 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4550 return;
4551 }
4552
4553 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4554 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4555 bitlength = size_binop (PLUS_EXPR,
4556 size_diffop (domain_max, domain_min),
4557 ssize_int (1));
4558
4559 nbits = tree_low_cst (bitlength, 1);
4560
4561 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4562 are "complicated" (more than one range), initialize (the
4563 constant parts) by copying from a constant. */
4564 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4565 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4566 {
4567 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4568 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4569 char *bit_buffer = (char *) alloca (nbits);
4570 HOST_WIDE_INT word = 0;
4571 unsigned int bit_pos = 0;
4572 unsigned int ibit = 0;
4573 unsigned int offset = 0; /* In bytes from beginning of set. */
4574
4575 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4576 for (;;)
4577 {
4578 if (bit_buffer[ibit])
4579 {
4580 if (BYTES_BIG_ENDIAN)
4581 word |= (1 << (set_word_size - 1 - bit_pos));
4582 else
4583 word |= 1 << bit_pos;
4584 }
4585
4586 bit_pos++; ibit++;
4587 if (bit_pos >= set_word_size || ibit == nbits)
4588 {
4589 if (word != 0 || ! cleared)
4590 {
4591 rtx datum = GEN_INT (word);
4592 rtx to_rtx;
4593
4594 /* The assumption here is that it is safe to use
4595 XEXP if the set is multi-word, but not if
4596 it's single-word. */
4597 if (GET_CODE (target) == MEM)
4598 {
4599 to_rtx = plus_constant (XEXP (target, 0), offset);
4600 to_rtx = change_address (target, mode, to_rtx);
4601 }
4602 else if (offset == 0)
4603 to_rtx = target;
4604 else
4605 abort ();
4606 emit_move_insn (to_rtx, datum);
4607 }
4608
4609 if (ibit == nbits)
4610 break;
4611 word = 0;
4612 bit_pos = 0;
4613 offset += set_word_size / BITS_PER_UNIT;
4614 }
4615 }
4616 }
4617 else if (!cleared)
4618 /* Don't bother clearing storage if the set is all ones. */
4619 if (TREE_CHAIN (elt) != NULL_TREE
4620 || (TREE_PURPOSE (elt) == NULL_TREE
4621 ? nbits != 1
4622 : ( ! host_integerp (TREE_VALUE (elt), 0)
4623 || ! host_integerp (TREE_PURPOSE (elt), 0)
4624 || (tree_low_cst (TREE_VALUE (elt), 0)
4625 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4626 != (HOST_WIDE_INT) nbits))))
4627 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4628
4629 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4630 {
4631 /* start of range of element or NULL */
4632 tree startbit = TREE_PURPOSE (elt);
4633 /* end of range of element, or element value */
4634 tree endbit = TREE_VALUE (elt);
4635 #ifdef TARGET_MEM_FUNCTIONS
4636 HOST_WIDE_INT startb, endb;
4637 #endif
4638 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4639
4640 bitlength_rtx = expand_expr (bitlength,
4641 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4642
4643 /* handle non-range tuple element like [ expr ] */
4644 if (startbit == NULL_TREE)
4645 {
4646 startbit = save_expr (endbit);
4647 endbit = startbit;
4648 }
4649
4650 startbit = convert (sizetype, startbit);
4651 endbit = convert (sizetype, endbit);
4652 if (! integer_zerop (domain_min))
4653 {
4654 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4655 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4656 }
4657 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4658 EXPAND_CONST_ADDRESS);
4659 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4660 EXPAND_CONST_ADDRESS);
4661
4662 if (REG_P (target))
4663 {
4664 targetx = assign_stack_temp (GET_MODE (target),
4665 GET_MODE_SIZE (GET_MODE (target)),
4666 0);
4667 emit_move_insn (targetx, target);
4668 }
4669
4670 else if (GET_CODE (target) == MEM)
4671 targetx = target;
4672 else
4673 abort ();
4674
4675 #ifdef TARGET_MEM_FUNCTIONS
4676 /* Optimization: If startbit and endbit are
4677 constants divisible by BITS_PER_UNIT,
4678 call memset instead. */
4679 if (TREE_CODE (startbit) == INTEGER_CST
4680 && TREE_CODE (endbit) == INTEGER_CST
4681 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4682 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4683 {
4684 emit_library_call (memset_libfunc, 0,
4685 VOIDmode, 3,
4686 plus_constant (XEXP (targetx, 0),
4687 startb / BITS_PER_UNIT),
4688 Pmode,
4689 constm1_rtx, TYPE_MODE (integer_type_node),
4690 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4691 TYPE_MODE (sizetype));
4692 }
4693 else
4694 #endif
4695 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4696 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4697 bitlength_rtx, TYPE_MODE (sizetype),
4698 startbit_rtx, TYPE_MODE (sizetype),
4699 endbit_rtx, TYPE_MODE (sizetype));
4700
4701 if (REG_P (target))
4702 emit_move_insn (target, targetx);
4703 }
4704 }
4705
4706 else
4707 abort ();
4708 }
4709
4710 /* Store the value of EXP (an expression tree)
4711 into a subfield of TARGET which has mode MODE and occupies
4712 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4713 If MODE is VOIDmode, it means that we are storing into a bit-field.
4714
4715 If VALUE_MODE is VOIDmode, return nothing in particular.
4716 UNSIGNEDP is not used in this case.
4717
4718 Otherwise, return an rtx for the value stored. This rtx
4719 has mode VALUE_MODE if that is convenient to do.
4720 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4721
4722 ALIGN is the alignment that TARGET is known to have.
4723 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4724
4725 ALIAS_SET is the alias set for the destination. This value will
4726 (in general) be different from that for TARGET, since TARGET is a
4727 reference to the containing structure. */
4728
4729 static rtx
4730 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4731 unsignedp, align, total_size, alias_set)
4732 rtx target;
4733 HOST_WIDE_INT bitsize;
4734 HOST_WIDE_INT bitpos;
4735 enum machine_mode mode;
4736 tree exp;
4737 enum machine_mode value_mode;
4738 int unsignedp;
4739 unsigned int align;
4740 HOST_WIDE_INT total_size;
4741 int alias_set;
4742 {
4743 HOST_WIDE_INT width_mask = 0;
4744
4745 if (TREE_CODE (exp) == ERROR_MARK)
4746 return const0_rtx;
4747
4748 if (bitsize < HOST_BITS_PER_WIDE_INT)
4749 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4750
4751 /* If we are storing into an unaligned field of an aligned union that is
4752 in a register, we may have the mode of TARGET being an integer mode but
4753 MODE == BLKmode. In that case, get an aligned object whose size and
4754 alignment are the same as TARGET and store TARGET into it (we can avoid
4755 the store if the field being stored is the entire width of TARGET). Then
4756 call ourselves recursively to store the field into a BLKmode version of
4757 that object. Finally, load from the object into TARGET. This is not
4758 very efficient in general, but should only be slightly more expensive
4759 than the otherwise-required unaligned accesses. Perhaps this can be
4760 cleaned up later. */
4761
4762 if (mode == BLKmode
4763 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4764 {
4765 rtx object = assign_stack_temp (GET_MODE (target),
4766 GET_MODE_SIZE (GET_MODE (target)), 0);
4767 rtx blk_object = copy_rtx (object);
4768
4769 MEM_SET_IN_STRUCT_P (object, 1);
4770 MEM_SET_IN_STRUCT_P (blk_object, 1);
4771 PUT_MODE (blk_object, BLKmode);
4772
4773 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4774 emit_move_insn (object, target);
4775
4776 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4777 align, total_size, alias_set);
4778
4779 /* Even though we aren't returning target, we need to
4780 give it the updated value. */
4781 emit_move_insn (target, object);
4782
4783 return blk_object;
4784 }
4785
4786 if (GET_CODE (target) == CONCAT)
4787 {
4788 /* We're storing into a struct containing a single __complex. */
4789
4790 if (bitpos != 0)
4791 abort ();
4792 return store_expr (exp, target, 0);
4793 }
4794
4795 /* If the structure is in a register or if the component
4796 is a bit field, we cannot use addressing to access it.
4797 Use bit-field techniques or SUBREG to store in it. */
4798
4799 if (mode == VOIDmode
4800 || (mode != BLKmode && ! direct_store[(int) mode]
4801 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4802 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4803 || GET_CODE (target) == REG
4804 || GET_CODE (target) == SUBREG
4805 /* If the field isn't aligned enough to store as an ordinary memref,
4806 store it as a bit field. */
4807 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4808 && (align < GET_MODE_ALIGNMENT (mode)
4809 || bitpos % GET_MODE_ALIGNMENT (mode)))
4810 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4811 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4812 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4813 /* If the RHS and field are a constant size and the size of the
4814 RHS isn't the same size as the bitfield, we must use bitfield
4815 operations. */
4816 || (bitsize >= 0
4817 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4818 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4819 {
4820 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4821
4822 /* If BITSIZE is narrower than the size of the type of EXP
4823 we will be narrowing TEMP. Normally, what's wanted are the
4824 low-order bits. However, if EXP's type is a record and this is
4825 big-endian machine, we want the upper BITSIZE bits. */
4826 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4827 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4828 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4829 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4830 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4831 - bitsize),
4832 temp, 1);
4833
4834 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4835 MODE. */
4836 if (mode != VOIDmode && mode != BLKmode
4837 && mode != TYPE_MODE (TREE_TYPE (exp)))
4838 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4839
4840 /* If the modes of TARGET and TEMP are both BLKmode, both
4841 must be in memory and BITPOS must be aligned on a byte
4842 boundary. If so, we simply do a block copy. */
4843 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4844 {
4845 unsigned int exp_align = expr_align (exp);
4846
4847 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4848 || bitpos % BITS_PER_UNIT != 0)
4849 abort ();
4850
4851 target = change_address (target, VOIDmode,
4852 plus_constant (XEXP (target, 0),
4853 bitpos / BITS_PER_UNIT));
4854
4855 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4856 align = MIN (exp_align, align);
4857
4858 /* Find an alignment that is consistent with the bit position. */
4859 while ((bitpos % align) != 0)
4860 align >>= 1;
4861
4862 emit_block_move (target, temp,
4863 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4864 / BITS_PER_UNIT),
4865 align);
4866
4867 return value_mode == VOIDmode ? const0_rtx : target;
4868 }
4869
4870 /* Store the value in the bitfield. */
4871 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4872 if (value_mode != VOIDmode)
4873 {
4874 /* The caller wants an rtx for the value. */
4875 /* If possible, avoid refetching from the bitfield itself. */
4876 if (width_mask != 0
4877 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4878 {
4879 tree count;
4880 enum machine_mode tmode;
4881
4882 if (unsignedp)
4883 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4884 tmode = GET_MODE (temp);
4885 if (tmode == VOIDmode)
4886 tmode = value_mode;
4887 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4888 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4889 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4890 }
4891 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4892 NULL_RTX, value_mode, 0, align,
4893 total_size);
4894 }
4895 return const0_rtx;
4896 }
4897 else
4898 {
4899 rtx addr = XEXP (target, 0);
4900 rtx to_rtx;
4901
4902 /* If a value is wanted, it must be the lhs;
4903 so make the address stable for multiple use. */
4904
4905 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4906 && ! CONSTANT_ADDRESS_P (addr)
4907 /* A frame-pointer reference is already stable. */
4908 && ! (GET_CODE (addr) == PLUS
4909 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4910 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4911 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4912 addr = copy_to_reg (addr);
4913
4914 /* Now build a reference to just the desired component. */
4915
4916 to_rtx = copy_rtx (change_address (target, mode,
4917 plus_constant (addr,
4918 (bitpos
4919 / BITS_PER_UNIT))));
4920 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4921 MEM_ALIAS_SET (to_rtx) = alias_set;
4922
4923 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4924 }
4925 }
4926 \f
4927 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4928 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4929 ARRAY_REFs and find the ultimate containing object, which we return.
4930
4931 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4932 bit position, and *PUNSIGNEDP to the signedness of the field.
4933 If the position of the field is variable, we store a tree
4934 giving the variable offset (in units) in *POFFSET.
4935 This offset is in addition to the bit position.
4936 If the position is not variable, we store 0 in *POFFSET.
4937 We set *PALIGNMENT to the alignment of the address that will be
4938 computed. This is the alignment of the thing we return if *POFFSET
4939 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4940
4941 If any of the extraction expressions is volatile,
4942 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4943
4944 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4945 is a mode that can be used to access the field. In that case, *PBITSIZE
4946 is redundant.
4947
4948 If the field describes a variable-sized object, *PMODE is set to
4949 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4950 this case, but the address of the object can be found. */
4951
4952 tree
4953 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4954 punsignedp, pvolatilep, palignment)
4955 tree exp;
4956 HOST_WIDE_INT *pbitsize;
4957 HOST_WIDE_INT *pbitpos;
4958 tree *poffset;
4959 enum machine_mode *pmode;
4960 int *punsignedp;
4961 int *pvolatilep;
4962 unsigned int *palignment;
4963 {
4964 tree size_tree = 0;
4965 enum machine_mode mode = VOIDmode;
4966 tree offset = size_zero_node;
4967 tree bit_offset = bitsize_zero_node;
4968 unsigned int alignment = BIGGEST_ALIGNMENT;
4969 tree tem;
4970
4971 /* First get the mode, signedness, and size. We do this from just the
4972 outermost expression. */
4973 if (TREE_CODE (exp) == COMPONENT_REF)
4974 {
4975 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4976 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4977 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4978
4979 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4980 }
4981 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4982 {
4983 size_tree = TREE_OPERAND (exp, 1);
4984 *punsignedp = TREE_UNSIGNED (exp);
4985 }
4986 else
4987 {
4988 mode = TYPE_MODE (TREE_TYPE (exp));
4989 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4990
4991 if (mode == BLKmode)
4992 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4993 else
4994 *pbitsize = GET_MODE_BITSIZE (mode);
4995 }
4996
4997 if (size_tree != 0)
4998 {
4999 if (! host_integerp (size_tree, 1))
5000 mode = BLKmode, *pbitsize = -1;
5001 else
5002 *pbitsize = tree_low_cst (size_tree, 1);
5003 }
5004
5005 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5006 and find the ultimate containing object. */
5007 while (1)
5008 {
5009 if (TREE_CODE (exp) == BIT_FIELD_REF)
5010 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5011 else if (TREE_CODE (exp) == COMPONENT_REF)
5012 {
5013 tree field = TREE_OPERAND (exp, 1);
5014 tree this_offset = DECL_FIELD_OFFSET (field);
5015
5016 /* If this field hasn't been filled in yet, don't go
5017 past it. This should only happen when folding expressions
5018 made during type construction. */
5019 if (this_offset == 0)
5020 break;
5021 else if (! TREE_CONSTANT (this_offset)
5022 && contains_placeholder_p (this_offset))
5023 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5024
5025 offset = size_binop (PLUS_EXPR, offset, DECL_FIELD_OFFSET (field));
5026 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5027 DECL_FIELD_BIT_OFFSET (field));
5028
5029 if (! host_integerp (offset, 0))
5030 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5031 }
5032 else if (TREE_CODE (exp) == ARRAY_REF)
5033 {
5034 tree index = TREE_OPERAND (exp, 1);
5035 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5036 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5037
5038 /* We assume all arrays have sizes that are a multiple of a byte.
5039 First subtract the lower bound, if any, in the type of the
5040 index, then convert to sizetype and multiply by the size of the
5041 array element. */
5042 if (low_bound != 0 && ! integer_zerop (low_bound))
5043 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5044 index, low_bound));
5045
5046 if (! TREE_CONSTANT (index)
5047 && contains_placeholder_p (index))
5048 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5049
5050 offset = size_binop (PLUS_EXPR, offset,
5051 size_binop (MULT_EXPR,
5052 convert (sizetype, index),
5053 TYPE_SIZE_UNIT (TREE_TYPE (exp))));
5054 }
5055 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5056 && ! ((TREE_CODE (exp) == NOP_EXPR
5057 || TREE_CODE (exp) == CONVERT_EXPR)
5058 && (TYPE_MODE (TREE_TYPE (exp))
5059 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5060 break;
5061
5062 /* If any reference in the chain is volatile, the effect is volatile. */
5063 if (TREE_THIS_VOLATILE (exp))
5064 *pvolatilep = 1;
5065
5066 /* If the offset is non-constant already, then we can't assume any
5067 alignment more than the alignment here. */
5068 if (! TREE_CONSTANT (offset))
5069 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5070
5071 exp = TREE_OPERAND (exp, 0);
5072 }
5073
5074 if (DECL_P (exp))
5075 alignment = MIN (alignment, DECL_ALIGN (exp));
5076 else if (TREE_TYPE (exp) != 0)
5077 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5078
5079 /* If OFFSET is constant, see if we can return the whole thing as a
5080 constant bit position. Otherwise, split it up. */
5081 if (host_integerp (offset, 0)
5082 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5083 bitsize_unit_node))
5084 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5085 && host_integerp (tem, 0))
5086 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5087 else
5088 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5089
5090 *pmode = mode;
5091 *palignment = alignment;
5092 return exp;
5093 }
5094
5095 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5096
5097 static enum memory_use_mode
5098 get_memory_usage_from_modifier (modifier)
5099 enum expand_modifier modifier;
5100 {
5101 switch (modifier)
5102 {
5103 case EXPAND_NORMAL:
5104 case EXPAND_SUM:
5105 return MEMORY_USE_RO;
5106 break;
5107 case EXPAND_MEMORY_USE_WO:
5108 return MEMORY_USE_WO;
5109 break;
5110 case EXPAND_MEMORY_USE_RW:
5111 return MEMORY_USE_RW;
5112 break;
5113 case EXPAND_MEMORY_USE_DONT:
5114 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5115 MEMORY_USE_DONT, because they are modifiers to a call of
5116 expand_expr in the ADDR_EXPR case of expand_expr. */
5117 case EXPAND_CONST_ADDRESS:
5118 case EXPAND_INITIALIZER:
5119 return MEMORY_USE_DONT;
5120 case EXPAND_MEMORY_USE_BAD:
5121 default:
5122 abort ();
5123 }
5124 }
5125 \f
5126 /* Given an rtx VALUE that may contain additions and multiplications,
5127 return an equivalent value that just refers to a register or memory.
5128 This is done by generating instructions to perform the arithmetic
5129 and returning a pseudo-register containing the value.
5130
5131 The returned value may be a REG, SUBREG, MEM or constant. */
5132
5133 rtx
5134 force_operand (value, target)
5135 rtx value, target;
5136 {
5137 register optab binoptab = 0;
5138 /* Use a temporary to force order of execution of calls to
5139 `force_operand'. */
5140 rtx tmp;
5141 register rtx op2;
5142 /* Use subtarget as the target for operand 0 of a binary operation. */
5143 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5144
5145 /* Check for a PIC address load. */
5146 if (flag_pic
5147 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5148 && XEXP (value, 0) == pic_offset_table_rtx
5149 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5150 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5151 || GET_CODE (XEXP (value, 1)) == CONST))
5152 {
5153 if (!subtarget)
5154 subtarget = gen_reg_rtx (GET_MODE (value));
5155 emit_move_insn (subtarget, value);
5156 return subtarget;
5157 }
5158
5159 if (GET_CODE (value) == PLUS)
5160 binoptab = add_optab;
5161 else if (GET_CODE (value) == MINUS)
5162 binoptab = sub_optab;
5163 else if (GET_CODE (value) == MULT)
5164 {
5165 op2 = XEXP (value, 1);
5166 if (!CONSTANT_P (op2)
5167 && !(GET_CODE (op2) == REG && op2 != subtarget))
5168 subtarget = 0;
5169 tmp = force_operand (XEXP (value, 0), subtarget);
5170 return expand_mult (GET_MODE (value), tmp,
5171 force_operand (op2, NULL_RTX),
5172 target, 0);
5173 }
5174
5175 if (binoptab)
5176 {
5177 op2 = XEXP (value, 1);
5178 if (!CONSTANT_P (op2)
5179 && !(GET_CODE (op2) == REG && op2 != subtarget))
5180 subtarget = 0;
5181 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5182 {
5183 binoptab = add_optab;
5184 op2 = negate_rtx (GET_MODE (value), op2);
5185 }
5186
5187 /* Check for an addition with OP2 a constant integer and our first
5188 operand a PLUS of a virtual register and something else. In that
5189 case, we want to emit the sum of the virtual register and the
5190 constant first and then add the other value. This allows virtual
5191 register instantiation to simply modify the constant rather than
5192 creating another one around this addition. */
5193 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5194 && GET_CODE (XEXP (value, 0)) == PLUS
5195 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5196 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5197 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5198 {
5199 rtx temp = expand_binop (GET_MODE (value), binoptab,
5200 XEXP (XEXP (value, 0), 0), op2,
5201 subtarget, 0, OPTAB_LIB_WIDEN);
5202 return expand_binop (GET_MODE (value), binoptab, temp,
5203 force_operand (XEXP (XEXP (value, 0), 1), 0),
5204 target, 0, OPTAB_LIB_WIDEN);
5205 }
5206
5207 tmp = force_operand (XEXP (value, 0), subtarget);
5208 return expand_binop (GET_MODE (value), binoptab, tmp,
5209 force_operand (op2, NULL_RTX),
5210 target, 0, OPTAB_LIB_WIDEN);
5211 /* We give UNSIGNEDP = 0 to expand_binop
5212 because the only operations we are expanding here are signed ones. */
5213 }
5214 return value;
5215 }
5216 \f
5217 /* Subroutine of expand_expr:
5218 save the non-copied parts (LIST) of an expr (LHS), and return a list
5219 which can restore these values to their previous values,
5220 should something modify their storage. */
5221
5222 static tree
5223 save_noncopied_parts (lhs, list)
5224 tree lhs;
5225 tree list;
5226 {
5227 tree tail;
5228 tree parts = 0;
5229
5230 for (tail = list; tail; tail = TREE_CHAIN (tail))
5231 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5232 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5233 else
5234 {
5235 tree part = TREE_VALUE (tail);
5236 tree part_type = TREE_TYPE (part);
5237 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5238 rtx target = assign_temp (part_type, 0, 1, 1);
5239 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5240 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5241 parts = tree_cons (to_be_saved,
5242 build (RTL_EXPR, part_type, NULL_TREE,
5243 (tree) target),
5244 parts);
5245 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5246 }
5247 return parts;
5248 }
5249
5250 /* Subroutine of expand_expr:
5251 record the non-copied parts (LIST) of an expr (LHS), and return a list
5252 which specifies the initial values of these parts. */
5253
5254 static tree
5255 init_noncopied_parts (lhs, list)
5256 tree lhs;
5257 tree list;
5258 {
5259 tree tail;
5260 tree parts = 0;
5261
5262 for (tail = list; tail; tail = TREE_CHAIN (tail))
5263 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5264 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5265 else if (TREE_PURPOSE (tail))
5266 {
5267 tree part = TREE_VALUE (tail);
5268 tree part_type = TREE_TYPE (part);
5269 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5270 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5271 }
5272 return parts;
5273 }
5274
5275 /* Subroutine of expand_expr: return nonzero iff there is no way that
5276 EXP can reference X, which is being modified. TOP_P is nonzero if this
5277 call is going to be used to determine whether we need a temporary
5278 for EXP, as opposed to a recursive call to this function.
5279
5280 It is always safe for this routine to return zero since it merely
5281 searches for optimization opportunities. */
5282
5283 static int
5284 safe_from_p (x, exp, top_p)
5285 rtx x;
5286 tree exp;
5287 int top_p;
5288 {
5289 rtx exp_rtl = 0;
5290 int i, nops;
5291 static int save_expr_count;
5292 static int save_expr_size = 0;
5293 static tree *save_expr_rewritten;
5294 static tree save_expr_trees[256];
5295
5296 if (x == 0
5297 /* If EXP has varying size, we MUST use a target since we currently
5298 have no way of allocating temporaries of variable size
5299 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5300 So we assume here that something at a higher level has prevented a
5301 clash. This is somewhat bogus, but the best we can do. Only
5302 do this when X is BLKmode and when we are at the top level. */
5303 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5304 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5305 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5306 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5307 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5308 != INTEGER_CST)
5309 && GET_MODE (x) == BLKmode))
5310 return 1;
5311
5312 if (top_p && save_expr_size == 0)
5313 {
5314 int rtn;
5315
5316 save_expr_count = 0;
5317 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5318 save_expr_rewritten = &save_expr_trees[0];
5319
5320 rtn = safe_from_p (x, exp, 1);
5321
5322 for (i = 0; i < save_expr_count; ++i)
5323 {
5324 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5325 abort ();
5326 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5327 }
5328
5329 save_expr_size = 0;
5330
5331 return rtn;
5332 }
5333
5334 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5335 find the underlying pseudo. */
5336 if (GET_CODE (x) == SUBREG)
5337 {
5338 x = SUBREG_REG (x);
5339 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5340 return 0;
5341 }
5342
5343 /* If X is a location in the outgoing argument area, it is always safe. */
5344 if (GET_CODE (x) == MEM
5345 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5346 || (GET_CODE (XEXP (x, 0)) == PLUS
5347 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5348 return 1;
5349
5350 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5351 {
5352 case 'd':
5353 exp_rtl = DECL_RTL (exp);
5354 break;
5355
5356 case 'c':
5357 return 1;
5358
5359 case 'x':
5360 if (TREE_CODE (exp) == TREE_LIST)
5361 return ((TREE_VALUE (exp) == 0
5362 || safe_from_p (x, TREE_VALUE (exp), 0))
5363 && (TREE_CHAIN (exp) == 0
5364 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5365 else if (TREE_CODE (exp) == ERROR_MARK)
5366 return 1; /* An already-visited SAVE_EXPR? */
5367 else
5368 return 0;
5369
5370 case '1':
5371 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5372
5373 case '2':
5374 case '<':
5375 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5376 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5377
5378 case 'e':
5379 case 'r':
5380 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5381 the expression. If it is set, we conflict iff we are that rtx or
5382 both are in memory. Otherwise, we check all operands of the
5383 expression recursively. */
5384
5385 switch (TREE_CODE (exp))
5386 {
5387 case ADDR_EXPR:
5388 return (staticp (TREE_OPERAND (exp, 0))
5389 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5390 || TREE_STATIC (exp));
5391
5392 case INDIRECT_REF:
5393 if (GET_CODE (x) == MEM)
5394 return 0;
5395 break;
5396
5397 case CALL_EXPR:
5398 exp_rtl = CALL_EXPR_RTL (exp);
5399 if (exp_rtl == 0)
5400 {
5401 /* Assume that the call will clobber all hard registers and
5402 all of memory. */
5403 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5404 || GET_CODE (x) == MEM)
5405 return 0;
5406 }
5407
5408 break;
5409
5410 case RTL_EXPR:
5411 /* If a sequence exists, we would have to scan every instruction
5412 in the sequence to see if it was safe. This is probably not
5413 worthwhile. */
5414 if (RTL_EXPR_SEQUENCE (exp))
5415 return 0;
5416
5417 exp_rtl = RTL_EXPR_RTL (exp);
5418 break;
5419
5420 case WITH_CLEANUP_EXPR:
5421 exp_rtl = RTL_EXPR_RTL (exp);
5422 break;
5423
5424 case CLEANUP_POINT_EXPR:
5425 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5426
5427 case SAVE_EXPR:
5428 exp_rtl = SAVE_EXPR_RTL (exp);
5429 if (exp_rtl)
5430 break;
5431
5432 /* This SAVE_EXPR might appear many times in the top-level
5433 safe_from_p() expression, and if it has a complex
5434 subexpression, examining it multiple times could result
5435 in a combinatorial explosion. E.g. on an Alpha
5436 running at least 200MHz, a Fortran test case compiled with
5437 optimization took about 28 minutes to compile -- even though
5438 it was only a few lines long, and the complicated line causing
5439 so much time to be spent in the earlier version of safe_from_p()
5440 had only 293 or so unique nodes.
5441
5442 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5443 where it is so we can turn it back in the top-level safe_from_p()
5444 when we're done. */
5445
5446 /* For now, don't bother re-sizing the array. */
5447 if (save_expr_count >= save_expr_size)
5448 return 0;
5449 save_expr_rewritten[save_expr_count++] = exp;
5450
5451 nops = tree_code_length[(int) SAVE_EXPR];
5452 for (i = 0; i < nops; i++)
5453 {
5454 tree operand = TREE_OPERAND (exp, i);
5455 if (operand == NULL_TREE)
5456 continue;
5457 TREE_SET_CODE (exp, ERROR_MARK);
5458 if (!safe_from_p (x, operand, 0))
5459 return 0;
5460 TREE_SET_CODE (exp, SAVE_EXPR);
5461 }
5462 TREE_SET_CODE (exp, ERROR_MARK);
5463 return 1;
5464
5465 case BIND_EXPR:
5466 /* The only operand we look at is operand 1. The rest aren't
5467 part of the expression. */
5468 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5469
5470 case METHOD_CALL_EXPR:
5471 /* This takes a rtx argument, but shouldn't appear here. */
5472 abort ();
5473
5474 default:
5475 break;
5476 }
5477
5478 /* If we have an rtx, we do not need to scan our operands. */
5479 if (exp_rtl)
5480 break;
5481
5482 nops = tree_code_length[(int) TREE_CODE (exp)];
5483 for (i = 0; i < nops; i++)
5484 if (TREE_OPERAND (exp, i) != 0
5485 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5486 return 0;
5487 }
5488
5489 /* If we have an rtl, find any enclosed object. Then see if we conflict
5490 with it. */
5491 if (exp_rtl)
5492 {
5493 if (GET_CODE (exp_rtl) == SUBREG)
5494 {
5495 exp_rtl = SUBREG_REG (exp_rtl);
5496 if (GET_CODE (exp_rtl) == REG
5497 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5498 return 0;
5499 }
5500
5501 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5502 are memory and EXP is not readonly. */
5503 return ! (rtx_equal_p (x, exp_rtl)
5504 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5505 && ! TREE_READONLY (exp)));
5506 }
5507
5508 /* If we reach here, it is safe. */
5509 return 1;
5510 }
5511
5512 /* Subroutine of expand_expr: return nonzero iff EXP is an
5513 expression whose type is statically determinable. */
5514
5515 static int
5516 fixed_type_p (exp)
5517 tree exp;
5518 {
5519 if (TREE_CODE (exp) == PARM_DECL
5520 || TREE_CODE (exp) == VAR_DECL
5521 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5522 || TREE_CODE (exp) == COMPONENT_REF
5523 || TREE_CODE (exp) == ARRAY_REF)
5524 return 1;
5525 return 0;
5526 }
5527
5528 /* Subroutine of expand_expr: return rtx if EXP is a
5529 variable or parameter; else return 0. */
5530
5531 static rtx
5532 var_rtx (exp)
5533 tree exp;
5534 {
5535 STRIP_NOPS (exp);
5536 switch (TREE_CODE (exp))
5537 {
5538 case PARM_DECL:
5539 case VAR_DECL:
5540 return DECL_RTL (exp);
5541 default:
5542 return 0;
5543 }
5544 }
5545
5546 #ifdef MAX_INTEGER_COMPUTATION_MODE
5547 void
5548 check_max_integer_computation_mode (exp)
5549 tree exp;
5550 {
5551 enum tree_code code;
5552 enum machine_mode mode;
5553
5554 /* Strip any NOPs that don't change the mode. */
5555 STRIP_NOPS (exp);
5556 code = TREE_CODE (exp);
5557
5558 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5559 if (code == NOP_EXPR
5560 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5561 return;
5562
5563 /* First check the type of the overall operation. We need only look at
5564 unary, binary and relational operations. */
5565 if (TREE_CODE_CLASS (code) == '1'
5566 || TREE_CODE_CLASS (code) == '2'
5567 || TREE_CODE_CLASS (code) == '<')
5568 {
5569 mode = TYPE_MODE (TREE_TYPE (exp));
5570 if (GET_MODE_CLASS (mode) == MODE_INT
5571 && mode > MAX_INTEGER_COMPUTATION_MODE)
5572 fatal ("unsupported wide integer operation");
5573 }
5574
5575 /* Check operand of a unary op. */
5576 if (TREE_CODE_CLASS (code) == '1')
5577 {
5578 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5579 if (GET_MODE_CLASS (mode) == MODE_INT
5580 && mode > MAX_INTEGER_COMPUTATION_MODE)
5581 fatal ("unsupported wide integer operation");
5582 }
5583
5584 /* Check operands of a binary/comparison op. */
5585 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5586 {
5587 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5588 if (GET_MODE_CLASS (mode) == MODE_INT
5589 && mode > MAX_INTEGER_COMPUTATION_MODE)
5590 fatal ("unsupported wide integer operation");
5591
5592 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5593 if (GET_MODE_CLASS (mode) == MODE_INT
5594 && mode > MAX_INTEGER_COMPUTATION_MODE)
5595 fatal ("unsupported wide integer operation");
5596 }
5597 }
5598 #endif
5599
5600 \f
5601 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5602 has any readonly fields. If any of the fields have types that
5603 contain readonly fields, return true as well. */
5604
5605 static int
5606 readonly_fields_p (type)
5607 tree type;
5608 {
5609 tree field;
5610
5611 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5612 if (TREE_CODE (field) == FIELD_DECL
5613 && (TREE_READONLY (field)
5614 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5615 && readonly_fields_p (TREE_TYPE (field)))))
5616 return 1;
5617
5618 return 0;
5619 }
5620 \f
5621 /* expand_expr: generate code for computing expression EXP.
5622 An rtx for the computed value is returned. The value is never null.
5623 In the case of a void EXP, const0_rtx is returned.
5624
5625 The value may be stored in TARGET if TARGET is nonzero.
5626 TARGET is just a suggestion; callers must assume that
5627 the rtx returned may not be the same as TARGET.
5628
5629 If TARGET is CONST0_RTX, it means that the value will be ignored.
5630
5631 If TMODE is not VOIDmode, it suggests generating the
5632 result in mode TMODE. But this is done only when convenient.
5633 Otherwise, TMODE is ignored and the value generated in its natural mode.
5634 TMODE is just a suggestion; callers must assume that
5635 the rtx returned may not have mode TMODE.
5636
5637 Note that TARGET may have neither TMODE nor MODE. In that case, it
5638 probably will not be used.
5639
5640 If MODIFIER is EXPAND_SUM then when EXP is an addition
5641 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5642 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5643 products as above, or REG or MEM, or constant.
5644 Ordinarily in such cases we would output mul or add instructions
5645 and then return a pseudo reg containing the sum.
5646
5647 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5648 it also marks a label as absolutely required (it can't be dead).
5649 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5650 This is used for outputting expressions used in initializers.
5651
5652 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5653 with a constant address even if that address is not normally legitimate.
5654 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5655
5656 rtx
5657 expand_expr (exp, target, tmode, modifier)
5658 register tree exp;
5659 rtx target;
5660 enum machine_mode tmode;
5661 enum expand_modifier modifier;
5662 {
5663 register rtx op0, op1, temp;
5664 tree type = TREE_TYPE (exp);
5665 int unsignedp = TREE_UNSIGNED (type);
5666 register enum machine_mode mode;
5667 register enum tree_code code = TREE_CODE (exp);
5668 optab this_optab;
5669 rtx subtarget, original_target;
5670 int ignore;
5671 tree context;
5672 /* Used by check-memory-usage to make modifier read only. */
5673 enum expand_modifier ro_modifier;
5674
5675 /* Handle ERROR_MARK before anybody tries to access its type. */
5676 if (TREE_CODE (exp) == ERROR_MARK)
5677 {
5678 op0 = CONST0_RTX (tmode);
5679 if (op0 != 0)
5680 return op0;
5681 return const0_rtx;
5682 }
5683
5684 mode = TYPE_MODE (type);
5685 /* Use subtarget as the target for operand 0 of a binary operation. */
5686 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5687 original_target = target;
5688 ignore = (target == const0_rtx
5689 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5690 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5691 || code == COND_EXPR)
5692 && TREE_CODE (type) == VOID_TYPE));
5693
5694 /* Make a read-only version of the modifier. */
5695 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5696 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5697 ro_modifier = modifier;
5698 else
5699 ro_modifier = EXPAND_NORMAL;
5700
5701 /* Don't use hard regs as subtargets, because the combiner
5702 can only handle pseudo regs. */
5703 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5704 subtarget = 0;
5705 /* Avoid subtargets inside loops,
5706 since they hide some invariant expressions. */
5707 if (preserve_subexpressions_p ())
5708 subtarget = 0;
5709
5710 /* If we are going to ignore this result, we need only do something
5711 if there is a side-effect somewhere in the expression. If there
5712 is, short-circuit the most common cases here. Note that we must
5713 not call expand_expr with anything but const0_rtx in case this
5714 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5715
5716 if (ignore)
5717 {
5718 if (! TREE_SIDE_EFFECTS (exp))
5719 return const0_rtx;
5720
5721 /* Ensure we reference a volatile object even if value is ignored, but
5722 don't do this if all we are doing is taking its address. */
5723 if (TREE_THIS_VOLATILE (exp)
5724 && TREE_CODE (exp) != FUNCTION_DECL
5725 && mode != VOIDmode && mode != BLKmode
5726 && modifier != EXPAND_CONST_ADDRESS)
5727 {
5728 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5729 if (GET_CODE (temp) == MEM)
5730 temp = copy_to_reg (temp);
5731 return const0_rtx;
5732 }
5733
5734 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5735 || code == INDIRECT_REF || code == BUFFER_REF)
5736 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5737 VOIDmode, ro_modifier);
5738 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5739 || code == ARRAY_REF)
5740 {
5741 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5742 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5743 return const0_rtx;
5744 }
5745 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5746 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5747 /* If the second operand has no side effects, just evaluate
5748 the first. */
5749 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5750 VOIDmode, ro_modifier);
5751 else if (code == BIT_FIELD_REF)
5752 {
5753 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5754 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5755 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5756 return const0_rtx;
5757 }
5758 ;
5759 target = 0;
5760 }
5761
5762 #ifdef MAX_INTEGER_COMPUTATION_MODE
5763 /* Only check stuff here if the mode we want is different from the mode
5764 of the expression; if it's the same, check_max_integer_computiation_mode
5765 will handle it. Do we really need to check this stuff at all? */
5766
5767 if (target
5768 && GET_MODE (target) != mode
5769 && TREE_CODE (exp) != INTEGER_CST
5770 && TREE_CODE (exp) != PARM_DECL
5771 && TREE_CODE (exp) != ARRAY_REF
5772 && TREE_CODE (exp) != COMPONENT_REF
5773 && TREE_CODE (exp) != BIT_FIELD_REF
5774 && TREE_CODE (exp) != INDIRECT_REF
5775 && TREE_CODE (exp) != CALL_EXPR
5776 && TREE_CODE (exp) != VAR_DECL
5777 && TREE_CODE (exp) != RTL_EXPR)
5778 {
5779 enum machine_mode mode = GET_MODE (target);
5780
5781 if (GET_MODE_CLASS (mode) == MODE_INT
5782 && mode > MAX_INTEGER_COMPUTATION_MODE)
5783 fatal ("unsupported wide integer operation");
5784 }
5785
5786 if (tmode != mode
5787 && TREE_CODE (exp) != INTEGER_CST
5788 && TREE_CODE (exp) != PARM_DECL
5789 && TREE_CODE (exp) != ARRAY_REF
5790 && TREE_CODE (exp) != COMPONENT_REF
5791 && TREE_CODE (exp) != BIT_FIELD_REF
5792 && TREE_CODE (exp) != INDIRECT_REF
5793 && TREE_CODE (exp) != VAR_DECL
5794 && TREE_CODE (exp) != CALL_EXPR
5795 && TREE_CODE (exp) != RTL_EXPR
5796 && GET_MODE_CLASS (tmode) == MODE_INT
5797 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5798 fatal ("unsupported wide integer operation");
5799
5800 check_max_integer_computation_mode (exp);
5801 #endif
5802
5803 /* If will do cse, generate all results into pseudo registers
5804 since 1) that allows cse to find more things
5805 and 2) otherwise cse could produce an insn the machine
5806 cannot support. */
5807
5808 if (! cse_not_expected && mode != BLKmode && target
5809 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5810 target = subtarget;
5811
5812 switch (code)
5813 {
5814 case LABEL_DECL:
5815 {
5816 tree function = decl_function_context (exp);
5817 /* Handle using a label in a containing function. */
5818 if (function != current_function_decl
5819 && function != inline_function_decl && function != 0)
5820 {
5821 struct function *p = find_function_data (function);
5822 /* Allocate in the memory associated with the function
5823 that the label is in. */
5824 push_obstacks (p->function_obstack,
5825 p->function_maybepermanent_obstack);
5826
5827 p->expr->x_forced_labels
5828 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5829 p->expr->x_forced_labels);
5830 pop_obstacks ();
5831 }
5832 else
5833 {
5834 if (modifier == EXPAND_INITIALIZER)
5835 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5836 label_rtx (exp),
5837 forced_labels);
5838 }
5839
5840 temp = gen_rtx_MEM (FUNCTION_MODE,
5841 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5842 if (function != current_function_decl
5843 && function != inline_function_decl && function != 0)
5844 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5845 return temp;
5846 }
5847
5848 case PARM_DECL:
5849 if (DECL_RTL (exp) == 0)
5850 {
5851 error_with_decl (exp, "prior parameter's size depends on `%s'");
5852 return CONST0_RTX (mode);
5853 }
5854
5855 /* ... fall through ... */
5856
5857 case VAR_DECL:
5858 /* If a static var's type was incomplete when the decl was written,
5859 but the type is complete now, lay out the decl now. */
5860 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5861 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5862 {
5863 push_obstacks_nochange ();
5864 end_temporary_allocation ();
5865 layout_decl (exp, 0);
5866 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5867 pop_obstacks ();
5868 }
5869
5870 /* Although static-storage variables start off initialized, according to
5871 ANSI C, a memcpy could overwrite them with uninitialized values. So
5872 we check them too. This also lets us check for read-only variables
5873 accessed via a non-const declaration, in case it won't be detected
5874 any other way (e.g., in an embedded system or OS kernel without
5875 memory protection).
5876
5877 Aggregates are not checked here; they're handled elsewhere. */
5878 if (cfun && current_function_check_memory_usage
5879 && code == VAR_DECL
5880 && GET_CODE (DECL_RTL (exp)) == MEM
5881 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5882 {
5883 enum memory_use_mode memory_usage;
5884 memory_usage = get_memory_usage_from_modifier (modifier);
5885
5886 if (memory_usage != MEMORY_USE_DONT)
5887 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5888 XEXP (DECL_RTL (exp), 0), Pmode,
5889 GEN_INT (int_size_in_bytes (type)),
5890 TYPE_MODE (sizetype),
5891 GEN_INT (memory_usage),
5892 TYPE_MODE (integer_type_node));
5893 }
5894
5895 /* ... fall through ... */
5896
5897 case FUNCTION_DECL:
5898 case RESULT_DECL:
5899 if (DECL_RTL (exp) == 0)
5900 abort ();
5901
5902 /* Ensure variable marked as used even if it doesn't go through
5903 a parser. If it hasn't be used yet, write out an external
5904 definition. */
5905 if (! TREE_USED (exp))
5906 {
5907 assemble_external (exp);
5908 TREE_USED (exp) = 1;
5909 }
5910
5911 /* Show we haven't gotten RTL for this yet. */
5912 temp = 0;
5913
5914 /* Handle variables inherited from containing functions. */
5915 context = decl_function_context (exp);
5916
5917 /* We treat inline_function_decl as an alias for the current function
5918 because that is the inline function whose vars, types, etc.
5919 are being merged into the current function.
5920 See expand_inline_function. */
5921
5922 if (context != 0 && context != current_function_decl
5923 && context != inline_function_decl
5924 /* If var is static, we don't need a static chain to access it. */
5925 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5926 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5927 {
5928 rtx addr;
5929
5930 /* Mark as non-local and addressable. */
5931 DECL_NONLOCAL (exp) = 1;
5932 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5933 abort ();
5934 mark_addressable (exp);
5935 if (GET_CODE (DECL_RTL (exp)) != MEM)
5936 abort ();
5937 addr = XEXP (DECL_RTL (exp), 0);
5938 if (GET_CODE (addr) == MEM)
5939 addr = gen_rtx_MEM (Pmode,
5940 fix_lexical_addr (XEXP (addr, 0), exp));
5941 else
5942 addr = fix_lexical_addr (addr, exp);
5943 temp = change_address (DECL_RTL (exp), mode, addr);
5944 }
5945
5946 /* This is the case of an array whose size is to be determined
5947 from its initializer, while the initializer is still being parsed.
5948 See expand_decl. */
5949
5950 else if (GET_CODE (DECL_RTL (exp)) == MEM
5951 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5952 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5953 XEXP (DECL_RTL (exp), 0));
5954
5955 /* If DECL_RTL is memory, we are in the normal case and either
5956 the address is not valid or it is not a register and -fforce-addr
5957 is specified, get the address into a register. */
5958
5959 else if (GET_CODE (DECL_RTL (exp)) == MEM
5960 && modifier != EXPAND_CONST_ADDRESS
5961 && modifier != EXPAND_SUM
5962 && modifier != EXPAND_INITIALIZER
5963 && (! memory_address_p (DECL_MODE (exp),
5964 XEXP (DECL_RTL (exp), 0))
5965 || (flag_force_addr
5966 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5967 temp = change_address (DECL_RTL (exp), VOIDmode,
5968 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5969
5970 /* If we got something, return it. But first, set the alignment
5971 the address is a register. */
5972 if (temp != 0)
5973 {
5974 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5975 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
5976
5977 return temp;
5978 }
5979
5980 /* If the mode of DECL_RTL does not match that of the decl, it
5981 must be a promoted value. We return a SUBREG of the wanted mode,
5982 but mark it so that we know that it was already extended. */
5983
5984 if (GET_CODE (DECL_RTL (exp)) == REG
5985 && GET_MODE (DECL_RTL (exp)) != mode)
5986 {
5987 /* Get the signedness used for this variable. Ensure we get the
5988 same mode we got when the variable was declared. */
5989 if (GET_MODE (DECL_RTL (exp))
5990 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5991 abort ();
5992
5993 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5994 SUBREG_PROMOTED_VAR_P (temp) = 1;
5995 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5996 return temp;
5997 }
5998
5999 return DECL_RTL (exp);
6000
6001 case INTEGER_CST:
6002 return immed_double_const (TREE_INT_CST_LOW (exp),
6003 TREE_INT_CST_HIGH (exp), mode);
6004
6005 case CONST_DECL:
6006 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6007 EXPAND_MEMORY_USE_BAD);
6008
6009 case REAL_CST:
6010 /* If optimized, generate immediate CONST_DOUBLE
6011 which will be turned into memory by reload if necessary.
6012
6013 We used to force a register so that loop.c could see it. But
6014 this does not allow gen_* patterns to perform optimizations with
6015 the constants. It also produces two insns in cases like "x = 1.0;".
6016 On most machines, floating-point constants are not permitted in
6017 many insns, so we'd end up copying it to a register in any case.
6018
6019 Now, we do the copying in expand_binop, if appropriate. */
6020 return immed_real_const (exp);
6021
6022 case COMPLEX_CST:
6023 case STRING_CST:
6024 if (! TREE_CST_RTL (exp))
6025 output_constant_def (exp);
6026
6027 /* TREE_CST_RTL probably contains a constant address.
6028 On RISC machines where a constant address isn't valid,
6029 make some insns to get that address into a register. */
6030 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6031 && modifier != EXPAND_CONST_ADDRESS
6032 && modifier != EXPAND_INITIALIZER
6033 && modifier != EXPAND_SUM
6034 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6035 || (flag_force_addr
6036 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6037 return change_address (TREE_CST_RTL (exp), VOIDmode,
6038 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6039 return TREE_CST_RTL (exp);
6040
6041 case EXPR_WITH_FILE_LOCATION:
6042 {
6043 rtx to_return;
6044 char *saved_input_filename = input_filename;
6045 int saved_lineno = lineno;
6046 input_filename = EXPR_WFL_FILENAME (exp);
6047 lineno = EXPR_WFL_LINENO (exp);
6048 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6049 emit_line_note (input_filename, lineno);
6050 /* Possibly avoid switching back and force here */
6051 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6052 input_filename = saved_input_filename;
6053 lineno = saved_lineno;
6054 return to_return;
6055 }
6056
6057 case SAVE_EXPR:
6058 context = decl_function_context (exp);
6059
6060 /* If this SAVE_EXPR was at global context, assume we are an
6061 initialization function and move it into our context. */
6062 if (context == 0)
6063 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6064
6065 /* We treat inline_function_decl as an alias for the current function
6066 because that is the inline function whose vars, types, etc.
6067 are being merged into the current function.
6068 See expand_inline_function. */
6069 if (context == current_function_decl || context == inline_function_decl)
6070 context = 0;
6071
6072 /* If this is non-local, handle it. */
6073 if (context)
6074 {
6075 /* The following call just exists to abort if the context is
6076 not of a containing function. */
6077 find_function_data (context);
6078
6079 temp = SAVE_EXPR_RTL (exp);
6080 if (temp && GET_CODE (temp) == REG)
6081 {
6082 put_var_into_stack (exp);
6083 temp = SAVE_EXPR_RTL (exp);
6084 }
6085 if (temp == 0 || GET_CODE (temp) != MEM)
6086 abort ();
6087 return change_address (temp, mode,
6088 fix_lexical_addr (XEXP (temp, 0), exp));
6089 }
6090 if (SAVE_EXPR_RTL (exp) == 0)
6091 {
6092 if (mode == VOIDmode)
6093 temp = const0_rtx;
6094 else
6095 temp = assign_temp (type, 3, 0, 0);
6096
6097 SAVE_EXPR_RTL (exp) = temp;
6098 if (!optimize && GET_CODE (temp) == REG)
6099 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6100 save_expr_regs);
6101
6102 /* If the mode of TEMP does not match that of the expression, it
6103 must be a promoted value. We pass store_expr a SUBREG of the
6104 wanted mode but mark it so that we know that it was already
6105 extended. Note that `unsignedp' was modified above in
6106 this case. */
6107
6108 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6109 {
6110 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6111 SUBREG_PROMOTED_VAR_P (temp) = 1;
6112 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6113 }
6114
6115 if (temp == const0_rtx)
6116 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6117 EXPAND_MEMORY_USE_BAD);
6118 else
6119 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6120
6121 TREE_USED (exp) = 1;
6122 }
6123
6124 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6125 must be a promoted value. We return a SUBREG of the wanted mode,
6126 but mark it so that we know that it was already extended. */
6127
6128 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6129 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6130 {
6131 /* Compute the signedness and make the proper SUBREG. */
6132 promote_mode (type, mode, &unsignedp, 0);
6133 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6134 SUBREG_PROMOTED_VAR_P (temp) = 1;
6135 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6136 return temp;
6137 }
6138
6139 return SAVE_EXPR_RTL (exp);
6140
6141 case UNSAVE_EXPR:
6142 {
6143 rtx temp;
6144 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6145 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6146 return temp;
6147 }
6148
6149 case PLACEHOLDER_EXPR:
6150 {
6151 tree placeholder_expr;
6152
6153 /* If there is an object on the head of the placeholder list,
6154 see if some object in it of type TYPE or a pointer to it. For
6155 further information, see tree.def. */
6156 for (placeholder_expr = placeholder_list;
6157 placeholder_expr != 0;
6158 placeholder_expr = TREE_CHAIN (placeholder_expr))
6159 {
6160 tree need_type = TYPE_MAIN_VARIANT (type);
6161 tree object = 0;
6162 tree old_list = placeholder_list;
6163 tree elt;
6164
6165 /* Find the outermost reference that is of the type we want.
6166 If none, see if any object has a type that is a pointer to
6167 the type we want. */
6168 for (elt = TREE_PURPOSE (placeholder_expr);
6169 elt != 0 && object == 0;
6170 elt
6171 = ((TREE_CODE (elt) == COMPOUND_EXPR
6172 || TREE_CODE (elt) == COND_EXPR)
6173 ? TREE_OPERAND (elt, 1)
6174 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6175 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6176 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6177 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6178 ? TREE_OPERAND (elt, 0) : 0))
6179 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6180 object = elt;
6181
6182 for (elt = TREE_PURPOSE (placeholder_expr);
6183 elt != 0 && object == 0;
6184 elt
6185 = ((TREE_CODE (elt) == COMPOUND_EXPR
6186 || TREE_CODE (elt) == COND_EXPR)
6187 ? TREE_OPERAND (elt, 1)
6188 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6189 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6190 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6191 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6192 ? TREE_OPERAND (elt, 0) : 0))
6193 if (POINTER_TYPE_P (TREE_TYPE (elt))
6194 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6195 == need_type))
6196 object = build1 (INDIRECT_REF, need_type, elt);
6197
6198 if (object != 0)
6199 {
6200 /* Expand this object skipping the list entries before
6201 it was found in case it is also a PLACEHOLDER_EXPR.
6202 In that case, we want to translate it using subsequent
6203 entries. */
6204 placeholder_list = TREE_CHAIN (placeholder_expr);
6205 temp = expand_expr (object, original_target, tmode,
6206 ro_modifier);
6207 placeholder_list = old_list;
6208 return temp;
6209 }
6210 }
6211 }
6212
6213 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6214 abort ();
6215
6216 case WITH_RECORD_EXPR:
6217 /* Put the object on the placeholder list, expand our first operand,
6218 and pop the list. */
6219 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6220 placeholder_list);
6221 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6222 tmode, ro_modifier);
6223 placeholder_list = TREE_CHAIN (placeholder_list);
6224 return target;
6225
6226 case GOTO_EXPR:
6227 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6228 expand_goto (TREE_OPERAND (exp, 0));
6229 else
6230 expand_computed_goto (TREE_OPERAND (exp, 0));
6231 return const0_rtx;
6232
6233 case EXIT_EXPR:
6234 expand_exit_loop_if_false (NULL_PTR,
6235 invert_truthvalue (TREE_OPERAND (exp, 0)));
6236 return const0_rtx;
6237
6238 case LABELED_BLOCK_EXPR:
6239 if (LABELED_BLOCK_BODY (exp))
6240 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6241 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6242 return const0_rtx;
6243
6244 case EXIT_BLOCK_EXPR:
6245 if (EXIT_BLOCK_RETURN (exp))
6246 sorry ("returned value in block_exit_expr");
6247 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6248 return const0_rtx;
6249
6250 case LOOP_EXPR:
6251 push_temp_slots ();
6252 expand_start_loop (1);
6253 expand_expr_stmt (TREE_OPERAND (exp, 0));
6254 expand_end_loop ();
6255 pop_temp_slots ();
6256
6257 return const0_rtx;
6258
6259 case BIND_EXPR:
6260 {
6261 tree vars = TREE_OPERAND (exp, 0);
6262 int vars_need_expansion = 0;
6263
6264 /* Need to open a binding contour here because
6265 if there are any cleanups they must be contained here. */
6266 expand_start_bindings (2);
6267
6268 /* Mark the corresponding BLOCK for output in its proper place. */
6269 if (TREE_OPERAND (exp, 2) != 0
6270 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6271 insert_block (TREE_OPERAND (exp, 2));
6272
6273 /* If VARS have not yet been expanded, expand them now. */
6274 while (vars)
6275 {
6276 if (DECL_RTL (vars) == 0)
6277 {
6278 vars_need_expansion = 1;
6279 expand_decl (vars);
6280 }
6281 expand_decl_init (vars);
6282 vars = TREE_CHAIN (vars);
6283 }
6284
6285 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6286
6287 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6288
6289 return temp;
6290 }
6291
6292 case RTL_EXPR:
6293 if (RTL_EXPR_SEQUENCE (exp))
6294 {
6295 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6296 abort ();
6297 emit_insns (RTL_EXPR_SEQUENCE (exp));
6298 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6299 }
6300 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6301 free_temps_for_rtl_expr (exp);
6302 return RTL_EXPR_RTL (exp);
6303
6304 case CONSTRUCTOR:
6305 /* If we don't need the result, just ensure we evaluate any
6306 subexpressions. */
6307 if (ignore)
6308 {
6309 tree elt;
6310 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6311 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6312 EXPAND_MEMORY_USE_BAD);
6313 return const0_rtx;
6314 }
6315
6316 /* All elts simple constants => refer to a constant in memory. But
6317 if this is a non-BLKmode mode, let it store a field at a time
6318 since that should make a CONST_INT or CONST_DOUBLE when we
6319 fold. Likewise, if we have a target we can use, it is best to
6320 store directly into the target unless the type is large enough
6321 that memcpy will be used. If we are making an initializer and
6322 all operands are constant, put it in memory as well. */
6323 else if ((TREE_STATIC (exp)
6324 && ((mode == BLKmode
6325 && ! (target != 0 && safe_from_p (target, exp, 1)))
6326 || TREE_ADDRESSABLE (exp)
6327 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6328 && (! MOVE_BY_PIECES_P
6329 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6330 TYPE_ALIGN (type)))
6331 && ! mostly_zeros_p (exp))))
6332 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6333 {
6334 rtx constructor = output_constant_def (exp);
6335
6336 if (modifier != EXPAND_CONST_ADDRESS
6337 && modifier != EXPAND_INITIALIZER
6338 && modifier != EXPAND_SUM
6339 && (! memory_address_p (GET_MODE (constructor),
6340 XEXP (constructor, 0))
6341 || (flag_force_addr
6342 && GET_CODE (XEXP (constructor, 0)) != REG)))
6343 constructor = change_address (constructor, VOIDmode,
6344 XEXP (constructor, 0));
6345 return constructor;
6346 }
6347
6348 else
6349 {
6350 /* Handle calls that pass values in multiple non-contiguous
6351 locations. The Irix 6 ABI has examples of this. */
6352 if (target == 0 || ! safe_from_p (target, exp, 1)
6353 || GET_CODE (target) == PARALLEL)
6354 {
6355 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6356 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6357 else
6358 target = assign_temp (type, 0, 1, 1);
6359 }
6360
6361 if (TREE_READONLY (exp))
6362 {
6363 if (GET_CODE (target) == MEM)
6364 target = copy_rtx (target);
6365
6366 RTX_UNCHANGING_P (target) = 1;
6367 }
6368
6369 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6370 int_size_in_bytes (TREE_TYPE (exp)));
6371 return target;
6372 }
6373
6374 case INDIRECT_REF:
6375 {
6376 tree exp1 = TREE_OPERAND (exp, 0);
6377 tree exp2;
6378 tree index;
6379 tree string = string_constant (exp1, &index);
6380
6381 /* Try to optimize reads from const strings. */
6382 if (string
6383 && TREE_CODE (string) == STRING_CST
6384 && TREE_CODE (index) == INTEGER_CST
6385 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6386 && GET_MODE_CLASS (mode) == MODE_INT
6387 && GET_MODE_SIZE (mode) == 1
6388 && modifier != EXPAND_MEMORY_USE_WO)
6389 return
6390 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6391
6392 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6393 op0 = memory_address (mode, op0);
6394
6395 if (cfun && current_function_check_memory_usage
6396 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6397 {
6398 enum memory_use_mode memory_usage;
6399 memory_usage = get_memory_usage_from_modifier (modifier);
6400
6401 if (memory_usage != MEMORY_USE_DONT)
6402 {
6403 in_check_memory_usage = 1;
6404 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6405 op0, Pmode,
6406 GEN_INT (int_size_in_bytes (type)),
6407 TYPE_MODE (sizetype),
6408 GEN_INT (memory_usage),
6409 TYPE_MODE (integer_type_node));
6410 in_check_memory_usage = 0;
6411 }
6412 }
6413
6414 temp = gen_rtx_MEM (mode, op0);
6415 /* If address was computed by addition,
6416 mark this as an element of an aggregate. */
6417 if (TREE_CODE (exp1) == PLUS_EXPR
6418 || (TREE_CODE (exp1) == SAVE_EXPR
6419 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6420 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6421 || (TREE_CODE (exp1) == ADDR_EXPR
6422 && (exp2 = TREE_OPERAND (exp1, 0))
6423 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6424 MEM_SET_IN_STRUCT_P (temp, 1);
6425
6426 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6427 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6428
6429 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6430 here, because, in C and C++, the fact that a location is accessed
6431 through a pointer to const does not mean that the value there can
6432 never change. Languages where it can never change should
6433 also set TREE_STATIC. */
6434 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6435
6436 /* If we are writing to this object and its type is a record with
6437 readonly fields, we must mark it as readonly so it will
6438 conflict with readonly references to those fields. */
6439 if (modifier == EXPAND_MEMORY_USE_WO
6440 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6441 RTX_UNCHANGING_P (temp) = 1;
6442
6443 return temp;
6444 }
6445
6446 case ARRAY_REF:
6447 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6448 abort ();
6449
6450 {
6451 tree array = TREE_OPERAND (exp, 0);
6452 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6453 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6454 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6455 HOST_WIDE_INT i;
6456
6457 /* Optimize the special-case of a zero lower bound.
6458
6459 We convert the low_bound to sizetype to avoid some problems
6460 with constant folding. (E.g. suppose the lower bound is 1,
6461 and its mode is QI. Without the conversion, (ARRAY
6462 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6463 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6464
6465 if (! integer_zerop (low_bound))
6466 index = size_diffop (index, convert (sizetype, low_bound));
6467
6468 /* Fold an expression like: "foo"[2].
6469 This is not done in fold so it won't happen inside &.
6470 Don't fold if this is for wide characters since it's too
6471 difficult to do correctly and this is a very rare case. */
6472
6473 if (TREE_CODE (array) == STRING_CST
6474 && TREE_CODE (index) == INTEGER_CST
6475 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6476 && GET_MODE_CLASS (mode) == MODE_INT
6477 && GET_MODE_SIZE (mode) == 1)
6478 return
6479 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6480
6481 /* If this is a constant index into a constant array,
6482 just get the value from the array. Handle both the cases when
6483 we have an explicit constructor and when our operand is a variable
6484 that was declared const. */
6485
6486 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6487 && TREE_CODE (index) == INTEGER_CST
6488 && 0 > compare_tree_int (index,
6489 list_length (CONSTRUCTOR_ELTS
6490 (TREE_OPERAND (exp, 0)))))
6491 {
6492 tree elem;
6493
6494 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6495 i = TREE_INT_CST_LOW (index);
6496 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6497 ;
6498
6499 if (elem)
6500 return expand_expr (fold (TREE_VALUE (elem)), target,
6501 tmode, ro_modifier);
6502 }
6503
6504 else if (optimize >= 1
6505 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6506 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6507 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6508 {
6509 if (TREE_CODE (index) == INTEGER_CST)
6510 {
6511 tree init = DECL_INITIAL (array);
6512
6513 if (TREE_CODE (init) == CONSTRUCTOR)
6514 {
6515 tree elem;
6516
6517 for (elem = CONSTRUCTOR_ELTS (init);
6518 (elem
6519 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6520 elem = TREE_CHAIN (elem))
6521 ;
6522
6523 if (elem)
6524 return expand_expr (fold (TREE_VALUE (elem)), target,
6525 tmode, ro_modifier);
6526 }
6527 else if (TREE_CODE (init) == STRING_CST
6528 && 0 > compare_tree_int (index,
6529 TREE_STRING_LENGTH (init)))
6530 return (GEN_INT
6531 (TREE_STRING_POINTER
6532 (init)[TREE_INT_CST_LOW (index)]));
6533 }
6534 }
6535 }
6536
6537 /* ... fall through ... */
6538
6539 case COMPONENT_REF:
6540 case BIT_FIELD_REF:
6541 /* If the operand is a CONSTRUCTOR, we can just extract the
6542 appropriate field if it is present. Don't do this if we have
6543 already written the data since we want to refer to that copy
6544 and varasm.c assumes that's what we'll do. */
6545 if (code != ARRAY_REF
6546 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6547 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6548 {
6549 tree elt;
6550
6551 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6552 elt = TREE_CHAIN (elt))
6553 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6554 /* We can normally use the value of the field in the
6555 CONSTRUCTOR. However, if this is a bitfield in
6556 an integral mode that we can fit in a HOST_WIDE_INT,
6557 we must mask only the number of bits in the bitfield,
6558 since this is done implicitly by the constructor. If
6559 the bitfield does not meet either of those conditions,
6560 we can't do this optimization. */
6561 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6562 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6563 == MODE_INT)
6564 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6565 <= HOST_BITS_PER_WIDE_INT))))
6566 {
6567 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6568 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6569 {
6570 HOST_WIDE_INT bitsize
6571 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6572
6573 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6574 {
6575 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6576 op0 = expand_and (op0, op1, target);
6577 }
6578 else
6579 {
6580 enum machine_mode imode
6581 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6582 tree count
6583 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6584 0);
6585
6586 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6587 target, 0);
6588 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6589 target, 0);
6590 }
6591 }
6592
6593 return op0;
6594 }
6595 }
6596
6597 {
6598 enum machine_mode mode1;
6599 HOST_WIDE_INT bitsize, bitpos;
6600 tree offset;
6601 int volatilep = 0;
6602 unsigned int alignment;
6603 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6604 &mode1, &unsignedp, &volatilep,
6605 &alignment);
6606
6607 /* If we got back the original object, something is wrong. Perhaps
6608 we are evaluating an expression too early. In any event, don't
6609 infinitely recurse. */
6610 if (tem == exp)
6611 abort ();
6612
6613 /* If TEM's type is a union of variable size, pass TARGET to the inner
6614 computation, since it will need a temporary and TARGET is known
6615 to have to do. This occurs in unchecked conversion in Ada. */
6616
6617 op0 = expand_expr (tem,
6618 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6619 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6620 != INTEGER_CST)
6621 ? target : NULL_RTX),
6622 VOIDmode,
6623 (modifier == EXPAND_INITIALIZER
6624 || modifier == EXPAND_CONST_ADDRESS)
6625 ? modifier : EXPAND_NORMAL);
6626
6627 /* If this is a constant, put it into a register if it is a
6628 legitimate constant and OFFSET is 0 and memory if it isn't. */
6629 if (CONSTANT_P (op0))
6630 {
6631 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6632 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6633 && offset == 0)
6634 op0 = force_reg (mode, op0);
6635 else
6636 op0 = validize_mem (force_const_mem (mode, op0));
6637 }
6638
6639 if (offset != 0)
6640 {
6641 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6642
6643 /* If this object is in memory, put it into a register.
6644 This case can't occur in C, but can in Ada if we have
6645 unchecked conversion of an expression from a scalar type to
6646 an array or record type. */
6647 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6648 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6649 {
6650 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6651
6652 mark_temp_addr_taken (memloc);
6653 emit_move_insn (memloc, op0);
6654 op0 = memloc;
6655 }
6656
6657 if (GET_CODE (op0) != MEM)
6658 abort ();
6659
6660 if (GET_MODE (offset_rtx) != ptr_mode)
6661 {
6662 #ifdef POINTERS_EXTEND_UNSIGNED
6663 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6664 #else
6665 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6666 #endif
6667 }
6668
6669 /* A constant address in OP0 can have VOIDmode, we must not try
6670 to call force_reg for that case. Avoid that case. */
6671 if (GET_CODE (op0) == MEM
6672 && GET_MODE (op0) == BLKmode
6673 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6674 && bitsize != 0
6675 && (bitpos % bitsize) == 0
6676 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6677 && alignment == GET_MODE_ALIGNMENT (mode1))
6678 {
6679 rtx temp = change_address (op0, mode1,
6680 plus_constant (XEXP (op0, 0),
6681 (bitpos /
6682 BITS_PER_UNIT)));
6683 if (GET_CODE (XEXP (temp, 0)) == REG)
6684 op0 = temp;
6685 else
6686 op0 = change_address (op0, mode1,
6687 force_reg (GET_MODE (XEXP (temp, 0)),
6688 XEXP (temp, 0)));
6689 bitpos = 0;
6690 }
6691
6692
6693 op0 = change_address (op0, VOIDmode,
6694 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6695 force_reg (ptr_mode,
6696 offset_rtx)));
6697 }
6698
6699 /* Don't forget about volatility even if this is a bitfield. */
6700 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6701 {
6702 op0 = copy_rtx (op0);
6703 MEM_VOLATILE_P (op0) = 1;
6704 }
6705
6706 /* Check the access. */
6707 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6708 {
6709 enum memory_use_mode memory_usage;
6710 memory_usage = get_memory_usage_from_modifier (modifier);
6711
6712 if (memory_usage != MEMORY_USE_DONT)
6713 {
6714 rtx to;
6715 int size;
6716
6717 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6718 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6719
6720 /* Check the access right of the pointer. */
6721 if (size > BITS_PER_UNIT)
6722 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6723 to, Pmode,
6724 GEN_INT (size / BITS_PER_UNIT),
6725 TYPE_MODE (sizetype),
6726 GEN_INT (memory_usage),
6727 TYPE_MODE (integer_type_node));
6728 }
6729 }
6730
6731 /* In cases where an aligned union has an unaligned object
6732 as a field, we might be extracting a BLKmode value from
6733 an integer-mode (e.g., SImode) object. Handle this case
6734 by doing the extract into an object as wide as the field
6735 (which we know to be the width of a basic mode), then
6736 storing into memory, and changing the mode to BLKmode.
6737 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6738 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6739 if (mode1 == VOIDmode
6740 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6741 || (modifier != EXPAND_CONST_ADDRESS
6742 && modifier != EXPAND_INITIALIZER
6743 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6744 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6745 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6746 /* If the field isn't aligned enough to fetch as a memref,
6747 fetch it as a bit field. */
6748 || (mode1 != BLKmode
6749 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6750 && ((TYPE_ALIGN (TREE_TYPE (tem))
6751 < GET_MODE_ALIGNMENT (mode))
6752 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6753 /* If the type and the field are a constant size and the
6754 size of the type isn't the same size as the bitfield,
6755 we must use bitfield operations. */
6756 || ((bitsize >= 0
6757 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6758 == INTEGER_CST)
6759 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6760 bitsize)))))
6761 || (modifier != EXPAND_CONST_ADDRESS
6762 && modifier != EXPAND_INITIALIZER
6763 && mode == BLKmode
6764 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6765 && (TYPE_ALIGN (type) > alignment
6766 || bitpos % TYPE_ALIGN (type) != 0)))
6767 {
6768 enum machine_mode ext_mode = mode;
6769
6770 if (ext_mode == BLKmode
6771 && ! (target != 0 && GET_CODE (op0) == MEM
6772 && GET_CODE (target) == MEM
6773 && bitpos % BITS_PER_UNIT == 0))
6774 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6775
6776 if (ext_mode == BLKmode)
6777 {
6778 /* In this case, BITPOS must start at a byte boundary and
6779 TARGET, if specified, must be a MEM. */
6780 if (GET_CODE (op0) != MEM
6781 || (target != 0 && GET_CODE (target) != MEM)
6782 || bitpos % BITS_PER_UNIT != 0)
6783 abort ();
6784
6785 op0 = change_address (op0, VOIDmode,
6786 plus_constant (XEXP (op0, 0),
6787 bitpos / BITS_PER_UNIT));
6788 if (target == 0)
6789 target = assign_temp (type, 0, 1, 1);
6790
6791 emit_block_move (target, op0,
6792 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6793 / BITS_PER_UNIT),
6794 BITS_PER_UNIT);
6795
6796 return target;
6797 }
6798
6799 op0 = validize_mem (op0);
6800
6801 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6802 mark_reg_pointer (XEXP (op0, 0), alignment);
6803
6804 op0 = extract_bit_field (op0, bitsize, bitpos,
6805 unsignedp, target, ext_mode, ext_mode,
6806 alignment,
6807 int_size_in_bytes (TREE_TYPE (tem)));
6808
6809 /* If the result is a record type and BITSIZE is narrower than
6810 the mode of OP0, an integral mode, and this is a big endian
6811 machine, we must put the field into the high-order bits. */
6812 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6813 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6814 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6815 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6816 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6817 - bitsize),
6818 op0, 1);
6819
6820 if (mode == BLKmode)
6821 {
6822 rtx new = assign_stack_temp (ext_mode,
6823 bitsize / BITS_PER_UNIT, 0);
6824
6825 emit_move_insn (new, op0);
6826 op0 = copy_rtx (new);
6827 PUT_MODE (op0, BLKmode);
6828 MEM_SET_IN_STRUCT_P (op0, 1);
6829 }
6830
6831 return op0;
6832 }
6833
6834 /* If the result is BLKmode, use that to access the object
6835 now as well. */
6836 if (mode == BLKmode)
6837 mode1 = BLKmode;
6838
6839 /* Get a reference to just this component. */
6840 if (modifier == EXPAND_CONST_ADDRESS
6841 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6842 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6843 (bitpos / BITS_PER_UNIT)));
6844 else
6845 op0 = change_address (op0, mode1,
6846 plus_constant (XEXP (op0, 0),
6847 (bitpos / BITS_PER_UNIT)));
6848
6849 if (GET_CODE (op0) == MEM)
6850 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6851
6852 if (GET_CODE (XEXP (op0, 0)) == REG)
6853 mark_reg_pointer (XEXP (op0, 0), alignment);
6854
6855 MEM_SET_IN_STRUCT_P (op0, 1);
6856 MEM_VOLATILE_P (op0) |= volatilep;
6857 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6858 || modifier == EXPAND_CONST_ADDRESS
6859 || modifier == EXPAND_INITIALIZER)
6860 return op0;
6861 else if (target == 0)
6862 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6863
6864 convert_move (target, op0, unsignedp);
6865 return target;
6866 }
6867
6868 /* Intended for a reference to a buffer of a file-object in Pascal.
6869 But it's not certain that a special tree code will really be
6870 necessary for these. INDIRECT_REF might work for them. */
6871 case BUFFER_REF:
6872 abort ();
6873
6874 case IN_EXPR:
6875 {
6876 /* Pascal set IN expression.
6877
6878 Algorithm:
6879 rlo = set_low - (set_low%bits_per_word);
6880 the_word = set [ (index - rlo)/bits_per_word ];
6881 bit_index = index % bits_per_word;
6882 bitmask = 1 << bit_index;
6883 return !!(the_word & bitmask); */
6884
6885 tree set = TREE_OPERAND (exp, 0);
6886 tree index = TREE_OPERAND (exp, 1);
6887 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6888 tree set_type = TREE_TYPE (set);
6889 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6890 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6891 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6892 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6893 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6894 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6895 rtx setaddr = XEXP (setval, 0);
6896 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6897 rtx rlow;
6898 rtx diff, quo, rem, addr, bit, result;
6899
6900 preexpand_calls (exp);
6901
6902 /* If domain is empty, answer is no. Likewise if index is constant
6903 and out of bounds. */
6904 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6905 && TREE_CODE (set_low_bound) == INTEGER_CST
6906 && tree_int_cst_lt (set_high_bound, set_low_bound))
6907 || (TREE_CODE (index) == INTEGER_CST
6908 && TREE_CODE (set_low_bound) == INTEGER_CST
6909 && tree_int_cst_lt (index, set_low_bound))
6910 || (TREE_CODE (set_high_bound) == INTEGER_CST
6911 && TREE_CODE (index) == INTEGER_CST
6912 && tree_int_cst_lt (set_high_bound, index))))
6913 return const0_rtx;
6914
6915 if (target == 0)
6916 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6917
6918 /* If we get here, we have to generate the code for both cases
6919 (in range and out of range). */
6920
6921 op0 = gen_label_rtx ();
6922 op1 = gen_label_rtx ();
6923
6924 if (! (GET_CODE (index_val) == CONST_INT
6925 && GET_CODE (lo_r) == CONST_INT))
6926 {
6927 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6928 GET_MODE (index_val), iunsignedp, 0, op1);
6929 }
6930
6931 if (! (GET_CODE (index_val) == CONST_INT
6932 && GET_CODE (hi_r) == CONST_INT))
6933 {
6934 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6935 GET_MODE (index_val), iunsignedp, 0, op1);
6936 }
6937
6938 /* Calculate the element number of bit zero in the first word
6939 of the set. */
6940 if (GET_CODE (lo_r) == CONST_INT)
6941 rlow = GEN_INT (INTVAL (lo_r)
6942 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6943 else
6944 rlow = expand_binop (index_mode, and_optab, lo_r,
6945 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6946 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6947
6948 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6949 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6950
6951 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6952 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6953 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6954 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6955
6956 addr = memory_address (byte_mode,
6957 expand_binop (index_mode, add_optab, diff,
6958 setaddr, NULL_RTX, iunsignedp,
6959 OPTAB_LIB_WIDEN));
6960
6961 /* Extract the bit we want to examine */
6962 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6963 gen_rtx_MEM (byte_mode, addr),
6964 make_tree (TREE_TYPE (index), rem),
6965 NULL_RTX, 1);
6966 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6967 GET_MODE (target) == byte_mode ? target : 0,
6968 1, OPTAB_LIB_WIDEN);
6969
6970 if (result != target)
6971 convert_move (target, result, 1);
6972
6973 /* Output the code to handle the out-of-range case. */
6974 emit_jump (op0);
6975 emit_label (op1);
6976 emit_move_insn (target, const0_rtx);
6977 emit_label (op0);
6978 return target;
6979 }
6980
6981 case WITH_CLEANUP_EXPR:
6982 if (RTL_EXPR_RTL (exp) == 0)
6983 {
6984 RTL_EXPR_RTL (exp)
6985 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6986 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6987
6988 /* That's it for this cleanup. */
6989 TREE_OPERAND (exp, 2) = 0;
6990 }
6991 return RTL_EXPR_RTL (exp);
6992
6993 case CLEANUP_POINT_EXPR:
6994 {
6995 /* Start a new binding layer that will keep track of all cleanup
6996 actions to be performed. */
6997 expand_start_bindings (2);
6998
6999 target_temp_slot_level = temp_slot_level;
7000
7001 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7002 /* If we're going to use this value, load it up now. */
7003 if (! ignore)
7004 op0 = force_not_mem (op0);
7005 preserve_temp_slots (op0);
7006 expand_end_bindings (NULL_TREE, 0, 0);
7007 }
7008 return op0;
7009
7010 case CALL_EXPR:
7011 /* Check for a built-in function. */
7012 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7013 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7014 == FUNCTION_DECL)
7015 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7016 return expand_builtin (exp, target, subtarget, tmode, ignore);
7017
7018 /* If this call was expanded already by preexpand_calls,
7019 just return the result we got. */
7020 if (CALL_EXPR_RTL (exp) != 0)
7021 return CALL_EXPR_RTL (exp);
7022
7023 return expand_call (exp, target, ignore);
7024
7025 case NON_LVALUE_EXPR:
7026 case NOP_EXPR:
7027 case CONVERT_EXPR:
7028 case REFERENCE_EXPR:
7029 if (TREE_CODE (type) == UNION_TYPE)
7030 {
7031 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7032
7033 /* If both input and output are BLKmode, this conversion
7034 isn't actually doing anything unless we need to make the
7035 alignment stricter. */
7036 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7037 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7038 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7039 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7040 modifier);
7041
7042 if (target == 0)
7043 {
7044 if (mode != BLKmode)
7045 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7046 else
7047 target = assign_temp (type, 0, 1, 1);
7048 }
7049
7050 if (GET_CODE (target) == MEM)
7051 /* Store data into beginning of memory target. */
7052 store_expr (TREE_OPERAND (exp, 0),
7053 change_address (target, TYPE_MODE (valtype), 0), 0);
7054
7055 else if (GET_CODE (target) == REG)
7056 /* Store this field into a union of the proper type. */
7057 store_field (target,
7058 MIN ((int_size_in_bytes (TREE_TYPE
7059 (TREE_OPERAND (exp, 0)))
7060 * BITS_PER_UNIT),
7061 GET_MODE_BITSIZE (mode)),
7062 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7063 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
7064 else
7065 abort ();
7066
7067 /* Return the entire union. */
7068 return target;
7069 }
7070
7071 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7072 {
7073 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7074 ro_modifier);
7075
7076 /* If the signedness of the conversion differs and OP0 is
7077 a promoted SUBREG, clear that indication since we now
7078 have to do the proper extension. */
7079 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7080 && GET_CODE (op0) == SUBREG)
7081 SUBREG_PROMOTED_VAR_P (op0) = 0;
7082
7083 return op0;
7084 }
7085
7086 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7087 if (GET_MODE (op0) == mode)
7088 return op0;
7089
7090 /* If OP0 is a constant, just convert it into the proper mode. */
7091 if (CONSTANT_P (op0))
7092 return
7093 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7094 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7095
7096 if (modifier == EXPAND_INITIALIZER)
7097 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7098
7099 if (target == 0)
7100 return
7101 convert_to_mode (mode, op0,
7102 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7103 else
7104 convert_move (target, op0,
7105 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7106 return target;
7107
7108 case PLUS_EXPR:
7109 /* We come here from MINUS_EXPR when the second operand is a
7110 constant. */
7111 plus_expr:
7112 this_optab = add_optab;
7113
7114 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7115 something else, make sure we add the register to the constant and
7116 then to the other thing. This case can occur during strength
7117 reduction and doing it this way will produce better code if the
7118 frame pointer or argument pointer is eliminated.
7119
7120 fold-const.c will ensure that the constant is always in the inner
7121 PLUS_EXPR, so the only case we need to do anything about is if
7122 sp, ap, or fp is our second argument, in which case we must swap
7123 the innermost first argument and our second argument. */
7124
7125 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7126 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7127 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7128 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7129 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7130 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7131 {
7132 tree t = TREE_OPERAND (exp, 1);
7133
7134 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7135 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7136 }
7137
7138 /* If the result is to be ptr_mode and we are adding an integer to
7139 something, we might be forming a constant. So try to use
7140 plus_constant. If it produces a sum and we can't accept it,
7141 use force_operand. This allows P = &ARR[const] to generate
7142 efficient code on machines where a SYMBOL_REF is not a valid
7143 address.
7144
7145 If this is an EXPAND_SUM call, always return the sum. */
7146 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7147 || mode == ptr_mode)
7148 {
7149 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7150 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7151 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7152 {
7153 rtx constant_part;
7154
7155 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7156 EXPAND_SUM);
7157 /* Use immed_double_const to ensure that the constant is
7158 truncated according to the mode of OP1, then sign extended
7159 to a HOST_WIDE_INT. Using the constant directly can result
7160 in non-canonical RTL in a 64x32 cross compile. */
7161 constant_part
7162 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7163 (HOST_WIDE_INT) 0,
7164 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7165 op1 = plus_constant (op1, INTVAL (constant_part));
7166 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7167 op1 = force_operand (op1, target);
7168 return op1;
7169 }
7170
7171 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7172 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7173 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7174 {
7175 rtx constant_part;
7176
7177 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7178 EXPAND_SUM);
7179 if (! CONSTANT_P (op0))
7180 {
7181 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7182 VOIDmode, modifier);
7183 /* Don't go to both_summands if modifier
7184 says it's not right to return a PLUS. */
7185 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7186 goto binop2;
7187 goto both_summands;
7188 }
7189 /* Use immed_double_const to ensure that the constant is
7190 truncated according to the mode of OP1, then sign extended
7191 to a HOST_WIDE_INT. Using the constant directly can result
7192 in non-canonical RTL in a 64x32 cross compile. */
7193 constant_part
7194 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7195 (HOST_WIDE_INT) 0,
7196 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7197 op0 = plus_constant (op0, INTVAL (constant_part));
7198 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7199 op0 = force_operand (op0, target);
7200 return op0;
7201 }
7202 }
7203
7204 /* No sense saving up arithmetic to be done
7205 if it's all in the wrong mode to form part of an address.
7206 And force_operand won't know whether to sign-extend or
7207 zero-extend. */
7208 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7209 || mode != ptr_mode)
7210 goto binop;
7211
7212 preexpand_calls (exp);
7213 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7214 subtarget = 0;
7215
7216 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7217 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7218
7219 both_summands:
7220 /* Make sure any term that's a sum with a constant comes last. */
7221 if (GET_CODE (op0) == PLUS
7222 && CONSTANT_P (XEXP (op0, 1)))
7223 {
7224 temp = op0;
7225 op0 = op1;
7226 op1 = temp;
7227 }
7228 /* If adding to a sum including a constant,
7229 associate it to put the constant outside. */
7230 if (GET_CODE (op1) == PLUS
7231 && CONSTANT_P (XEXP (op1, 1)))
7232 {
7233 rtx constant_term = const0_rtx;
7234
7235 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7236 if (temp != 0)
7237 op0 = temp;
7238 /* Ensure that MULT comes first if there is one. */
7239 else if (GET_CODE (op0) == MULT)
7240 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7241 else
7242 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7243
7244 /* Let's also eliminate constants from op0 if possible. */
7245 op0 = eliminate_constant_term (op0, &constant_term);
7246
7247 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7248 their sum should be a constant. Form it into OP1, since the
7249 result we want will then be OP0 + OP1. */
7250
7251 temp = simplify_binary_operation (PLUS, mode, constant_term,
7252 XEXP (op1, 1));
7253 if (temp != 0)
7254 op1 = temp;
7255 else
7256 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7257 }
7258
7259 /* Put a constant term last and put a multiplication first. */
7260 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7261 temp = op1, op1 = op0, op0 = temp;
7262
7263 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7264 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7265
7266 case MINUS_EXPR:
7267 /* For initializers, we are allowed to return a MINUS of two
7268 symbolic constants. Here we handle all cases when both operands
7269 are constant. */
7270 /* Handle difference of two symbolic constants,
7271 for the sake of an initializer. */
7272 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7273 && really_constant_p (TREE_OPERAND (exp, 0))
7274 && really_constant_p (TREE_OPERAND (exp, 1)))
7275 {
7276 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7277 VOIDmode, ro_modifier);
7278 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7279 VOIDmode, ro_modifier);
7280
7281 /* If the last operand is a CONST_INT, use plus_constant of
7282 the negated constant. Else make the MINUS. */
7283 if (GET_CODE (op1) == CONST_INT)
7284 return plus_constant (op0, - INTVAL (op1));
7285 else
7286 return gen_rtx_MINUS (mode, op0, op1);
7287 }
7288 /* Convert A - const to A + (-const). */
7289 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7290 {
7291 tree negated = fold (build1 (NEGATE_EXPR, type,
7292 TREE_OPERAND (exp, 1)));
7293
7294 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7295 /* If we can't negate the constant in TYPE, leave it alone and
7296 expand_binop will negate it for us. We used to try to do it
7297 here in the signed version of TYPE, but that doesn't work
7298 on POINTER_TYPEs. */;
7299 else
7300 {
7301 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7302 goto plus_expr;
7303 }
7304 }
7305 this_optab = sub_optab;
7306 goto binop;
7307
7308 case MULT_EXPR:
7309 preexpand_calls (exp);
7310 /* If first operand is constant, swap them.
7311 Thus the following special case checks need only
7312 check the second operand. */
7313 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7314 {
7315 register tree t1 = TREE_OPERAND (exp, 0);
7316 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7317 TREE_OPERAND (exp, 1) = t1;
7318 }
7319
7320 /* Attempt to return something suitable for generating an
7321 indexed address, for machines that support that. */
7322
7323 if (modifier == EXPAND_SUM && mode == ptr_mode
7324 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7325 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7326 {
7327 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7328 EXPAND_SUM);
7329
7330 /* Apply distributive law if OP0 is x+c. */
7331 if (GET_CODE (op0) == PLUS
7332 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7333 return
7334 gen_rtx_PLUS
7335 (mode,
7336 gen_rtx_MULT
7337 (mode, XEXP (op0, 0),
7338 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7339 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7340 * INTVAL (XEXP (op0, 1))));
7341
7342 if (GET_CODE (op0) != REG)
7343 op0 = force_operand (op0, NULL_RTX);
7344 if (GET_CODE (op0) != REG)
7345 op0 = copy_to_mode_reg (mode, op0);
7346
7347 return
7348 gen_rtx_MULT (mode, op0,
7349 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7350 }
7351
7352 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7353 subtarget = 0;
7354
7355 /* Check for multiplying things that have been extended
7356 from a narrower type. If this machine supports multiplying
7357 in that narrower type with a result in the desired type,
7358 do it that way, and avoid the explicit type-conversion. */
7359 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7360 && TREE_CODE (type) == INTEGER_TYPE
7361 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7362 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7363 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7364 && int_fits_type_p (TREE_OPERAND (exp, 1),
7365 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7366 /* Don't use a widening multiply if a shift will do. */
7367 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7368 > HOST_BITS_PER_WIDE_INT)
7369 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7370 ||
7371 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7372 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7373 ==
7374 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7375 /* If both operands are extended, they must either both
7376 be zero-extended or both be sign-extended. */
7377 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7378 ==
7379 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7380 {
7381 enum machine_mode innermode
7382 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7383 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7384 ? smul_widen_optab : umul_widen_optab);
7385 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7386 ? umul_widen_optab : smul_widen_optab);
7387 if (mode == GET_MODE_WIDER_MODE (innermode))
7388 {
7389 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7390 {
7391 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7392 NULL_RTX, VOIDmode, 0);
7393 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7394 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7395 VOIDmode, 0);
7396 else
7397 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7398 NULL_RTX, VOIDmode, 0);
7399 goto binop2;
7400 }
7401 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7402 && innermode == word_mode)
7403 {
7404 rtx htem;
7405 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7406 NULL_RTX, VOIDmode, 0);
7407 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7408 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7409 VOIDmode, 0);
7410 else
7411 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7412 NULL_RTX, VOIDmode, 0);
7413 temp = expand_binop (mode, other_optab, op0, op1, target,
7414 unsignedp, OPTAB_LIB_WIDEN);
7415 htem = expand_mult_highpart_adjust (innermode,
7416 gen_highpart (innermode, temp),
7417 op0, op1,
7418 gen_highpart (innermode, temp),
7419 unsignedp);
7420 emit_move_insn (gen_highpart (innermode, temp), htem);
7421 return temp;
7422 }
7423 }
7424 }
7425 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7426 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7427 return expand_mult (mode, op0, op1, target, unsignedp);
7428
7429 case TRUNC_DIV_EXPR:
7430 case FLOOR_DIV_EXPR:
7431 case CEIL_DIV_EXPR:
7432 case ROUND_DIV_EXPR:
7433 case EXACT_DIV_EXPR:
7434 preexpand_calls (exp);
7435 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7436 subtarget = 0;
7437 /* Possible optimization: compute the dividend with EXPAND_SUM
7438 then if the divisor is constant can optimize the case
7439 where some terms of the dividend have coeffs divisible by it. */
7440 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7441 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7442 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7443
7444 case RDIV_EXPR:
7445 this_optab = flodiv_optab;
7446 goto binop;
7447
7448 case TRUNC_MOD_EXPR:
7449 case FLOOR_MOD_EXPR:
7450 case CEIL_MOD_EXPR:
7451 case ROUND_MOD_EXPR:
7452 preexpand_calls (exp);
7453 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7454 subtarget = 0;
7455 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7456 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7457 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7458
7459 case FIX_ROUND_EXPR:
7460 case FIX_FLOOR_EXPR:
7461 case FIX_CEIL_EXPR:
7462 abort (); /* Not used for C. */
7463
7464 case FIX_TRUNC_EXPR:
7465 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7466 if (target == 0)
7467 target = gen_reg_rtx (mode);
7468 expand_fix (target, op0, unsignedp);
7469 return target;
7470
7471 case FLOAT_EXPR:
7472 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7473 if (target == 0)
7474 target = gen_reg_rtx (mode);
7475 /* expand_float can't figure out what to do if FROM has VOIDmode.
7476 So give it the correct mode. With -O, cse will optimize this. */
7477 if (GET_MODE (op0) == VOIDmode)
7478 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7479 op0);
7480 expand_float (target, op0,
7481 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7482 return target;
7483
7484 case NEGATE_EXPR:
7485 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7486 temp = expand_unop (mode, neg_optab, op0, target, 0);
7487 if (temp == 0)
7488 abort ();
7489 return temp;
7490
7491 case ABS_EXPR:
7492 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7493
7494 /* Handle complex values specially. */
7495 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7496 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7497 return expand_complex_abs (mode, op0, target, unsignedp);
7498
7499 /* Unsigned abs is simply the operand. Testing here means we don't
7500 risk generating incorrect code below. */
7501 if (TREE_UNSIGNED (type))
7502 return op0;
7503
7504 return expand_abs (mode, op0, target,
7505 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7506
7507 case MAX_EXPR:
7508 case MIN_EXPR:
7509 target = original_target;
7510 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7511 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7512 || GET_MODE (target) != mode
7513 || (GET_CODE (target) == REG
7514 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7515 target = gen_reg_rtx (mode);
7516 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7517 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7518
7519 /* First try to do it with a special MIN or MAX instruction.
7520 If that does not win, use a conditional jump to select the proper
7521 value. */
7522 this_optab = (TREE_UNSIGNED (type)
7523 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7524 : (code == MIN_EXPR ? smin_optab : smax_optab));
7525
7526 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7527 OPTAB_WIDEN);
7528 if (temp != 0)
7529 return temp;
7530
7531 /* At this point, a MEM target is no longer useful; we will get better
7532 code without it. */
7533
7534 if (GET_CODE (target) == MEM)
7535 target = gen_reg_rtx (mode);
7536
7537 if (target != op0)
7538 emit_move_insn (target, op0);
7539
7540 op0 = gen_label_rtx ();
7541
7542 /* If this mode is an integer too wide to compare properly,
7543 compare word by word. Rely on cse to optimize constant cases. */
7544 if (GET_MODE_CLASS (mode) == MODE_INT
7545 && ! can_compare_p (GE, mode, ccp_jump))
7546 {
7547 if (code == MAX_EXPR)
7548 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7549 target, op1, NULL_RTX, op0);
7550 else
7551 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7552 op1, target, NULL_RTX, op0);
7553 }
7554 else
7555 {
7556 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7557 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7558 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7559 op0);
7560 }
7561 emit_move_insn (target, op1);
7562 emit_label (op0);
7563 return target;
7564
7565 case BIT_NOT_EXPR:
7566 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7567 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7568 if (temp == 0)
7569 abort ();
7570 return temp;
7571
7572 case FFS_EXPR:
7573 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7574 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7575 if (temp == 0)
7576 abort ();
7577 return temp;
7578
7579 /* ??? Can optimize bitwise operations with one arg constant.
7580 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7581 and (a bitwise1 b) bitwise2 b (etc)
7582 but that is probably not worth while. */
7583
7584 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7585 boolean values when we want in all cases to compute both of them. In
7586 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7587 as actual zero-or-1 values and then bitwise anding. In cases where
7588 there cannot be any side effects, better code would be made by
7589 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7590 how to recognize those cases. */
7591
7592 case TRUTH_AND_EXPR:
7593 case BIT_AND_EXPR:
7594 this_optab = and_optab;
7595 goto binop;
7596
7597 case TRUTH_OR_EXPR:
7598 case BIT_IOR_EXPR:
7599 this_optab = ior_optab;
7600 goto binop;
7601
7602 case TRUTH_XOR_EXPR:
7603 case BIT_XOR_EXPR:
7604 this_optab = xor_optab;
7605 goto binop;
7606
7607 case LSHIFT_EXPR:
7608 case RSHIFT_EXPR:
7609 case LROTATE_EXPR:
7610 case RROTATE_EXPR:
7611 preexpand_calls (exp);
7612 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7613 subtarget = 0;
7614 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7615 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7616 unsignedp);
7617
7618 /* Could determine the answer when only additive constants differ. Also,
7619 the addition of one can be handled by changing the condition. */
7620 case LT_EXPR:
7621 case LE_EXPR:
7622 case GT_EXPR:
7623 case GE_EXPR:
7624 case EQ_EXPR:
7625 case NE_EXPR:
7626 case UNORDERED_EXPR:
7627 case ORDERED_EXPR:
7628 case UNLT_EXPR:
7629 case UNLE_EXPR:
7630 case UNGT_EXPR:
7631 case UNGE_EXPR:
7632 case UNEQ_EXPR:
7633 preexpand_calls (exp);
7634 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7635 if (temp != 0)
7636 return temp;
7637
7638 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7639 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7640 && original_target
7641 && GET_CODE (original_target) == REG
7642 && (GET_MODE (original_target)
7643 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7644 {
7645 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7646 VOIDmode, 0);
7647
7648 if (temp != original_target)
7649 temp = copy_to_reg (temp);
7650
7651 op1 = gen_label_rtx ();
7652 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7653 GET_MODE (temp), unsignedp, 0, op1);
7654 emit_move_insn (temp, const1_rtx);
7655 emit_label (op1);
7656 return temp;
7657 }
7658
7659 /* If no set-flag instruction, must generate a conditional
7660 store into a temporary variable. Drop through
7661 and handle this like && and ||. */
7662
7663 case TRUTH_ANDIF_EXPR:
7664 case TRUTH_ORIF_EXPR:
7665 if (! ignore
7666 && (target == 0 || ! safe_from_p (target, exp, 1)
7667 /* Make sure we don't have a hard reg (such as function's return
7668 value) live across basic blocks, if not optimizing. */
7669 || (!optimize && GET_CODE (target) == REG
7670 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7671 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7672
7673 if (target)
7674 emit_clr_insn (target);
7675
7676 op1 = gen_label_rtx ();
7677 jumpifnot (exp, op1);
7678
7679 if (target)
7680 emit_0_to_1_insn (target);
7681
7682 emit_label (op1);
7683 return ignore ? const0_rtx : target;
7684
7685 case TRUTH_NOT_EXPR:
7686 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7687 /* The parser is careful to generate TRUTH_NOT_EXPR
7688 only with operands that are always zero or one. */
7689 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7690 target, 1, OPTAB_LIB_WIDEN);
7691 if (temp == 0)
7692 abort ();
7693 return temp;
7694
7695 case COMPOUND_EXPR:
7696 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7697 emit_queue ();
7698 return expand_expr (TREE_OPERAND (exp, 1),
7699 (ignore ? const0_rtx : target),
7700 VOIDmode, 0);
7701
7702 case COND_EXPR:
7703 /* If we would have a "singleton" (see below) were it not for a
7704 conversion in each arm, bring that conversion back out. */
7705 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7706 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7707 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7708 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7709 {
7710 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7711 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7712
7713 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7714 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7715 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7716 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7717 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7718 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7719 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7720 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7721 return expand_expr (build1 (NOP_EXPR, type,
7722 build (COND_EXPR, TREE_TYPE (true),
7723 TREE_OPERAND (exp, 0),
7724 true, false)),
7725 target, tmode, modifier);
7726 }
7727
7728 {
7729 /* Note that COND_EXPRs whose type is a structure or union
7730 are required to be constructed to contain assignments of
7731 a temporary variable, so that we can evaluate them here
7732 for side effect only. If type is void, we must do likewise. */
7733
7734 /* If an arm of the branch requires a cleanup,
7735 only that cleanup is performed. */
7736
7737 tree singleton = 0;
7738 tree binary_op = 0, unary_op = 0;
7739
7740 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7741 convert it to our mode, if necessary. */
7742 if (integer_onep (TREE_OPERAND (exp, 1))
7743 && integer_zerop (TREE_OPERAND (exp, 2))
7744 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7745 {
7746 if (ignore)
7747 {
7748 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7749 ro_modifier);
7750 return const0_rtx;
7751 }
7752
7753 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7754 if (GET_MODE (op0) == mode)
7755 return op0;
7756
7757 if (target == 0)
7758 target = gen_reg_rtx (mode);
7759 convert_move (target, op0, unsignedp);
7760 return target;
7761 }
7762
7763 /* Check for X ? A + B : A. If we have this, we can copy A to the
7764 output and conditionally add B. Similarly for unary operations.
7765 Don't do this if X has side-effects because those side effects
7766 might affect A or B and the "?" operation is a sequence point in
7767 ANSI. (operand_equal_p tests for side effects.) */
7768
7769 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7770 && operand_equal_p (TREE_OPERAND (exp, 2),
7771 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7772 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7773 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7774 && operand_equal_p (TREE_OPERAND (exp, 1),
7775 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7776 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7777 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7778 && operand_equal_p (TREE_OPERAND (exp, 2),
7779 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7780 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7781 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7782 && operand_equal_p (TREE_OPERAND (exp, 1),
7783 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7784 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7785
7786 /* If we are not to produce a result, we have no target. Otherwise,
7787 if a target was specified use it; it will not be used as an
7788 intermediate target unless it is safe. If no target, use a
7789 temporary. */
7790
7791 if (ignore)
7792 temp = 0;
7793 else if (original_target
7794 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7795 || (singleton && GET_CODE (original_target) == REG
7796 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7797 && original_target == var_rtx (singleton)))
7798 && GET_MODE (original_target) == mode
7799 #ifdef HAVE_conditional_move
7800 && (! can_conditionally_move_p (mode)
7801 || GET_CODE (original_target) == REG
7802 || TREE_ADDRESSABLE (type))
7803 #endif
7804 && ! (GET_CODE (original_target) == MEM
7805 && MEM_VOLATILE_P (original_target)))
7806 temp = original_target;
7807 else if (TREE_ADDRESSABLE (type))
7808 abort ();
7809 else
7810 temp = assign_temp (type, 0, 0, 1);
7811
7812 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7813 do the test of X as a store-flag operation, do this as
7814 A + ((X != 0) << log C). Similarly for other simple binary
7815 operators. Only do for C == 1 if BRANCH_COST is low. */
7816 if (temp && singleton && binary_op
7817 && (TREE_CODE (binary_op) == PLUS_EXPR
7818 || TREE_CODE (binary_op) == MINUS_EXPR
7819 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7820 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7821 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7822 : integer_onep (TREE_OPERAND (binary_op, 1)))
7823 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7824 {
7825 rtx result;
7826 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7827 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7828 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7829 : xor_optab);
7830
7831 /* If we had X ? A : A + 1, do this as A + (X == 0).
7832
7833 We have to invert the truth value here and then put it
7834 back later if do_store_flag fails. We cannot simply copy
7835 TREE_OPERAND (exp, 0) to another variable and modify that
7836 because invert_truthvalue can modify the tree pointed to
7837 by its argument. */
7838 if (singleton == TREE_OPERAND (exp, 1))
7839 TREE_OPERAND (exp, 0)
7840 = invert_truthvalue (TREE_OPERAND (exp, 0));
7841
7842 result = do_store_flag (TREE_OPERAND (exp, 0),
7843 (safe_from_p (temp, singleton, 1)
7844 ? temp : NULL_RTX),
7845 mode, BRANCH_COST <= 1);
7846
7847 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7848 result = expand_shift (LSHIFT_EXPR, mode, result,
7849 build_int_2 (tree_log2
7850 (TREE_OPERAND
7851 (binary_op, 1)),
7852 0),
7853 (safe_from_p (temp, singleton, 1)
7854 ? temp : NULL_RTX), 0);
7855
7856 if (result)
7857 {
7858 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7859 return expand_binop (mode, boptab, op1, result, temp,
7860 unsignedp, OPTAB_LIB_WIDEN);
7861 }
7862 else if (singleton == TREE_OPERAND (exp, 1))
7863 TREE_OPERAND (exp, 0)
7864 = invert_truthvalue (TREE_OPERAND (exp, 0));
7865 }
7866
7867 do_pending_stack_adjust ();
7868 NO_DEFER_POP;
7869 op0 = gen_label_rtx ();
7870
7871 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7872 {
7873 if (temp != 0)
7874 {
7875 /* If the target conflicts with the other operand of the
7876 binary op, we can't use it. Also, we can't use the target
7877 if it is a hard register, because evaluating the condition
7878 might clobber it. */
7879 if ((binary_op
7880 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7881 || (GET_CODE (temp) == REG
7882 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7883 temp = gen_reg_rtx (mode);
7884 store_expr (singleton, temp, 0);
7885 }
7886 else
7887 expand_expr (singleton,
7888 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7889 if (singleton == TREE_OPERAND (exp, 1))
7890 jumpif (TREE_OPERAND (exp, 0), op0);
7891 else
7892 jumpifnot (TREE_OPERAND (exp, 0), op0);
7893
7894 start_cleanup_deferral ();
7895 if (binary_op && temp == 0)
7896 /* Just touch the other operand. */
7897 expand_expr (TREE_OPERAND (binary_op, 1),
7898 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7899 else if (binary_op)
7900 store_expr (build (TREE_CODE (binary_op), type,
7901 make_tree (type, temp),
7902 TREE_OPERAND (binary_op, 1)),
7903 temp, 0);
7904 else
7905 store_expr (build1 (TREE_CODE (unary_op), type,
7906 make_tree (type, temp)),
7907 temp, 0);
7908 op1 = op0;
7909 }
7910 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7911 comparison operator. If we have one of these cases, set the
7912 output to A, branch on A (cse will merge these two references),
7913 then set the output to FOO. */
7914 else if (temp
7915 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7916 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7917 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7918 TREE_OPERAND (exp, 1), 0)
7919 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7920 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7921 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7922 {
7923 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7924 temp = gen_reg_rtx (mode);
7925 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7926 jumpif (TREE_OPERAND (exp, 0), op0);
7927
7928 start_cleanup_deferral ();
7929 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7930 op1 = op0;
7931 }
7932 else if (temp
7933 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7934 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7935 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7936 TREE_OPERAND (exp, 2), 0)
7937 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7938 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7939 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7940 {
7941 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7942 temp = gen_reg_rtx (mode);
7943 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7944 jumpifnot (TREE_OPERAND (exp, 0), op0);
7945
7946 start_cleanup_deferral ();
7947 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7948 op1 = op0;
7949 }
7950 else
7951 {
7952 op1 = gen_label_rtx ();
7953 jumpifnot (TREE_OPERAND (exp, 0), op0);
7954
7955 start_cleanup_deferral ();
7956
7957 /* One branch of the cond can be void, if it never returns. For
7958 example A ? throw : E */
7959 if (temp != 0
7960 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7961 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7962 else
7963 expand_expr (TREE_OPERAND (exp, 1),
7964 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7965 end_cleanup_deferral ();
7966 emit_queue ();
7967 emit_jump_insn (gen_jump (op1));
7968 emit_barrier ();
7969 emit_label (op0);
7970 start_cleanup_deferral ();
7971 if (temp != 0
7972 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7973 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7974 else
7975 expand_expr (TREE_OPERAND (exp, 2),
7976 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7977 }
7978
7979 end_cleanup_deferral ();
7980
7981 emit_queue ();
7982 emit_label (op1);
7983 OK_DEFER_POP;
7984
7985 return temp;
7986 }
7987
7988 case TARGET_EXPR:
7989 {
7990 /* Something needs to be initialized, but we didn't know
7991 where that thing was when building the tree. For example,
7992 it could be the return value of a function, or a parameter
7993 to a function which lays down in the stack, or a temporary
7994 variable which must be passed by reference.
7995
7996 We guarantee that the expression will either be constructed
7997 or copied into our original target. */
7998
7999 tree slot = TREE_OPERAND (exp, 0);
8000 tree cleanups = NULL_TREE;
8001 tree exp1;
8002
8003 if (TREE_CODE (slot) != VAR_DECL)
8004 abort ();
8005
8006 if (! ignore)
8007 target = original_target;
8008
8009 /* Set this here so that if we get a target that refers to a
8010 register variable that's already been used, put_reg_into_stack
8011 knows that it should fix up those uses. */
8012 TREE_USED (slot) = 1;
8013
8014 if (target == 0)
8015 {
8016 if (DECL_RTL (slot) != 0)
8017 {
8018 target = DECL_RTL (slot);
8019 /* If we have already expanded the slot, so don't do
8020 it again. (mrs) */
8021 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8022 return target;
8023 }
8024 else
8025 {
8026 target = assign_temp (type, 2, 0, 1);
8027 /* All temp slots at this level must not conflict. */
8028 preserve_temp_slots (target);
8029 DECL_RTL (slot) = target;
8030 if (TREE_ADDRESSABLE (slot))
8031 {
8032 TREE_ADDRESSABLE (slot) = 0;
8033 mark_addressable (slot);
8034 }
8035
8036 /* Since SLOT is not known to the called function
8037 to belong to its stack frame, we must build an explicit
8038 cleanup. This case occurs when we must build up a reference
8039 to pass the reference as an argument. In this case,
8040 it is very likely that such a reference need not be
8041 built here. */
8042
8043 if (TREE_OPERAND (exp, 2) == 0)
8044 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8045 cleanups = TREE_OPERAND (exp, 2);
8046 }
8047 }
8048 else
8049 {
8050 /* This case does occur, when expanding a parameter which
8051 needs to be constructed on the stack. The target
8052 is the actual stack address that we want to initialize.
8053 The function we call will perform the cleanup in this case. */
8054
8055 /* If we have already assigned it space, use that space,
8056 not target that we were passed in, as our target
8057 parameter is only a hint. */
8058 if (DECL_RTL (slot) != 0)
8059 {
8060 target = DECL_RTL (slot);
8061 /* If we have already expanded the slot, so don't do
8062 it again. (mrs) */
8063 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8064 return target;
8065 }
8066 else
8067 {
8068 DECL_RTL (slot) = target;
8069 /* If we must have an addressable slot, then make sure that
8070 the RTL that we just stored in slot is OK. */
8071 if (TREE_ADDRESSABLE (slot))
8072 {
8073 TREE_ADDRESSABLE (slot) = 0;
8074 mark_addressable (slot);
8075 }
8076 }
8077 }
8078
8079 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8080 /* Mark it as expanded. */
8081 TREE_OPERAND (exp, 1) = NULL_TREE;
8082
8083 store_expr (exp1, target, 0);
8084
8085 expand_decl_cleanup (NULL_TREE, cleanups);
8086
8087 return target;
8088 }
8089
8090 case INIT_EXPR:
8091 {
8092 tree lhs = TREE_OPERAND (exp, 0);
8093 tree rhs = TREE_OPERAND (exp, 1);
8094 tree noncopied_parts = 0;
8095 tree lhs_type = TREE_TYPE (lhs);
8096
8097 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8098 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8099 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8100 TYPE_NONCOPIED_PARTS (lhs_type));
8101 while (noncopied_parts != 0)
8102 {
8103 expand_assignment (TREE_VALUE (noncopied_parts),
8104 TREE_PURPOSE (noncopied_parts), 0, 0);
8105 noncopied_parts = TREE_CHAIN (noncopied_parts);
8106 }
8107 return temp;
8108 }
8109
8110 case MODIFY_EXPR:
8111 {
8112 /* If lhs is complex, expand calls in rhs before computing it.
8113 That's so we don't compute a pointer and save it over a call.
8114 If lhs is simple, compute it first so we can give it as a
8115 target if the rhs is just a call. This avoids an extra temp and copy
8116 and that prevents a partial-subsumption which makes bad code.
8117 Actually we could treat component_ref's of vars like vars. */
8118
8119 tree lhs = TREE_OPERAND (exp, 0);
8120 tree rhs = TREE_OPERAND (exp, 1);
8121 tree noncopied_parts = 0;
8122 tree lhs_type = TREE_TYPE (lhs);
8123
8124 temp = 0;
8125
8126 if (TREE_CODE (lhs) != VAR_DECL
8127 && TREE_CODE (lhs) != RESULT_DECL
8128 && TREE_CODE (lhs) != PARM_DECL
8129 && ! (TREE_CODE (lhs) == INDIRECT_REF
8130 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8131 preexpand_calls (exp);
8132
8133 /* Check for |= or &= of a bitfield of size one into another bitfield
8134 of size 1. In this case, (unless we need the result of the
8135 assignment) we can do this more efficiently with a
8136 test followed by an assignment, if necessary.
8137
8138 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8139 things change so we do, this code should be enhanced to
8140 support it. */
8141 if (ignore
8142 && TREE_CODE (lhs) == COMPONENT_REF
8143 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8144 || TREE_CODE (rhs) == BIT_AND_EXPR)
8145 && TREE_OPERAND (rhs, 0) == lhs
8146 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8147 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8148 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8149 {
8150 rtx label = gen_label_rtx ();
8151
8152 do_jump (TREE_OPERAND (rhs, 1),
8153 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8154 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8155 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8156 (TREE_CODE (rhs) == BIT_IOR_EXPR
8157 ? integer_one_node
8158 : integer_zero_node)),
8159 0, 0);
8160 do_pending_stack_adjust ();
8161 emit_label (label);
8162 return const0_rtx;
8163 }
8164
8165 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8166 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8167 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8168 TYPE_NONCOPIED_PARTS (lhs_type));
8169
8170 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8171 while (noncopied_parts != 0)
8172 {
8173 expand_assignment (TREE_PURPOSE (noncopied_parts),
8174 TREE_VALUE (noncopied_parts), 0, 0);
8175 noncopied_parts = TREE_CHAIN (noncopied_parts);
8176 }
8177 return temp;
8178 }
8179
8180 case RETURN_EXPR:
8181 if (!TREE_OPERAND (exp, 0))
8182 expand_null_return ();
8183 else
8184 expand_return (TREE_OPERAND (exp, 0));
8185 return const0_rtx;
8186
8187 case PREINCREMENT_EXPR:
8188 case PREDECREMENT_EXPR:
8189 return expand_increment (exp, 0, ignore);
8190
8191 case POSTINCREMENT_EXPR:
8192 case POSTDECREMENT_EXPR:
8193 /* Faster to treat as pre-increment if result is not used. */
8194 return expand_increment (exp, ! ignore, ignore);
8195
8196 case ADDR_EXPR:
8197 /* If nonzero, TEMP will be set to the address of something that might
8198 be a MEM corresponding to a stack slot. */
8199 temp = 0;
8200
8201 /* Are we taking the address of a nested function? */
8202 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8203 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8204 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8205 && ! TREE_STATIC (exp))
8206 {
8207 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8208 op0 = force_operand (op0, target);
8209 }
8210 /* If we are taking the address of something erroneous, just
8211 return a zero. */
8212 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8213 return const0_rtx;
8214 else
8215 {
8216 /* We make sure to pass const0_rtx down if we came in with
8217 ignore set, to avoid doing the cleanups twice for something. */
8218 op0 = expand_expr (TREE_OPERAND (exp, 0),
8219 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8220 (modifier == EXPAND_INITIALIZER
8221 ? modifier : EXPAND_CONST_ADDRESS));
8222
8223 /* If we are going to ignore the result, OP0 will have been set
8224 to const0_rtx, so just return it. Don't get confused and
8225 think we are taking the address of the constant. */
8226 if (ignore)
8227 return op0;
8228
8229 op0 = protect_from_queue (op0, 0);
8230
8231 /* We would like the object in memory. If it is a constant, we can
8232 have it be statically allocated into memory. For a non-constant,
8233 we need to allocate some memory and store the value into it. */
8234
8235 if (CONSTANT_P (op0))
8236 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8237 op0);
8238 else if (GET_CODE (op0) == MEM)
8239 {
8240 mark_temp_addr_taken (op0);
8241 temp = XEXP (op0, 0);
8242 }
8243
8244 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8245 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8246 {
8247 /* If this object is in a register, it must be not
8248 be BLKmode. */
8249 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8250 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8251
8252 mark_temp_addr_taken (memloc);
8253 emit_move_insn (memloc, op0);
8254 op0 = memloc;
8255 }
8256
8257 if (GET_CODE (op0) != MEM)
8258 abort ();
8259
8260 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8261 {
8262 temp = XEXP (op0, 0);
8263 #ifdef POINTERS_EXTEND_UNSIGNED
8264 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8265 && mode == ptr_mode)
8266 temp = convert_memory_address (ptr_mode, temp);
8267 #endif
8268 return temp;
8269 }
8270
8271 op0 = force_operand (XEXP (op0, 0), target);
8272 }
8273
8274 if (flag_force_addr && GET_CODE (op0) != REG)
8275 op0 = force_reg (Pmode, op0);
8276
8277 if (GET_CODE (op0) == REG
8278 && ! REG_USERVAR_P (op0))
8279 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8280
8281 /* If we might have had a temp slot, add an equivalent address
8282 for it. */
8283 if (temp != 0)
8284 update_temp_slot_address (temp, op0);
8285
8286 #ifdef POINTERS_EXTEND_UNSIGNED
8287 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8288 && mode == ptr_mode)
8289 op0 = convert_memory_address (ptr_mode, op0);
8290 #endif
8291
8292 return op0;
8293
8294 case ENTRY_VALUE_EXPR:
8295 abort ();
8296
8297 /* COMPLEX type for Extended Pascal & Fortran */
8298 case COMPLEX_EXPR:
8299 {
8300 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8301 rtx insns;
8302
8303 /* Get the rtx code of the operands. */
8304 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8305 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8306
8307 if (! target)
8308 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8309
8310 start_sequence ();
8311
8312 /* Move the real (op0) and imaginary (op1) parts to their location. */
8313 emit_move_insn (gen_realpart (mode, target), op0);
8314 emit_move_insn (gen_imagpart (mode, target), op1);
8315
8316 insns = get_insns ();
8317 end_sequence ();
8318
8319 /* Complex construction should appear as a single unit. */
8320 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8321 each with a separate pseudo as destination.
8322 It's not correct for flow to treat them as a unit. */
8323 if (GET_CODE (target) != CONCAT)
8324 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8325 else
8326 emit_insns (insns);
8327
8328 return target;
8329 }
8330
8331 case REALPART_EXPR:
8332 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8333 return gen_realpart (mode, op0);
8334
8335 case IMAGPART_EXPR:
8336 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8337 return gen_imagpart (mode, op0);
8338
8339 case CONJ_EXPR:
8340 {
8341 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8342 rtx imag_t;
8343 rtx insns;
8344
8345 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8346
8347 if (! target)
8348 target = gen_reg_rtx (mode);
8349
8350 start_sequence ();
8351
8352 /* Store the realpart and the negated imagpart to target. */
8353 emit_move_insn (gen_realpart (partmode, target),
8354 gen_realpart (partmode, op0));
8355
8356 imag_t = gen_imagpart (partmode, target);
8357 temp = expand_unop (partmode, neg_optab,
8358 gen_imagpart (partmode, op0), imag_t, 0);
8359 if (temp != imag_t)
8360 emit_move_insn (imag_t, temp);
8361
8362 insns = get_insns ();
8363 end_sequence ();
8364
8365 /* Conjugate should appear as a single unit
8366 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8367 each with a separate pseudo as destination.
8368 It's not correct for flow to treat them as a unit. */
8369 if (GET_CODE (target) != CONCAT)
8370 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8371 else
8372 emit_insns (insns);
8373
8374 return target;
8375 }
8376
8377 case TRY_CATCH_EXPR:
8378 {
8379 tree handler = TREE_OPERAND (exp, 1);
8380
8381 expand_eh_region_start ();
8382
8383 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8384
8385 expand_eh_region_end (handler);
8386
8387 return op0;
8388 }
8389
8390 case TRY_FINALLY_EXPR:
8391 {
8392 tree try_block = TREE_OPERAND (exp, 0);
8393 tree finally_block = TREE_OPERAND (exp, 1);
8394 rtx finally_label = gen_label_rtx ();
8395 rtx done_label = gen_label_rtx ();
8396 rtx return_link = gen_reg_rtx (Pmode);
8397 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8398 (tree) finally_label, (tree) return_link);
8399 TREE_SIDE_EFFECTS (cleanup) = 1;
8400
8401 /* Start a new binding layer that will keep track of all cleanup
8402 actions to be performed. */
8403 expand_start_bindings (2);
8404
8405 target_temp_slot_level = temp_slot_level;
8406
8407 expand_decl_cleanup (NULL_TREE, cleanup);
8408 op0 = expand_expr (try_block, target, tmode, modifier);
8409
8410 preserve_temp_slots (op0);
8411 expand_end_bindings (NULL_TREE, 0, 0);
8412 emit_jump (done_label);
8413 emit_label (finally_label);
8414 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8415 emit_indirect_jump (return_link);
8416 emit_label (done_label);
8417 return op0;
8418 }
8419
8420 case GOTO_SUBROUTINE_EXPR:
8421 {
8422 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8423 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8424 rtx return_address = gen_label_rtx ();
8425 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8426 emit_jump (subr);
8427 emit_label (return_address);
8428 return const0_rtx;
8429 }
8430
8431 case POPDCC_EXPR:
8432 {
8433 rtx dcc = get_dynamic_cleanup_chain ();
8434 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8435 return const0_rtx;
8436 }
8437
8438 case POPDHC_EXPR:
8439 {
8440 rtx dhc = get_dynamic_handler_chain ();
8441 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8442 return const0_rtx;
8443 }
8444
8445 case VA_ARG_EXPR:
8446 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8447
8448 default:
8449 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8450 }
8451
8452 /* Here to do an ordinary binary operator, generating an instruction
8453 from the optab already placed in `this_optab'. */
8454 binop:
8455 preexpand_calls (exp);
8456 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8457 subtarget = 0;
8458 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8459 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8460 binop2:
8461 temp = expand_binop (mode, this_optab, op0, op1, target,
8462 unsignedp, OPTAB_LIB_WIDEN);
8463 if (temp == 0)
8464 abort ();
8465 return temp;
8466 }
8467 \f
8468 /* Similar to expand_expr, except that we don't specify a target, target
8469 mode, or modifier and we return the alignment of the inner type. This is
8470 used in cases where it is not necessary to align the result to the
8471 alignment of its type as long as we know the alignment of the result, for
8472 example for comparisons of BLKmode values. */
8473
8474 static rtx
8475 expand_expr_unaligned (exp, palign)
8476 register tree exp;
8477 unsigned int *palign;
8478 {
8479 register rtx op0;
8480 tree type = TREE_TYPE (exp);
8481 register enum machine_mode mode = TYPE_MODE (type);
8482
8483 /* Default the alignment we return to that of the type. */
8484 *palign = TYPE_ALIGN (type);
8485
8486 /* The only cases in which we do anything special is if the resulting mode
8487 is BLKmode. */
8488 if (mode != BLKmode)
8489 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8490
8491 switch (TREE_CODE (exp))
8492 {
8493 case CONVERT_EXPR:
8494 case NOP_EXPR:
8495 case NON_LVALUE_EXPR:
8496 /* Conversions between BLKmode values don't change the underlying
8497 alignment or value. */
8498 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8499 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8500 break;
8501
8502 case ARRAY_REF:
8503 /* Much of the code for this case is copied directly from expand_expr.
8504 We need to duplicate it here because we will do something different
8505 in the fall-through case, so we need to handle the same exceptions
8506 it does. */
8507 {
8508 tree array = TREE_OPERAND (exp, 0);
8509 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8510 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8511 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8512 HOST_WIDE_INT i;
8513
8514 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8515 abort ();
8516
8517 /* Optimize the special-case of a zero lower bound.
8518
8519 We convert the low_bound to sizetype to avoid some problems
8520 with constant folding. (E.g. suppose the lower bound is 1,
8521 and its mode is QI. Without the conversion, (ARRAY
8522 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8523 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8524
8525 if (! integer_zerop (low_bound))
8526 index = size_diffop (index, convert (sizetype, low_bound));
8527
8528 /* If this is a constant index into a constant array,
8529 just get the value from the array. Handle both the cases when
8530 we have an explicit constructor and when our operand is a variable
8531 that was declared const. */
8532
8533 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8534 && 0 > compare_tree_int (index,
8535 list_length (CONSTRUCTOR_ELTS
8536 (TREE_OPERAND (exp, 0)))))
8537 {
8538 tree elem;
8539
8540 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8541 i = TREE_INT_CST_LOW (index);
8542 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8543 ;
8544
8545 if (elem)
8546 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8547 }
8548
8549 else if (optimize >= 1
8550 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8551 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8552 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8553 {
8554 if (TREE_CODE (index) == INTEGER_CST)
8555 {
8556 tree init = DECL_INITIAL (array);
8557
8558 if (TREE_CODE (init) == CONSTRUCTOR)
8559 {
8560 tree elem;
8561
8562 for (elem = CONSTRUCTOR_ELTS (init);
8563 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8564 elem = TREE_CHAIN (elem))
8565 ;
8566
8567 if (elem)
8568 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8569 palign);
8570 }
8571 }
8572 }
8573 }
8574
8575 /* ... fall through ... */
8576
8577 case COMPONENT_REF:
8578 case BIT_FIELD_REF:
8579 /* If the operand is a CONSTRUCTOR, we can just extract the
8580 appropriate field if it is present. Don't do this if we have
8581 already written the data since we want to refer to that copy
8582 and varasm.c assumes that's what we'll do. */
8583 if (TREE_CODE (exp) != ARRAY_REF
8584 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8585 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8586 {
8587 tree elt;
8588
8589 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8590 elt = TREE_CHAIN (elt))
8591 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8592 /* Note that unlike the case in expand_expr, we know this is
8593 BLKmode and hence not an integer. */
8594 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8595 }
8596
8597 {
8598 enum machine_mode mode1;
8599 HOST_WIDE_INT bitsize, bitpos;
8600 tree offset;
8601 int volatilep = 0;
8602 unsigned int alignment;
8603 int unsignedp;
8604 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8605 &mode1, &unsignedp, &volatilep,
8606 &alignment);
8607
8608 /* If we got back the original object, something is wrong. Perhaps
8609 we are evaluating an expression too early. In any event, don't
8610 infinitely recurse. */
8611 if (tem == exp)
8612 abort ();
8613
8614 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8615
8616 /* If this is a constant, put it into a register if it is a
8617 legitimate constant and OFFSET is 0 and memory if it isn't. */
8618 if (CONSTANT_P (op0))
8619 {
8620 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8621
8622 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8623 && offset == 0)
8624 op0 = force_reg (inner_mode, op0);
8625 else
8626 op0 = validize_mem (force_const_mem (inner_mode, op0));
8627 }
8628
8629 if (offset != 0)
8630 {
8631 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8632
8633 /* If this object is in a register, put it into memory.
8634 This case can't occur in C, but can in Ada if we have
8635 unchecked conversion of an expression from a scalar type to
8636 an array or record type. */
8637 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8638 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8639 {
8640 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8641
8642 mark_temp_addr_taken (memloc);
8643 emit_move_insn (memloc, op0);
8644 op0 = memloc;
8645 }
8646
8647 if (GET_CODE (op0) != MEM)
8648 abort ();
8649
8650 if (GET_MODE (offset_rtx) != ptr_mode)
8651 {
8652 #ifdef POINTERS_EXTEND_UNSIGNED
8653 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8654 #else
8655 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8656 #endif
8657 }
8658
8659 op0 = change_address (op0, VOIDmode,
8660 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8661 force_reg (ptr_mode,
8662 offset_rtx)));
8663 }
8664
8665 /* Don't forget about volatility even if this is a bitfield. */
8666 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8667 {
8668 op0 = copy_rtx (op0);
8669 MEM_VOLATILE_P (op0) = 1;
8670 }
8671
8672 /* Check the access. */
8673 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8674 {
8675 rtx to;
8676 int size;
8677
8678 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8679 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8680
8681 /* Check the access right of the pointer. */
8682 if (size > BITS_PER_UNIT)
8683 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8684 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8685 TYPE_MODE (sizetype),
8686 GEN_INT (MEMORY_USE_RO),
8687 TYPE_MODE (integer_type_node));
8688 }
8689
8690 /* In cases where an aligned union has an unaligned object
8691 as a field, we might be extracting a BLKmode value from
8692 an integer-mode (e.g., SImode) object. Handle this case
8693 by doing the extract into an object as wide as the field
8694 (which we know to be the width of a basic mode), then
8695 storing into memory, and changing the mode to BLKmode.
8696 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8697 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8698 if (mode1 == VOIDmode
8699 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8700 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8701 && (TYPE_ALIGN (type) > alignment
8702 || bitpos % TYPE_ALIGN (type) != 0)))
8703 {
8704 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8705
8706 if (ext_mode == BLKmode)
8707 {
8708 /* In this case, BITPOS must start at a byte boundary. */
8709 if (GET_CODE (op0) != MEM
8710 || bitpos % BITS_PER_UNIT != 0)
8711 abort ();
8712
8713 op0 = change_address (op0, VOIDmode,
8714 plus_constant (XEXP (op0, 0),
8715 bitpos / BITS_PER_UNIT));
8716 }
8717 else
8718 {
8719 rtx new = assign_stack_temp (ext_mode,
8720 bitsize / BITS_PER_UNIT, 0);
8721
8722 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8723 unsignedp, NULL_RTX, ext_mode,
8724 ext_mode, alignment,
8725 int_size_in_bytes (TREE_TYPE (tem)));
8726
8727 /* If the result is a record type and BITSIZE is narrower than
8728 the mode of OP0, an integral mode, and this is a big endian
8729 machine, we must put the field into the high-order bits. */
8730 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8731 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8732 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8733 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8734 size_int (GET_MODE_BITSIZE
8735 (GET_MODE (op0))
8736 - bitsize),
8737 op0, 1);
8738
8739
8740 emit_move_insn (new, op0);
8741 op0 = copy_rtx (new);
8742 PUT_MODE (op0, BLKmode);
8743 }
8744 }
8745 else
8746 /* Get a reference to just this component. */
8747 op0 = change_address (op0, mode1,
8748 plus_constant (XEXP (op0, 0),
8749 (bitpos / BITS_PER_UNIT)));
8750
8751 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8752
8753 /* Adjust the alignment in case the bit position is not
8754 a multiple of the alignment of the inner object. */
8755 while (bitpos % alignment != 0)
8756 alignment >>= 1;
8757
8758 if (GET_CODE (XEXP (op0, 0)) == REG)
8759 mark_reg_pointer (XEXP (op0, 0), alignment);
8760
8761 MEM_IN_STRUCT_P (op0) = 1;
8762 MEM_VOLATILE_P (op0) |= volatilep;
8763
8764 *palign = alignment;
8765 return op0;
8766 }
8767
8768 default:
8769 break;
8770
8771 }
8772
8773 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8774 }
8775 \f
8776 /* Return the tree node if a ARG corresponds to a string constant or zero
8777 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8778 in bytes within the string that ARG is accessing. The type of the
8779 offset will be `sizetype'. */
8780
8781 tree
8782 string_constant (arg, ptr_offset)
8783 tree arg;
8784 tree *ptr_offset;
8785 {
8786 STRIP_NOPS (arg);
8787
8788 if (TREE_CODE (arg) == ADDR_EXPR
8789 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8790 {
8791 *ptr_offset = size_zero_node;
8792 return TREE_OPERAND (arg, 0);
8793 }
8794 else if (TREE_CODE (arg) == PLUS_EXPR)
8795 {
8796 tree arg0 = TREE_OPERAND (arg, 0);
8797 tree arg1 = TREE_OPERAND (arg, 1);
8798
8799 STRIP_NOPS (arg0);
8800 STRIP_NOPS (arg1);
8801
8802 if (TREE_CODE (arg0) == ADDR_EXPR
8803 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8804 {
8805 *ptr_offset = convert (sizetype, arg1);
8806 return TREE_OPERAND (arg0, 0);
8807 }
8808 else if (TREE_CODE (arg1) == ADDR_EXPR
8809 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8810 {
8811 *ptr_offset = convert (sizetype, arg0);
8812 return TREE_OPERAND (arg1, 0);
8813 }
8814 }
8815
8816 return 0;
8817 }
8818 \f
8819 /* Expand code for a post- or pre- increment or decrement
8820 and return the RTX for the result.
8821 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8822
8823 static rtx
8824 expand_increment (exp, post, ignore)
8825 register tree exp;
8826 int post, ignore;
8827 {
8828 register rtx op0, op1;
8829 register rtx temp, value;
8830 register tree incremented = TREE_OPERAND (exp, 0);
8831 optab this_optab = add_optab;
8832 int icode;
8833 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8834 int op0_is_copy = 0;
8835 int single_insn = 0;
8836 /* 1 means we can't store into OP0 directly,
8837 because it is a subreg narrower than a word,
8838 and we don't dare clobber the rest of the word. */
8839 int bad_subreg = 0;
8840
8841 /* Stabilize any component ref that might need to be
8842 evaluated more than once below. */
8843 if (!post
8844 || TREE_CODE (incremented) == BIT_FIELD_REF
8845 || (TREE_CODE (incremented) == COMPONENT_REF
8846 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8847 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8848 incremented = stabilize_reference (incremented);
8849 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8850 ones into save exprs so that they don't accidentally get evaluated
8851 more than once by the code below. */
8852 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8853 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8854 incremented = save_expr (incremented);
8855
8856 /* Compute the operands as RTX.
8857 Note whether OP0 is the actual lvalue or a copy of it:
8858 I believe it is a copy iff it is a register or subreg
8859 and insns were generated in computing it. */
8860
8861 temp = get_last_insn ();
8862 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8863
8864 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8865 in place but instead must do sign- or zero-extension during assignment,
8866 so we copy it into a new register and let the code below use it as
8867 a copy.
8868
8869 Note that we can safely modify this SUBREG since it is know not to be
8870 shared (it was made by the expand_expr call above). */
8871
8872 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8873 {
8874 if (post)
8875 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8876 else
8877 bad_subreg = 1;
8878 }
8879 else if (GET_CODE (op0) == SUBREG
8880 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8881 {
8882 /* We cannot increment this SUBREG in place. If we are
8883 post-incrementing, get a copy of the old value. Otherwise,
8884 just mark that we cannot increment in place. */
8885 if (post)
8886 op0 = copy_to_reg (op0);
8887 else
8888 bad_subreg = 1;
8889 }
8890
8891 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8892 && temp != get_last_insn ());
8893 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8894 EXPAND_MEMORY_USE_BAD);
8895
8896 /* Decide whether incrementing or decrementing. */
8897 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8898 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8899 this_optab = sub_optab;
8900
8901 /* Convert decrement by a constant into a negative increment. */
8902 if (this_optab == sub_optab
8903 && GET_CODE (op1) == CONST_INT)
8904 {
8905 op1 = GEN_INT (- INTVAL (op1));
8906 this_optab = add_optab;
8907 }
8908
8909 /* For a preincrement, see if we can do this with a single instruction. */
8910 if (!post)
8911 {
8912 icode = (int) this_optab->handlers[(int) mode].insn_code;
8913 if (icode != (int) CODE_FOR_nothing
8914 /* Make sure that OP0 is valid for operands 0 and 1
8915 of the insn we want to queue. */
8916 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8917 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8918 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8919 single_insn = 1;
8920 }
8921
8922 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8923 then we cannot just increment OP0. We must therefore contrive to
8924 increment the original value. Then, for postincrement, we can return
8925 OP0 since it is a copy of the old value. For preincrement, expand here
8926 unless we can do it with a single insn.
8927
8928 Likewise if storing directly into OP0 would clobber high bits
8929 we need to preserve (bad_subreg). */
8930 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8931 {
8932 /* This is the easiest way to increment the value wherever it is.
8933 Problems with multiple evaluation of INCREMENTED are prevented
8934 because either (1) it is a component_ref or preincrement,
8935 in which case it was stabilized above, or (2) it is an array_ref
8936 with constant index in an array in a register, which is
8937 safe to reevaluate. */
8938 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8939 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8940 ? MINUS_EXPR : PLUS_EXPR),
8941 TREE_TYPE (exp),
8942 incremented,
8943 TREE_OPERAND (exp, 1));
8944
8945 while (TREE_CODE (incremented) == NOP_EXPR
8946 || TREE_CODE (incremented) == CONVERT_EXPR)
8947 {
8948 newexp = convert (TREE_TYPE (incremented), newexp);
8949 incremented = TREE_OPERAND (incremented, 0);
8950 }
8951
8952 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8953 return post ? op0 : temp;
8954 }
8955
8956 if (post)
8957 {
8958 /* We have a true reference to the value in OP0.
8959 If there is an insn to add or subtract in this mode, queue it.
8960 Queueing the increment insn avoids the register shuffling
8961 that often results if we must increment now and first save
8962 the old value for subsequent use. */
8963
8964 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8965 op0 = stabilize (op0);
8966 #endif
8967
8968 icode = (int) this_optab->handlers[(int) mode].insn_code;
8969 if (icode != (int) CODE_FOR_nothing
8970 /* Make sure that OP0 is valid for operands 0 and 1
8971 of the insn we want to queue. */
8972 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8973 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8974 {
8975 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8976 op1 = force_reg (mode, op1);
8977
8978 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8979 }
8980 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8981 {
8982 rtx addr = (general_operand (XEXP (op0, 0), mode)
8983 ? force_reg (Pmode, XEXP (op0, 0))
8984 : copy_to_reg (XEXP (op0, 0)));
8985 rtx temp, result;
8986
8987 op0 = change_address (op0, VOIDmode, addr);
8988 temp = force_reg (GET_MODE (op0), op0);
8989 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8990 op1 = force_reg (mode, op1);
8991
8992 /* The increment queue is LIFO, thus we have to `queue'
8993 the instructions in reverse order. */
8994 enqueue_insn (op0, gen_move_insn (op0, temp));
8995 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8996 return result;
8997 }
8998 }
8999
9000 /* Preincrement, or we can't increment with one simple insn. */
9001 if (post)
9002 /* Save a copy of the value before inc or dec, to return it later. */
9003 temp = value = copy_to_reg (op0);
9004 else
9005 /* Arrange to return the incremented value. */
9006 /* Copy the rtx because expand_binop will protect from the queue,
9007 and the results of that would be invalid for us to return
9008 if our caller does emit_queue before using our result. */
9009 temp = copy_rtx (value = op0);
9010
9011 /* Increment however we can. */
9012 op1 = expand_binop (mode, this_optab, value, op1,
9013 current_function_check_memory_usage ? NULL_RTX : op0,
9014 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9015 /* Make sure the value is stored into OP0. */
9016 if (op1 != op0)
9017 emit_move_insn (op0, op1);
9018
9019 return temp;
9020 }
9021 \f
9022 /* Expand all function calls contained within EXP, innermost ones first.
9023 But don't look within expressions that have sequence points.
9024 For each CALL_EXPR, record the rtx for its value
9025 in the CALL_EXPR_RTL field. */
9026
9027 static void
9028 preexpand_calls (exp)
9029 tree exp;
9030 {
9031 register int nops, i;
9032 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9033
9034 if (! do_preexpand_calls)
9035 return;
9036
9037 /* Only expressions and references can contain calls. */
9038
9039 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9040 return;
9041
9042 switch (TREE_CODE (exp))
9043 {
9044 case CALL_EXPR:
9045 /* Do nothing if already expanded. */
9046 if (CALL_EXPR_RTL (exp) != 0
9047 /* Do nothing if the call returns a variable-sized object. */
9048 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9049 /* Do nothing to built-in functions. */
9050 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9051 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9052 == FUNCTION_DECL)
9053 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9054 return;
9055
9056 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9057 return;
9058
9059 case COMPOUND_EXPR:
9060 case COND_EXPR:
9061 case TRUTH_ANDIF_EXPR:
9062 case TRUTH_ORIF_EXPR:
9063 /* If we find one of these, then we can be sure
9064 the adjust will be done for it (since it makes jumps).
9065 Do it now, so that if this is inside an argument
9066 of a function, we don't get the stack adjustment
9067 after some other args have already been pushed. */
9068 do_pending_stack_adjust ();
9069 return;
9070
9071 case BLOCK:
9072 case RTL_EXPR:
9073 case WITH_CLEANUP_EXPR:
9074 case CLEANUP_POINT_EXPR:
9075 case TRY_CATCH_EXPR:
9076 return;
9077
9078 case SAVE_EXPR:
9079 if (SAVE_EXPR_RTL (exp) != 0)
9080 return;
9081
9082 default:
9083 break;
9084 }
9085
9086 nops = tree_code_length[(int) TREE_CODE (exp)];
9087 for (i = 0; i < nops; i++)
9088 if (TREE_OPERAND (exp, i) != 0)
9089 {
9090 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9091 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9092 It doesn't happen before the call is made. */
9093 ;
9094 else
9095 {
9096 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9097 if (type == 'e' || type == '<' || type == '1' || type == '2'
9098 || type == 'r')
9099 preexpand_calls (TREE_OPERAND (exp, i));
9100 }
9101 }
9102 }
9103 \f
9104 /* At the start of a function, record that we have no previously-pushed
9105 arguments waiting to be popped. */
9106
9107 void
9108 init_pending_stack_adjust ()
9109 {
9110 pending_stack_adjust = 0;
9111 }
9112
9113 /* When exiting from function, if safe, clear out any pending stack adjust
9114 so the adjustment won't get done.
9115
9116 Note, if the current function calls alloca, then it must have a
9117 frame pointer regardless of the value of flag_omit_frame_pointer. */
9118
9119 void
9120 clear_pending_stack_adjust ()
9121 {
9122 #ifdef EXIT_IGNORE_STACK
9123 if (optimize > 0
9124 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9125 && EXIT_IGNORE_STACK
9126 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9127 && ! flag_inline_functions)
9128 {
9129 stack_pointer_delta -= pending_stack_adjust,
9130 pending_stack_adjust = 0;
9131 }
9132 #endif
9133 }
9134
9135 /* Pop any previously-pushed arguments that have not been popped yet. */
9136
9137 void
9138 do_pending_stack_adjust ()
9139 {
9140 if (inhibit_defer_pop == 0)
9141 {
9142 if (pending_stack_adjust != 0)
9143 adjust_stack (GEN_INT (pending_stack_adjust));
9144 pending_stack_adjust = 0;
9145 }
9146 }
9147 \f
9148 /* Expand conditional expressions. */
9149
9150 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9151 LABEL is an rtx of code CODE_LABEL, in this function and all the
9152 functions here. */
9153
9154 void
9155 jumpifnot (exp, label)
9156 tree exp;
9157 rtx label;
9158 {
9159 do_jump (exp, label, NULL_RTX);
9160 }
9161
9162 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9163
9164 void
9165 jumpif (exp, label)
9166 tree exp;
9167 rtx label;
9168 {
9169 do_jump (exp, NULL_RTX, label);
9170 }
9171
9172 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9173 the result is zero, or IF_TRUE_LABEL if the result is one.
9174 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9175 meaning fall through in that case.
9176
9177 do_jump always does any pending stack adjust except when it does not
9178 actually perform a jump. An example where there is no jump
9179 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9180
9181 This function is responsible for optimizing cases such as
9182 &&, || and comparison operators in EXP. */
9183
9184 void
9185 do_jump (exp, if_false_label, if_true_label)
9186 tree exp;
9187 rtx if_false_label, if_true_label;
9188 {
9189 register enum tree_code code = TREE_CODE (exp);
9190 /* Some cases need to create a label to jump to
9191 in order to properly fall through.
9192 These cases set DROP_THROUGH_LABEL nonzero. */
9193 rtx drop_through_label = 0;
9194 rtx temp;
9195 int i;
9196 tree type;
9197 enum machine_mode mode;
9198
9199 #ifdef MAX_INTEGER_COMPUTATION_MODE
9200 check_max_integer_computation_mode (exp);
9201 #endif
9202
9203 emit_queue ();
9204
9205 switch (code)
9206 {
9207 case ERROR_MARK:
9208 break;
9209
9210 case INTEGER_CST:
9211 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9212 if (temp)
9213 emit_jump (temp);
9214 break;
9215
9216 #if 0
9217 /* This is not true with #pragma weak */
9218 case ADDR_EXPR:
9219 /* The address of something can never be zero. */
9220 if (if_true_label)
9221 emit_jump (if_true_label);
9222 break;
9223 #endif
9224
9225 case NOP_EXPR:
9226 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9227 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9228 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9229 goto normal;
9230 case CONVERT_EXPR:
9231 /* If we are narrowing the operand, we have to do the compare in the
9232 narrower mode. */
9233 if ((TYPE_PRECISION (TREE_TYPE (exp))
9234 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9235 goto normal;
9236 case NON_LVALUE_EXPR:
9237 case REFERENCE_EXPR:
9238 case ABS_EXPR:
9239 case NEGATE_EXPR:
9240 case LROTATE_EXPR:
9241 case RROTATE_EXPR:
9242 /* These cannot change zero->non-zero or vice versa. */
9243 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9244 break;
9245
9246 case WITH_RECORD_EXPR:
9247 /* Put the object on the placeholder list, recurse through our first
9248 operand, and pop the list. */
9249 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9250 placeholder_list);
9251 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9252 placeholder_list = TREE_CHAIN (placeholder_list);
9253 break;
9254
9255 #if 0
9256 /* This is never less insns than evaluating the PLUS_EXPR followed by
9257 a test and can be longer if the test is eliminated. */
9258 case PLUS_EXPR:
9259 /* Reduce to minus. */
9260 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9261 TREE_OPERAND (exp, 0),
9262 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9263 TREE_OPERAND (exp, 1))));
9264 /* Process as MINUS. */
9265 #endif
9266
9267 case MINUS_EXPR:
9268 /* Non-zero iff operands of minus differ. */
9269 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9270 TREE_OPERAND (exp, 0),
9271 TREE_OPERAND (exp, 1)),
9272 NE, NE, if_false_label, if_true_label);
9273 break;
9274
9275 case BIT_AND_EXPR:
9276 /* If we are AND'ing with a small constant, do this comparison in the
9277 smallest type that fits. If the machine doesn't have comparisons
9278 that small, it will be converted back to the wider comparison.
9279 This helps if we are testing the sign bit of a narrower object.
9280 combine can't do this for us because it can't know whether a
9281 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9282
9283 if (! SLOW_BYTE_ACCESS
9284 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9285 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9286 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9287 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9288 && (type = type_for_mode (mode, 1)) != 0
9289 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9290 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9291 != CODE_FOR_nothing))
9292 {
9293 do_jump (convert (type, exp), if_false_label, if_true_label);
9294 break;
9295 }
9296 goto normal;
9297
9298 case TRUTH_NOT_EXPR:
9299 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9300 break;
9301
9302 case TRUTH_ANDIF_EXPR:
9303 if (if_false_label == 0)
9304 if_false_label = drop_through_label = gen_label_rtx ();
9305 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9306 start_cleanup_deferral ();
9307 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9308 end_cleanup_deferral ();
9309 break;
9310
9311 case TRUTH_ORIF_EXPR:
9312 if (if_true_label == 0)
9313 if_true_label = drop_through_label = gen_label_rtx ();
9314 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9315 start_cleanup_deferral ();
9316 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9317 end_cleanup_deferral ();
9318 break;
9319
9320 case COMPOUND_EXPR:
9321 push_temp_slots ();
9322 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9323 preserve_temp_slots (NULL_RTX);
9324 free_temp_slots ();
9325 pop_temp_slots ();
9326 emit_queue ();
9327 do_pending_stack_adjust ();
9328 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9329 break;
9330
9331 case COMPONENT_REF:
9332 case BIT_FIELD_REF:
9333 case ARRAY_REF:
9334 {
9335 HOST_WIDE_INT bitsize, bitpos;
9336 int unsignedp;
9337 enum machine_mode mode;
9338 tree type;
9339 tree offset;
9340 int volatilep = 0;
9341 unsigned int alignment;
9342
9343 /* Get description of this reference. We don't actually care
9344 about the underlying object here. */
9345 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9346 &unsignedp, &volatilep, &alignment);
9347
9348 type = type_for_size (bitsize, unsignedp);
9349 if (! SLOW_BYTE_ACCESS
9350 && type != 0 && bitsize >= 0
9351 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9352 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9353 != CODE_FOR_nothing))
9354 {
9355 do_jump (convert (type, exp), if_false_label, if_true_label);
9356 break;
9357 }
9358 goto normal;
9359 }
9360
9361 case COND_EXPR:
9362 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9363 if (integer_onep (TREE_OPERAND (exp, 1))
9364 && integer_zerop (TREE_OPERAND (exp, 2)))
9365 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9366
9367 else if (integer_zerop (TREE_OPERAND (exp, 1))
9368 && integer_onep (TREE_OPERAND (exp, 2)))
9369 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9370
9371 else
9372 {
9373 register rtx label1 = gen_label_rtx ();
9374 drop_through_label = gen_label_rtx ();
9375
9376 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9377
9378 start_cleanup_deferral ();
9379 /* Now the THEN-expression. */
9380 do_jump (TREE_OPERAND (exp, 1),
9381 if_false_label ? if_false_label : drop_through_label,
9382 if_true_label ? if_true_label : drop_through_label);
9383 /* In case the do_jump just above never jumps. */
9384 do_pending_stack_adjust ();
9385 emit_label (label1);
9386
9387 /* Now the ELSE-expression. */
9388 do_jump (TREE_OPERAND (exp, 2),
9389 if_false_label ? if_false_label : drop_through_label,
9390 if_true_label ? if_true_label : drop_through_label);
9391 end_cleanup_deferral ();
9392 }
9393 break;
9394
9395 case EQ_EXPR:
9396 {
9397 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9398
9399 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9400 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9401 {
9402 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9403 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9404 do_jump
9405 (fold
9406 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9407 fold (build (EQ_EXPR, TREE_TYPE (exp),
9408 fold (build1 (REALPART_EXPR,
9409 TREE_TYPE (inner_type),
9410 exp0)),
9411 fold (build1 (REALPART_EXPR,
9412 TREE_TYPE (inner_type),
9413 exp1)))),
9414 fold (build (EQ_EXPR, TREE_TYPE (exp),
9415 fold (build1 (IMAGPART_EXPR,
9416 TREE_TYPE (inner_type),
9417 exp0)),
9418 fold (build1 (IMAGPART_EXPR,
9419 TREE_TYPE (inner_type),
9420 exp1)))))),
9421 if_false_label, if_true_label);
9422 }
9423
9424 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9425 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9426
9427 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9428 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9429 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9430 else
9431 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9432 break;
9433 }
9434
9435 case NE_EXPR:
9436 {
9437 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9438
9439 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9440 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9441 {
9442 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9443 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9444 do_jump
9445 (fold
9446 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9447 fold (build (NE_EXPR, TREE_TYPE (exp),
9448 fold (build1 (REALPART_EXPR,
9449 TREE_TYPE (inner_type),
9450 exp0)),
9451 fold (build1 (REALPART_EXPR,
9452 TREE_TYPE (inner_type),
9453 exp1)))),
9454 fold (build (NE_EXPR, TREE_TYPE (exp),
9455 fold (build1 (IMAGPART_EXPR,
9456 TREE_TYPE (inner_type),
9457 exp0)),
9458 fold (build1 (IMAGPART_EXPR,
9459 TREE_TYPE (inner_type),
9460 exp1)))))),
9461 if_false_label, if_true_label);
9462 }
9463
9464 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9465 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9466
9467 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9468 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9469 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9470 else
9471 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9472 break;
9473 }
9474
9475 case LT_EXPR:
9476 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9477 if (GET_MODE_CLASS (mode) == MODE_INT
9478 && ! can_compare_p (LT, mode, ccp_jump))
9479 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9480 else
9481 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9482 break;
9483
9484 case LE_EXPR:
9485 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9486 if (GET_MODE_CLASS (mode) == MODE_INT
9487 && ! can_compare_p (LE, mode, ccp_jump))
9488 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9489 else
9490 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9491 break;
9492
9493 case GT_EXPR:
9494 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9495 if (GET_MODE_CLASS (mode) == MODE_INT
9496 && ! can_compare_p (GT, mode, ccp_jump))
9497 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9498 else
9499 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9500 break;
9501
9502 case GE_EXPR:
9503 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9504 if (GET_MODE_CLASS (mode) == MODE_INT
9505 && ! can_compare_p (GE, mode, ccp_jump))
9506 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9507 else
9508 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9509 break;
9510
9511 case UNORDERED_EXPR:
9512 case ORDERED_EXPR:
9513 {
9514 enum rtx_code cmp, rcmp;
9515 int do_rev;
9516
9517 if (code == UNORDERED_EXPR)
9518 cmp = UNORDERED, rcmp = ORDERED;
9519 else
9520 cmp = ORDERED, rcmp = UNORDERED;
9521 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9522
9523 do_rev = 0;
9524 if (! can_compare_p (cmp, mode, ccp_jump)
9525 && (can_compare_p (rcmp, mode, ccp_jump)
9526 /* If the target doesn't provide either UNORDERED or ORDERED
9527 comparisons, canonicalize on UNORDERED for the library. */
9528 || rcmp == UNORDERED))
9529 do_rev = 1;
9530
9531 if (! do_rev)
9532 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9533 else
9534 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9535 }
9536 break;
9537
9538 {
9539 enum rtx_code rcode1;
9540 enum tree_code tcode2;
9541
9542 case UNLT_EXPR:
9543 rcode1 = UNLT;
9544 tcode2 = LT_EXPR;
9545 goto unordered_bcc;
9546 case UNLE_EXPR:
9547 rcode1 = UNLE;
9548 tcode2 = LE_EXPR;
9549 goto unordered_bcc;
9550 case UNGT_EXPR:
9551 rcode1 = UNGT;
9552 tcode2 = GT_EXPR;
9553 goto unordered_bcc;
9554 case UNGE_EXPR:
9555 rcode1 = UNGE;
9556 tcode2 = GE_EXPR;
9557 goto unordered_bcc;
9558 case UNEQ_EXPR:
9559 rcode1 = UNEQ;
9560 tcode2 = EQ_EXPR;
9561 goto unordered_bcc;
9562
9563 unordered_bcc:
9564 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9565 if (can_compare_p (rcode1, mode, ccp_jump))
9566 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9567 if_true_label);
9568 else
9569 {
9570 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9571 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9572 tree cmp0, cmp1;
9573
9574 /* If the target doesn't support combined unordered
9575 compares, decompose into UNORDERED + comparison. */
9576 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9577 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9578 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9579 do_jump (exp, if_false_label, if_true_label);
9580 }
9581 }
9582 break;
9583
9584 default:
9585 normal:
9586 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9587 #if 0
9588 /* This is not needed any more and causes poor code since it causes
9589 comparisons and tests from non-SI objects to have different code
9590 sequences. */
9591 /* Copy to register to avoid generating bad insns by cse
9592 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9593 if (!cse_not_expected && GET_CODE (temp) == MEM)
9594 temp = copy_to_reg (temp);
9595 #endif
9596 do_pending_stack_adjust ();
9597 /* Do any postincrements in the expression that was tested. */
9598 emit_queue ();
9599
9600 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9601 {
9602 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9603 if (target)
9604 emit_jump (target);
9605 }
9606 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9607 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9608 /* Note swapping the labels gives us not-equal. */
9609 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9610 else if (GET_MODE (temp) != VOIDmode)
9611 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9612 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9613 GET_MODE (temp), NULL_RTX, 0,
9614 if_false_label, if_true_label);
9615 else
9616 abort ();
9617 }
9618
9619 if (drop_through_label)
9620 {
9621 /* If do_jump produces code that might be jumped around,
9622 do any stack adjusts from that code, before the place
9623 where control merges in. */
9624 do_pending_stack_adjust ();
9625 emit_label (drop_through_label);
9626 }
9627 }
9628 \f
9629 /* Given a comparison expression EXP for values too wide to be compared
9630 with one insn, test the comparison and jump to the appropriate label.
9631 The code of EXP is ignored; we always test GT if SWAP is 0,
9632 and LT if SWAP is 1. */
9633
9634 static void
9635 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9636 tree exp;
9637 int swap;
9638 rtx if_false_label, if_true_label;
9639 {
9640 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9641 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9642 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9643 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9644
9645 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9646 }
9647
9648 /* Compare OP0 with OP1, word at a time, in mode MODE.
9649 UNSIGNEDP says to do unsigned comparison.
9650 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9651
9652 void
9653 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9654 enum machine_mode mode;
9655 int unsignedp;
9656 rtx op0, op1;
9657 rtx if_false_label, if_true_label;
9658 {
9659 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9660 rtx drop_through_label = 0;
9661 int i;
9662
9663 if (! if_true_label || ! if_false_label)
9664 drop_through_label = gen_label_rtx ();
9665 if (! if_true_label)
9666 if_true_label = drop_through_label;
9667 if (! if_false_label)
9668 if_false_label = drop_through_label;
9669
9670 /* Compare a word at a time, high order first. */
9671 for (i = 0; i < nwords; i++)
9672 {
9673 rtx op0_word, op1_word;
9674
9675 if (WORDS_BIG_ENDIAN)
9676 {
9677 op0_word = operand_subword_force (op0, i, mode);
9678 op1_word = operand_subword_force (op1, i, mode);
9679 }
9680 else
9681 {
9682 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9683 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9684 }
9685
9686 /* All but high-order word must be compared as unsigned. */
9687 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9688 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9689 NULL_RTX, if_true_label);
9690
9691 /* Consider lower words only if these are equal. */
9692 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9693 NULL_RTX, 0, NULL_RTX, if_false_label);
9694 }
9695
9696 if (if_false_label)
9697 emit_jump (if_false_label);
9698 if (drop_through_label)
9699 emit_label (drop_through_label);
9700 }
9701
9702 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9703 with one insn, test the comparison and jump to the appropriate label. */
9704
9705 static void
9706 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9707 tree exp;
9708 rtx if_false_label, if_true_label;
9709 {
9710 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9711 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9712 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9713 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9714 int i;
9715 rtx drop_through_label = 0;
9716
9717 if (! if_false_label)
9718 drop_through_label = if_false_label = gen_label_rtx ();
9719
9720 for (i = 0; i < nwords; i++)
9721 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9722 operand_subword_force (op1, i, mode),
9723 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9724 word_mode, NULL_RTX, 0, if_false_label,
9725 NULL_RTX);
9726
9727 if (if_true_label)
9728 emit_jump (if_true_label);
9729 if (drop_through_label)
9730 emit_label (drop_through_label);
9731 }
9732 \f
9733 /* Jump according to whether OP0 is 0.
9734 We assume that OP0 has an integer mode that is too wide
9735 for the available compare insns. */
9736
9737 void
9738 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9739 rtx op0;
9740 rtx if_false_label, if_true_label;
9741 {
9742 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9743 rtx part;
9744 int i;
9745 rtx drop_through_label = 0;
9746
9747 /* The fastest way of doing this comparison on almost any machine is to
9748 "or" all the words and compare the result. If all have to be loaded
9749 from memory and this is a very wide item, it's possible this may
9750 be slower, but that's highly unlikely. */
9751
9752 part = gen_reg_rtx (word_mode);
9753 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9754 for (i = 1; i < nwords && part != 0; i++)
9755 part = expand_binop (word_mode, ior_optab, part,
9756 operand_subword_force (op0, i, GET_MODE (op0)),
9757 part, 1, OPTAB_WIDEN);
9758
9759 if (part != 0)
9760 {
9761 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9762 NULL_RTX, 0, if_false_label, if_true_label);
9763
9764 return;
9765 }
9766
9767 /* If we couldn't do the "or" simply, do this with a series of compares. */
9768 if (! if_false_label)
9769 drop_through_label = if_false_label = gen_label_rtx ();
9770
9771 for (i = 0; i < nwords; i++)
9772 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9773 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9774 if_false_label, NULL_RTX);
9775
9776 if (if_true_label)
9777 emit_jump (if_true_label);
9778
9779 if (drop_through_label)
9780 emit_label (drop_through_label);
9781 }
9782 \f
9783 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9784 (including code to compute the values to be compared)
9785 and set (CC0) according to the result.
9786 The decision as to signed or unsigned comparison must be made by the caller.
9787
9788 We force a stack adjustment unless there are currently
9789 things pushed on the stack that aren't yet used.
9790
9791 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9792 compared.
9793
9794 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9795 size of MODE should be used. */
9796
9797 rtx
9798 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9799 register rtx op0, op1;
9800 enum rtx_code code;
9801 int unsignedp;
9802 enum machine_mode mode;
9803 rtx size;
9804 unsigned int align;
9805 {
9806 rtx tem;
9807
9808 /* If one operand is constant, make it the second one. Only do this
9809 if the other operand is not constant as well. */
9810
9811 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9812 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9813 {
9814 tem = op0;
9815 op0 = op1;
9816 op1 = tem;
9817 code = swap_condition (code);
9818 }
9819
9820 if (flag_force_mem)
9821 {
9822 op0 = force_not_mem (op0);
9823 op1 = force_not_mem (op1);
9824 }
9825
9826 do_pending_stack_adjust ();
9827
9828 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9829 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9830 return tem;
9831
9832 #if 0
9833 /* There's no need to do this now that combine.c can eliminate lots of
9834 sign extensions. This can be less efficient in certain cases on other
9835 machines. */
9836
9837 /* If this is a signed equality comparison, we can do it as an
9838 unsigned comparison since zero-extension is cheaper than sign
9839 extension and comparisons with zero are done as unsigned. This is
9840 the case even on machines that can do fast sign extension, since
9841 zero-extension is easier to combine with other operations than
9842 sign-extension is. If we are comparing against a constant, we must
9843 convert it to what it would look like unsigned. */
9844 if ((code == EQ || code == NE) && ! unsignedp
9845 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9846 {
9847 if (GET_CODE (op1) == CONST_INT
9848 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9849 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9850 unsignedp = 1;
9851 }
9852 #endif
9853
9854 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9855
9856 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9857 }
9858
9859 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9860 The decision as to signed or unsigned comparison must be made by the caller.
9861
9862 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9863 compared.
9864
9865 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9866 size of MODE should be used. */
9867
9868 void
9869 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9870 if_false_label, if_true_label)
9871 register rtx op0, op1;
9872 enum rtx_code code;
9873 int unsignedp;
9874 enum machine_mode mode;
9875 rtx size;
9876 unsigned int align;
9877 rtx if_false_label, if_true_label;
9878 {
9879 rtx tem;
9880 int dummy_true_label = 0;
9881
9882 /* Reverse the comparison if that is safe and we want to jump if it is
9883 false. */
9884 if (! if_true_label && ! FLOAT_MODE_P (mode))
9885 {
9886 if_true_label = if_false_label;
9887 if_false_label = 0;
9888 code = reverse_condition (code);
9889 }
9890
9891 /* If one operand is constant, make it the second one. Only do this
9892 if the other operand is not constant as well. */
9893
9894 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9895 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9896 {
9897 tem = op0;
9898 op0 = op1;
9899 op1 = tem;
9900 code = swap_condition (code);
9901 }
9902
9903 if (flag_force_mem)
9904 {
9905 op0 = force_not_mem (op0);
9906 op1 = force_not_mem (op1);
9907 }
9908
9909 do_pending_stack_adjust ();
9910
9911 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9912 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9913 {
9914 if (tem == const_true_rtx)
9915 {
9916 if (if_true_label)
9917 emit_jump (if_true_label);
9918 }
9919 else
9920 {
9921 if (if_false_label)
9922 emit_jump (if_false_label);
9923 }
9924 return;
9925 }
9926
9927 #if 0
9928 /* There's no need to do this now that combine.c can eliminate lots of
9929 sign extensions. This can be less efficient in certain cases on other
9930 machines. */
9931
9932 /* If this is a signed equality comparison, we can do it as an
9933 unsigned comparison since zero-extension is cheaper than sign
9934 extension and comparisons with zero are done as unsigned. This is
9935 the case even on machines that can do fast sign extension, since
9936 zero-extension is easier to combine with other operations than
9937 sign-extension is. If we are comparing against a constant, we must
9938 convert it to what it would look like unsigned. */
9939 if ((code == EQ || code == NE) && ! unsignedp
9940 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9941 {
9942 if (GET_CODE (op1) == CONST_INT
9943 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9944 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9945 unsignedp = 1;
9946 }
9947 #endif
9948
9949 if (! if_true_label)
9950 {
9951 dummy_true_label = 1;
9952 if_true_label = gen_label_rtx ();
9953 }
9954
9955 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9956 if_true_label);
9957
9958 if (if_false_label)
9959 emit_jump (if_false_label);
9960 if (dummy_true_label)
9961 emit_label (if_true_label);
9962 }
9963
9964 /* Generate code for a comparison expression EXP (including code to compute
9965 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9966 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9967 generated code will drop through.
9968 SIGNED_CODE should be the rtx operation for this comparison for
9969 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9970
9971 We force a stack adjustment unless there are currently
9972 things pushed on the stack that aren't yet used. */
9973
9974 static void
9975 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9976 if_true_label)
9977 register tree exp;
9978 enum rtx_code signed_code, unsigned_code;
9979 rtx if_false_label, if_true_label;
9980 {
9981 unsigned int align0, align1;
9982 register rtx op0, op1;
9983 register tree type;
9984 register enum machine_mode mode;
9985 int unsignedp;
9986 enum rtx_code code;
9987
9988 /* Don't crash if the comparison was erroneous. */
9989 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
9990 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9991 return;
9992
9993 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
9994 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9995 mode = TYPE_MODE (type);
9996 unsignedp = TREE_UNSIGNED (type);
9997 code = unsignedp ? unsigned_code : signed_code;
9998
9999 #ifdef HAVE_canonicalize_funcptr_for_compare
10000 /* If function pointers need to be "canonicalized" before they can
10001 be reliably compared, then canonicalize them. */
10002 if (HAVE_canonicalize_funcptr_for_compare
10003 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10004 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10005 == FUNCTION_TYPE))
10006 {
10007 rtx new_op0 = gen_reg_rtx (mode);
10008
10009 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10010 op0 = new_op0;
10011 }
10012
10013 if (HAVE_canonicalize_funcptr_for_compare
10014 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10015 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10016 == FUNCTION_TYPE))
10017 {
10018 rtx new_op1 = gen_reg_rtx (mode);
10019
10020 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10021 op1 = new_op1;
10022 }
10023 #endif
10024
10025 /* Do any postincrements in the expression that was tested. */
10026 emit_queue ();
10027
10028 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10029 ((mode == BLKmode)
10030 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10031 MIN (align0, align1),
10032 if_false_label, if_true_label);
10033 }
10034 \f
10035 /* Generate code to calculate EXP using a store-flag instruction
10036 and return an rtx for the result. EXP is either a comparison
10037 or a TRUTH_NOT_EXPR whose operand is a comparison.
10038
10039 If TARGET is nonzero, store the result there if convenient.
10040
10041 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10042 cheap.
10043
10044 Return zero if there is no suitable set-flag instruction
10045 available on this machine.
10046
10047 Once expand_expr has been called on the arguments of the comparison,
10048 we are committed to doing the store flag, since it is not safe to
10049 re-evaluate the expression. We emit the store-flag insn by calling
10050 emit_store_flag, but only expand the arguments if we have a reason
10051 to believe that emit_store_flag will be successful. If we think that
10052 it will, but it isn't, we have to simulate the store-flag with a
10053 set/jump/set sequence. */
10054
10055 static rtx
10056 do_store_flag (exp, target, mode, only_cheap)
10057 tree exp;
10058 rtx target;
10059 enum machine_mode mode;
10060 int only_cheap;
10061 {
10062 enum rtx_code code;
10063 tree arg0, arg1, type;
10064 tree tem;
10065 enum machine_mode operand_mode;
10066 int invert = 0;
10067 int unsignedp;
10068 rtx op0, op1;
10069 enum insn_code icode;
10070 rtx subtarget = target;
10071 rtx result, label;
10072
10073 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10074 result at the end. We can't simply invert the test since it would
10075 have already been inverted if it were valid. This case occurs for
10076 some floating-point comparisons. */
10077
10078 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10079 invert = 1, exp = TREE_OPERAND (exp, 0);
10080
10081 arg0 = TREE_OPERAND (exp, 0);
10082 arg1 = TREE_OPERAND (exp, 1);
10083 type = TREE_TYPE (arg0);
10084 operand_mode = TYPE_MODE (type);
10085 unsignedp = TREE_UNSIGNED (type);
10086
10087 /* We won't bother with BLKmode store-flag operations because it would mean
10088 passing a lot of information to emit_store_flag. */
10089 if (operand_mode == BLKmode)
10090 return 0;
10091
10092 /* We won't bother with store-flag operations involving function pointers
10093 when function pointers must be canonicalized before comparisons. */
10094 #ifdef HAVE_canonicalize_funcptr_for_compare
10095 if (HAVE_canonicalize_funcptr_for_compare
10096 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10097 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10098 == FUNCTION_TYPE))
10099 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10100 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10101 == FUNCTION_TYPE))))
10102 return 0;
10103 #endif
10104
10105 STRIP_NOPS (arg0);
10106 STRIP_NOPS (arg1);
10107
10108 /* Get the rtx comparison code to use. We know that EXP is a comparison
10109 operation of some type. Some comparisons against 1 and -1 can be
10110 converted to comparisons with zero. Do so here so that the tests
10111 below will be aware that we have a comparison with zero. These
10112 tests will not catch constants in the first operand, but constants
10113 are rarely passed as the first operand. */
10114
10115 switch (TREE_CODE (exp))
10116 {
10117 case EQ_EXPR:
10118 code = EQ;
10119 break;
10120 case NE_EXPR:
10121 code = NE;
10122 break;
10123 case LT_EXPR:
10124 if (integer_onep (arg1))
10125 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10126 else
10127 code = unsignedp ? LTU : LT;
10128 break;
10129 case LE_EXPR:
10130 if (! unsignedp && integer_all_onesp (arg1))
10131 arg1 = integer_zero_node, code = LT;
10132 else
10133 code = unsignedp ? LEU : LE;
10134 break;
10135 case GT_EXPR:
10136 if (! unsignedp && integer_all_onesp (arg1))
10137 arg1 = integer_zero_node, code = GE;
10138 else
10139 code = unsignedp ? GTU : GT;
10140 break;
10141 case GE_EXPR:
10142 if (integer_onep (arg1))
10143 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10144 else
10145 code = unsignedp ? GEU : GE;
10146 break;
10147
10148 case UNORDERED_EXPR:
10149 code = UNORDERED;
10150 break;
10151 case ORDERED_EXPR:
10152 code = ORDERED;
10153 break;
10154 case UNLT_EXPR:
10155 code = UNLT;
10156 break;
10157 case UNLE_EXPR:
10158 code = UNLE;
10159 break;
10160 case UNGT_EXPR:
10161 code = UNGT;
10162 break;
10163 case UNGE_EXPR:
10164 code = UNGE;
10165 break;
10166 case UNEQ_EXPR:
10167 code = UNEQ;
10168 break;
10169
10170 default:
10171 abort ();
10172 }
10173
10174 /* Put a constant second. */
10175 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10176 {
10177 tem = arg0; arg0 = arg1; arg1 = tem;
10178 code = swap_condition (code);
10179 }
10180
10181 /* If this is an equality or inequality test of a single bit, we can
10182 do this by shifting the bit being tested to the low-order bit and
10183 masking the result with the constant 1. If the condition was EQ,
10184 we xor it with 1. This does not require an scc insn and is faster
10185 than an scc insn even if we have it. */
10186
10187 if ((code == NE || code == EQ)
10188 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10189 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10190 {
10191 tree inner = TREE_OPERAND (arg0, 0);
10192 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10193 int ops_unsignedp;
10194
10195 /* If INNER is a right shift of a constant and it plus BITNUM does
10196 not overflow, adjust BITNUM and INNER. */
10197
10198 if (TREE_CODE (inner) == RSHIFT_EXPR
10199 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10200 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10201 && bitnum < TYPE_PRECISION (type)
10202 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10203 bitnum - TYPE_PRECISION (type)))
10204 {
10205 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10206 inner = TREE_OPERAND (inner, 0);
10207 }
10208
10209 /* If we are going to be able to omit the AND below, we must do our
10210 operations as unsigned. If we must use the AND, we have a choice.
10211 Normally unsigned is faster, but for some machines signed is. */
10212 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10213 #ifdef LOAD_EXTEND_OP
10214 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10215 #else
10216 : 1
10217 #endif
10218 );
10219
10220 if (subtarget == 0 || GET_CODE (subtarget) != REG
10221 || GET_MODE (subtarget) != operand_mode
10222 || ! safe_from_p (subtarget, inner, 1))
10223 subtarget = 0;
10224
10225 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10226
10227 if (bitnum != 0)
10228 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10229 size_int (bitnum), subtarget, ops_unsignedp);
10230
10231 if (GET_MODE (op0) != mode)
10232 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10233
10234 if ((code == EQ && ! invert) || (code == NE && invert))
10235 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10236 ops_unsignedp, OPTAB_LIB_WIDEN);
10237
10238 /* Put the AND last so it can combine with more things. */
10239 if (bitnum != TYPE_PRECISION (type) - 1)
10240 op0 = expand_and (op0, const1_rtx, subtarget);
10241
10242 return op0;
10243 }
10244
10245 /* Now see if we are likely to be able to do this. Return if not. */
10246 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10247 return 0;
10248
10249 icode = setcc_gen_code[(int) code];
10250 if (icode == CODE_FOR_nothing
10251 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10252 {
10253 /* We can only do this if it is one of the special cases that
10254 can be handled without an scc insn. */
10255 if ((code == LT && integer_zerop (arg1))
10256 || (! only_cheap && code == GE && integer_zerop (arg1)))
10257 ;
10258 else if (BRANCH_COST >= 0
10259 && ! only_cheap && (code == NE || code == EQ)
10260 && TREE_CODE (type) != REAL_TYPE
10261 && ((abs_optab->handlers[(int) operand_mode].insn_code
10262 != CODE_FOR_nothing)
10263 || (ffs_optab->handlers[(int) operand_mode].insn_code
10264 != CODE_FOR_nothing)))
10265 ;
10266 else
10267 return 0;
10268 }
10269
10270 preexpand_calls (exp);
10271 if (subtarget == 0 || GET_CODE (subtarget) != REG
10272 || GET_MODE (subtarget) != operand_mode
10273 || ! safe_from_p (subtarget, arg1, 1))
10274 subtarget = 0;
10275
10276 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10277 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10278
10279 if (target == 0)
10280 target = gen_reg_rtx (mode);
10281
10282 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10283 because, if the emit_store_flag does anything it will succeed and
10284 OP0 and OP1 will not be used subsequently. */
10285
10286 result = emit_store_flag (target, code,
10287 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10288 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10289 operand_mode, unsignedp, 1);
10290
10291 if (result)
10292 {
10293 if (invert)
10294 result = expand_binop (mode, xor_optab, result, const1_rtx,
10295 result, 0, OPTAB_LIB_WIDEN);
10296 return result;
10297 }
10298
10299 /* If this failed, we have to do this with set/compare/jump/set code. */
10300 if (GET_CODE (target) != REG
10301 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10302 target = gen_reg_rtx (GET_MODE (target));
10303
10304 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10305 result = compare_from_rtx (op0, op1, code, unsignedp,
10306 operand_mode, NULL_RTX, 0);
10307 if (GET_CODE (result) == CONST_INT)
10308 return (((result == const0_rtx && ! invert)
10309 || (result != const0_rtx && invert))
10310 ? const0_rtx : const1_rtx);
10311
10312 label = gen_label_rtx ();
10313 if (bcc_gen_fctn[(int) code] == 0)
10314 abort ();
10315
10316 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10317 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10318 emit_label (label);
10319
10320 return target;
10321 }
10322 \f
10323 /* Generate a tablejump instruction (used for switch statements). */
10324
10325 #ifdef HAVE_tablejump
10326
10327 /* INDEX is the value being switched on, with the lowest value
10328 in the table already subtracted.
10329 MODE is its expected mode (needed if INDEX is constant).
10330 RANGE is the length of the jump table.
10331 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10332
10333 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10334 index value is out of range. */
10335
10336 void
10337 do_tablejump (index, mode, range, table_label, default_label)
10338 rtx index, range, table_label, default_label;
10339 enum machine_mode mode;
10340 {
10341 register rtx temp, vector;
10342
10343 /* Do an unsigned comparison (in the proper mode) between the index
10344 expression and the value which represents the length of the range.
10345 Since we just finished subtracting the lower bound of the range
10346 from the index expression, this comparison allows us to simultaneously
10347 check that the original index expression value is both greater than
10348 or equal to the minimum value of the range and less than or equal to
10349 the maximum value of the range. */
10350
10351 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10352 0, default_label);
10353
10354 /* If index is in range, it must fit in Pmode.
10355 Convert to Pmode so we can index with it. */
10356 if (mode != Pmode)
10357 index = convert_to_mode (Pmode, index, 1);
10358
10359 /* Don't let a MEM slip thru, because then INDEX that comes
10360 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10361 and break_out_memory_refs will go to work on it and mess it up. */
10362 #ifdef PIC_CASE_VECTOR_ADDRESS
10363 if (flag_pic && GET_CODE (index) != REG)
10364 index = copy_to_mode_reg (Pmode, index);
10365 #endif
10366
10367 /* If flag_force_addr were to affect this address
10368 it could interfere with the tricky assumptions made
10369 about addresses that contain label-refs,
10370 which may be valid only very near the tablejump itself. */
10371 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10372 GET_MODE_SIZE, because this indicates how large insns are. The other
10373 uses should all be Pmode, because they are addresses. This code
10374 could fail if addresses and insns are not the same size. */
10375 index = gen_rtx_PLUS (Pmode,
10376 gen_rtx_MULT (Pmode, index,
10377 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10378 gen_rtx_LABEL_REF (Pmode, table_label));
10379 #ifdef PIC_CASE_VECTOR_ADDRESS
10380 if (flag_pic)
10381 index = PIC_CASE_VECTOR_ADDRESS (index);
10382 else
10383 #endif
10384 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10385 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10386 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10387 RTX_UNCHANGING_P (vector) = 1;
10388 convert_move (temp, vector, 0);
10389
10390 emit_jump_insn (gen_tablejump (temp, table_label));
10391
10392 /* If we are generating PIC code or if the table is PC-relative, the
10393 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10394 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10395 emit_barrier ();
10396 }
10397
10398 #endif /* HAVE_tablejump */