rtl.h (MEM_COPY_ATTRIBUTES): Also copy RTX_UNCHANGING_P and MEM_ALIAS_SET.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "machmode.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "defaults.h"
44 #include "toplev.h"
45 #include "ggc.h"
46 #include "intl.h"
47 #include "tm_p.h"
48
49 #ifndef ACCUMULATE_OUTGOING_ARGS
50 #define ACCUMULATE_OUTGOING_ARGS 0
51 #endif
52
53 /* Supply a default definition for PUSH_ARGS. */
54 #ifndef PUSH_ARGS
55 #ifdef PUSH_ROUNDING
56 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
57 #else
58 #define PUSH_ARGS 0
59 #endif
60 #endif
61
62 /* Decide whether a function's arguments should be processed
63 from first to last or from last to first.
64
65 They should if the stack and args grow in opposite directions, but
66 only if we have push insns. */
67
68 #ifdef PUSH_ROUNDING
69
70 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
71 #define PUSH_ARGS_REVERSED /* If it's last to first */
72 #endif
73
74 #endif
75
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
79 #else
80 #define STACK_PUSH_CODE PRE_INC
81 #endif
82 #endif
83
84 /* Assume that case vectors are not pc-relative. */
85 #ifndef CASE_VECTOR_PC_RELATIVE
86 #define CASE_VECTOR_PC_RELATIVE 0
87 #endif
88
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
96
97 /* Nonzero to generate code for all the subroutines within an
98 expression before generating the upper levels of the expression.
99 Nowadays this is never zero. */
100 int do_preexpand_calls = 1;
101
102 /* Don't check memory usage, since code is being emitted to check a memory
103 usage. Used when current_function_check_memory_usage is true, to avoid
104 infinite recursion. */
105 static int in_check_memory_usage;
106
107 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
108 static tree placeholder_list = 0;
109
110 /* This structure is used by move_by_pieces to describe the move to
111 be performed. */
112 struct move_by_pieces
113 {
114 rtx to;
115 rtx to_addr;
116 int autinc_to;
117 int explicit_inc_to;
118 int to_struct;
119 int to_readonly;
120 rtx from;
121 rtx from_addr;
122 int autinc_from;
123 int explicit_inc_from;
124 int from_struct;
125 int from_readonly;
126 int len;
127 int offset;
128 int reverse;
129 };
130
131 /* This structure is used by clear_by_pieces to describe the clear to
132 be performed. */
133
134 struct clear_by_pieces
135 {
136 rtx to;
137 rtx to_addr;
138 int autinc_to;
139 int explicit_inc_to;
140 int to_struct;
141 int len;
142 int offset;
143 int reverse;
144 };
145
146 extern struct obstack permanent_obstack;
147
148 static rtx get_push_address PARAMS ((int));
149
150 static rtx enqueue_insn PARAMS ((rtx, rtx));
151 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
152 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
153 struct move_by_pieces *));
154 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
155 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
156 enum machine_mode,
157 struct clear_by_pieces *));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, unsigned int, int));
163 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
164 HOST_WIDE_INT));
165 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
166 HOST_WIDE_INT, enum machine_mode,
167 tree, enum machine_mode, int,
168 unsigned int, HOST_WIDE_INT, int));
169 static enum memory_use_mode
170 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
171 static tree save_noncopied_parts PARAMS ((tree, tree));
172 static tree init_noncopied_parts PARAMS ((tree, tree));
173 static int safe_from_p PARAMS ((rtx, tree, int));
174 static int fixed_type_p PARAMS ((tree));
175 static rtx var_rtx PARAMS ((tree));
176 static int readonly_fields_p PARAMS ((tree));
177 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
178 static rtx expand_increment PARAMS ((tree, int, int));
179 static void preexpand_calls PARAMS ((tree));
180 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
181 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
182 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
183 rtx, rtx));
184 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
185
186 /* Record for each mode whether we can move a register directly to or
187 from an object of that mode in memory. If we can't, we won't try
188 to use that mode directly when accessing a field of that mode. */
189
190 static char direct_load[NUM_MACHINE_MODES];
191 static char direct_store[NUM_MACHINE_MODES];
192
193 /* If a memory-to-memory move would take MOVE_RATIO or more simple
194 move-instruction sequences, we will do a movstr or libcall instead. */
195
196 #ifndef MOVE_RATIO
197 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
198 #define MOVE_RATIO 2
199 #else
200 /* If we are optimizing for space (-Os), cut down the default move ratio */
201 #define MOVE_RATIO (optimize_size ? 3 : 15)
202 #endif
203 #endif
204
205 /* This macro is used to determine whether move_by_pieces should be called
206 to perform a structure copy. */
207 #ifndef MOVE_BY_PIECES_P
208 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
209 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
210 #endif
211
212 /* This array records the insn_code of insns to perform block moves. */
213 enum insn_code movstr_optab[NUM_MACHINE_MODES];
214
215 /* This array records the insn_code of insns to perform block clears. */
216 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
217
218 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
219
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
222 #endif
223 \f
224 /* This is run once per compilation to set up which modes can be used
225 directly in memory and to initialize the block move optab. */
226
227 void
228 init_expr_once ()
229 {
230 rtx insn, pat;
231 enum machine_mode mode;
232 int num_clobbers;
233 rtx mem, mem1;
234 char *free_point;
235
236 start_sequence ();
237
238 /* Since we are on the permanent obstack, we must be sure we save this
239 spot AFTER we call start_sequence, since it will reuse the rtl it
240 makes. */
241 free_point = (char *) oballoc (0);
242
243 /* Try indexing by frame ptr and try by stack ptr.
244 It is known that on the Convex the stack ptr isn't a valid index.
245 With luck, one or the other is valid on any machine. */
246 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
247 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
248
249 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
250 pat = PATTERN (insn);
251
252 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
253 mode = (enum machine_mode) ((int) mode + 1))
254 {
255 int regno;
256 rtx reg;
257
258 direct_load[(int) mode] = direct_store[(int) mode] = 0;
259 PUT_MODE (mem, mode);
260 PUT_MODE (mem1, mode);
261
262 /* See if there is some register that can be used in this mode and
263 directly loaded or stored from memory. */
264
265 if (mode != VOIDmode && mode != BLKmode)
266 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
267 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
268 regno++)
269 {
270 if (! HARD_REGNO_MODE_OK (regno, mode))
271 continue;
272
273 reg = gen_rtx_REG (mode, regno);
274
275 SET_SRC (pat) = mem;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
279
280 SET_SRC (pat) = mem1;
281 SET_DEST (pat) = reg;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_load[(int) mode] = 1;
284
285 SET_SRC (pat) = reg;
286 SET_DEST (pat) = mem;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
289
290 SET_SRC (pat) = reg;
291 SET_DEST (pat) = mem1;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_store[(int) mode] = 1;
294 }
295 }
296
297 end_sequence ();
298 obfree (free_point);
299 }
300
301 /* This is run at the start of compiling a function. */
302
303 void
304 init_expr ()
305 {
306 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
307
308 pending_chain = 0;
309 pending_stack_adjust = 0;
310 stack_pointer_delta = 0;
311 inhibit_defer_pop = 0;
312 saveregs_value = 0;
313 apply_args_value = 0;
314 forced_labels = 0;
315 }
316
317 void
318 mark_expr_status (p)
319 struct expr_status *p;
320 {
321 if (p == NULL)
322 return;
323
324 ggc_mark_rtx (p->x_saveregs_value);
325 ggc_mark_rtx (p->x_apply_args_value);
326 ggc_mark_rtx (p->x_forced_labels);
327 }
328
329 void
330 free_expr_status (f)
331 struct function *f;
332 {
333 free (f->expr);
334 f->expr = NULL;
335 }
336
337 /* Small sanity check that the queue is empty at the end of a function. */
338 void
339 finish_expr_for_function ()
340 {
341 if (pending_chain)
342 abort ();
343 }
344 \f
345 /* Manage the queue of increment instructions to be output
346 for POSTINCREMENT_EXPR expressions, etc. */
347
348 /* Queue up to increment (or change) VAR later. BODY says how:
349 BODY should be the same thing you would pass to emit_insn
350 to increment right away. It will go to emit_insn later on.
351
352 The value is a QUEUED expression to be used in place of VAR
353 where you want to guarantee the pre-incrementation value of VAR. */
354
355 static rtx
356 enqueue_insn (var, body)
357 rtx var, body;
358 {
359 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
360 body, pending_chain);
361 return pending_chain;
362 }
363
364 /* Use protect_from_queue to convert a QUEUED expression
365 into something that you can put immediately into an instruction.
366 If the queued incrementation has not happened yet,
367 protect_from_queue returns the variable itself.
368 If the incrementation has happened, protect_from_queue returns a temp
369 that contains a copy of the old value of the variable.
370
371 Any time an rtx which might possibly be a QUEUED is to be put
372 into an instruction, it must be passed through protect_from_queue first.
373 QUEUED expressions are not meaningful in instructions.
374
375 Do not pass a value through protect_from_queue and then hold
376 on to it for a while before putting it in an instruction!
377 If the queue is flushed in between, incorrect code will result. */
378
379 rtx
380 protect_from_queue (x, modify)
381 register rtx x;
382 int modify;
383 {
384 register RTX_CODE code = GET_CODE (x);
385
386 #if 0 /* A QUEUED can hang around after the queue is forced out. */
387 /* Shortcut for most common case. */
388 if (pending_chain == 0)
389 return x;
390 #endif
391
392 if (code != QUEUED)
393 {
394 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
395 use of autoincrement. Make a copy of the contents of the memory
396 location rather than a copy of the address, but not if the value is
397 of mode BLKmode. Don't modify X in place since it might be
398 shared. */
399 if (code == MEM && GET_MODE (x) != BLKmode
400 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
401 {
402 register rtx y = XEXP (x, 0);
403 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
404
405 MEM_COPY_ATTRIBUTES (new, x);
406
407 if (QUEUED_INSN (y))
408 {
409 register rtx temp = gen_reg_rtx (GET_MODE (new));
410 emit_insn_before (gen_move_insn (temp, new),
411 QUEUED_INSN (y));
412 return temp;
413 }
414 return new;
415 }
416 /* Otherwise, recursively protect the subexpressions of all
417 the kinds of rtx's that can contain a QUEUED. */
418 if (code == MEM)
419 {
420 rtx tem = protect_from_queue (XEXP (x, 0), 0);
421 if (tem != XEXP (x, 0))
422 {
423 x = copy_rtx (x);
424 XEXP (x, 0) = tem;
425 }
426 }
427 else if (code == PLUS || code == MULT)
428 {
429 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
430 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
431 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
432 {
433 x = copy_rtx (x);
434 XEXP (x, 0) = new0;
435 XEXP (x, 1) = new1;
436 }
437 }
438 return x;
439 }
440 /* If the increment has not happened, use the variable itself. */
441 if (QUEUED_INSN (x) == 0)
442 return QUEUED_VAR (x);
443 /* If the increment has happened and a pre-increment copy exists,
444 use that copy. */
445 if (QUEUED_COPY (x) != 0)
446 return QUEUED_COPY (x);
447 /* The increment has happened but we haven't set up a pre-increment copy.
448 Set one up now, and use it. */
449 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
450 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
451 QUEUED_INSN (x));
452 return QUEUED_COPY (x);
453 }
454
455 /* Return nonzero if X contains a QUEUED expression:
456 if it contains anything that will be altered by a queued increment.
457 We handle only combinations of MEM, PLUS, MINUS and MULT operators
458 since memory addresses generally contain only those. */
459
460 int
461 queued_subexp_p (x)
462 rtx x;
463 {
464 register enum rtx_code code = GET_CODE (x);
465 switch (code)
466 {
467 case QUEUED:
468 return 1;
469 case MEM:
470 return queued_subexp_p (XEXP (x, 0));
471 case MULT:
472 case PLUS:
473 case MINUS:
474 return (queued_subexp_p (XEXP (x, 0))
475 || queued_subexp_p (XEXP (x, 1)));
476 default:
477 return 0;
478 }
479 }
480
481 /* Perform all the pending incrementations. */
482
483 void
484 emit_queue ()
485 {
486 register rtx p;
487 while ((p = pending_chain))
488 {
489 rtx body = QUEUED_BODY (p);
490
491 if (GET_CODE (body) == SEQUENCE)
492 {
493 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
494 emit_insn (QUEUED_BODY (p));
495 }
496 else
497 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
498 pending_chain = QUEUED_NEXT (p);
499 }
500 }
501 \f
502 /* Copy data from FROM to TO, where the machine modes are not the same.
503 Both modes may be integer, or both may be floating.
504 UNSIGNEDP should be nonzero if FROM is an unsigned type.
505 This causes zero-extension instead of sign-extension. */
506
507 void
508 convert_move (to, from, unsignedp)
509 register rtx to, from;
510 int unsignedp;
511 {
512 enum machine_mode to_mode = GET_MODE (to);
513 enum machine_mode from_mode = GET_MODE (from);
514 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
515 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
516 enum insn_code code;
517 rtx libcall;
518
519 /* rtx code for making an equivalent value. */
520 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
521
522 to = protect_from_queue (to, 1);
523 from = protect_from_queue (from, 0);
524
525 if (to_real != from_real)
526 abort ();
527
528 /* If FROM is a SUBREG that indicates that we have already done at least
529 the required extension, strip it. We don't handle such SUBREGs as
530 TO here. */
531
532 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
533 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
534 >= GET_MODE_SIZE (to_mode))
535 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
536 from = gen_lowpart (to_mode, from), from_mode = to_mode;
537
538 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
539 abort ();
540
541 if (to_mode == from_mode
542 || (from_mode == VOIDmode && CONSTANT_P (from)))
543 {
544 emit_move_insn (to, from);
545 return;
546 }
547
548 if (to_real)
549 {
550 rtx value;
551
552 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
553 {
554 /* Try converting directly if the insn is supported. */
555 if ((code = can_extend_p (to_mode, from_mode, 0))
556 != CODE_FOR_nothing)
557 {
558 emit_unop_insn (code, to, from, UNKNOWN);
559 return;
560 }
561 }
562
563 #ifdef HAVE_trunchfqf2
564 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
565 {
566 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
567 return;
568 }
569 #endif
570 #ifdef HAVE_trunctqfqf2
571 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
572 {
573 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
574 return;
575 }
576 #endif
577 #ifdef HAVE_truncsfqf2
578 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
579 {
580 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
581 return;
582 }
583 #endif
584 #ifdef HAVE_truncdfqf2
585 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
586 {
587 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
588 return;
589 }
590 #endif
591 #ifdef HAVE_truncxfqf2
592 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
593 {
594 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
595 return;
596 }
597 #endif
598 #ifdef HAVE_trunctfqf2
599 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
602 return;
603 }
604 #endif
605
606 #ifdef HAVE_trunctqfhf2
607 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
608 {
609 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
610 return;
611 }
612 #endif
613 #ifdef HAVE_truncsfhf2
614 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
615 {
616 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
617 return;
618 }
619 #endif
620 #ifdef HAVE_truncdfhf2
621 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
622 {
623 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
624 return;
625 }
626 #endif
627 #ifdef HAVE_truncxfhf2
628 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
629 {
630 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
631 return;
632 }
633 #endif
634 #ifdef HAVE_trunctfhf2
635 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
636 {
637 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
638 return;
639 }
640 #endif
641
642 #ifdef HAVE_truncsftqf2
643 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
644 {
645 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
646 return;
647 }
648 #endif
649 #ifdef HAVE_truncdftqf2
650 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
651 {
652 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
653 return;
654 }
655 #endif
656 #ifdef HAVE_truncxftqf2
657 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
658 {
659 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
660 return;
661 }
662 #endif
663 #ifdef HAVE_trunctftqf2
664 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
665 {
666 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
667 return;
668 }
669 #endif
670
671 #ifdef HAVE_truncdfsf2
672 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
673 {
674 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
675 return;
676 }
677 #endif
678 #ifdef HAVE_truncxfsf2
679 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
680 {
681 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
682 return;
683 }
684 #endif
685 #ifdef HAVE_trunctfsf2
686 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
687 {
688 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
689 return;
690 }
691 #endif
692 #ifdef HAVE_truncxfdf2
693 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
694 {
695 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
696 return;
697 }
698 #endif
699 #ifdef HAVE_trunctfdf2
700 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
701 {
702 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
703 return;
704 }
705 #endif
706
707 libcall = (rtx) 0;
708 switch (from_mode)
709 {
710 case SFmode:
711 switch (to_mode)
712 {
713 case DFmode:
714 libcall = extendsfdf2_libfunc;
715 break;
716
717 case XFmode:
718 libcall = extendsfxf2_libfunc;
719 break;
720
721 case TFmode:
722 libcall = extendsftf2_libfunc;
723 break;
724
725 default:
726 break;
727 }
728 break;
729
730 case DFmode:
731 switch (to_mode)
732 {
733 case SFmode:
734 libcall = truncdfsf2_libfunc;
735 break;
736
737 case XFmode:
738 libcall = extenddfxf2_libfunc;
739 break;
740
741 case TFmode:
742 libcall = extenddftf2_libfunc;
743 break;
744
745 default:
746 break;
747 }
748 break;
749
750 case XFmode:
751 switch (to_mode)
752 {
753 case SFmode:
754 libcall = truncxfsf2_libfunc;
755 break;
756
757 case DFmode:
758 libcall = truncxfdf2_libfunc;
759 break;
760
761 default:
762 break;
763 }
764 break;
765
766 case TFmode:
767 switch (to_mode)
768 {
769 case SFmode:
770 libcall = trunctfsf2_libfunc;
771 break;
772
773 case DFmode:
774 libcall = trunctfdf2_libfunc;
775 break;
776
777 default:
778 break;
779 }
780 break;
781
782 default:
783 break;
784 }
785
786 if (libcall == (rtx) 0)
787 /* This conversion is not implemented yet. */
788 abort ();
789
790 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
791 1, from, from_mode);
792 emit_move_insn (to, value);
793 return;
794 }
795
796 /* Now both modes are integers. */
797
798 /* Handle expanding beyond a word. */
799 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
800 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
801 {
802 rtx insns;
803 rtx lowpart;
804 rtx fill_value;
805 rtx lowfrom;
806 int i;
807 enum machine_mode lowpart_mode;
808 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
809
810 /* Try converting directly if the insn is supported. */
811 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
812 != CODE_FOR_nothing)
813 {
814 /* If FROM is a SUBREG, put it into a register. Do this
815 so that we always generate the same set of insns for
816 better cse'ing; if an intermediate assignment occurred,
817 we won't be doing the operation directly on the SUBREG. */
818 if (optimize > 0 && GET_CODE (from) == SUBREG)
819 from = force_reg (from_mode, from);
820 emit_unop_insn (code, to, from, equiv_code);
821 return;
822 }
823 /* Next, try converting via full word. */
824 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
825 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
826 != CODE_FOR_nothing))
827 {
828 if (GET_CODE (to) == REG)
829 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
830 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
831 emit_unop_insn (code, to,
832 gen_lowpart (word_mode, to), equiv_code);
833 return;
834 }
835
836 /* No special multiword conversion insn; do it by hand. */
837 start_sequence ();
838
839 /* Since we will turn this into a no conflict block, we must ensure
840 that the source does not overlap the target. */
841
842 if (reg_overlap_mentioned_p (to, from))
843 from = force_reg (from_mode, from);
844
845 /* Get a copy of FROM widened to a word, if necessary. */
846 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
847 lowpart_mode = word_mode;
848 else
849 lowpart_mode = from_mode;
850
851 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
852
853 lowpart = gen_lowpart (lowpart_mode, to);
854 emit_move_insn (lowpart, lowfrom);
855
856 /* Compute the value to put in each remaining word. */
857 if (unsignedp)
858 fill_value = const0_rtx;
859 else
860 {
861 #ifdef HAVE_slt
862 if (HAVE_slt
863 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
864 && STORE_FLAG_VALUE == -1)
865 {
866 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
867 lowpart_mode, 0, 0);
868 fill_value = gen_reg_rtx (word_mode);
869 emit_insn (gen_slt (fill_value));
870 }
871 else
872 #endif
873 {
874 fill_value
875 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
876 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
877 NULL_RTX, 0);
878 fill_value = convert_to_mode (word_mode, fill_value, 1);
879 }
880 }
881
882 /* Fill the remaining words. */
883 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
884 {
885 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
886 rtx subword = operand_subword (to, index, 1, to_mode);
887
888 if (subword == 0)
889 abort ();
890
891 if (fill_value != subword)
892 emit_move_insn (subword, fill_value);
893 }
894
895 insns = get_insns ();
896 end_sequence ();
897
898 emit_no_conflict_block (insns, to, from, NULL_RTX,
899 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
900 return;
901 }
902
903 /* Truncating multi-word to a word or less. */
904 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
905 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
906 {
907 if (!((GET_CODE (from) == MEM
908 && ! MEM_VOLATILE_P (from)
909 && direct_load[(int) to_mode]
910 && ! mode_dependent_address_p (XEXP (from, 0)))
911 || GET_CODE (from) == REG
912 || GET_CODE (from) == SUBREG))
913 from = force_reg (from_mode, from);
914 convert_move (to, gen_lowpart (word_mode, from), 0);
915 return;
916 }
917
918 /* Handle pointer conversion */ /* SPEE 900220 */
919 if (to_mode == PQImode)
920 {
921 if (from_mode != QImode)
922 from = convert_to_mode (QImode, from, unsignedp);
923
924 #ifdef HAVE_truncqipqi2
925 if (HAVE_truncqipqi2)
926 {
927 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
928 return;
929 }
930 #endif /* HAVE_truncqipqi2 */
931 abort ();
932 }
933
934 if (from_mode == PQImode)
935 {
936 if (to_mode != QImode)
937 {
938 from = convert_to_mode (QImode, from, unsignedp);
939 from_mode = QImode;
940 }
941 else
942 {
943 #ifdef HAVE_extendpqiqi2
944 if (HAVE_extendpqiqi2)
945 {
946 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
947 return;
948 }
949 #endif /* HAVE_extendpqiqi2 */
950 abort ();
951 }
952 }
953
954 if (to_mode == PSImode)
955 {
956 if (from_mode != SImode)
957 from = convert_to_mode (SImode, from, unsignedp);
958
959 #ifdef HAVE_truncsipsi2
960 if (HAVE_truncsipsi2)
961 {
962 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
963 return;
964 }
965 #endif /* HAVE_truncsipsi2 */
966 abort ();
967 }
968
969 if (from_mode == PSImode)
970 {
971 if (to_mode != SImode)
972 {
973 from = convert_to_mode (SImode, from, unsignedp);
974 from_mode = SImode;
975 }
976 else
977 {
978 #ifdef HAVE_extendpsisi2
979 if (HAVE_extendpsisi2)
980 {
981 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
982 return;
983 }
984 #endif /* HAVE_extendpsisi2 */
985 abort ();
986 }
987 }
988
989 if (to_mode == PDImode)
990 {
991 if (from_mode != DImode)
992 from = convert_to_mode (DImode, from, unsignedp);
993
994 #ifdef HAVE_truncdipdi2
995 if (HAVE_truncdipdi2)
996 {
997 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
998 return;
999 }
1000 #endif /* HAVE_truncdipdi2 */
1001 abort ();
1002 }
1003
1004 if (from_mode == PDImode)
1005 {
1006 if (to_mode != DImode)
1007 {
1008 from = convert_to_mode (DImode, from, unsignedp);
1009 from_mode = DImode;
1010 }
1011 else
1012 {
1013 #ifdef HAVE_extendpdidi2
1014 if (HAVE_extendpdidi2)
1015 {
1016 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1017 return;
1018 }
1019 #endif /* HAVE_extendpdidi2 */
1020 abort ();
1021 }
1022 }
1023
1024 /* Now follow all the conversions between integers
1025 no more than a word long. */
1026
1027 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1028 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1029 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1030 GET_MODE_BITSIZE (from_mode)))
1031 {
1032 if (!((GET_CODE (from) == MEM
1033 && ! MEM_VOLATILE_P (from)
1034 && direct_load[(int) to_mode]
1035 && ! mode_dependent_address_p (XEXP (from, 0)))
1036 || GET_CODE (from) == REG
1037 || GET_CODE (from) == SUBREG))
1038 from = force_reg (from_mode, from);
1039 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1040 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1041 from = copy_to_reg (from);
1042 emit_move_insn (to, gen_lowpart (to_mode, from));
1043 return;
1044 }
1045
1046 /* Handle extension. */
1047 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1048 {
1049 /* Convert directly if that works. */
1050 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1051 != CODE_FOR_nothing)
1052 {
1053 emit_unop_insn (code, to, from, equiv_code);
1054 return;
1055 }
1056 else
1057 {
1058 enum machine_mode intermediate;
1059 rtx tmp;
1060 tree shift_amount;
1061
1062 /* Search for a mode to convert via. */
1063 for (intermediate = from_mode; intermediate != VOIDmode;
1064 intermediate = GET_MODE_WIDER_MODE (intermediate))
1065 if (((can_extend_p (to_mode, intermediate, unsignedp)
1066 != CODE_FOR_nothing)
1067 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1068 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1069 GET_MODE_BITSIZE (intermediate))))
1070 && (can_extend_p (intermediate, from_mode, unsignedp)
1071 != CODE_FOR_nothing))
1072 {
1073 convert_move (to, convert_to_mode (intermediate, from,
1074 unsignedp), unsignedp);
1075 return;
1076 }
1077
1078 /* No suitable intermediate mode.
1079 Generate what we need with shifts. */
1080 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1081 - GET_MODE_BITSIZE (from_mode), 0);
1082 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1083 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1084 to, unsignedp);
1085 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1086 to, unsignedp);
1087 if (tmp != to)
1088 emit_move_insn (to, tmp);
1089 return;
1090 }
1091 }
1092
1093 /* Support special truncate insns for certain modes. */
1094
1095 if (from_mode == DImode && to_mode == SImode)
1096 {
1097 #ifdef HAVE_truncdisi2
1098 if (HAVE_truncdisi2)
1099 {
1100 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1101 return;
1102 }
1103 #endif
1104 convert_move (to, force_reg (from_mode, from), unsignedp);
1105 return;
1106 }
1107
1108 if (from_mode == DImode && to_mode == HImode)
1109 {
1110 #ifdef HAVE_truncdihi2
1111 if (HAVE_truncdihi2)
1112 {
1113 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1114 return;
1115 }
1116 #endif
1117 convert_move (to, force_reg (from_mode, from), unsignedp);
1118 return;
1119 }
1120
1121 if (from_mode == DImode && to_mode == QImode)
1122 {
1123 #ifdef HAVE_truncdiqi2
1124 if (HAVE_truncdiqi2)
1125 {
1126 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1127 return;
1128 }
1129 #endif
1130 convert_move (to, force_reg (from_mode, from), unsignedp);
1131 return;
1132 }
1133
1134 if (from_mode == SImode && to_mode == HImode)
1135 {
1136 #ifdef HAVE_truncsihi2
1137 if (HAVE_truncsihi2)
1138 {
1139 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1140 return;
1141 }
1142 #endif
1143 convert_move (to, force_reg (from_mode, from), unsignedp);
1144 return;
1145 }
1146
1147 if (from_mode == SImode && to_mode == QImode)
1148 {
1149 #ifdef HAVE_truncsiqi2
1150 if (HAVE_truncsiqi2)
1151 {
1152 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1153 return;
1154 }
1155 #endif
1156 convert_move (to, force_reg (from_mode, from), unsignedp);
1157 return;
1158 }
1159
1160 if (from_mode == HImode && to_mode == QImode)
1161 {
1162 #ifdef HAVE_trunchiqi2
1163 if (HAVE_trunchiqi2)
1164 {
1165 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1166 return;
1167 }
1168 #endif
1169 convert_move (to, force_reg (from_mode, from), unsignedp);
1170 return;
1171 }
1172
1173 if (from_mode == TImode && to_mode == DImode)
1174 {
1175 #ifdef HAVE_trunctidi2
1176 if (HAVE_trunctidi2)
1177 {
1178 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1179 return;
1180 }
1181 #endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1184 }
1185
1186 if (from_mode == TImode && to_mode == SImode)
1187 {
1188 #ifdef HAVE_trunctisi2
1189 if (HAVE_trunctisi2)
1190 {
1191 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1192 return;
1193 }
1194 #endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1197 }
1198
1199 if (from_mode == TImode && to_mode == HImode)
1200 {
1201 #ifdef HAVE_trunctihi2
1202 if (HAVE_trunctihi2)
1203 {
1204 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1205 return;
1206 }
1207 #endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1210 }
1211
1212 if (from_mode == TImode && to_mode == QImode)
1213 {
1214 #ifdef HAVE_trunctiqi2
1215 if (HAVE_trunctiqi2)
1216 {
1217 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1218 return;
1219 }
1220 #endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1223 }
1224
1225 /* Handle truncation of volatile memrefs, and so on;
1226 the things that couldn't be truncated directly,
1227 and for which there was no special instruction. */
1228 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1229 {
1230 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1231 emit_move_insn (to, temp);
1232 return;
1233 }
1234
1235 /* Mode combination is not recognized. */
1236 abort ();
1237 }
1238
1239 /* Return an rtx for a value that would result
1240 from converting X to mode MODE.
1241 Both X and MODE may be floating, or both integer.
1242 UNSIGNEDP is nonzero if X is an unsigned value.
1243 This can be done by referring to a part of X in place
1244 or by copying to a new temporary with conversion.
1245
1246 This function *must not* call protect_from_queue
1247 except when putting X into an insn (in which case convert_move does it). */
1248
1249 rtx
1250 convert_to_mode (mode, x, unsignedp)
1251 enum machine_mode mode;
1252 rtx x;
1253 int unsignedp;
1254 {
1255 return convert_modes (mode, VOIDmode, x, unsignedp);
1256 }
1257
1258 /* Return an rtx for a value that would result
1259 from converting X from mode OLDMODE to mode MODE.
1260 Both modes may be floating, or both integer.
1261 UNSIGNEDP is nonzero if X is an unsigned value.
1262
1263 This can be done by referring to a part of X in place
1264 or by copying to a new temporary with conversion.
1265
1266 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1267
1268 This function *must not* call protect_from_queue
1269 except when putting X into an insn (in which case convert_move does it). */
1270
1271 rtx
1272 convert_modes (mode, oldmode, x, unsignedp)
1273 enum machine_mode mode, oldmode;
1274 rtx x;
1275 int unsignedp;
1276 {
1277 register rtx temp;
1278
1279 /* If FROM is a SUBREG that indicates that we have already done at least
1280 the required extension, strip it. */
1281
1282 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1283 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1284 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1285 x = gen_lowpart (mode, x);
1286
1287 if (GET_MODE (x) != VOIDmode)
1288 oldmode = GET_MODE (x);
1289
1290 if (mode == oldmode)
1291 return x;
1292
1293 /* There is one case that we must handle specially: If we are converting
1294 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1295 we are to interpret the constant as unsigned, gen_lowpart will do
1296 the wrong if the constant appears negative. What we want to do is
1297 make the high-order word of the constant zero, not all ones. */
1298
1299 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1300 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1301 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1302 {
1303 HOST_WIDE_INT val = INTVAL (x);
1304
1305 if (oldmode != VOIDmode
1306 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1307 {
1308 int width = GET_MODE_BITSIZE (oldmode);
1309
1310 /* We need to zero extend VAL. */
1311 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1312 }
1313
1314 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1315 }
1316
1317 /* We can do this with a gen_lowpart if both desired and current modes
1318 are integer, and this is either a constant integer, a register, or a
1319 non-volatile MEM. Except for the constant case where MODE is no
1320 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1321
1322 if ((GET_CODE (x) == CONST_INT
1323 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1324 || (GET_MODE_CLASS (mode) == MODE_INT
1325 && GET_MODE_CLASS (oldmode) == MODE_INT
1326 && (GET_CODE (x) == CONST_DOUBLE
1327 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1328 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1329 && direct_load[(int) mode])
1330 || (GET_CODE (x) == REG
1331 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1332 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1333 {
1334 /* ?? If we don't know OLDMODE, we have to assume here that
1335 X does not need sign- or zero-extension. This may not be
1336 the case, but it's the best we can do. */
1337 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1338 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1339 {
1340 HOST_WIDE_INT val = INTVAL (x);
1341 int width = GET_MODE_BITSIZE (oldmode);
1342
1343 /* We must sign or zero-extend in this case. Start by
1344 zero-extending, then sign extend if we need to. */
1345 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1346 if (! unsignedp
1347 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1348 val |= (HOST_WIDE_INT) (-1) << width;
1349
1350 return GEN_INT (val);
1351 }
1352
1353 return gen_lowpart (mode, x);
1354 }
1355
1356 temp = gen_reg_rtx (mode);
1357 convert_move (temp, x, unsignedp);
1358 return temp;
1359 }
1360 \f
1361
1362 /* This macro is used to determine what the largest unit size that
1363 move_by_pieces can use is. */
1364
1365 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1366 move efficiently, as opposed to MOVE_MAX which is the maximum
1367 number of bytes we can move with a single instruction. */
1368
1369 #ifndef MOVE_MAX_PIECES
1370 #define MOVE_MAX_PIECES MOVE_MAX
1371 #endif
1372
1373 /* Generate several move instructions to copy LEN bytes
1374 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1375 The caller must pass FROM and TO
1376 through protect_from_queue before calling.
1377 ALIGN is maximum alignment we can assume. */
1378
1379 void
1380 move_by_pieces (to, from, len, align)
1381 rtx to, from;
1382 int len;
1383 unsigned int align;
1384 {
1385 struct move_by_pieces data;
1386 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1387 unsigned int max_size = MOVE_MAX_PIECES + 1;
1388 enum machine_mode mode = VOIDmode, tmode;
1389 enum insn_code icode;
1390
1391 data.offset = 0;
1392 data.to_addr = to_addr;
1393 data.from_addr = from_addr;
1394 data.to = to;
1395 data.from = from;
1396 data.autinc_to
1397 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1398 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1399 data.autinc_from
1400 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1401 || GET_CODE (from_addr) == POST_INC
1402 || GET_CODE (from_addr) == POST_DEC);
1403
1404 data.explicit_inc_from = 0;
1405 data.explicit_inc_to = 0;
1406 data.reverse
1407 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1408 if (data.reverse) data.offset = len;
1409 data.len = len;
1410
1411 data.to_struct = MEM_IN_STRUCT_P (to);
1412 data.from_struct = MEM_IN_STRUCT_P (from);
1413 data.to_readonly = RTX_UNCHANGING_P (to);
1414 data.from_readonly = RTX_UNCHANGING_P (from);
1415
1416 /* If copying requires more than two move insns,
1417 copy addresses to registers (to make displacements shorter)
1418 and use post-increment if available. */
1419 if (!(data.autinc_from && data.autinc_to)
1420 && move_by_pieces_ninsns (len, align) > 2)
1421 {
1422 /* Find the mode of the largest move... */
1423 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1424 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1425 if (GET_MODE_SIZE (tmode) < max_size)
1426 mode = tmode;
1427
1428 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1429 {
1430 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1431 data.autinc_from = 1;
1432 data.explicit_inc_from = -1;
1433 }
1434 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1435 {
1436 data.from_addr = copy_addr_to_reg (from_addr);
1437 data.autinc_from = 1;
1438 data.explicit_inc_from = 1;
1439 }
1440 if (!data.autinc_from && CONSTANT_P (from_addr))
1441 data.from_addr = copy_addr_to_reg (from_addr);
1442 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1443 {
1444 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1445 data.autinc_to = 1;
1446 data.explicit_inc_to = -1;
1447 }
1448 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1449 {
1450 data.to_addr = copy_addr_to_reg (to_addr);
1451 data.autinc_to = 1;
1452 data.explicit_inc_to = 1;
1453 }
1454 if (!data.autinc_to && CONSTANT_P (to_addr))
1455 data.to_addr = copy_addr_to_reg (to_addr);
1456 }
1457
1458 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1459 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1460 align = MOVE_MAX * BITS_PER_UNIT;
1461
1462 /* First move what we can in the largest integer mode, then go to
1463 successively smaller modes. */
1464
1465 while (max_size > 1)
1466 {
1467 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1468 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1469 if (GET_MODE_SIZE (tmode) < max_size)
1470 mode = tmode;
1471
1472 if (mode == VOIDmode)
1473 break;
1474
1475 icode = mov_optab->handlers[(int) mode].insn_code;
1476 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1477 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1478
1479 max_size = GET_MODE_SIZE (mode);
1480 }
1481
1482 /* The code above should have handled everything. */
1483 if (data.len > 0)
1484 abort ();
1485 }
1486
1487 /* Return number of insns required to move L bytes by pieces.
1488 ALIGN (in bytes) is maximum alignment we can assume. */
1489
1490 static int
1491 move_by_pieces_ninsns (l, align)
1492 unsigned int l;
1493 unsigned int align;
1494 {
1495 register int n_insns = 0;
1496 unsigned int max_size = MOVE_MAX + 1;
1497
1498 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1499 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1500 align = MOVE_MAX * BITS_PER_UNIT;
1501
1502 while (max_size > 1)
1503 {
1504 enum machine_mode mode = VOIDmode, tmode;
1505 enum insn_code icode;
1506
1507 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1508 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1509 if (GET_MODE_SIZE (tmode) < max_size)
1510 mode = tmode;
1511
1512 if (mode == VOIDmode)
1513 break;
1514
1515 icode = mov_optab->handlers[(int) mode].insn_code;
1516 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1517 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1518
1519 max_size = GET_MODE_SIZE (mode);
1520 }
1521
1522 return n_insns;
1523 }
1524
1525 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1526 with move instructions for mode MODE. GENFUN is the gen_... function
1527 to make a move insn for that mode. DATA has all the other info. */
1528
1529 static void
1530 move_by_pieces_1 (genfun, mode, data)
1531 rtx (*genfun) PARAMS ((rtx, ...));
1532 enum machine_mode mode;
1533 struct move_by_pieces *data;
1534 {
1535 register int size = GET_MODE_SIZE (mode);
1536 register rtx to1, from1;
1537
1538 while (data->len >= size)
1539 {
1540 if (data->reverse) data->offset -= size;
1541
1542 to1 = (data->autinc_to
1543 ? gen_rtx_MEM (mode, data->to_addr)
1544 : copy_rtx (change_address (data->to, mode,
1545 plus_constant (data->to_addr,
1546 data->offset))));
1547 MEM_IN_STRUCT_P (to1) = data->to_struct;
1548 RTX_UNCHANGING_P (to1) = data->to_readonly;
1549
1550 from1
1551 = (data->autinc_from
1552 ? gen_rtx_MEM (mode, data->from_addr)
1553 : copy_rtx (change_address (data->from, mode,
1554 plus_constant (data->from_addr,
1555 data->offset))));
1556 MEM_IN_STRUCT_P (from1) = data->from_struct;
1557 RTX_UNCHANGING_P (from1) = data->from_readonly;
1558
1559 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1560 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1561 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1562 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1563
1564 emit_insn ((*genfun) (to1, from1));
1565 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1566 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1567 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1568 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1569
1570 if (! data->reverse) data->offset += size;
1571
1572 data->len -= size;
1573 }
1574 }
1575 \f
1576 /* Emit code to move a block Y to a block X.
1577 This may be done with string-move instructions,
1578 with multiple scalar move instructions, or with a library call.
1579
1580 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1581 with mode BLKmode.
1582 SIZE is an rtx that says how long they are.
1583 ALIGN is the maximum alignment we can assume they have.
1584
1585 Return the address of the new block, if memcpy is called and returns it,
1586 0 otherwise. */
1587
1588 rtx
1589 emit_block_move (x, y, size, align)
1590 rtx x, y;
1591 rtx size;
1592 unsigned int align;
1593 {
1594 rtx retval = 0;
1595 #ifdef TARGET_MEM_FUNCTIONS
1596 static tree fn;
1597 tree call_expr, arg_list;
1598 #endif
1599
1600 if (GET_MODE (x) != BLKmode)
1601 abort ();
1602
1603 if (GET_MODE (y) != BLKmode)
1604 abort ();
1605
1606 x = protect_from_queue (x, 1);
1607 y = protect_from_queue (y, 0);
1608 size = protect_from_queue (size, 0);
1609
1610 if (GET_CODE (x) != MEM)
1611 abort ();
1612 if (GET_CODE (y) != MEM)
1613 abort ();
1614 if (size == 0)
1615 abort ();
1616
1617 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1618 move_by_pieces (x, y, INTVAL (size), align);
1619 else
1620 {
1621 /* Try the most limited insn first, because there's no point
1622 including more than one in the machine description unless
1623 the more limited one has some advantage. */
1624
1625 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1626 enum machine_mode mode;
1627
1628 /* Since this is a move insn, we don't care about volatility. */
1629 volatile_ok = 1;
1630
1631 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1632 mode = GET_MODE_WIDER_MODE (mode))
1633 {
1634 enum insn_code code = movstr_optab[(int) mode];
1635 insn_operand_predicate_fn pred;
1636
1637 if (code != CODE_FOR_nothing
1638 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1639 here because if SIZE is less than the mode mask, as it is
1640 returned by the macro, it will definitely be less than the
1641 actual mode mask. */
1642 && ((GET_CODE (size) == CONST_INT
1643 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1644 <= (GET_MODE_MASK (mode) >> 1)))
1645 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1646 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1647 || (*pred) (x, BLKmode))
1648 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1649 || (*pred) (y, BLKmode))
1650 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1651 || (*pred) (opalign, VOIDmode)))
1652 {
1653 rtx op2;
1654 rtx last = get_last_insn ();
1655 rtx pat;
1656
1657 op2 = convert_to_mode (mode, size, 1);
1658 pred = insn_data[(int) code].operand[2].predicate;
1659 if (pred != 0 && ! (*pred) (op2, mode))
1660 op2 = copy_to_mode_reg (mode, op2);
1661
1662 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1663 if (pat)
1664 {
1665 emit_insn (pat);
1666 volatile_ok = 0;
1667 return 0;
1668 }
1669 else
1670 delete_insns_since (last);
1671 }
1672 }
1673
1674 volatile_ok = 0;
1675
1676 /* X, Y, or SIZE may have been passed through protect_from_queue.
1677
1678 It is unsafe to save the value generated by protect_from_queue
1679 and reuse it later. Consider what happens if emit_queue is
1680 called before the return value from protect_from_queue is used.
1681
1682 Expansion of the CALL_EXPR below will call emit_queue before
1683 we are finished emitting RTL for argument setup. So if we are
1684 not careful we could get the wrong value for an argument.
1685
1686 To avoid this problem we go ahead and emit code to copy X, Y &
1687 SIZE into new pseudos. We can then place those new pseudos
1688 into an RTL_EXPR and use them later, even after a call to
1689 emit_queue.
1690
1691 Note this is not strictly needed for library calls since they
1692 do not call emit_queue before loading their arguments. However,
1693 we may need to have library calls call emit_queue in the future
1694 since failing to do so could cause problems for targets which
1695 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1696 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1697 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1698
1699 #ifdef TARGET_MEM_FUNCTIONS
1700 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1701 #else
1702 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1703 TREE_UNSIGNED (integer_type_node));
1704 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1705 #endif
1706
1707 #ifdef TARGET_MEM_FUNCTIONS
1708 /* It is incorrect to use the libcall calling conventions to call
1709 memcpy in this context.
1710
1711 This could be a user call to memcpy and the user may wish to
1712 examine the return value from memcpy.
1713
1714 For targets where libcalls and normal calls have different conventions
1715 for returning pointers, we could end up generating incorrect code.
1716
1717 So instead of using a libcall sequence we build up a suitable
1718 CALL_EXPR and expand the call in the normal fashion. */
1719 if (fn == NULL_TREE)
1720 {
1721 tree fntype;
1722
1723 /* This was copied from except.c, I don't know if all this is
1724 necessary in this context or not. */
1725 fn = get_identifier ("memcpy");
1726 push_obstacks_nochange ();
1727 end_temporary_allocation ();
1728 fntype = build_pointer_type (void_type_node);
1729 fntype = build_function_type (fntype, NULL_TREE);
1730 fn = build_decl (FUNCTION_DECL, fn, fntype);
1731 ggc_add_tree_root (&fn, 1);
1732 DECL_EXTERNAL (fn) = 1;
1733 TREE_PUBLIC (fn) = 1;
1734 DECL_ARTIFICIAL (fn) = 1;
1735 make_decl_rtl (fn, NULL_PTR, 1);
1736 assemble_external (fn);
1737 pop_obstacks ();
1738 }
1739
1740 /* We need to make an argument list for the function call.
1741
1742 memcpy has three arguments, the first two are void * addresses and
1743 the last is a size_t byte count for the copy. */
1744 arg_list
1745 = build_tree_list (NULL_TREE,
1746 make_tree (build_pointer_type (void_type_node), x));
1747 TREE_CHAIN (arg_list)
1748 = build_tree_list (NULL_TREE,
1749 make_tree (build_pointer_type (void_type_node), y));
1750 TREE_CHAIN (TREE_CHAIN (arg_list))
1751 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1752 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1753
1754 /* Now we have to build up the CALL_EXPR itself. */
1755 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1756 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1757 call_expr, arg_list, NULL_TREE);
1758 TREE_SIDE_EFFECTS (call_expr) = 1;
1759
1760 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1761 #else
1762 emit_library_call (bcopy_libfunc, 0,
1763 VOIDmode, 3, y, Pmode, x, Pmode,
1764 convert_to_mode (TYPE_MODE (integer_type_node), size,
1765 TREE_UNSIGNED (integer_type_node)),
1766 TYPE_MODE (integer_type_node));
1767 #endif
1768 }
1769
1770 return retval;
1771 }
1772 \f
1773 /* Copy all or part of a value X into registers starting at REGNO.
1774 The number of registers to be filled is NREGS. */
1775
1776 void
1777 move_block_to_reg (regno, x, nregs, mode)
1778 int regno;
1779 rtx x;
1780 int nregs;
1781 enum machine_mode mode;
1782 {
1783 int i;
1784 #ifdef HAVE_load_multiple
1785 rtx pat;
1786 rtx last;
1787 #endif
1788
1789 if (nregs == 0)
1790 return;
1791
1792 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1793 x = validize_mem (force_const_mem (mode, x));
1794
1795 /* See if the machine can do this with a load multiple insn. */
1796 #ifdef HAVE_load_multiple
1797 if (HAVE_load_multiple)
1798 {
1799 last = get_last_insn ();
1800 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1801 GEN_INT (nregs));
1802 if (pat)
1803 {
1804 emit_insn (pat);
1805 return;
1806 }
1807 else
1808 delete_insns_since (last);
1809 }
1810 #endif
1811
1812 for (i = 0; i < nregs; i++)
1813 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1814 operand_subword_force (x, i, mode));
1815 }
1816
1817 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1818 The number of registers to be filled is NREGS. SIZE indicates the number
1819 of bytes in the object X. */
1820
1821
1822 void
1823 move_block_from_reg (regno, x, nregs, size)
1824 int regno;
1825 rtx x;
1826 int nregs;
1827 int size;
1828 {
1829 int i;
1830 #ifdef HAVE_store_multiple
1831 rtx pat;
1832 rtx last;
1833 #endif
1834 enum machine_mode mode;
1835
1836 /* If SIZE is that of a mode no bigger than a word, just use that
1837 mode's store operation. */
1838 if (size <= UNITS_PER_WORD
1839 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1840 {
1841 emit_move_insn (change_address (x, mode, NULL),
1842 gen_rtx_REG (mode, regno));
1843 return;
1844 }
1845
1846 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1847 to the left before storing to memory. Note that the previous test
1848 doesn't handle all cases (e.g. SIZE == 3). */
1849 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1850 {
1851 rtx tem = operand_subword (x, 0, 1, BLKmode);
1852 rtx shift;
1853
1854 if (tem == 0)
1855 abort ();
1856
1857 shift = expand_shift (LSHIFT_EXPR, word_mode,
1858 gen_rtx_REG (word_mode, regno),
1859 build_int_2 ((UNITS_PER_WORD - size)
1860 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1861 emit_move_insn (tem, shift);
1862 return;
1863 }
1864
1865 /* See if the machine can do this with a store multiple insn. */
1866 #ifdef HAVE_store_multiple
1867 if (HAVE_store_multiple)
1868 {
1869 last = get_last_insn ();
1870 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1871 GEN_INT (nregs));
1872 if (pat)
1873 {
1874 emit_insn (pat);
1875 return;
1876 }
1877 else
1878 delete_insns_since (last);
1879 }
1880 #endif
1881
1882 for (i = 0; i < nregs; i++)
1883 {
1884 rtx tem = operand_subword (x, i, 1, BLKmode);
1885
1886 if (tem == 0)
1887 abort ();
1888
1889 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1890 }
1891 }
1892
1893 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1894 registers represented by a PARALLEL. SSIZE represents the total size of
1895 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1896 SRC in bits. */
1897 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1898 the balance will be in what would be the low-order memory addresses, i.e.
1899 left justified for big endian, right justified for little endian. This
1900 happens to be true for the targets currently using this support. If this
1901 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1902 would be needed. */
1903
1904 void
1905 emit_group_load (dst, orig_src, ssize, align)
1906 rtx dst, orig_src;
1907 unsigned int align;
1908 int ssize;
1909 {
1910 rtx *tmps, src;
1911 int start, i;
1912
1913 if (GET_CODE (dst) != PARALLEL)
1914 abort ();
1915
1916 /* Check for a NULL entry, used to indicate that the parameter goes
1917 both on the stack and in registers. */
1918 if (XEXP (XVECEXP (dst, 0, 0), 0))
1919 start = 0;
1920 else
1921 start = 1;
1922
1923 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1924
1925 /* If we won't be loading directly from memory, protect the real source
1926 from strange tricks we might play. */
1927 src = orig_src;
1928 if (GET_CODE (src) != MEM)
1929 {
1930 if (GET_CODE (src) == VOIDmode)
1931 src = gen_reg_rtx (GET_MODE (dst));
1932 else
1933 src = gen_reg_rtx (GET_MODE (orig_src));
1934 emit_move_insn (src, orig_src);
1935 }
1936
1937 /* Process the pieces. */
1938 for (i = start; i < XVECLEN (dst, 0); i++)
1939 {
1940 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1941 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1942 unsigned int bytelen = GET_MODE_SIZE (mode);
1943 int shift = 0;
1944
1945 /* Handle trailing fragments that run over the size of the struct. */
1946 if (ssize >= 0 && bytepos + bytelen > ssize)
1947 {
1948 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1949 bytelen = ssize - bytepos;
1950 if (bytelen <= 0)
1951 abort ();
1952 }
1953
1954 /* Optimize the access just a bit. */
1955 if (GET_CODE (src) == MEM
1956 && align >= GET_MODE_ALIGNMENT (mode)
1957 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1958 && bytelen == GET_MODE_SIZE (mode))
1959 {
1960 tmps[i] = gen_reg_rtx (mode);
1961 emit_move_insn (tmps[i],
1962 change_address (src, mode,
1963 plus_constant (XEXP (src, 0),
1964 bytepos)));
1965 }
1966 else if (GET_CODE (src) == CONCAT)
1967 {
1968 if (bytepos == 0
1969 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1970 tmps[i] = XEXP (src, 0);
1971 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1972 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1973 tmps[i] = XEXP (src, 1);
1974 else
1975 abort ();
1976 }
1977 else
1978 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1979 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1980 mode, mode, align, ssize);
1981
1982 if (BYTES_BIG_ENDIAN && shift)
1983 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1984 tmps[i], 0, OPTAB_WIDEN);
1985 }
1986
1987 emit_queue();
1988
1989 /* Copy the extracted pieces into the proper (probable) hard regs. */
1990 for (i = start; i < XVECLEN (dst, 0); i++)
1991 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1992 }
1993
1994 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1995 registers represented by a PARALLEL. SSIZE represents the total size of
1996 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1997
1998 void
1999 emit_group_store (orig_dst, src, ssize, align)
2000 rtx orig_dst, src;
2001 int ssize;
2002 unsigned int align;
2003 {
2004 rtx *tmps, dst;
2005 int start, i;
2006
2007 if (GET_CODE (src) != PARALLEL)
2008 abort ();
2009
2010 /* Check for a NULL entry, used to indicate that the parameter goes
2011 both on the stack and in registers. */
2012 if (XEXP (XVECEXP (src, 0, 0), 0))
2013 start = 0;
2014 else
2015 start = 1;
2016
2017 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2018
2019 /* Copy the (probable) hard regs into pseudos. */
2020 for (i = start; i < XVECLEN (src, 0); i++)
2021 {
2022 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2023 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2024 emit_move_insn (tmps[i], reg);
2025 }
2026 emit_queue();
2027
2028 /* If we won't be storing directly into memory, protect the real destination
2029 from strange tricks we might play. */
2030 dst = orig_dst;
2031 if (GET_CODE (dst) == PARALLEL)
2032 {
2033 rtx temp;
2034
2035 /* We can get a PARALLEL dst if there is a conditional expression in
2036 a return statement. In that case, the dst and src are the same,
2037 so no action is necessary. */
2038 if (rtx_equal_p (dst, src))
2039 return;
2040
2041 /* It is unclear if we can ever reach here, but we may as well handle
2042 it. Allocate a temporary, and split this into a store/load to/from
2043 the temporary. */
2044
2045 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2046 emit_group_store (temp, src, ssize, align);
2047 emit_group_load (dst, temp, ssize, align);
2048 return;
2049 }
2050 else if (GET_CODE (dst) != MEM)
2051 {
2052 dst = gen_reg_rtx (GET_MODE (orig_dst));
2053 /* Make life a bit easier for combine. */
2054 emit_move_insn (dst, const0_rtx);
2055 }
2056 else if (! MEM_IN_STRUCT_P (dst))
2057 {
2058 /* store_bit_field requires that memory operations have
2059 mem_in_struct_p set; we might not. */
2060
2061 dst = copy_rtx (orig_dst);
2062 MEM_SET_IN_STRUCT_P (dst, 1);
2063 }
2064
2065 /* Process the pieces. */
2066 for (i = start; i < XVECLEN (src, 0); i++)
2067 {
2068 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2069 enum machine_mode mode = GET_MODE (tmps[i]);
2070 unsigned int bytelen = GET_MODE_SIZE (mode);
2071
2072 /* Handle trailing fragments that run over the size of the struct. */
2073 if (ssize >= 0 && bytepos + bytelen > ssize)
2074 {
2075 if (BYTES_BIG_ENDIAN)
2076 {
2077 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2078 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2079 tmps[i], 0, OPTAB_WIDEN);
2080 }
2081 bytelen = ssize - bytepos;
2082 }
2083
2084 /* Optimize the access just a bit. */
2085 if (GET_CODE (dst) == MEM
2086 && align >= GET_MODE_ALIGNMENT (mode)
2087 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2088 && bytelen == GET_MODE_SIZE (mode))
2089 emit_move_insn (change_address (dst, mode,
2090 plus_constant (XEXP (dst, 0),
2091 bytepos)),
2092 tmps[i]);
2093 else
2094 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2095 mode, tmps[i], align, ssize);
2096 }
2097
2098 emit_queue();
2099
2100 /* Copy from the pseudo into the (probable) hard reg. */
2101 if (GET_CODE (dst) == REG)
2102 emit_move_insn (orig_dst, dst);
2103 }
2104
2105 /* Generate code to copy a BLKmode object of TYPE out of a
2106 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2107 is null, a stack temporary is created. TGTBLK is returned.
2108
2109 The primary purpose of this routine is to handle functions
2110 that return BLKmode structures in registers. Some machines
2111 (the PA for example) want to return all small structures
2112 in registers regardless of the structure's alignment. */
2113
2114 rtx
2115 copy_blkmode_from_reg (tgtblk, srcreg, type)
2116 rtx tgtblk;
2117 rtx srcreg;
2118 tree type;
2119 {
2120 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2121 rtx src = NULL, dst = NULL;
2122 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2123 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2124
2125 if (tgtblk == 0)
2126 {
2127 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2128 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2129 preserve_temp_slots (tgtblk);
2130 }
2131
2132 /* This code assumes srcreg is at least a full word. If it isn't,
2133 copy it into a new pseudo which is a full word. */
2134 if (GET_MODE (srcreg) != BLKmode
2135 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2136 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2137
2138 /* Structures whose size is not a multiple of a word are aligned
2139 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2140 machine, this means we must skip the empty high order bytes when
2141 calculating the bit offset. */
2142 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2143 big_endian_correction
2144 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2145
2146 /* Copy the structure BITSIZE bites at a time.
2147
2148 We could probably emit more efficient code for machines which do not use
2149 strict alignment, but it doesn't seem worth the effort at the current
2150 time. */
2151 for (bitpos = 0, xbitpos = big_endian_correction;
2152 bitpos < bytes * BITS_PER_UNIT;
2153 bitpos += bitsize, xbitpos += bitsize)
2154 {
2155 /* We need a new source operand each time xbitpos is on a
2156 word boundary and when xbitpos == big_endian_correction
2157 (the first time through). */
2158 if (xbitpos % BITS_PER_WORD == 0
2159 || xbitpos == big_endian_correction)
2160 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2161
2162 /* We need a new destination operand each time bitpos is on
2163 a word boundary. */
2164 if (bitpos % BITS_PER_WORD == 0)
2165 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2166
2167 /* Use xbitpos for the source extraction (right justified) and
2168 xbitpos for the destination store (left justified). */
2169 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2170 extract_bit_field (src, bitsize,
2171 xbitpos % BITS_PER_WORD, 1,
2172 NULL_RTX, word_mode, word_mode,
2173 bitsize, BITS_PER_WORD),
2174 bitsize, BITS_PER_WORD);
2175 }
2176
2177 return tgtblk;
2178 }
2179
2180
2181 /* Add a USE expression for REG to the (possibly empty) list pointed
2182 to by CALL_FUSAGE. REG must denote a hard register. */
2183
2184 void
2185 use_reg (call_fusage, reg)
2186 rtx *call_fusage, reg;
2187 {
2188 if (GET_CODE (reg) != REG
2189 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2190 abort();
2191
2192 *call_fusage
2193 = gen_rtx_EXPR_LIST (VOIDmode,
2194 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2195 }
2196
2197 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2198 starting at REGNO. All of these registers must be hard registers. */
2199
2200 void
2201 use_regs (call_fusage, regno, nregs)
2202 rtx *call_fusage;
2203 int regno;
2204 int nregs;
2205 {
2206 int i;
2207
2208 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2209 abort ();
2210
2211 for (i = 0; i < nregs; i++)
2212 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2213 }
2214
2215 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2216 PARALLEL REGS. This is for calls that pass values in multiple
2217 non-contiguous locations. The Irix 6 ABI has examples of this. */
2218
2219 void
2220 use_group_regs (call_fusage, regs)
2221 rtx *call_fusage;
2222 rtx regs;
2223 {
2224 int i;
2225
2226 for (i = 0; i < XVECLEN (regs, 0); i++)
2227 {
2228 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2229
2230 /* A NULL entry means the parameter goes both on the stack and in
2231 registers. This can also be a MEM for targets that pass values
2232 partially on the stack and partially in registers. */
2233 if (reg != 0 && GET_CODE (reg) == REG)
2234 use_reg (call_fusage, reg);
2235 }
2236 }
2237 \f
2238 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2239 rtx with BLKmode). The caller must pass TO through protect_from_queue
2240 before calling. ALIGN is maximum alignment we can assume. */
2241
2242 static void
2243 clear_by_pieces (to, len, align)
2244 rtx to;
2245 int len;
2246 unsigned int align;
2247 {
2248 struct clear_by_pieces data;
2249 rtx to_addr = XEXP (to, 0);
2250 unsigned int max_size = MOVE_MAX_PIECES + 1;
2251 enum machine_mode mode = VOIDmode, tmode;
2252 enum insn_code icode;
2253
2254 data.offset = 0;
2255 data.to_addr = to_addr;
2256 data.to = to;
2257 data.autinc_to
2258 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2259 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2260
2261 data.explicit_inc_to = 0;
2262 data.reverse
2263 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2264 if (data.reverse) data.offset = len;
2265 data.len = len;
2266
2267 data.to_struct = MEM_IN_STRUCT_P (to);
2268
2269 /* If copying requires more than two move insns,
2270 copy addresses to registers (to make displacements shorter)
2271 and use post-increment if available. */
2272 if (!data.autinc_to
2273 && move_by_pieces_ninsns (len, align) > 2)
2274 {
2275 /* Determine the main mode we'll be using */
2276 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2277 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2278 if (GET_MODE_SIZE (tmode) < max_size)
2279 mode = tmode;
2280
2281 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2282 {
2283 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2284 data.autinc_to = 1;
2285 data.explicit_inc_to = -1;
2286 }
2287 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2288 {
2289 data.to_addr = copy_addr_to_reg (to_addr);
2290 data.autinc_to = 1;
2291 data.explicit_inc_to = 1;
2292 }
2293 if (!data.autinc_to && CONSTANT_P (to_addr))
2294 data.to_addr = copy_addr_to_reg (to_addr);
2295 }
2296
2297 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2298 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2299 align = MOVE_MAX * BITS_PER_UNIT;
2300
2301 /* First move what we can in the largest integer mode, then go to
2302 successively smaller modes. */
2303
2304 while (max_size > 1)
2305 {
2306 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2307 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2308 if (GET_MODE_SIZE (tmode) < max_size)
2309 mode = tmode;
2310
2311 if (mode == VOIDmode)
2312 break;
2313
2314 icode = mov_optab->handlers[(int) mode].insn_code;
2315 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2316 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2317
2318 max_size = GET_MODE_SIZE (mode);
2319 }
2320
2321 /* The code above should have handled everything. */
2322 if (data.len != 0)
2323 abort ();
2324 }
2325
2326 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2327 with move instructions for mode MODE. GENFUN is the gen_... function
2328 to make a move insn for that mode. DATA has all the other info. */
2329
2330 static void
2331 clear_by_pieces_1 (genfun, mode, data)
2332 rtx (*genfun) PARAMS ((rtx, ...));
2333 enum machine_mode mode;
2334 struct clear_by_pieces *data;
2335 {
2336 register int size = GET_MODE_SIZE (mode);
2337 register rtx to1;
2338
2339 while (data->len >= size)
2340 {
2341 if (data->reverse) data->offset -= size;
2342
2343 to1 = (data->autinc_to
2344 ? gen_rtx_MEM (mode, data->to_addr)
2345 : copy_rtx (change_address (data->to, mode,
2346 plus_constant (data->to_addr,
2347 data->offset))));
2348 MEM_IN_STRUCT_P (to1) = data->to_struct;
2349
2350 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2351 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2352
2353 emit_insn ((*genfun) (to1, const0_rtx));
2354 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2355 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2356
2357 if (! data->reverse) data->offset += size;
2358
2359 data->len -= size;
2360 }
2361 }
2362 \f
2363 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2364 its length in bytes and ALIGN is the maximum alignment we can is has.
2365
2366 If we call a function that returns the length of the block, return it. */
2367
2368 rtx
2369 clear_storage (object, size, align)
2370 rtx object;
2371 rtx size;
2372 unsigned int align;
2373 {
2374 #ifdef TARGET_MEM_FUNCTIONS
2375 static tree fn;
2376 tree call_expr, arg_list;
2377 #endif
2378 rtx retval = 0;
2379
2380 if (GET_MODE (object) == BLKmode)
2381 {
2382 object = protect_from_queue (object, 1);
2383 size = protect_from_queue (size, 0);
2384
2385 if (GET_CODE (size) == CONST_INT
2386 && MOVE_BY_PIECES_P (INTVAL (size), align))
2387 clear_by_pieces (object, INTVAL (size), align);
2388 else
2389 {
2390 /* Try the most limited insn first, because there's no point
2391 including more than one in the machine description unless
2392 the more limited one has some advantage. */
2393
2394 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2395 enum machine_mode mode;
2396
2397 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2398 mode = GET_MODE_WIDER_MODE (mode))
2399 {
2400 enum insn_code code = clrstr_optab[(int) mode];
2401 insn_operand_predicate_fn pred;
2402
2403 if (code != CODE_FOR_nothing
2404 /* We don't need MODE to be narrower than
2405 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2406 the mode mask, as it is returned by the macro, it will
2407 definitely be less than the actual mode mask. */
2408 && ((GET_CODE (size) == CONST_INT
2409 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2410 <= (GET_MODE_MASK (mode) >> 1)))
2411 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2412 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2413 || (*pred) (object, BLKmode))
2414 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2415 || (*pred) (opalign, VOIDmode)))
2416 {
2417 rtx op1;
2418 rtx last = get_last_insn ();
2419 rtx pat;
2420
2421 op1 = convert_to_mode (mode, size, 1);
2422 pred = insn_data[(int) code].operand[1].predicate;
2423 if (pred != 0 && ! (*pred) (op1, mode))
2424 op1 = copy_to_mode_reg (mode, op1);
2425
2426 pat = GEN_FCN ((int) code) (object, op1, opalign);
2427 if (pat)
2428 {
2429 emit_insn (pat);
2430 return 0;
2431 }
2432 else
2433 delete_insns_since (last);
2434 }
2435 }
2436
2437 /* OBJECT or SIZE may have been passed through protect_from_queue.
2438
2439 It is unsafe to save the value generated by protect_from_queue
2440 and reuse it later. Consider what happens if emit_queue is
2441 called before the return value from protect_from_queue is used.
2442
2443 Expansion of the CALL_EXPR below will call emit_queue before
2444 we are finished emitting RTL for argument setup. So if we are
2445 not careful we could get the wrong value for an argument.
2446
2447 To avoid this problem we go ahead and emit code to copy OBJECT
2448 and SIZE into new pseudos. We can then place those new pseudos
2449 into an RTL_EXPR and use them later, even after a call to
2450 emit_queue.
2451
2452 Note this is not strictly needed for library calls since they
2453 do not call emit_queue before loading their arguments. However,
2454 we may need to have library calls call emit_queue in the future
2455 since failing to do so could cause problems for targets which
2456 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2457 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2458
2459 #ifdef TARGET_MEM_FUNCTIONS
2460 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2461 #else
2462 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2463 TREE_UNSIGNED (integer_type_node));
2464 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2465 #endif
2466
2467
2468 #ifdef TARGET_MEM_FUNCTIONS
2469 /* It is incorrect to use the libcall calling conventions to call
2470 memset in this context.
2471
2472 This could be a user call to memset and the user may wish to
2473 examine the return value from memset.
2474
2475 For targets where libcalls and normal calls have different
2476 conventions for returning pointers, we could end up generating
2477 incorrect code.
2478
2479 So instead of using a libcall sequence we build up a suitable
2480 CALL_EXPR and expand the call in the normal fashion. */
2481 if (fn == NULL_TREE)
2482 {
2483 tree fntype;
2484
2485 /* This was copied from except.c, I don't know if all this is
2486 necessary in this context or not. */
2487 fn = get_identifier ("memset");
2488 push_obstacks_nochange ();
2489 end_temporary_allocation ();
2490 fntype = build_pointer_type (void_type_node);
2491 fntype = build_function_type (fntype, NULL_TREE);
2492 fn = build_decl (FUNCTION_DECL, fn, fntype);
2493 ggc_add_tree_root (&fn, 1);
2494 DECL_EXTERNAL (fn) = 1;
2495 TREE_PUBLIC (fn) = 1;
2496 DECL_ARTIFICIAL (fn) = 1;
2497 make_decl_rtl (fn, NULL_PTR, 1);
2498 assemble_external (fn);
2499 pop_obstacks ();
2500 }
2501
2502 /* We need to make an argument list for the function call.
2503
2504 memset has three arguments, the first is a void * addresses, the
2505 second a integer with the initialization value, the last is a
2506 size_t byte count for the copy. */
2507 arg_list
2508 = build_tree_list (NULL_TREE,
2509 make_tree (build_pointer_type (void_type_node),
2510 object));
2511 TREE_CHAIN (arg_list)
2512 = build_tree_list (NULL_TREE,
2513 make_tree (integer_type_node, const0_rtx));
2514 TREE_CHAIN (TREE_CHAIN (arg_list))
2515 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2516 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2517
2518 /* Now we have to build up the CALL_EXPR itself. */
2519 call_expr = build1 (ADDR_EXPR,
2520 build_pointer_type (TREE_TYPE (fn)), fn);
2521 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2522 call_expr, arg_list, NULL_TREE);
2523 TREE_SIDE_EFFECTS (call_expr) = 1;
2524
2525 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2526 #else
2527 emit_library_call (bzero_libfunc, 0,
2528 VOIDmode, 2, object, Pmode, size,
2529 TYPE_MODE (integer_type_node));
2530 #endif
2531 }
2532 }
2533 else
2534 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2535
2536 return retval;
2537 }
2538
2539 /* Generate code to copy Y into X.
2540 Both Y and X must have the same mode, except that
2541 Y can be a constant with VOIDmode.
2542 This mode cannot be BLKmode; use emit_block_move for that.
2543
2544 Return the last instruction emitted. */
2545
2546 rtx
2547 emit_move_insn (x, y)
2548 rtx x, y;
2549 {
2550 enum machine_mode mode = GET_MODE (x);
2551
2552 x = protect_from_queue (x, 1);
2553 y = protect_from_queue (y, 0);
2554
2555 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2556 abort ();
2557
2558 /* Never force constant_p_rtx to memory. */
2559 if (GET_CODE (y) == CONSTANT_P_RTX)
2560 ;
2561 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2562 y = force_const_mem (mode, y);
2563
2564 /* If X or Y are memory references, verify that their addresses are valid
2565 for the machine. */
2566 if (GET_CODE (x) == MEM
2567 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2568 && ! push_operand (x, GET_MODE (x)))
2569 || (flag_force_addr
2570 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2571 x = change_address (x, VOIDmode, XEXP (x, 0));
2572
2573 if (GET_CODE (y) == MEM
2574 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2575 || (flag_force_addr
2576 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2577 y = change_address (y, VOIDmode, XEXP (y, 0));
2578
2579 if (mode == BLKmode)
2580 abort ();
2581
2582 return emit_move_insn_1 (x, y);
2583 }
2584
2585 /* Low level part of emit_move_insn.
2586 Called just like emit_move_insn, but assumes X and Y
2587 are basically valid. */
2588
2589 rtx
2590 emit_move_insn_1 (x, y)
2591 rtx x, y;
2592 {
2593 enum machine_mode mode = GET_MODE (x);
2594 enum machine_mode submode;
2595 enum mode_class class = GET_MODE_CLASS (mode);
2596 unsigned int i;
2597
2598 if (mode >= MAX_MACHINE_MODE)
2599 abort ();
2600
2601 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2602 return
2603 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2604
2605 /* Expand complex moves by moving real part and imag part, if possible. */
2606 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2607 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2608 * BITS_PER_UNIT),
2609 (class == MODE_COMPLEX_INT
2610 ? MODE_INT : MODE_FLOAT),
2611 0))
2612 && (mov_optab->handlers[(int) submode].insn_code
2613 != CODE_FOR_nothing))
2614 {
2615 /* Don't split destination if it is a stack push. */
2616 int stack = push_operand (x, GET_MODE (x));
2617
2618 /* If this is a stack, push the highpart first, so it
2619 will be in the argument order.
2620
2621 In that case, change_address is used only to convert
2622 the mode, not to change the address. */
2623 if (stack)
2624 {
2625 /* Note that the real part always precedes the imag part in memory
2626 regardless of machine's endianness. */
2627 #ifdef STACK_GROWS_DOWNWARD
2628 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2629 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2630 gen_imagpart (submode, y)));
2631 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2632 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2633 gen_realpart (submode, y)));
2634 #else
2635 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2636 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2637 gen_realpart (submode, y)));
2638 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2639 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2640 gen_imagpart (submode, y)));
2641 #endif
2642 }
2643 else
2644 {
2645 rtx realpart_x, realpart_y;
2646 rtx imagpart_x, imagpart_y;
2647
2648 /* If this is a complex value with each part being smaller than a
2649 word, the usual calling sequence will likely pack the pieces into
2650 a single register. Unfortunately, SUBREG of hard registers only
2651 deals in terms of words, so we have a problem converting input
2652 arguments to the CONCAT of two registers that is used elsewhere
2653 for complex values. If this is before reload, we can copy it into
2654 memory and reload. FIXME, we should see about using extract and
2655 insert on integer registers, but complex short and complex char
2656 variables should be rarely used. */
2657 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2658 && (reload_in_progress | reload_completed) == 0)
2659 {
2660 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2661 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2662
2663 if (packed_dest_p || packed_src_p)
2664 {
2665 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2666 ? MODE_FLOAT : MODE_INT);
2667
2668 enum machine_mode reg_mode =
2669 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2670
2671 if (reg_mode != BLKmode)
2672 {
2673 rtx mem = assign_stack_temp (reg_mode,
2674 GET_MODE_SIZE (mode), 0);
2675
2676 rtx cmem = change_address (mem, mode, NULL_RTX);
2677
2678 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2679
2680 if (packed_dest_p)
2681 {
2682 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2683 emit_move_insn_1 (cmem, y);
2684 return emit_move_insn_1 (sreg, mem);
2685 }
2686 else
2687 {
2688 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2689 emit_move_insn_1 (mem, sreg);
2690 return emit_move_insn_1 (x, cmem);
2691 }
2692 }
2693 }
2694 }
2695
2696 realpart_x = gen_realpart (submode, x);
2697 realpart_y = gen_realpart (submode, y);
2698 imagpart_x = gen_imagpart (submode, x);
2699 imagpart_y = gen_imagpart (submode, y);
2700
2701 /* Show the output dies here. This is necessary for SUBREGs
2702 of pseudos since we cannot track their lifetimes correctly;
2703 hard regs shouldn't appear here except as return values.
2704 We never want to emit such a clobber after reload. */
2705 if (x != y
2706 && ! (reload_in_progress || reload_completed)
2707 && (GET_CODE (realpart_x) == SUBREG
2708 || GET_CODE (imagpart_x) == SUBREG))
2709 {
2710 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2711 }
2712
2713 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2714 (realpart_x, realpart_y));
2715 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2716 (imagpart_x, imagpart_y));
2717 }
2718
2719 return get_last_insn ();
2720 }
2721
2722 /* This will handle any multi-word mode that lacks a move_insn pattern.
2723 However, you will get better code if you define such patterns,
2724 even if they must turn into multiple assembler instructions. */
2725 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2726 {
2727 rtx last_insn = 0;
2728 rtx seq, inner;
2729 int need_clobber;
2730
2731 #ifdef PUSH_ROUNDING
2732
2733 /* If X is a push on the stack, do the push now and replace
2734 X with a reference to the stack pointer. */
2735 if (push_operand (x, GET_MODE (x)))
2736 {
2737 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2738 x = change_address (x, VOIDmode, stack_pointer_rtx);
2739 }
2740 #endif
2741
2742 /* If we are in reload, see if either operand is a MEM whose address
2743 is scheduled for replacement. */
2744 if (reload_in_progress && GET_CODE (x) == MEM
2745 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2746 {
2747 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2748
2749 MEM_COPY_ATTRIBUTES (new, x);
2750 x = new;
2751 }
2752 if (reload_in_progress && GET_CODE (y) == MEM
2753 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2754 {
2755 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2756
2757 MEM_COPY_ATTRIBUTES (new, y);
2758 y = new;
2759 }
2760
2761 start_sequence ();
2762
2763 need_clobber = 0;
2764 for (i = 0;
2765 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2766 i++)
2767 {
2768 rtx xpart = operand_subword (x, i, 1, mode);
2769 rtx ypart = operand_subword (y, i, 1, mode);
2770
2771 /* If we can't get a part of Y, put Y into memory if it is a
2772 constant. Otherwise, force it into a register. If we still
2773 can't get a part of Y, abort. */
2774 if (ypart == 0 && CONSTANT_P (y))
2775 {
2776 y = force_const_mem (mode, y);
2777 ypart = operand_subword (y, i, 1, mode);
2778 }
2779 else if (ypart == 0)
2780 ypart = operand_subword_force (y, i, mode);
2781
2782 if (xpart == 0 || ypart == 0)
2783 abort ();
2784
2785 need_clobber |= (GET_CODE (xpart) == SUBREG);
2786
2787 last_insn = emit_move_insn (xpart, ypart);
2788 }
2789
2790 seq = gen_sequence ();
2791 end_sequence ();
2792
2793 /* Show the output dies here. This is necessary for SUBREGs
2794 of pseudos since we cannot track their lifetimes correctly;
2795 hard regs shouldn't appear here except as return values.
2796 We never want to emit such a clobber after reload. */
2797 if (x != y
2798 && ! (reload_in_progress || reload_completed)
2799 && need_clobber != 0)
2800 {
2801 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2802 }
2803
2804 emit_insn (seq);
2805
2806 return last_insn;
2807 }
2808 else
2809 abort ();
2810 }
2811 \f
2812 /* Pushing data onto the stack. */
2813
2814 /* Push a block of length SIZE (perhaps variable)
2815 and return an rtx to address the beginning of the block.
2816 Note that it is not possible for the value returned to be a QUEUED.
2817 The value may be virtual_outgoing_args_rtx.
2818
2819 EXTRA is the number of bytes of padding to push in addition to SIZE.
2820 BELOW nonzero means this padding comes at low addresses;
2821 otherwise, the padding comes at high addresses. */
2822
2823 rtx
2824 push_block (size, extra, below)
2825 rtx size;
2826 int extra, below;
2827 {
2828 register rtx temp;
2829
2830 size = convert_modes (Pmode, ptr_mode, size, 1);
2831 if (CONSTANT_P (size))
2832 anti_adjust_stack (plus_constant (size, extra));
2833 else if (GET_CODE (size) == REG && extra == 0)
2834 anti_adjust_stack (size);
2835 else
2836 {
2837 temp = copy_to_mode_reg (Pmode, size);
2838 if (extra != 0)
2839 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2840 temp, 0, OPTAB_LIB_WIDEN);
2841 anti_adjust_stack (temp);
2842 }
2843
2844 #ifndef STACK_GROWS_DOWNWARD
2845 #ifdef ARGS_GROW_DOWNWARD
2846 if (!ACCUMULATE_OUTGOING_ARGS)
2847 #else
2848 if (0)
2849 #endif
2850 #else
2851 if (1)
2852 #endif
2853 {
2854 /* Return the lowest stack address when STACK or ARGS grow downward and
2855 we are not aaccumulating outgoing arguments (the c4x port uses such
2856 conventions). */
2857 temp = virtual_outgoing_args_rtx;
2858 if (extra != 0 && below)
2859 temp = plus_constant (temp, extra);
2860 }
2861 else
2862 {
2863 if (GET_CODE (size) == CONST_INT)
2864 temp = plus_constant (virtual_outgoing_args_rtx,
2865 - INTVAL (size) - (below ? 0 : extra));
2866 else if (extra != 0 && !below)
2867 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2868 negate_rtx (Pmode, plus_constant (size, extra)));
2869 else
2870 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2871 negate_rtx (Pmode, size));
2872 }
2873
2874 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2875 }
2876
2877 rtx
2878 gen_push_operand ()
2879 {
2880 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2881 }
2882
2883 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2884 block of SIZE bytes. */
2885
2886 static rtx
2887 get_push_address (size)
2888 int size;
2889 {
2890 register rtx temp;
2891
2892 if (STACK_PUSH_CODE == POST_DEC)
2893 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2894 else if (STACK_PUSH_CODE == POST_INC)
2895 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2896 else
2897 temp = stack_pointer_rtx;
2898
2899 return copy_to_reg (temp);
2900 }
2901
2902 /* Generate code to push X onto the stack, assuming it has mode MODE and
2903 type TYPE.
2904 MODE is redundant except when X is a CONST_INT (since they don't
2905 carry mode info).
2906 SIZE is an rtx for the size of data to be copied (in bytes),
2907 needed only if X is BLKmode.
2908
2909 ALIGN is maximum alignment we can assume.
2910
2911 If PARTIAL and REG are both nonzero, then copy that many of the first
2912 words of X into registers starting with REG, and push the rest of X.
2913 The amount of space pushed is decreased by PARTIAL words,
2914 rounded *down* to a multiple of PARM_BOUNDARY.
2915 REG must be a hard register in this case.
2916 If REG is zero but PARTIAL is not, take any all others actions for an
2917 argument partially in registers, but do not actually load any
2918 registers.
2919
2920 EXTRA is the amount in bytes of extra space to leave next to this arg.
2921 This is ignored if an argument block has already been allocated.
2922
2923 On a machine that lacks real push insns, ARGS_ADDR is the address of
2924 the bottom of the argument block for this call. We use indexing off there
2925 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2926 argument block has not been preallocated.
2927
2928 ARGS_SO_FAR is the size of args previously pushed for this call.
2929
2930 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2931 for arguments passed in registers. If nonzero, it will be the number
2932 of bytes required. */
2933
2934 void
2935 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2936 args_addr, args_so_far, reg_parm_stack_space,
2937 alignment_pad)
2938 register rtx x;
2939 enum machine_mode mode;
2940 tree type;
2941 rtx size;
2942 unsigned int align;
2943 int partial;
2944 rtx reg;
2945 int extra;
2946 rtx args_addr;
2947 rtx args_so_far;
2948 int reg_parm_stack_space;
2949 rtx alignment_pad;
2950 {
2951 rtx xinner;
2952 enum direction stack_direction
2953 #ifdef STACK_GROWS_DOWNWARD
2954 = downward;
2955 #else
2956 = upward;
2957 #endif
2958
2959 /* Decide where to pad the argument: `downward' for below,
2960 `upward' for above, or `none' for don't pad it.
2961 Default is below for small data on big-endian machines; else above. */
2962 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2963
2964 /* Invert direction if stack is post-update. */
2965 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2966 if (where_pad != none)
2967 where_pad = (where_pad == downward ? upward : downward);
2968
2969 xinner = x = protect_from_queue (x, 0);
2970
2971 if (mode == BLKmode)
2972 {
2973 /* Copy a block into the stack, entirely or partially. */
2974
2975 register rtx temp;
2976 int used = partial * UNITS_PER_WORD;
2977 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2978 int skip;
2979
2980 if (size == 0)
2981 abort ();
2982
2983 used -= offset;
2984
2985 /* USED is now the # of bytes we need not copy to the stack
2986 because registers will take care of them. */
2987
2988 if (partial != 0)
2989 xinner = change_address (xinner, BLKmode,
2990 plus_constant (XEXP (xinner, 0), used));
2991
2992 /* If the partial register-part of the arg counts in its stack size,
2993 skip the part of stack space corresponding to the registers.
2994 Otherwise, start copying to the beginning of the stack space,
2995 by setting SKIP to 0. */
2996 skip = (reg_parm_stack_space == 0) ? 0 : used;
2997
2998 #ifdef PUSH_ROUNDING
2999 /* Do it with several push insns if that doesn't take lots of insns
3000 and if there is no difficulty with push insns that skip bytes
3001 on the stack for alignment purposes. */
3002 if (args_addr == 0
3003 && PUSH_ARGS
3004 && GET_CODE (size) == CONST_INT
3005 && skip == 0
3006 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3007 /* Here we avoid the case of a structure whose weak alignment
3008 forces many pushes of a small amount of data,
3009 and such small pushes do rounding that causes trouble. */
3010 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3011 || align >= BIGGEST_ALIGNMENT
3012 || PUSH_ROUNDING (align) == align)
3013 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3014 {
3015 /* Push padding now if padding above and stack grows down,
3016 or if padding below and stack grows up.
3017 But if space already allocated, this has already been done. */
3018 if (extra && args_addr == 0
3019 && where_pad != none && where_pad != stack_direction)
3020 anti_adjust_stack (GEN_INT (extra));
3021
3022 stack_pointer_delta += INTVAL (size) - used;
3023 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3024 INTVAL (size) - used, align);
3025
3026 if (current_function_check_memory_usage && ! in_check_memory_usage)
3027 {
3028 rtx temp;
3029
3030 in_check_memory_usage = 1;
3031 temp = get_push_address (INTVAL(size) - used);
3032 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3033 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3034 temp, Pmode,
3035 XEXP (xinner, 0), Pmode,
3036 GEN_INT (INTVAL(size) - used),
3037 TYPE_MODE (sizetype));
3038 else
3039 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3040 temp, Pmode,
3041 GEN_INT (INTVAL(size) - used),
3042 TYPE_MODE (sizetype),
3043 GEN_INT (MEMORY_USE_RW),
3044 TYPE_MODE (integer_type_node));
3045 in_check_memory_usage = 0;
3046 }
3047 }
3048 else
3049 #endif /* PUSH_ROUNDING */
3050 {
3051 /* Otherwise make space on the stack and copy the data
3052 to the address of that space. */
3053
3054 /* Deduct words put into registers from the size we must copy. */
3055 if (partial != 0)
3056 {
3057 if (GET_CODE (size) == CONST_INT)
3058 size = GEN_INT (INTVAL (size) - used);
3059 else
3060 size = expand_binop (GET_MODE (size), sub_optab, size,
3061 GEN_INT (used), NULL_RTX, 0,
3062 OPTAB_LIB_WIDEN);
3063 }
3064
3065 /* Get the address of the stack space.
3066 In this case, we do not deal with EXTRA separately.
3067 A single stack adjust will do. */
3068 if (! args_addr)
3069 {
3070 temp = push_block (size, extra, where_pad == downward);
3071 extra = 0;
3072 }
3073 else if (GET_CODE (args_so_far) == CONST_INT)
3074 temp = memory_address (BLKmode,
3075 plus_constant (args_addr,
3076 skip + INTVAL (args_so_far)));
3077 else
3078 temp = memory_address (BLKmode,
3079 plus_constant (gen_rtx_PLUS (Pmode,
3080 args_addr,
3081 args_so_far),
3082 skip));
3083 if (current_function_check_memory_usage && ! in_check_memory_usage)
3084 {
3085 rtx target;
3086
3087 in_check_memory_usage = 1;
3088 target = copy_to_reg (temp);
3089 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3090 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3091 target, Pmode,
3092 XEXP (xinner, 0), Pmode,
3093 size, TYPE_MODE (sizetype));
3094 else
3095 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3096 target, Pmode,
3097 size, TYPE_MODE (sizetype),
3098 GEN_INT (MEMORY_USE_RW),
3099 TYPE_MODE (integer_type_node));
3100 in_check_memory_usage = 0;
3101 }
3102
3103 /* TEMP is the address of the block. Copy the data there. */
3104 if (GET_CODE (size) == CONST_INT
3105 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3106 {
3107 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3108 INTVAL (size), align);
3109 goto ret;
3110 }
3111 else
3112 {
3113 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3114 enum machine_mode mode;
3115 rtx target = gen_rtx_MEM (BLKmode, temp);
3116
3117 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3118 mode != VOIDmode;
3119 mode = GET_MODE_WIDER_MODE (mode))
3120 {
3121 enum insn_code code = movstr_optab[(int) mode];
3122 insn_operand_predicate_fn pred;
3123
3124 if (code != CODE_FOR_nothing
3125 && ((GET_CODE (size) == CONST_INT
3126 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3127 <= (GET_MODE_MASK (mode) >> 1)))
3128 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3129 && (!(pred = insn_data[(int) code].operand[0].predicate)
3130 || ((*pred) (target, BLKmode)))
3131 && (!(pred = insn_data[(int) code].operand[1].predicate)
3132 || ((*pred) (xinner, BLKmode)))
3133 && (!(pred = insn_data[(int) code].operand[3].predicate)
3134 || ((*pred) (opalign, VOIDmode))))
3135 {
3136 rtx op2 = convert_to_mode (mode, size, 1);
3137 rtx last = get_last_insn ();
3138 rtx pat;
3139
3140 pred = insn_data[(int) code].operand[2].predicate;
3141 if (pred != 0 && ! (*pred) (op2, mode))
3142 op2 = copy_to_mode_reg (mode, op2);
3143
3144 pat = GEN_FCN ((int) code) (target, xinner,
3145 op2, opalign);
3146 if (pat)
3147 {
3148 emit_insn (pat);
3149 goto ret;
3150 }
3151 else
3152 delete_insns_since (last);
3153 }
3154 }
3155 }
3156
3157 if (!ACCUMULATE_OUTGOING_ARGS)
3158 {
3159 /* If the source is referenced relative to the stack pointer,
3160 copy it to another register to stabilize it. We do not need
3161 to do this if we know that we won't be changing sp. */
3162
3163 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3164 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3165 temp = copy_to_reg (temp);
3166 }
3167
3168 /* Make inhibit_defer_pop nonzero around the library call
3169 to force it to pop the bcopy-arguments right away. */
3170 NO_DEFER_POP;
3171 #ifdef TARGET_MEM_FUNCTIONS
3172 emit_library_call (memcpy_libfunc, 0,
3173 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3174 convert_to_mode (TYPE_MODE (sizetype),
3175 size, TREE_UNSIGNED (sizetype)),
3176 TYPE_MODE (sizetype));
3177 #else
3178 emit_library_call (bcopy_libfunc, 0,
3179 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3180 convert_to_mode (TYPE_MODE (integer_type_node),
3181 size,
3182 TREE_UNSIGNED (integer_type_node)),
3183 TYPE_MODE (integer_type_node));
3184 #endif
3185 OK_DEFER_POP;
3186 }
3187 }
3188 else if (partial > 0)
3189 {
3190 /* Scalar partly in registers. */
3191
3192 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3193 int i;
3194 int not_stack;
3195 /* # words of start of argument
3196 that we must make space for but need not store. */
3197 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3198 int args_offset = INTVAL (args_so_far);
3199 int skip;
3200
3201 /* Push padding now if padding above and stack grows down,
3202 or if padding below and stack grows up.
3203 But if space already allocated, this has already been done. */
3204 if (extra && args_addr == 0
3205 && where_pad != none && where_pad != stack_direction)
3206 anti_adjust_stack (GEN_INT (extra));
3207
3208 /* If we make space by pushing it, we might as well push
3209 the real data. Otherwise, we can leave OFFSET nonzero
3210 and leave the space uninitialized. */
3211 if (args_addr == 0)
3212 offset = 0;
3213
3214 /* Now NOT_STACK gets the number of words that we don't need to
3215 allocate on the stack. */
3216 not_stack = partial - offset;
3217
3218 /* If the partial register-part of the arg counts in its stack size,
3219 skip the part of stack space corresponding to the registers.
3220 Otherwise, start copying to the beginning of the stack space,
3221 by setting SKIP to 0. */
3222 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3223
3224 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3225 x = validize_mem (force_const_mem (mode, x));
3226
3227 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3228 SUBREGs of such registers are not allowed. */
3229 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3230 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3231 x = copy_to_reg (x);
3232
3233 /* Loop over all the words allocated on the stack for this arg. */
3234 /* We can do it by words, because any scalar bigger than a word
3235 has a size a multiple of a word. */
3236 #ifndef PUSH_ARGS_REVERSED
3237 for (i = not_stack; i < size; i++)
3238 #else
3239 for (i = size - 1; i >= not_stack; i--)
3240 #endif
3241 if (i >= not_stack + offset)
3242 emit_push_insn (operand_subword_force (x, i, mode),
3243 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3244 0, args_addr,
3245 GEN_INT (args_offset + ((i - not_stack + skip)
3246 * UNITS_PER_WORD)),
3247 reg_parm_stack_space, alignment_pad);
3248 }
3249 else
3250 {
3251 rtx addr;
3252 rtx target = NULL_RTX;
3253
3254 /* Push padding now if padding above and stack grows down,
3255 or if padding below and stack grows up.
3256 But if space already allocated, this has already been done. */
3257 if (extra && args_addr == 0
3258 && where_pad != none && where_pad != stack_direction)
3259 anti_adjust_stack (GEN_INT (extra));
3260
3261 #ifdef PUSH_ROUNDING
3262 if (args_addr == 0 && PUSH_ARGS)
3263 {
3264 addr = gen_push_operand ();
3265 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3266 }
3267 else
3268 #endif
3269 {
3270 if (GET_CODE (args_so_far) == CONST_INT)
3271 addr
3272 = memory_address (mode,
3273 plus_constant (args_addr,
3274 INTVAL (args_so_far)));
3275 else
3276 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3277 args_so_far));
3278 target = addr;
3279 }
3280
3281 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3282
3283 if (current_function_check_memory_usage && ! in_check_memory_usage)
3284 {
3285 in_check_memory_usage = 1;
3286 if (target == 0)
3287 target = get_push_address (GET_MODE_SIZE (mode));
3288
3289 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3290 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3291 target, Pmode,
3292 XEXP (x, 0), Pmode,
3293 GEN_INT (GET_MODE_SIZE (mode)),
3294 TYPE_MODE (sizetype));
3295 else
3296 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3297 target, Pmode,
3298 GEN_INT (GET_MODE_SIZE (mode)),
3299 TYPE_MODE (sizetype),
3300 GEN_INT (MEMORY_USE_RW),
3301 TYPE_MODE (integer_type_node));
3302 in_check_memory_usage = 0;
3303 }
3304 }
3305
3306 ret:
3307 /* If part should go in registers, copy that part
3308 into the appropriate registers. Do this now, at the end,
3309 since mem-to-mem copies above may do function calls. */
3310 if (partial > 0 && reg != 0)
3311 {
3312 /* Handle calls that pass values in multiple non-contiguous locations.
3313 The Irix 6 ABI has examples of this. */
3314 if (GET_CODE (reg) == PARALLEL)
3315 emit_group_load (reg, x, -1, align); /* ??? size? */
3316 else
3317 move_block_to_reg (REGNO (reg), x, partial, mode);
3318 }
3319
3320 if (extra && args_addr == 0 && where_pad == stack_direction)
3321 anti_adjust_stack (GEN_INT (extra));
3322
3323 if (alignment_pad)
3324 anti_adjust_stack (alignment_pad);
3325 }
3326 \f
3327 /* Expand an assignment that stores the value of FROM into TO.
3328 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3329 (This may contain a QUEUED rtx;
3330 if the value is constant, this rtx is a constant.)
3331 Otherwise, the returned value is NULL_RTX.
3332
3333 SUGGEST_REG is no longer actually used.
3334 It used to mean, copy the value through a register
3335 and return that register, if that is possible.
3336 We now use WANT_VALUE to decide whether to do this. */
3337
3338 rtx
3339 expand_assignment (to, from, want_value, suggest_reg)
3340 tree to, from;
3341 int want_value;
3342 int suggest_reg ATTRIBUTE_UNUSED;
3343 {
3344 register rtx to_rtx = 0;
3345 rtx result;
3346
3347 /* Don't crash if the lhs of the assignment was erroneous. */
3348
3349 if (TREE_CODE (to) == ERROR_MARK)
3350 {
3351 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3352 return want_value ? result : NULL_RTX;
3353 }
3354
3355 /* Assignment of a structure component needs special treatment
3356 if the structure component's rtx is not simply a MEM.
3357 Assignment of an array element at a constant index, and assignment of
3358 an array element in an unaligned packed structure field, has the same
3359 problem. */
3360
3361 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3362 || TREE_CODE (to) == ARRAY_REF)
3363 {
3364 enum machine_mode mode1;
3365 HOST_WIDE_INT bitsize, bitpos;
3366 tree offset;
3367 int unsignedp;
3368 int volatilep = 0;
3369 tree tem;
3370 unsigned int alignment;
3371
3372 push_temp_slots ();
3373 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3374 &unsignedp, &volatilep, &alignment);
3375
3376 /* If we are going to use store_bit_field and extract_bit_field,
3377 make sure to_rtx will be safe for multiple use. */
3378
3379 if (mode1 == VOIDmode && want_value)
3380 tem = stabilize_reference (tem);
3381
3382 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3383 if (offset != 0)
3384 {
3385 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3386
3387 if (GET_CODE (to_rtx) != MEM)
3388 abort ();
3389
3390 if (GET_MODE (offset_rtx) != ptr_mode)
3391 {
3392 #ifdef POINTERS_EXTEND_UNSIGNED
3393 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3394 #else
3395 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3396 #endif
3397 }
3398
3399 /* A constant address in TO_RTX can have VOIDmode, we must not try
3400 to call force_reg for that case. Avoid that case. */
3401 if (GET_CODE (to_rtx) == MEM
3402 && GET_MODE (to_rtx) == BLKmode
3403 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3404 && bitsize
3405 && (bitpos % bitsize) == 0
3406 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3407 && alignment == GET_MODE_ALIGNMENT (mode1))
3408 {
3409 rtx temp = change_address (to_rtx, mode1,
3410 plus_constant (XEXP (to_rtx, 0),
3411 (bitpos /
3412 BITS_PER_UNIT)));
3413 if (GET_CODE (XEXP (temp, 0)) == REG)
3414 to_rtx = temp;
3415 else
3416 to_rtx = change_address (to_rtx, mode1,
3417 force_reg (GET_MODE (XEXP (temp, 0)),
3418 XEXP (temp, 0)));
3419 bitpos = 0;
3420 }
3421
3422 to_rtx = change_address (to_rtx, VOIDmode,
3423 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3424 force_reg (ptr_mode,
3425 offset_rtx)));
3426 }
3427
3428 if (volatilep)
3429 {
3430 if (GET_CODE (to_rtx) == MEM)
3431 {
3432 /* When the offset is zero, to_rtx is the address of the
3433 structure we are storing into, and hence may be shared.
3434 We must make a new MEM before setting the volatile bit. */
3435 if (offset == 0)
3436 to_rtx = copy_rtx (to_rtx);
3437
3438 MEM_VOLATILE_P (to_rtx) = 1;
3439 }
3440 #if 0 /* This was turned off because, when a field is volatile
3441 in an object which is not volatile, the object may be in a register,
3442 and then we would abort over here. */
3443 else
3444 abort ();
3445 #endif
3446 }
3447
3448 if (TREE_CODE (to) == COMPONENT_REF
3449 && TREE_READONLY (TREE_OPERAND (to, 1)))
3450 {
3451 if (offset == 0)
3452 to_rtx = copy_rtx (to_rtx);
3453
3454 RTX_UNCHANGING_P (to_rtx) = 1;
3455 }
3456
3457 /* Check the access. */
3458 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3459 {
3460 rtx to_addr;
3461 int size;
3462 int best_mode_size;
3463 enum machine_mode best_mode;
3464
3465 best_mode = get_best_mode (bitsize, bitpos,
3466 TYPE_ALIGN (TREE_TYPE (tem)),
3467 mode1, volatilep);
3468 if (best_mode == VOIDmode)
3469 best_mode = QImode;
3470
3471 best_mode_size = GET_MODE_BITSIZE (best_mode);
3472 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3473 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3474 size *= GET_MODE_SIZE (best_mode);
3475
3476 /* Check the access right of the pointer. */
3477 if (size)
3478 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3479 to_addr, Pmode,
3480 GEN_INT (size), TYPE_MODE (sizetype),
3481 GEN_INT (MEMORY_USE_WO),
3482 TYPE_MODE (integer_type_node));
3483 }
3484
3485 /* If this is a varying-length object, we must get the address of
3486 the source and do an explicit block move. */
3487 if (bitsize < 0)
3488 {
3489 unsigned int from_align;
3490 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3491 rtx inner_to_rtx
3492 = change_address (to_rtx, VOIDmode,
3493 plus_constant (XEXP (to_rtx, 0),
3494 bitpos / BITS_PER_UNIT));
3495
3496 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3497 MIN (alignment, from_align));
3498 free_temp_slots ();
3499 pop_temp_slots ();
3500 return to_rtx;
3501 }
3502 else
3503 {
3504 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3505 (want_value
3506 /* Spurious cast for HPUX compiler. */
3507 ? ((enum machine_mode)
3508 TYPE_MODE (TREE_TYPE (to)))
3509 : VOIDmode),
3510 unsignedp,
3511 alignment,
3512 int_size_in_bytes (TREE_TYPE (tem)),
3513 get_alias_set (to));
3514
3515 preserve_temp_slots (result);
3516 free_temp_slots ();
3517 pop_temp_slots ();
3518
3519 /* If the value is meaningful, convert RESULT to the proper mode.
3520 Otherwise, return nothing. */
3521 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3522 TYPE_MODE (TREE_TYPE (from)),
3523 result,
3524 TREE_UNSIGNED (TREE_TYPE (to)))
3525 : NULL_RTX);
3526 }
3527 }
3528
3529 /* If the rhs is a function call and its value is not an aggregate,
3530 call the function before we start to compute the lhs.
3531 This is needed for correct code for cases such as
3532 val = setjmp (buf) on machines where reference to val
3533 requires loading up part of an address in a separate insn.
3534
3535 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3536 since it might be a promoted variable where the zero- or sign- extension
3537 needs to be done. Handling this in the normal way is safe because no
3538 computation is done before the call. */
3539 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3540 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3541 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3542 && GET_CODE (DECL_RTL (to)) == REG))
3543 {
3544 rtx value;
3545
3546 push_temp_slots ();
3547 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3548 if (to_rtx == 0)
3549 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3550
3551 /* Handle calls that return values in multiple non-contiguous locations.
3552 The Irix 6 ABI has examples of this. */
3553 if (GET_CODE (to_rtx) == PARALLEL)
3554 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3555 TYPE_ALIGN (TREE_TYPE (from)));
3556 else if (GET_MODE (to_rtx) == BLKmode)
3557 emit_block_move (to_rtx, value, expr_size (from),
3558 TYPE_ALIGN (TREE_TYPE (from)));
3559 else
3560 {
3561 #ifdef POINTERS_EXTEND_UNSIGNED
3562 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3563 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3564 value = convert_memory_address (GET_MODE (to_rtx), value);
3565 #endif
3566 emit_move_insn (to_rtx, value);
3567 }
3568 preserve_temp_slots (to_rtx);
3569 free_temp_slots ();
3570 pop_temp_slots ();
3571 return want_value ? to_rtx : NULL_RTX;
3572 }
3573
3574 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3575 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3576
3577 if (to_rtx == 0)
3578 {
3579 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3580 if (GET_CODE (to_rtx) == MEM)
3581 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3582 }
3583
3584 /* Don't move directly into a return register. */
3585 if (TREE_CODE (to) == RESULT_DECL
3586 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3587 {
3588 rtx temp;
3589
3590 push_temp_slots ();
3591 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3592
3593 if (GET_CODE (to_rtx) == PARALLEL)
3594 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3595 TYPE_ALIGN (TREE_TYPE (from)));
3596 else
3597 emit_move_insn (to_rtx, temp);
3598
3599 preserve_temp_slots (to_rtx);
3600 free_temp_slots ();
3601 pop_temp_slots ();
3602 return want_value ? to_rtx : NULL_RTX;
3603 }
3604
3605 /* In case we are returning the contents of an object which overlaps
3606 the place the value is being stored, use a safe function when copying
3607 a value through a pointer into a structure value return block. */
3608 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3609 && current_function_returns_struct
3610 && !current_function_returns_pcc_struct)
3611 {
3612 rtx from_rtx, size;
3613
3614 push_temp_slots ();
3615 size = expr_size (from);
3616 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3617 EXPAND_MEMORY_USE_DONT);
3618
3619 /* Copy the rights of the bitmap. */
3620 if (current_function_check_memory_usage)
3621 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3622 XEXP (to_rtx, 0), Pmode,
3623 XEXP (from_rtx, 0), Pmode,
3624 convert_to_mode (TYPE_MODE (sizetype),
3625 size, TREE_UNSIGNED (sizetype)),
3626 TYPE_MODE (sizetype));
3627
3628 #ifdef TARGET_MEM_FUNCTIONS
3629 emit_library_call (memcpy_libfunc, 0,
3630 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3631 XEXP (from_rtx, 0), Pmode,
3632 convert_to_mode (TYPE_MODE (sizetype),
3633 size, TREE_UNSIGNED (sizetype)),
3634 TYPE_MODE (sizetype));
3635 #else
3636 emit_library_call (bcopy_libfunc, 0,
3637 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3638 XEXP (to_rtx, 0), Pmode,
3639 convert_to_mode (TYPE_MODE (integer_type_node),
3640 size, TREE_UNSIGNED (integer_type_node)),
3641 TYPE_MODE (integer_type_node));
3642 #endif
3643
3644 preserve_temp_slots (to_rtx);
3645 free_temp_slots ();
3646 pop_temp_slots ();
3647 return want_value ? to_rtx : NULL_RTX;
3648 }
3649
3650 /* Compute FROM and store the value in the rtx we got. */
3651
3652 push_temp_slots ();
3653 result = store_expr (from, to_rtx, want_value);
3654 preserve_temp_slots (result);
3655 free_temp_slots ();
3656 pop_temp_slots ();
3657 return want_value ? result : NULL_RTX;
3658 }
3659
3660 /* Generate code for computing expression EXP,
3661 and storing the value into TARGET.
3662 TARGET may contain a QUEUED rtx.
3663
3664 If WANT_VALUE is nonzero, return a copy of the value
3665 not in TARGET, so that we can be sure to use the proper
3666 value in a containing expression even if TARGET has something
3667 else stored in it. If possible, we copy the value through a pseudo
3668 and return that pseudo. Or, if the value is constant, we try to
3669 return the constant. In some cases, we return a pseudo
3670 copied *from* TARGET.
3671
3672 If the mode is BLKmode then we may return TARGET itself.
3673 It turns out that in BLKmode it doesn't cause a problem.
3674 because C has no operators that could combine two different
3675 assignments into the same BLKmode object with different values
3676 with no sequence point. Will other languages need this to
3677 be more thorough?
3678
3679 If WANT_VALUE is 0, we return NULL, to make sure
3680 to catch quickly any cases where the caller uses the value
3681 and fails to set WANT_VALUE. */
3682
3683 rtx
3684 store_expr (exp, target, want_value)
3685 register tree exp;
3686 register rtx target;
3687 int want_value;
3688 {
3689 register rtx temp;
3690 int dont_return_target = 0;
3691
3692 if (TREE_CODE (exp) == COMPOUND_EXPR)
3693 {
3694 /* Perform first part of compound expression, then assign from second
3695 part. */
3696 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3697 emit_queue ();
3698 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3699 }
3700 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3701 {
3702 /* For conditional expression, get safe form of the target. Then
3703 test the condition, doing the appropriate assignment on either
3704 side. This avoids the creation of unnecessary temporaries.
3705 For non-BLKmode, it is more efficient not to do this. */
3706
3707 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3708
3709 emit_queue ();
3710 target = protect_from_queue (target, 1);
3711
3712 do_pending_stack_adjust ();
3713 NO_DEFER_POP;
3714 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3715 start_cleanup_deferral ();
3716 store_expr (TREE_OPERAND (exp, 1), target, 0);
3717 end_cleanup_deferral ();
3718 emit_queue ();
3719 emit_jump_insn (gen_jump (lab2));
3720 emit_barrier ();
3721 emit_label (lab1);
3722 start_cleanup_deferral ();
3723 store_expr (TREE_OPERAND (exp, 2), target, 0);
3724 end_cleanup_deferral ();
3725 emit_queue ();
3726 emit_label (lab2);
3727 OK_DEFER_POP;
3728
3729 return want_value ? target : NULL_RTX;
3730 }
3731 else if (queued_subexp_p (target))
3732 /* If target contains a postincrement, let's not risk
3733 using it as the place to generate the rhs. */
3734 {
3735 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3736 {
3737 /* Expand EXP into a new pseudo. */
3738 temp = gen_reg_rtx (GET_MODE (target));
3739 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3740 }
3741 else
3742 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3743
3744 /* If target is volatile, ANSI requires accessing the value
3745 *from* the target, if it is accessed. So make that happen.
3746 In no case return the target itself. */
3747 if (! MEM_VOLATILE_P (target) && want_value)
3748 dont_return_target = 1;
3749 }
3750 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3751 && GET_MODE (target) != BLKmode)
3752 /* If target is in memory and caller wants value in a register instead,
3753 arrange that. Pass TARGET as target for expand_expr so that,
3754 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3755 We know expand_expr will not use the target in that case.
3756 Don't do this if TARGET is volatile because we are supposed
3757 to write it and then read it. */
3758 {
3759 temp = expand_expr (exp, target, GET_MODE (target), 0);
3760 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3761 temp = copy_to_reg (temp);
3762 dont_return_target = 1;
3763 }
3764 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3765 /* If this is an scalar in a register that is stored in a wider mode
3766 than the declared mode, compute the result into its declared mode
3767 and then convert to the wider mode. Our value is the computed
3768 expression. */
3769 {
3770 /* If we don't want a value, we can do the conversion inside EXP,
3771 which will often result in some optimizations. Do the conversion
3772 in two steps: first change the signedness, if needed, then
3773 the extend. But don't do this if the type of EXP is a subtype
3774 of something else since then the conversion might involve
3775 more than just converting modes. */
3776 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3777 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3778 {
3779 if (TREE_UNSIGNED (TREE_TYPE (exp))
3780 != SUBREG_PROMOTED_UNSIGNED_P (target))
3781 exp
3782 = convert
3783 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3784 TREE_TYPE (exp)),
3785 exp);
3786
3787 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3788 SUBREG_PROMOTED_UNSIGNED_P (target)),
3789 exp);
3790 }
3791
3792 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3793
3794 /* If TEMP is a volatile MEM and we want a result value, make
3795 the access now so it gets done only once. Likewise if
3796 it contains TARGET. */
3797 if (GET_CODE (temp) == MEM && want_value
3798 && (MEM_VOLATILE_P (temp)
3799 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3800 temp = copy_to_reg (temp);
3801
3802 /* If TEMP is a VOIDmode constant, use convert_modes to make
3803 sure that we properly convert it. */
3804 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3805 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3806 TYPE_MODE (TREE_TYPE (exp)), temp,
3807 SUBREG_PROMOTED_UNSIGNED_P (target));
3808
3809 convert_move (SUBREG_REG (target), temp,
3810 SUBREG_PROMOTED_UNSIGNED_P (target));
3811
3812 /* If we promoted a constant, change the mode back down to match
3813 target. Otherwise, the caller might get confused by a result whose
3814 mode is larger than expected. */
3815
3816 if (want_value && GET_MODE (temp) != GET_MODE (target)
3817 && GET_MODE (temp) != VOIDmode)
3818 {
3819 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3820 SUBREG_PROMOTED_VAR_P (temp) = 1;
3821 SUBREG_PROMOTED_UNSIGNED_P (temp)
3822 = SUBREG_PROMOTED_UNSIGNED_P (target);
3823 }
3824
3825 return want_value ? temp : NULL_RTX;
3826 }
3827 else
3828 {
3829 temp = expand_expr (exp, target, GET_MODE (target), 0);
3830 /* Return TARGET if it's a specified hardware register.
3831 If TARGET is a volatile mem ref, either return TARGET
3832 or return a reg copied *from* TARGET; ANSI requires this.
3833
3834 Otherwise, if TEMP is not TARGET, return TEMP
3835 if it is constant (for efficiency),
3836 or if we really want the correct value. */
3837 if (!(target && GET_CODE (target) == REG
3838 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3839 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3840 && ! rtx_equal_p (temp, target)
3841 && (CONSTANT_P (temp) || want_value))
3842 dont_return_target = 1;
3843 }
3844
3845 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3846 the same as that of TARGET, adjust the constant. This is needed, for
3847 example, in case it is a CONST_DOUBLE and we want only a word-sized
3848 value. */
3849 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3850 && TREE_CODE (exp) != ERROR_MARK
3851 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3852 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3853 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3854
3855 if (current_function_check_memory_usage
3856 && GET_CODE (target) == MEM
3857 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3858 {
3859 if (GET_CODE (temp) == MEM)
3860 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3861 XEXP (target, 0), Pmode,
3862 XEXP (temp, 0), Pmode,
3863 expr_size (exp), TYPE_MODE (sizetype));
3864 else
3865 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3866 XEXP (target, 0), Pmode,
3867 expr_size (exp), TYPE_MODE (sizetype),
3868 GEN_INT (MEMORY_USE_WO),
3869 TYPE_MODE (integer_type_node));
3870 }
3871
3872 /* If value was not generated in the target, store it there.
3873 Convert the value to TARGET's type first if nec. */
3874 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3875 one or both of them are volatile memory refs, we have to distinguish
3876 two cases:
3877 - expand_expr has used TARGET. In this case, we must not generate
3878 another copy. This can be detected by TARGET being equal according
3879 to == .
3880 - expand_expr has not used TARGET - that means that the source just
3881 happens to have the same RTX form. Since temp will have been created
3882 by expand_expr, it will compare unequal according to == .
3883 We must generate a copy in this case, to reach the correct number
3884 of volatile memory references. */
3885
3886 if ((! rtx_equal_p (temp, target)
3887 || (temp != target && (side_effects_p (temp)
3888 || side_effects_p (target))))
3889 && TREE_CODE (exp) != ERROR_MARK)
3890 {
3891 target = protect_from_queue (target, 1);
3892 if (GET_MODE (temp) != GET_MODE (target)
3893 && GET_MODE (temp) != VOIDmode)
3894 {
3895 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3896 if (dont_return_target)
3897 {
3898 /* In this case, we will return TEMP,
3899 so make sure it has the proper mode.
3900 But don't forget to store the value into TARGET. */
3901 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3902 emit_move_insn (target, temp);
3903 }
3904 else
3905 convert_move (target, temp, unsignedp);
3906 }
3907
3908 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3909 {
3910 /* Handle copying a string constant into an array.
3911 The string constant may be shorter than the array.
3912 So copy just the string's actual length, and clear the rest. */
3913 rtx size;
3914 rtx addr;
3915
3916 /* Get the size of the data type of the string,
3917 which is actually the size of the target. */
3918 size = expr_size (exp);
3919 if (GET_CODE (size) == CONST_INT
3920 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3921 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3922 else
3923 {
3924 /* Compute the size of the data to copy from the string. */
3925 tree copy_size
3926 = size_binop (MIN_EXPR,
3927 make_tree (sizetype, size),
3928 size_int (TREE_STRING_LENGTH (exp)));
3929 int align = TYPE_ALIGN (TREE_TYPE (exp));
3930 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3931 VOIDmode, 0);
3932 rtx label = 0;
3933
3934 /* Copy that much. */
3935 emit_block_move (target, temp, copy_size_rtx,
3936 TYPE_ALIGN (TREE_TYPE (exp)));
3937
3938 /* Figure out how much is left in TARGET that we have to clear.
3939 Do all calculations in ptr_mode. */
3940
3941 addr = XEXP (target, 0);
3942 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3943
3944 if (GET_CODE (copy_size_rtx) == CONST_INT)
3945 {
3946 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3947 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3948 align = MIN (align, (BITS_PER_UNIT
3949 * (INTVAL (copy_size_rtx)
3950 & - INTVAL (copy_size_rtx))));
3951 }
3952 else
3953 {
3954 addr = force_reg (ptr_mode, addr);
3955 addr = expand_binop (ptr_mode, add_optab, addr,
3956 copy_size_rtx, NULL_RTX, 0,
3957 OPTAB_LIB_WIDEN);
3958
3959 size = expand_binop (ptr_mode, sub_optab, size,
3960 copy_size_rtx, NULL_RTX, 0,
3961 OPTAB_LIB_WIDEN);
3962
3963 align = BITS_PER_UNIT;
3964 label = gen_label_rtx ();
3965 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3966 GET_MODE (size), 0, 0, label);
3967 }
3968 align = MIN (align, expr_align (copy_size));
3969
3970 if (size != const0_rtx)
3971 {
3972 /* Be sure we can write on ADDR. */
3973 if (current_function_check_memory_usage)
3974 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3975 addr, Pmode,
3976 size, TYPE_MODE (sizetype),
3977 GEN_INT (MEMORY_USE_WO),
3978 TYPE_MODE (integer_type_node));
3979 clear_storage (gen_rtx_MEM (BLKmode, addr), size, align);
3980 }
3981
3982 if (label)
3983 emit_label (label);
3984 }
3985 }
3986 /* Handle calls that return values in multiple non-contiguous locations.
3987 The Irix 6 ABI has examples of this. */
3988 else if (GET_CODE (target) == PARALLEL)
3989 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3990 TYPE_ALIGN (TREE_TYPE (exp)));
3991 else if (GET_MODE (temp) == BLKmode)
3992 emit_block_move (target, temp, expr_size (exp),
3993 TYPE_ALIGN (TREE_TYPE (exp)));
3994 else
3995 emit_move_insn (target, temp);
3996 }
3997
3998 /* If we don't want a value, return NULL_RTX. */
3999 if (! want_value)
4000 return NULL_RTX;
4001
4002 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4003 ??? The latter test doesn't seem to make sense. */
4004 else if (dont_return_target && GET_CODE (temp) != MEM)
4005 return temp;
4006
4007 /* Return TARGET itself if it is a hard register. */
4008 else if (want_value && GET_MODE (target) != BLKmode
4009 && ! (GET_CODE (target) == REG
4010 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4011 return copy_to_reg (target);
4012
4013 else
4014 return target;
4015 }
4016 \f
4017 /* Return 1 if EXP just contains zeros. */
4018
4019 static int
4020 is_zeros_p (exp)
4021 tree exp;
4022 {
4023 tree elt;
4024
4025 switch (TREE_CODE (exp))
4026 {
4027 case CONVERT_EXPR:
4028 case NOP_EXPR:
4029 case NON_LVALUE_EXPR:
4030 return is_zeros_p (TREE_OPERAND (exp, 0));
4031
4032 case INTEGER_CST:
4033 return integer_zerop (exp);
4034
4035 case COMPLEX_CST:
4036 return
4037 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4038
4039 case REAL_CST:
4040 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4041
4042 case CONSTRUCTOR:
4043 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4044 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4045 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4046 if (! is_zeros_p (TREE_VALUE (elt)))
4047 return 0;
4048
4049 return 1;
4050
4051 default:
4052 return 0;
4053 }
4054 }
4055
4056 /* Return 1 if EXP contains mostly (3/4) zeros. */
4057
4058 static int
4059 mostly_zeros_p (exp)
4060 tree exp;
4061 {
4062 if (TREE_CODE (exp) == CONSTRUCTOR)
4063 {
4064 int elts = 0, zeros = 0;
4065 tree elt = CONSTRUCTOR_ELTS (exp);
4066 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4067 {
4068 /* If there are no ranges of true bits, it is all zero. */
4069 return elt == NULL_TREE;
4070 }
4071 for (; elt; elt = TREE_CHAIN (elt))
4072 {
4073 /* We do not handle the case where the index is a RANGE_EXPR,
4074 so the statistic will be somewhat inaccurate.
4075 We do make a more accurate count in store_constructor itself,
4076 so since this function is only used for nested array elements,
4077 this should be close enough. */
4078 if (mostly_zeros_p (TREE_VALUE (elt)))
4079 zeros++;
4080 elts++;
4081 }
4082
4083 return 4 * zeros >= 3 * elts;
4084 }
4085
4086 return is_zeros_p (exp);
4087 }
4088 \f
4089 /* Helper function for store_constructor.
4090 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4091 TYPE is the type of the CONSTRUCTOR, not the element type.
4092 ALIGN and CLEARED are as for store_constructor.
4093
4094 This provides a recursive shortcut back to store_constructor when it isn't
4095 necessary to go through store_field. This is so that we can pass through
4096 the cleared field to let store_constructor know that we may not have to
4097 clear a substructure if the outer structure has already been cleared. */
4098
4099 static void
4100 store_constructor_field (target, bitsize, bitpos,
4101 mode, exp, type, align, cleared)
4102 rtx target;
4103 unsigned HOST_WIDE_INT bitsize;
4104 HOST_WIDE_INT bitpos;
4105 enum machine_mode mode;
4106 tree exp, type;
4107 unsigned int align;
4108 int cleared;
4109 {
4110 if (TREE_CODE (exp) == CONSTRUCTOR
4111 && bitpos % BITS_PER_UNIT == 0
4112 /* If we have a non-zero bitpos for a register target, then we just
4113 let store_field do the bitfield handling. This is unlikely to
4114 generate unnecessary clear instructions anyways. */
4115 && (bitpos == 0 || GET_CODE (target) == MEM))
4116 {
4117 if (bitpos != 0)
4118 target
4119 = change_address (target,
4120 GET_MODE (target) == BLKmode
4121 || 0 != (bitpos
4122 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4123 ? BLKmode : VOIDmode,
4124 plus_constant (XEXP (target, 0),
4125 bitpos / BITS_PER_UNIT));
4126 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4127 }
4128 else
4129 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4130 int_size_in_bytes (type), 0);
4131 }
4132
4133 /* Store the value of constructor EXP into the rtx TARGET.
4134 TARGET is either a REG or a MEM.
4135 ALIGN is the maximum known alignment for TARGET.
4136 CLEARED is true if TARGET is known to have been zero'd.
4137 SIZE is the number of bytes of TARGET we are allowed to modify: this
4138 may not be the same as the size of EXP if we are assigning to a field
4139 which has been packed to exclude padding bits. */
4140
4141 static void
4142 store_constructor (exp, target, align, cleared, size)
4143 tree exp;
4144 rtx target;
4145 unsigned int align;
4146 int cleared;
4147 HOST_WIDE_INT size;
4148 {
4149 tree type = TREE_TYPE (exp);
4150 #ifdef WORD_REGISTER_OPERATIONS
4151 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4152 #endif
4153
4154 /* We know our target cannot conflict, since safe_from_p has been called. */
4155 #if 0
4156 /* Don't try copying piece by piece into a hard register
4157 since that is vulnerable to being clobbered by EXP.
4158 Instead, construct in a pseudo register and then copy it all. */
4159 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4160 {
4161 rtx temp = gen_reg_rtx (GET_MODE (target));
4162 store_constructor (exp, temp, align, cleared, size);
4163 emit_move_insn (target, temp);
4164 return;
4165 }
4166 #endif
4167
4168 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4169 || TREE_CODE (type) == QUAL_UNION_TYPE)
4170 {
4171 register tree elt;
4172
4173 /* Inform later passes that the whole union value is dead. */
4174 if ((TREE_CODE (type) == UNION_TYPE
4175 || TREE_CODE (type) == QUAL_UNION_TYPE)
4176 && ! cleared)
4177 {
4178 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4179
4180 /* If the constructor is empty, clear the union. */
4181 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4182 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4183 }
4184
4185 /* If we are building a static constructor into a register,
4186 set the initial value as zero so we can fold the value into
4187 a constant. But if more than one register is involved,
4188 this probably loses. */
4189 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4190 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4191 {
4192 if (! cleared)
4193 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4194
4195 cleared = 1;
4196 }
4197
4198 /* If the constructor has fewer fields than the structure
4199 or if we are initializing the structure to mostly zeros,
4200 clear the whole structure first. */
4201 else if (size > 0
4202 && ((list_length (CONSTRUCTOR_ELTS (exp))
4203 != fields_length (type))
4204 || mostly_zeros_p (exp)))
4205 {
4206 if (! cleared)
4207 clear_storage (target, GEN_INT (size), align);
4208
4209 cleared = 1;
4210 }
4211 else if (! cleared)
4212 /* Inform later passes that the old value is dead. */
4213 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4214
4215 /* Store each element of the constructor into
4216 the corresponding field of TARGET. */
4217
4218 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4219 {
4220 register tree field = TREE_PURPOSE (elt);
4221 #ifdef WORD_REGISTER_OPERATIONS
4222 tree value = TREE_VALUE (elt);
4223 #endif
4224 register enum machine_mode mode;
4225 HOST_WIDE_INT bitsize;
4226 HOST_WIDE_INT bitpos = 0;
4227 int unsignedp;
4228 tree offset;
4229 rtx to_rtx = target;
4230
4231 /* Just ignore missing fields.
4232 We cleared the whole structure, above,
4233 if any fields are missing. */
4234 if (field == 0)
4235 continue;
4236
4237 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4238 continue;
4239
4240 if (host_integerp (DECL_SIZE (field), 1))
4241 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4242 else
4243 bitsize = -1;
4244
4245 unsignedp = TREE_UNSIGNED (field);
4246 mode = DECL_MODE (field);
4247 if (DECL_BIT_FIELD (field))
4248 mode = VOIDmode;
4249
4250 offset = DECL_FIELD_OFFSET (field);
4251 if (host_integerp (offset, 0)
4252 && host_integerp (bit_position (field), 0))
4253 {
4254 bitpos = int_bit_position (field);
4255 offset = 0;
4256 }
4257 else
4258 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4259
4260 if (offset)
4261 {
4262 rtx offset_rtx;
4263
4264 if (contains_placeholder_p (offset))
4265 offset = build (WITH_RECORD_EXPR, sizetype,
4266 offset, make_tree (TREE_TYPE (exp), target));
4267
4268 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4269 if (GET_CODE (to_rtx) != MEM)
4270 abort ();
4271
4272 if (GET_MODE (offset_rtx) != ptr_mode)
4273 {
4274 #ifdef POINTERS_EXTEND_UNSIGNED
4275 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4276 #else
4277 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4278 #endif
4279 }
4280
4281 to_rtx
4282 = change_address (to_rtx, VOIDmode,
4283 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4284 force_reg (ptr_mode,
4285 offset_rtx)));
4286 align = DECL_OFFSET_ALIGN (field);
4287 }
4288
4289 if (TREE_READONLY (field))
4290 {
4291 if (GET_CODE (to_rtx) == MEM)
4292 to_rtx = copy_rtx (to_rtx);
4293
4294 RTX_UNCHANGING_P (to_rtx) = 1;
4295 }
4296
4297 #ifdef WORD_REGISTER_OPERATIONS
4298 /* If this initializes a field that is smaller than a word, at the
4299 start of a word, try to widen it to a full word.
4300 This special case allows us to output C++ member function
4301 initializations in a form that the optimizers can understand. */
4302 if (GET_CODE (target) == REG
4303 && bitsize < BITS_PER_WORD
4304 && bitpos % BITS_PER_WORD == 0
4305 && GET_MODE_CLASS (mode) == MODE_INT
4306 && TREE_CODE (value) == INTEGER_CST
4307 && exp_size >= 0
4308 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4309 {
4310 tree type = TREE_TYPE (value);
4311 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4312 {
4313 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4314 value = convert (type, value);
4315 }
4316 if (BYTES_BIG_ENDIAN)
4317 value
4318 = fold (build (LSHIFT_EXPR, type, value,
4319 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4320 bitsize = BITS_PER_WORD;
4321 mode = word_mode;
4322 }
4323 #endif
4324 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4325 TREE_VALUE (elt), type, align, cleared);
4326 }
4327 }
4328 else if (TREE_CODE (type) == ARRAY_TYPE)
4329 {
4330 register tree elt;
4331 register int i;
4332 int need_to_clear;
4333 tree domain = TYPE_DOMAIN (type);
4334 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4335 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4336 tree elttype = TREE_TYPE (type);
4337
4338 /* If the constructor has fewer elements than the array,
4339 clear the whole array first. Similarly if this is
4340 static constructor of a non-BLKmode object. */
4341 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4342 need_to_clear = 1;
4343 else
4344 {
4345 HOST_WIDE_INT count = 0, zero_count = 0;
4346 need_to_clear = 0;
4347 /* This loop is a more accurate version of the loop in
4348 mostly_zeros_p (it handles RANGE_EXPR in an index).
4349 It is also needed to check for missing elements. */
4350 for (elt = CONSTRUCTOR_ELTS (exp);
4351 elt != NULL_TREE;
4352 elt = TREE_CHAIN (elt))
4353 {
4354 tree index = TREE_PURPOSE (elt);
4355 HOST_WIDE_INT this_node_count;
4356
4357 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4358 {
4359 tree lo_index = TREE_OPERAND (index, 0);
4360 tree hi_index = TREE_OPERAND (index, 1);
4361
4362 if (! host_integerp (lo_index, 1)
4363 || ! host_integerp (hi_index, 1))
4364 {
4365 need_to_clear = 1;
4366 break;
4367 }
4368
4369 this_node_count = (tree_low_cst (hi_index, 1)
4370 - tree_low_cst (lo_index, 1) + 1);
4371 }
4372 else
4373 this_node_count = 1;
4374 count += this_node_count;
4375 if (mostly_zeros_p (TREE_VALUE (elt)))
4376 zero_count += this_node_count;
4377 }
4378 /* Clear the entire array first if there are any missing elements,
4379 or if the incidence of zero elements is >= 75%. */
4380 if (count < maxelt - minelt + 1
4381 || 4 * zero_count >= 3 * count)
4382 need_to_clear = 1;
4383 }
4384 if (need_to_clear && size > 0)
4385 {
4386 if (! cleared)
4387 clear_storage (target, GEN_INT (size), align);
4388 cleared = 1;
4389 }
4390 else
4391 /* Inform later passes that the old value is dead. */
4392 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4393
4394 /* Store each element of the constructor into
4395 the corresponding element of TARGET, determined
4396 by counting the elements. */
4397 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4398 elt;
4399 elt = TREE_CHAIN (elt), i++)
4400 {
4401 register enum machine_mode mode;
4402 HOST_WIDE_INT bitsize;
4403 HOST_WIDE_INT bitpos;
4404 int unsignedp;
4405 tree value = TREE_VALUE (elt);
4406 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4407 tree index = TREE_PURPOSE (elt);
4408 rtx xtarget = target;
4409
4410 if (cleared && is_zeros_p (value))
4411 continue;
4412
4413 unsignedp = TREE_UNSIGNED (elttype);
4414 mode = TYPE_MODE (elttype);
4415 if (mode == BLKmode)
4416 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4417 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4418 : -1);
4419 else
4420 bitsize = GET_MODE_BITSIZE (mode);
4421
4422 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4423 {
4424 tree lo_index = TREE_OPERAND (index, 0);
4425 tree hi_index = TREE_OPERAND (index, 1);
4426 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4427 struct nesting *loop;
4428 HOST_WIDE_INT lo, hi, count;
4429 tree position;
4430
4431 /* If the range is constant and "small", unroll the loop. */
4432 if (host_integerp (lo_index, 0)
4433 && host_integerp (hi_index, 0)
4434 && (lo = tree_low_cst (lo_index, 0),
4435 hi = tree_low_cst (hi_index, 0),
4436 count = hi - lo + 1,
4437 (GET_CODE (target) != MEM
4438 || count <= 2
4439 || (host_integerp (TYPE_SIZE (elttype), 1)
4440 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4441 <= 40 * 8)))))
4442 {
4443 lo -= minelt; hi -= minelt;
4444 for (; lo <= hi; lo++)
4445 {
4446 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4447 store_constructor_field (target, bitsize, bitpos, mode,
4448 value, type, align, cleared);
4449 }
4450 }
4451 else
4452 {
4453 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4454 loop_top = gen_label_rtx ();
4455 loop_end = gen_label_rtx ();
4456
4457 unsignedp = TREE_UNSIGNED (domain);
4458
4459 index = build_decl (VAR_DECL, NULL_TREE, domain);
4460
4461 DECL_RTL (index) = index_r
4462 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4463 &unsignedp, 0));
4464
4465 if (TREE_CODE (value) == SAVE_EXPR
4466 && SAVE_EXPR_RTL (value) == 0)
4467 {
4468 /* Make sure value gets expanded once before the
4469 loop. */
4470 expand_expr (value, const0_rtx, VOIDmode, 0);
4471 emit_queue ();
4472 }
4473 store_expr (lo_index, index_r, 0);
4474 loop = expand_start_loop (0);
4475
4476 /* Assign value to element index. */
4477 position
4478 = convert (ssizetype,
4479 fold (build (MINUS_EXPR, TREE_TYPE (index),
4480 index, TYPE_MIN_VALUE (domain))));
4481 position = size_binop (MULT_EXPR, position,
4482 convert (ssizetype,
4483 TYPE_SIZE_UNIT (elttype)));
4484
4485 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4486 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4487 xtarget = change_address (target, mode, addr);
4488 if (TREE_CODE (value) == CONSTRUCTOR)
4489 store_constructor (value, xtarget, align, cleared,
4490 bitsize / BITS_PER_UNIT);
4491 else
4492 store_expr (value, xtarget, 0);
4493
4494 expand_exit_loop_if_false (loop,
4495 build (LT_EXPR, integer_type_node,
4496 index, hi_index));
4497
4498 expand_increment (build (PREINCREMENT_EXPR,
4499 TREE_TYPE (index),
4500 index, integer_one_node), 0, 0);
4501 expand_end_loop ();
4502 emit_label (loop_end);
4503 }
4504 }
4505 else if ((index != 0 && ! host_integerp (index, 0))
4506 || ! host_integerp (TYPE_SIZE (elttype), 1))
4507 {
4508 rtx pos_rtx, addr;
4509 tree position;
4510
4511 if (index == 0)
4512 index = ssize_int (1);
4513
4514 if (minelt)
4515 index = convert (ssizetype,
4516 fold (build (MINUS_EXPR, index,
4517 TYPE_MIN_VALUE (domain))));
4518
4519 position = size_binop (MULT_EXPR, index,
4520 convert (ssizetype,
4521 TYPE_SIZE_UNIT (elttype)));
4522 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4523 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4524 xtarget = change_address (target, mode, addr);
4525 store_expr (value, xtarget, 0);
4526 }
4527 else
4528 {
4529 if (index != 0)
4530 bitpos = ((tree_low_cst (index, 0) - minelt)
4531 * tree_low_cst (TYPE_SIZE (elttype), 1));
4532 else
4533 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4534
4535 store_constructor_field (target, bitsize, bitpos, mode, value,
4536 type, align, cleared);
4537 }
4538 }
4539 }
4540
4541 /* Set constructor assignments */
4542 else if (TREE_CODE (type) == SET_TYPE)
4543 {
4544 tree elt = CONSTRUCTOR_ELTS (exp);
4545 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4546 tree domain = TYPE_DOMAIN (type);
4547 tree domain_min, domain_max, bitlength;
4548
4549 /* The default implementation strategy is to extract the constant
4550 parts of the constructor, use that to initialize the target,
4551 and then "or" in whatever non-constant ranges we need in addition.
4552
4553 If a large set is all zero or all ones, it is
4554 probably better to set it using memset (if available) or bzero.
4555 Also, if a large set has just a single range, it may also be
4556 better to first clear all the first clear the set (using
4557 bzero/memset), and set the bits we want. */
4558
4559 /* Check for all zeros. */
4560 if (elt == NULL_TREE && size > 0)
4561 {
4562 if (!cleared)
4563 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4564 return;
4565 }
4566
4567 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4568 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4569 bitlength = size_binop (PLUS_EXPR,
4570 size_diffop (domain_max, domain_min),
4571 ssize_int (1));
4572
4573 nbits = tree_low_cst (bitlength, 1);
4574
4575 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4576 are "complicated" (more than one range), initialize (the
4577 constant parts) by copying from a constant. */
4578 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4579 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4580 {
4581 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4582 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4583 char *bit_buffer = (char *) alloca (nbits);
4584 HOST_WIDE_INT word = 0;
4585 unsigned int bit_pos = 0;
4586 unsigned int ibit = 0;
4587 unsigned int offset = 0; /* In bytes from beginning of set. */
4588
4589 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4590 for (;;)
4591 {
4592 if (bit_buffer[ibit])
4593 {
4594 if (BYTES_BIG_ENDIAN)
4595 word |= (1 << (set_word_size - 1 - bit_pos));
4596 else
4597 word |= 1 << bit_pos;
4598 }
4599
4600 bit_pos++; ibit++;
4601 if (bit_pos >= set_word_size || ibit == nbits)
4602 {
4603 if (word != 0 || ! cleared)
4604 {
4605 rtx datum = GEN_INT (word);
4606 rtx to_rtx;
4607
4608 /* The assumption here is that it is safe to use
4609 XEXP if the set is multi-word, but not if
4610 it's single-word. */
4611 if (GET_CODE (target) == MEM)
4612 {
4613 to_rtx = plus_constant (XEXP (target, 0), offset);
4614 to_rtx = change_address (target, mode, to_rtx);
4615 }
4616 else if (offset == 0)
4617 to_rtx = target;
4618 else
4619 abort ();
4620 emit_move_insn (to_rtx, datum);
4621 }
4622
4623 if (ibit == nbits)
4624 break;
4625 word = 0;
4626 bit_pos = 0;
4627 offset += set_word_size / BITS_PER_UNIT;
4628 }
4629 }
4630 }
4631 else if (!cleared)
4632 /* Don't bother clearing storage if the set is all ones. */
4633 if (TREE_CHAIN (elt) != NULL_TREE
4634 || (TREE_PURPOSE (elt) == NULL_TREE
4635 ? nbits != 1
4636 : ( ! host_integerp (TREE_VALUE (elt), 0)
4637 || ! host_integerp (TREE_PURPOSE (elt), 0)
4638 || (tree_low_cst (TREE_VALUE (elt), 0)
4639 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4640 != (HOST_WIDE_INT) nbits))))
4641 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4642
4643 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4644 {
4645 /* start of range of element or NULL */
4646 tree startbit = TREE_PURPOSE (elt);
4647 /* end of range of element, or element value */
4648 tree endbit = TREE_VALUE (elt);
4649 #ifdef TARGET_MEM_FUNCTIONS
4650 HOST_WIDE_INT startb, endb;
4651 #endif
4652 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4653
4654 bitlength_rtx = expand_expr (bitlength,
4655 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4656
4657 /* handle non-range tuple element like [ expr ] */
4658 if (startbit == NULL_TREE)
4659 {
4660 startbit = save_expr (endbit);
4661 endbit = startbit;
4662 }
4663
4664 startbit = convert (sizetype, startbit);
4665 endbit = convert (sizetype, endbit);
4666 if (! integer_zerop (domain_min))
4667 {
4668 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4669 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4670 }
4671 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4672 EXPAND_CONST_ADDRESS);
4673 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4674 EXPAND_CONST_ADDRESS);
4675
4676 if (REG_P (target))
4677 {
4678 targetx = assign_stack_temp (GET_MODE (target),
4679 GET_MODE_SIZE (GET_MODE (target)),
4680 0);
4681 emit_move_insn (targetx, target);
4682 }
4683
4684 else if (GET_CODE (target) == MEM)
4685 targetx = target;
4686 else
4687 abort ();
4688
4689 #ifdef TARGET_MEM_FUNCTIONS
4690 /* Optimization: If startbit and endbit are
4691 constants divisible by BITS_PER_UNIT,
4692 call memset instead. */
4693 if (TREE_CODE (startbit) == INTEGER_CST
4694 && TREE_CODE (endbit) == INTEGER_CST
4695 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4696 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4697 {
4698 emit_library_call (memset_libfunc, 0,
4699 VOIDmode, 3,
4700 plus_constant (XEXP (targetx, 0),
4701 startb / BITS_PER_UNIT),
4702 Pmode,
4703 constm1_rtx, TYPE_MODE (integer_type_node),
4704 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4705 TYPE_MODE (sizetype));
4706 }
4707 else
4708 #endif
4709 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4710 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4711 bitlength_rtx, TYPE_MODE (sizetype),
4712 startbit_rtx, TYPE_MODE (sizetype),
4713 endbit_rtx, TYPE_MODE (sizetype));
4714
4715 if (REG_P (target))
4716 emit_move_insn (target, targetx);
4717 }
4718 }
4719
4720 else
4721 abort ();
4722 }
4723
4724 /* Store the value of EXP (an expression tree)
4725 into a subfield of TARGET which has mode MODE and occupies
4726 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4727 If MODE is VOIDmode, it means that we are storing into a bit-field.
4728
4729 If VALUE_MODE is VOIDmode, return nothing in particular.
4730 UNSIGNEDP is not used in this case.
4731
4732 Otherwise, return an rtx for the value stored. This rtx
4733 has mode VALUE_MODE if that is convenient to do.
4734 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4735
4736 ALIGN is the alignment that TARGET is known to have.
4737 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4738
4739 ALIAS_SET is the alias set for the destination. This value will
4740 (in general) be different from that for TARGET, since TARGET is a
4741 reference to the containing structure. */
4742
4743 static rtx
4744 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4745 unsignedp, align, total_size, alias_set)
4746 rtx target;
4747 HOST_WIDE_INT bitsize;
4748 HOST_WIDE_INT bitpos;
4749 enum machine_mode mode;
4750 tree exp;
4751 enum machine_mode value_mode;
4752 int unsignedp;
4753 unsigned int align;
4754 HOST_WIDE_INT total_size;
4755 int alias_set;
4756 {
4757 HOST_WIDE_INT width_mask = 0;
4758
4759 if (TREE_CODE (exp) == ERROR_MARK)
4760 return const0_rtx;
4761
4762 if (bitsize < HOST_BITS_PER_WIDE_INT)
4763 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4764
4765 /* If we are storing into an unaligned field of an aligned union that is
4766 in a register, we may have the mode of TARGET being an integer mode but
4767 MODE == BLKmode. In that case, get an aligned object whose size and
4768 alignment are the same as TARGET and store TARGET into it (we can avoid
4769 the store if the field being stored is the entire width of TARGET). Then
4770 call ourselves recursively to store the field into a BLKmode version of
4771 that object. Finally, load from the object into TARGET. This is not
4772 very efficient in general, but should only be slightly more expensive
4773 than the otherwise-required unaligned accesses. Perhaps this can be
4774 cleaned up later. */
4775
4776 if (mode == BLKmode
4777 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4778 {
4779 rtx object = assign_stack_temp (GET_MODE (target),
4780 GET_MODE_SIZE (GET_MODE (target)), 0);
4781 rtx blk_object = copy_rtx (object);
4782
4783 MEM_SET_IN_STRUCT_P (object, 1);
4784 MEM_SET_IN_STRUCT_P (blk_object, 1);
4785 PUT_MODE (blk_object, BLKmode);
4786
4787 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4788 emit_move_insn (object, target);
4789
4790 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4791 align, total_size, alias_set);
4792
4793 /* Even though we aren't returning target, we need to
4794 give it the updated value. */
4795 emit_move_insn (target, object);
4796
4797 return blk_object;
4798 }
4799
4800 if (GET_CODE (target) == CONCAT)
4801 {
4802 /* We're storing into a struct containing a single __complex. */
4803
4804 if (bitpos != 0)
4805 abort ();
4806 return store_expr (exp, target, 0);
4807 }
4808
4809 /* If the structure is in a register or if the component
4810 is a bit field, we cannot use addressing to access it.
4811 Use bit-field techniques or SUBREG to store in it. */
4812
4813 if (mode == VOIDmode
4814 || (mode != BLKmode && ! direct_store[(int) mode]
4815 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4816 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4817 || GET_CODE (target) == REG
4818 || GET_CODE (target) == SUBREG
4819 /* If the field isn't aligned enough to store as an ordinary memref,
4820 store it as a bit field. */
4821 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4822 && (align < GET_MODE_ALIGNMENT (mode)
4823 || bitpos % GET_MODE_ALIGNMENT (mode)))
4824 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4825 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4826 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4827 /* If the RHS and field are a constant size and the size of the
4828 RHS isn't the same size as the bitfield, we must use bitfield
4829 operations. */
4830 || (bitsize >= 0
4831 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4832 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4833 {
4834 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4835
4836 /* If BITSIZE is narrower than the size of the type of EXP
4837 we will be narrowing TEMP. Normally, what's wanted are the
4838 low-order bits. However, if EXP's type is a record and this is
4839 big-endian machine, we want the upper BITSIZE bits. */
4840 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4841 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4842 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4843 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4844 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4845 - bitsize),
4846 temp, 1);
4847
4848 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4849 MODE. */
4850 if (mode != VOIDmode && mode != BLKmode
4851 && mode != TYPE_MODE (TREE_TYPE (exp)))
4852 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4853
4854 /* If the modes of TARGET and TEMP are both BLKmode, both
4855 must be in memory and BITPOS must be aligned on a byte
4856 boundary. If so, we simply do a block copy. */
4857 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4858 {
4859 unsigned int exp_align = expr_align (exp);
4860
4861 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4862 || bitpos % BITS_PER_UNIT != 0)
4863 abort ();
4864
4865 target = change_address (target, VOIDmode,
4866 plus_constant (XEXP (target, 0),
4867 bitpos / BITS_PER_UNIT));
4868
4869 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4870 align = MIN (exp_align, align);
4871
4872 /* Find an alignment that is consistent with the bit position. */
4873 while ((bitpos % align) != 0)
4874 align >>= 1;
4875
4876 emit_block_move (target, temp,
4877 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4878 / BITS_PER_UNIT),
4879 align);
4880
4881 return value_mode == VOIDmode ? const0_rtx : target;
4882 }
4883
4884 /* Store the value in the bitfield. */
4885 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4886 if (value_mode != VOIDmode)
4887 {
4888 /* The caller wants an rtx for the value. */
4889 /* If possible, avoid refetching from the bitfield itself. */
4890 if (width_mask != 0
4891 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4892 {
4893 tree count;
4894 enum machine_mode tmode;
4895
4896 if (unsignedp)
4897 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4898 tmode = GET_MODE (temp);
4899 if (tmode == VOIDmode)
4900 tmode = value_mode;
4901 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4902 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4903 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4904 }
4905 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4906 NULL_RTX, value_mode, 0, align,
4907 total_size);
4908 }
4909 return const0_rtx;
4910 }
4911 else
4912 {
4913 rtx addr = XEXP (target, 0);
4914 rtx to_rtx;
4915
4916 /* If a value is wanted, it must be the lhs;
4917 so make the address stable for multiple use. */
4918
4919 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4920 && ! CONSTANT_ADDRESS_P (addr)
4921 /* A frame-pointer reference is already stable. */
4922 && ! (GET_CODE (addr) == PLUS
4923 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4924 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4925 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4926 addr = copy_to_reg (addr);
4927
4928 /* Now build a reference to just the desired component. */
4929
4930 to_rtx = copy_rtx (change_address (target, mode,
4931 plus_constant (addr,
4932 (bitpos
4933 / BITS_PER_UNIT))));
4934 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4935 MEM_ALIAS_SET (to_rtx) = alias_set;
4936
4937 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4938 }
4939 }
4940 \f
4941 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4942 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4943 ARRAY_REFs and find the ultimate containing object, which we return.
4944
4945 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4946 bit position, and *PUNSIGNEDP to the signedness of the field.
4947 If the position of the field is variable, we store a tree
4948 giving the variable offset (in units) in *POFFSET.
4949 This offset is in addition to the bit position.
4950 If the position is not variable, we store 0 in *POFFSET.
4951 We set *PALIGNMENT to the alignment of the address that will be
4952 computed. This is the alignment of the thing we return if *POFFSET
4953 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4954
4955 If any of the extraction expressions is volatile,
4956 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4957
4958 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4959 is a mode that can be used to access the field. In that case, *PBITSIZE
4960 is redundant.
4961
4962 If the field describes a variable-sized object, *PMODE is set to
4963 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4964 this case, but the address of the object can be found. */
4965
4966 tree
4967 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4968 punsignedp, pvolatilep, palignment)
4969 tree exp;
4970 HOST_WIDE_INT *pbitsize;
4971 HOST_WIDE_INT *pbitpos;
4972 tree *poffset;
4973 enum machine_mode *pmode;
4974 int *punsignedp;
4975 int *pvolatilep;
4976 unsigned int *palignment;
4977 {
4978 tree size_tree = 0;
4979 enum machine_mode mode = VOIDmode;
4980 tree offset = size_zero_node;
4981 tree bit_offset = bitsize_zero_node;
4982 unsigned int alignment = BIGGEST_ALIGNMENT;
4983 tree tem;
4984
4985 /* First get the mode, signedness, and size. We do this from just the
4986 outermost expression. */
4987 if (TREE_CODE (exp) == COMPONENT_REF)
4988 {
4989 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4990 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4991 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4992
4993 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4994 }
4995 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4996 {
4997 size_tree = TREE_OPERAND (exp, 1);
4998 *punsignedp = TREE_UNSIGNED (exp);
4999 }
5000 else
5001 {
5002 mode = TYPE_MODE (TREE_TYPE (exp));
5003 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5004
5005 if (mode == BLKmode)
5006 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5007 else
5008 *pbitsize = GET_MODE_BITSIZE (mode);
5009 }
5010
5011 if (size_tree != 0)
5012 {
5013 if (! host_integerp (size_tree, 1))
5014 mode = BLKmode, *pbitsize = -1;
5015 else
5016 *pbitsize = tree_low_cst (size_tree, 1);
5017 }
5018
5019 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5020 and find the ultimate containing object. */
5021 while (1)
5022 {
5023 if (TREE_CODE (exp) == BIT_FIELD_REF)
5024 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5025 else if (TREE_CODE (exp) == COMPONENT_REF)
5026 {
5027 tree field = TREE_OPERAND (exp, 1);
5028 tree this_offset = DECL_FIELD_OFFSET (field);
5029
5030 /* If this field hasn't been filled in yet, don't go
5031 past it. This should only happen when folding expressions
5032 made during type construction. */
5033 if (this_offset == 0)
5034 break;
5035 else if (! TREE_CONSTANT (this_offset)
5036 && contains_placeholder_p (this_offset))
5037 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5038
5039 offset = size_binop (PLUS_EXPR, offset, this_offset);
5040 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5041 DECL_FIELD_BIT_OFFSET (field));
5042
5043 if (! host_integerp (offset, 0))
5044 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5045 }
5046
5047 else if (TREE_CODE (exp) == ARRAY_REF)
5048 {
5049 tree index = TREE_OPERAND (exp, 1);
5050 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5051 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5052 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5053
5054 /* We assume all arrays have sizes that are a multiple of a byte.
5055 First subtract the lower bound, if any, in the type of the
5056 index, then convert to sizetype and multiply by the size of the
5057 array element. */
5058 if (low_bound != 0 && ! integer_zerop (low_bound))
5059 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5060 index, low_bound));
5061
5062 /* If the index has a self-referential type, pass it to a
5063 WITH_RECORD_EXPR; if the component size is, pass our
5064 component to one. */
5065 if (! TREE_CONSTANT (index)
5066 && contains_placeholder_p (index))
5067 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5068 if (! TREE_CONSTANT (unit_size)
5069 && contains_placeholder_p (unit_size))
5070 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5071 TREE_OPERAND (exp, 0));
5072
5073 offset = size_binop (PLUS_EXPR, offset,
5074 size_binop (MULT_EXPR,
5075 convert (sizetype, index),
5076 unit_size));
5077 }
5078
5079 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5080 && ! ((TREE_CODE (exp) == NOP_EXPR
5081 || TREE_CODE (exp) == CONVERT_EXPR)
5082 && (TYPE_MODE (TREE_TYPE (exp))
5083 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5084 break;
5085
5086 /* If any reference in the chain is volatile, the effect is volatile. */
5087 if (TREE_THIS_VOLATILE (exp))
5088 *pvolatilep = 1;
5089
5090 /* If the offset is non-constant already, then we can't assume any
5091 alignment more than the alignment here. */
5092 if (! TREE_CONSTANT (offset))
5093 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5094
5095 exp = TREE_OPERAND (exp, 0);
5096 }
5097
5098 if (DECL_P (exp))
5099 alignment = MIN (alignment, DECL_ALIGN (exp));
5100 else if (TREE_TYPE (exp) != 0)
5101 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5102
5103 /* If OFFSET is constant, see if we can return the whole thing as a
5104 constant bit position. Otherwise, split it up. */
5105 if (host_integerp (offset, 0)
5106 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5107 bitsize_unit_node))
5108 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5109 && host_integerp (tem, 0))
5110 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5111 else
5112 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5113
5114 *pmode = mode;
5115 *palignment = alignment;
5116 return exp;
5117 }
5118
5119 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5120
5121 static enum memory_use_mode
5122 get_memory_usage_from_modifier (modifier)
5123 enum expand_modifier modifier;
5124 {
5125 switch (modifier)
5126 {
5127 case EXPAND_NORMAL:
5128 case EXPAND_SUM:
5129 return MEMORY_USE_RO;
5130 break;
5131 case EXPAND_MEMORY_USE_WO:
5132 return MEMORY_USE_WO;
5133 break;
5134 case EXPAND_MEMORY_USE_RW:
5135 return MEMORY_USE_RW;
5136 break;
5137 case EXPAND_MEMORY_USE_DONT:
5138 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5139 MEMORY_USE_DONT, because they are modifiers to a call of
5140 expand_expr in the ADDR_EXPR case of expand_expr. */
5141 case EXPAND_CONST_ADDRESS:
5142 case EXPAND_INITIALIZER:
5143 return MEMORY_USE_DONT;
5144 case EXPAND_MEMORY_USE_BAD:
5145 default:
5146 abort ();
5147 }
5148 }
5149 \f
5150 /* Given an rtx VALUE that may contain additions and multiplications,
5151 return an equivalent value that just refers to a register or memory.
5152 This is done by generating instructions to perform the arithmetic
5153 and returning a pseudo-register containing the value.
5154
5155 The returned value may be a REG, SUBREG, MEM or constant. */
5156
5157 rtx
5158 force_operand (value, target)
5159 rtx value, target;
5160 {
5161 register optab binoptab = 0;
5162 /* Use a temporary to force order of execution of calls to
5163 `force_operand'. */
5164 rtx tmp;
5165 register rtx op2;
5166 /* Use subtarget as the target for operand 0 of a binary operation. */
5167 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5168
5169 /* Check for a PIC address load. */
5170 if (flag_pic
5171 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5172 && XEXP (value, 0) == pic_offset_table_rtx
5173 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5174 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5175 || GET_CODE (XEXP (value, 1)) == CONST))
5176 {
5177 if (!subtarget)
5178 subtarget = gen_reg_rtx (GET_MODE (value));
5179 emit_move_insn (subtarget, value);
5180 return subtarget;
5181 }
5182
5183 if (GET_CODE (value) == PLUS)
5184 binoptab = add_optab;
5185 else if (GET_CODE (value) == MINUS)
5186 binoptab = sub_optab;
5187 else if (GET_CODE (value) == MULT)
5188 {
5189 op2 = XEXP (value, 1);
5190 if (!CONSTANT_P (op2)
5191 && !(GET_CODE (op2) == REG && op2 != subtarget))
5192 subtarget = 0;
5193 tmp = force_operand (XEXP (value, 0), subtarget);
5194 return expand_mult (GET_MODE (value), tmp,
5195 force_operand (op2, NULL_RTX),
5196 target, 0);
5197 }
5198
5199 if (binoptab)
5200 {
5201 op2 = XEXP (value, 1);
5202 if (!CONSTANT_P (op2)
5203 && !(GET_CODE (op2) == REG && op2 != subtarget))
5204 subtarget = 0;
5205 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5206 {
5207 binoptab = add_optab;
5208 op2 = negate_rtx (GET_MODE (value), op2);
5209 }
5210
5211 /* Check for an addition with OP2 a constant integer and our first
5212 operand a PLUS of a virtual register and something else. In that
5213 case, we want to emit the sum of the virtual register and the
5214 constant first and then add the other value. This allows virtual
5215 register instantiation to simply modify the constant rather than
5216 creating another one around this addition. */
5217 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5218 && GET_CODE (XEXP (value, 0)) == PLUS
5219 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5220 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5221 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5222 {
5223 rtx temp = expand_binop (GET_MODE (value), binoptab,
5224 XEXP (XEXP (value, 0), 0), op2,
5225 subtarget, 0, OPTAB_LIB_WIDEN);
5226 return expand_binop (GET_MODE (value), binoptab, temp,
5227 force_operand (XEXP (XEXP (value, 0), 1), 0),
5228 target, 0, OPTAB_LIB_WIDEN);
5229 }
5230
5231 tmp = force_operand (XEXP (value, 0), subtarget);
5232 return expand_binop (GET_MODE (value), binoptab, tmp,
5233 force_operand (op2, NULL_RTX),
5234 target, 0, OPTAB_LIB_WIDEN);
5235 /* We give UNSIGNEDP = 0 to expand_binop
5236 because the only operations we are expanding here are signed ones. */
5237 }
5238 return value;
5239 }
5240 \f
5241 /* Subroutine of expand_expr:
5242 save the non-copied parts (LIST) of an expr (LHS), and return a list
5243 which can restore these values to their previous values,
5244 should something modify their storage. */
5245
5246 static tree
5247 save_noncopied_parts (lhs, list)
5248 tree lhs;
5249 tree list;
5250 {
5251 tree tail;
5252 tree parts = 0;
5253
5254 for (tail = list; tail; tail = TREE_CHAIN (tail))
5255 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5256 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5257 else
5258 {
5259 tree part = TREE_VALUE (tail);
5260 tree part_type = TREE_TYPE (part);
5261 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5262 rtx target = assign_temp (part_type, 0, 1, 1);
5263 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5264 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5265 parts = tree_cons (to_be_saved,
5266 build (RTL_EXPR, part_type, NULL_TREE,
5267 (tree) target),
5268 parts);
5269 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5270 }
5271 return parts;
5272 }
5273
5274 /* Subroutine of expand_expr:
5275 record the non-copied parts (LIST) of an expr (LHS), and return a list
5276 which specifies the initial values of these parts. */
5277
5278 static tree
5279 init_noncopied_parts (lhs, list)
5280 tree lhs;
5281 tree list;
5282 {
5283 tree tail;
5284 tree parts = 0;
5285
5286 for (tail = list; tail; tail = TREE_CHAIN (tail))
5287 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5288 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5289 else if (TREE_PURPOSE (tail))
5290 {
5291 tree part = TREE_VALUE (tail);
5292 tree part_type = TREE_TYPE (part);
5293 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5294 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5295 }
5296 return parts;
5297 }
5298
5299 /* Subroutine of expand_expr: return nonzero iff there is no way that
5300 EXP can reference X, which is being modified. TOP_P is nonzero if this
5301 call is going to be used to determine whether we need a temporary
5302 for EXP, as opposed to a recursive call to this function.
5303
5304 It is always safe for this routine to return zero since it merely
5305 searches for optimization opportunities. */
5306
5307 static int
5308 safe_from_p (x, exp, top_p)
5309 rtx x;
5310 tree exp;
5311 int top_p;
5312 {
5313 rtx exp_rtl = 0;
5314 int i, nops;
5315 static int save_expr_count;
5316 static int save_expr_size = 0;
5317 static tree *save_expr_rewritten;
5318 static tree save_expr_trees[256];
5319
5320 if (x == 0
5321 /* If EXP has varying size, we MUST use a target since we currently
5322 have no way of allocating temporaries of variable size
5323 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5324 So we assume here that something at a higher level has prevented a
5325 clash. This is somewhat bogus, but the best we can do. Only
5326 do this when X is BLKmode and when we are at the top level. */
5327 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5328 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5329 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5330 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5331 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5332 != INTEGER_CST)
5333 && GET_MODE (x) == BLKmode))
5334 return 1;
5335
5336 if (top_p && save_expr_size == 0)
5337 {
5338 int rtn;
5339
5340 save_expr_count = 0;
5341 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5342 save_expr_rewritten = &save_expr_trees[0];
5343
5344 rtn = safe_from_p (x, exp, 1);
5345
5346 for (i = 0; i < save_expr_count; ++i)
5347 {
5348 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5349 abort ();
5350 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5351 }
5352
5353 save_expr_size = 0;
5354
5355 return rtn;
5356 }
5357
5358 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5359 find the underlying pseudo. */
5360 if (GET_CODE (x) == SUBREG)
5361 {
5362 x = SUBREG_REG (x);
5363 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5364 return 0;
5365 }
5366
5367 /* If X is a location in the outgoing argument area, it is always safe. */
5368 if (GET_CODE (x) == MEM
5369 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5370 || (GET_CODE (XEXP (x, 0)) == PLUS
5371 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5372 return 1;
5373
5374 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5375 {
5376 case 'd':
5377 exp_rtl = DECL_RTL (exp);
5378 break;
5379
5380 case 'c':
5381 return 1;
5382
5383 case 'x':
5384 if (TREE_CODE (exp) == TREE_LIST)
5385 return ((TREE_VALUE (exp) == 0
5386 || safe_from_p (x, TREE_VALUE (exp), 0))
5387 && (TREE_CHAIN (exp) == 0
5388 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5389 else if (TREE_CODE (exp) == ERROR_MARK)
5390 return 1; /* An already-visited SAVE_EXPR? */
5391 else
5392 return 0;
5393
5394 case '1':
5395 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5396
5397 case '2':
5398 case '<':
5399 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5400 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5401
5402 case 'e':
5403 case 'r':
5404 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5405 the expression. If it is set, we conflict iff we are that rtx or
5406 both are in memory. Otherwise, we check all operands of the
5407 expression recursively. */
5408
5409 switch (TREE_CODE (exp))
5410 {
5411 case ADDR_EXPR:
5412 return (staticp (TREE_OPERAND (exp, 0))
5413 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5414 || TREE_STATIC (exp));
5415
5416 case INDIRECT_REF:
5417 if (GET_CODE (x) == MEM)
5418 return 0;
5419 break;
5420
5421 case CALL_EXPR:
5422 exp_rtl = CALL_EXPR_RTL (exp);
5423 if (exp_rtl == 0)
5424 {
5425 /* Assume that the call will clobber all hard registers and
5426 all of memory. */
5427 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5428 || GET_CODE (x) == MEM)
5429 return 0;
5430 }
5431
5432 break;
5433
5434 case RTL_EXPR:
5435 /* If a sequence exists, we would have to scan every instruction
5436 in the sequence to see if it was safe. This is probably not
5437 worthwhile. */
5438 if (RTL_EXPR_SEQUENCE (exp))
5439 return 0;
5440
5441 exp_rtl = RTL_EXPR_RTL (exp);
5442 break;
5443
5444 case WITH_CLEANUP_EXPR:
5445 exp_rtl = RTL_EXPR_RTL (exp);
5446 break;
5447
5448 case CLEANUP_POINT_EXPR:
5449 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5450
5451 case SAVE_EXPR:
5452 exp_rtl = SAVE_EXPR_RTL (exp);
5453 if (exp_rtl)
5454 break;
5455
5456 /* This SAVE_EXPR might appear many times in the top-level
5457 safe_from_p() expression, and if it has a complex
5458 subexpression, examining it multiple times could result
5459 in a combinatorial explosion. E.g. on an Alpha
5460 running at least 200MHz, a Fortran test case compiled with
5461 optimization took about 28 minutes to compile -- even though
5462 it was only a few lines long, and the complicated line causing
5463 so much time to be spent in the earlier version of safe_from_p()
5464 had only 293 or so unique nodes.
5465
5466 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5467 where it is so we can turn it back in the top-level safe_from_p()
5468 when we're done. */
5469
5470 /* For now, don't bother re-sizing the array. */
5471 if (save_expr_count >= save_expr_size)
5472 return 0;
5473 save_expr_rewritten[save_expr_count++] = exp;
5474
5475 nops = tree_code_length[(int) SAVE_EXPR];
5476 for (i = 0; i < nops; i++)
5477 {
5478 tree operand = TREE_OPERAND (exp, i);
5479 if (operand == NULL_TREE)
5480 continue;
5481 TREE_SET_CODE (exp, ERROR_MARK);
5482 if (!safe_from_p (x, operand, 0))
5483 return 0;
5484 TREE_SET_CODE (exp, SAVE_EXPR);
5485 }
5486 TREE_SET_CODE (exp, ERROR_MARK);
5487 return 1;
5488
5489 case BIND_EXPR:
5490 /* The only operand we look at is operand 1. The rest aren't
5491 part of the expression. */
5492 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5493
5494 case METHOD_CALL_EXPR:
5495 /* This takes a rtx argument, but shouldn't appear here. */
5496 abort ();
5497
5498 default:
5499 break;
5500 }
5501
5502 /* If we have an rtx, we do not need to scan our operands. */
5503 if (exp_rtl)
5504 break;
5505
5506 nops = tree_code_length[(int) TREE_CODE (exp)];
5507 for (i = 0; i < nops; i++)
5508 if (TREE_OPERAND (exp, i) != 0
5509 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5510 return 0;
5511 }
5512
5513 /* If we have an rtl, find any enclosed object. Then see if we conflict
5514 with it. */
5515 if (exp_rtl)
5516 {
5517 if (GET_CODE (exp_rtl) == SUBREG)
5518 {
5519 exp_rtl = SUBREG_REG (exp_rtl);
5520 if (GET_CODE (exp_rtl) == REG
5521 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5522 return 0;
5523 }
5524
5525 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5526 are memory and EXP is not readonly. */
5527 return ! (rtx_equal_p (x, exp_rtl)
5528 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5529 && ! TREE_READONLY (exp)));
5530 }
5531
5532 /* If we reach here, it is safe. */
5533 return 1;
5534 }
5535
5536 /* Subroutine of expand_expr: return nonzero iff EXP is an
5537 expression whose type is statically determinable. */
5538
5539 static int
5540 fixed_type_p (exp)
5541 tree exp;
5542 {
5543 if (TREE_CODE (exp) == PARM_DECL
5544 || TREE_CODE (exp) == VAR_DECL
5545 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5546 || TREE_CODE (exp) == COMPONENT_REF
5547 || TREE_CODE (exp) == ARRAY_REF)
5548 return 1;
5549 return 0;
5550 }
5551
5552 /* Subroutine of expand_expr: return rtx if EXP is a
5553 variable or parameter; else return 0. */
5554
5555 static rtx
5556 var_rtx (exp)
5557 tree exp;
5558 {
5559 STRIP_NOPS (exp);
5560 switch (TREE_CODE (exp))
5561 {
5562 case PARM_DECL:
5563 case VAR_DECL:
5564 return DECL_RTL (exp);
5565 default:
5566 return 0;
5567 }
5568 }
5569
5570 #ifdef MAX_INTEGER_COMPUTATION_MODE
5571 void
5572 check_max_integer_computation_mode (exp)
5573 tree exp;
5574 {
5575 enum tree_code code;
5576 enum machine_mode mode;
5577
5578 /* Strip any NOPs that don't change the mode. */
5579 STRIP_NOPS (exp);
5580 code = TREE_CODE (exp);
5581
5582 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5583 if (code == NOP_EXPR
5584 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5585 return;
5586
5587 /* First check the type of the overall operation. We need only look at
5588 unary, binary and relational operations. */
5589 if (TREE_CODE_CLASS (code) == '1'
5590 || TREE_CODE_CLASS (code) == '2'
5591 || TREE_CODE_CLASS (code) == '<')
5592 {
5593 mode = TYPE_MODE (TREE_TYPE (exp));
5594 if (GET_MODE_CLASS (mode) == MODE_INT
5595 && mode > MAX_INTEGER_COMPUTATION_MODE)
5596 fatal ("unsupported wide integer operation");
5597 }
5598
5599 /* Check operand of a unary op. */
5600 if (TREE_CODE_CLASS (code) == '1')
5601 {
5602 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5603 if (GET_MODE_CLASS (mode) == MODE_INT
5604 && mode > MAX_INTEGER_COMPUTATION_MODE)
5605 fatal ("unsupported wide integer operation");
5606 }
5607
5608 /* Check operands of a binary/comparison op. */
5609 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5610 {
5611 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5612 if (GET_MODE_CLASS (mode) == MODE_INT
5613 && mode > MAX_INTEGER_COMPUTATION_MODE)
5614 fatal ("unsupported wide integer operation");
5615
5616 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5617 if (GET_MODE_CLASS (mode) == MODE_INT
5618 && mode > MAX_INTEGER_COMPUTATION_MODE)
5619 fatal ("unsupported wide integer operation");
5620 }
5621 }
5622 #endif
5623
5624 \f
5625 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5626 has any readonly fields. If any of the fields have types that
5627 contain readonly fields, return true as well. */
5628
5629 static int
5630 readonly_fields_p (type)
5631 tree type;
5632 {
5633 tree field;
5634
5635 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5636 if (TREE_CODE (field) == FIELD_DECL
5637 && (TREE_READONLY (field)
5638 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5639 && readonly_fields_p (TREE_TYPE (field)))))
5640 return 1;
5641
5642 return 0;
5643 }
5644 \f
5645 /* expand_expr: generate code for computing expression EXP.
5646 An rtx for the computed value is returned. The value is never null.
5647 In the case of a void EXP, const0_rtx is returned.
5648
5649 The value may be stored in TARGET if TARGET is nonzero.
5650 TARGET is just a suggestion; callers must assume that
5651 the rtx returned may not be the same as TARGET.
5652
5653 If TARGET is CONST0_RTX, it means that the value will be ignored.
5654
5655 If TMODE is not VOIDmode, it suggests generating the
5656 result in mode TMODE. But this is done only when convenient.
5657 Otherwise, TMODE is ignored and the value generated in its natural mode.
5658 TMODE is just a suggestion; callers must assume that
5659 the rtx returned may not have mode TMODE.
5660
5661 Note that TARGET may have neither TMODE nor MODE. In that case, it
5662 probably will not be used.
5663
5664 If MODIFIER is EXPAND_SUM then when EXP is an addition
5665 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5666 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5667 products as above, or REG or MEM, or constant.
5668 Ordinarily in such cases we would output mul or add instructions
5669 and then return a pseudo reg containing the sum.
5670
5671 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5672 it also marks a label as absolutely required (it can't be dead).
5673 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5674 This is used for outputting expressions used in initializers.
5675
5676 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5677 with a constant address even if that address is not normally legitimate.
5678 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5679
5680 rtx
5681 expand_expr (exp, target, tmode, modifier)
5682 register tree exp;
5683 rtx target;
5684 enum machine_mode tmode;
5685 enum expand_modifier modifier;
5686 {
5687 register rtx op0, op1, temp;
5688 tree type = TREE_TYPE (exp);
5689 int unsignedp = TREE_UNSIGNED (type);
5690 register enum machine_mode mode;
5691 register enum tree_code code = TREE_CODE (exp);
5692 optab this_optab;
5693 rtx subtarget, original_target;
5694 int ignore;
5695 tree context;
5696 /* Used by check-memory-usage to make modifier read only. */
5697 enum expand_modifier ro_modifier;
5698
5699 /* Handle ERROR_MARK before anybody tries to access its type. */
5700 if (TREE_CODE (exp) == ERROR_MARK)
5701 {
5702 op0 = CONST0_RTX (tmode);
5703 if (op0 != 0)
5704 return op0;
5705 return const0_rtx;
5706 }
5707
5708 mode = TYPE_MODE (type);
5709 /* Use subtarget as the target for operand 0 of a binary operation. */
5710 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5711 original_target = target;
5712 ignore = (target == const0_rtx
5713 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5714 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5715 || code == COND_EXPR)
5716 && TREE_CODE (type) == VOID_TYPE));
5717
5718 /* Make a read-only version of the modifier. */
5719 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5720 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5721 ro_modifier = modifier;
5722 else
5723 ro_modifier = EXPAND_NORMAL;
5724
5725 /* Don't use hard regs as subtargets, because the combiner
5726 can only handle pseudo regs. */
5727 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5728 subtarget = 0;
5729 /* Avoid subtargets inside loops,
5730 since they hide some invariant expressions. */
5731 if (preserve_subexpressions_p ())
5732 subtarget = 0;
5733
5734 /* If we are going to ignore this result, we need only do something
5735 if there is a side-effect somewhere in the expression. If there
5736 is, short-circuit the most common cases here. Note that we must
5737 not call expand_expr with anything but const0_rtx in case this
5738 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5739
5740 if (ignore)
5741 {
5742 if (! TREE_SIDE_EFFECTS (exp))
5743 return const0_rtx;
5744
5745 /* Ensure we reference a volatile object even if value is ignored, but
5746 don't do this if all we are doing is taking its address. */
5747 if (TREE_THIS_VOLATILE (exp)
5748 && TREE_CODE (exp) != FUNCTION_DECL
5749 && mode != VOIDmode && mode != BLKmode
5750 && modifier != EXPAND_CONST_ADDRESS)
5751 {
5752 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5753 if (GET_CODE (temp) == MEM)
5754 temp = copy_to_reg (temp);
5755 return const0_rtx;
5756 }
5757
5758 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5759 || code == INDIRECT_REF || code == BUFFER_REF)
5760 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5761 VOIDmode, ro_modifier);
5762 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5763 || code == ARRAY_REF)
5764 {
5765 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5766 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5767 return const0_rtx;
5768 }
5769 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5770 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5771 /* If the second operand has no side effects, just evaluate
5772 the first. */
5773 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5774 VOIDmode, ro_modifier);
5775 else if (code == BIT_FIELD_REF)
5776 {
5777 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5778 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5779 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5780 return const0_rtx;
5781 }
5782 ;
5783 target = 0;
5784 }
5785
5786 #ifdef MAX_INTEGER_COMPUTATION_MODE
5787 /* Only check stuff here if the mode we want is different from the mode
5788 of the expression; if it's the same, check_max_integer_computiation_mode
5789 will handle it. Do we really need to check this stuff at all? */
5790
5791 if (target
5792 && GET_MODE (target) != mode
5793 && TREE_CODE (exp) != INTEGER_CST
5794 && TREE_CODE (exp) != PARM_DECL
5795 && TREE_CODE (exp) != ARRAY_REF
5796 && TREE_CODE (exp) != COMPONENT_REF
5797 && TREE_CODE (exp) != BIT_FIELD_REF
5798 && TREE_CODE (exp) != INDIRECT_REF
5799 && TREE_CODE (exp) != CALL_EXPR
5800 && TREE_CODE (exp) != VAR_DECL
5801 && TREE_CODE (exp) != RTL_EXPR)
5802 {
5803 enum machine_mode mode = GET_MODE (target);
5804
5805 if (GET_MODE_CLASS (mode) == MODE_INT
5806 && mode > MAX_INTEGER_COMPUTATION_MODE)
5807 fatal ("unsupported wide integer operation");
5808 }
5809
5810 if (tmode != mode
5811 && TREE_CODE (exp) != INTEGER_CST
5812 && TREE_CODE (exp) != PARM_DECL
5813 && TREE_CODE (exp) != ARRAY_REF
5814 && TREE_CODE (exp) != COMPONENT_REF
5815 && TREE_CODE (exp) != BIT_FIELD_REF
5816 && TREE_CODE (exp) != INDIRECT_REF
5817 && TREE_CODE (exp) != VAR_DECL
5818 && TREE_CODE (exp) != CALL_EXPR
5819 && TREE_CODE (exp) != RTL_EXPR
5820 && GET_MODE_CLASS (tmode) == MODE_INT
5821 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5822 fatal ("unsupported wide integer operation");
5823
5824 check_max_integer_computation_mode (exp);
5825 #endif
5826
5827 /* If will do cse, generate all results into pseudo registers
5828 since 1) that allows cse to find more things
5829 and 2) otherwise cse could produce an insn the machine
5830 cannot support. */
5831
5832 if (! cse_not_expected && mode != BLKmode && target
5833 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5834 target = subtarget;
5835
5836 switch (code)
5837 {
5838 case LABEL_DECL:
5839 {
5840 tree function = decl_function_context (exp);
5841 /* Handle using a label in a containing function. */
5842 if (function != current_function_decl
5843 && function != inline_function_decl && function != 0)
5844 {
5845 struct function *p = find_function_data (function);
5846 /* Allocate in the memory associated with the function
5847 that the label is in. */
5848 push_obstacks (p->function_obstack,
5849 p->function_maybepermanent_obstack);
5850
5851 p->expr->x_forced_labels
5852 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5853 p->expr->x_forced_labels);
5854 pop_obstacks ();
5855 }
5856 else
5857 {
5858 if (modifier == EXPAND_INITIALIZER)
5859 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5860 label_rtx (exp),
5861 forced_labels);
5862 }
5863
5864 temp = gen_rtx_MEM (FUNCTION_MODE,
5865 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5866 if (function != current_function_decl
5867 && function != inline_function_decl && function != 0)
5868 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5869 return temp;
5870 }
5871
5872 case PARM_DECL:
5873 if (DECL_RTL (exp) == 0)
5874 {
5875 error_with_decl (exp, "prior parameter's size depends on `%s'");
5876 return CONST0_RTX (mode);
5877 }
5878
5879 /* ... fall through ... */
5880
5881 case VAR_DECL:
5882 /* If a static var's type was incomplete when the decl was written,
5883 but the type is complete now, lay out the decl now. */
5884 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5885 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5886 {
5887 push_obstacks_nochange ();
5888 end_temporary_allocation ();
5889 layout_decl (exp, 0);
5890 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5891 pop_obstacks ();
5892 }
5893
5894 /* Although static-storage variables start off initialized, according to
5895 ANSI C, a memcpy could overwrite them with uninitialized values. So
5896 we check them too. This also lets us check for read-only variables
5897 accessed via a non-const declaration, in case it won't be detected
5898 any other way (e.g., in an embedded system or OS kernel without
5899 memory protection).
5900
5901 Aggregates are not checked here; they're handled elsewhere. */
5902 if (cfun && current_function_check_memory_usage
5903 && code == VAR_DECL
5904 && GET_CODE (DECL_RTL (exp)) == MEM
5905 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5906 {
5907 enum memory_use_mode memory_usage;
5908 memory_usage = get_memory_usage_from_modifier (modifier);
5909
5910 if (memory_usage != MEMORY_USE_DONT)
5911 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5912 XEXP (DECL_RTL (exp), 0), Pmode,
5913 GEN_INT (int_size_in_bytes (type)),
5914 TYPE_MODE (sizetype),
5915 GEN_INT (memory_usage),
5916 TYPE_MODE (integer_type_node));
5917 }
5918
5919 /* ... fall through ... */
5920
5921 case FUNCTION_DECL:
5922 case RESULT_DECL:
5923 if (DECL_RTL (exp) == 0)
5924 abort ();
5925
5926 /* Ensure variable marked as used even if it doesn't go through
5927 a parser. If it hasn't be used yet, write out an external
5928 definition. */
5929 if (! TREE_USED (exp))
5930 {
5931 assemble_external (exp);
5932 TREE_USED (exp) = 1;
5933 }
5934
5935 /* Show we haven't gotten RTL for this yet. */
5936 temp = 0;
5937
5938 /* Handle variables inherited from containing functions. */
5939 context = decl_function_context (exp);
5940
5941 /* We treat inline_function_decl as an alias for the current function
5942 because that is the inline function whose vars, types, etc.
5943 are being merged into the current function.
5944 See expand_inline_function. */
5945
5946 if (context != 0 && context != current_function_decl
5947 && context != inline_function_decl
5948 /* If var is static, we don't need a static chain to access it. */
5949 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5950 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5951 {
5952 rtx addr;
5953
5954 /* Mark as non-local and addressable. */
5955 DECL_NONLOCAL (exp) = 1;
5956 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5957 abort ();
5958 mark_addressable (exp);
5959 if (GET_CODE (DECL_RTL (exp)) != MEM)
5960 abort ();
5961 addr = XEXP (DECL_RTL (exp), 0);
5962 if (GET_CODE (addr) == MEM)
5963 addr = gen_rtx_MEM (Pmode,
5964 fix_lexical_addr (XEXP (addr, 0), exp));
5965 else
5966 addr = fix_lexical_addr (addr, exp);
5967 temp = change_address (DECL_RTL (exp), mode, addr);
5968 }
5969
5970 /* This is the case of an array whose size is to be determined
5971 from its initializer, while the initializer is still being parsed.
5972 See expand_decl. */
5973
5974 else if (GET_CODE (DECL_RTL (exp)) == MEM
5975 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5976 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5977 XEXP (DECL_RTL (exp), 0));
5978
5979 /* If DECL_RTL is memory, we are in the normal case and either
5980 the address is not valid or it is not a register and -fforce-addr
5981 is specified, get the address into a register. */
5982
5983 else if (GET_CODE (DECL_RTL (exp)) == MEM
5984 && modifier != EXPAND_CONST_ADDRESS
5985 && modifier != EXPAND_SUM
5986 && modifier != EXPAND_INITIALIZER
5987 && (! memory_address_p (DECL_MODE (exp),
5988 XEXP (DECL_RTL (exp), 0))
5989 || (flag_force_addr
5990 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5991 temp = change_address (DECL_RTL (exp), VOIDmode,
5992 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5993
5994 /* If we got something, return it. But first, set the alignment
5995 the address is a register. */
5996 if (temp != 0)
5997 {
5998 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5999 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6000
6001 return temp;
6002 }
6003
6004 /* If the mode of DECL_RTL does not match that of the decl, it
6005 must be a promoted value. We return a SUBREG of the wanted mode,
6006 but mark it so that we know that it was already extended. */
6007
6008 if (GET_CODE (DECL_RTL (exp)) == REG
6009 && GET_MODE (DECL_RTL (exp)) != mode)
6010 {
6011 /* Get the signedness used for this variable. Ensure we get the
6012 same mode we got when the variable was declared. */
6013 if (GET_MODE (DECL_RTL (exp))
6014 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6015 abort ();
6016
6017 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6018 SUBREG_PROMOTED_VAR_P (temp) = 1;
6019 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6020 return temp;
6021 }
6022
6023 return DECL_RTL (exp);
6024
6025 case INTEGER_CST:
6026 return immed_double_const (TREE_INT_CST_LOW (exp),
6027 TREE_INT_CST_HIGH (exp), mode);
6028
6029 case CONST_DECL:
6030 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6031 EXPAND_MEMORY_USE_BAD);
6032
6033 case REAL_CST:
6034 /* If optimized, generate immediate CONST_DOUBLE
6035 which will be turned into memory by reload if necessary.
6036
6037 We used to force a register so that loop.c could see it. But
6038 this does not allow gen_* patterns to perform optimizations with
6039 the constants. It also produces two insns in cases like "x = 1.0;".
6040 On most machines, floating-point constants are not permitted in
6041 many insns, so we'd end up copying it to a register in any case.
6042
6043 Now, we do the copying in expand_binop, if appropriate. */
6044 return immed_real_const (exp);
6045
6046 case COMPLEX_CST:
6047 case STRING_CST:
6048 if (! TREE_CST_RTL (exp))
6049 output_constant_def (exp);
6050
6051 /* TREE_CST_RTL probably contains a constant address.
6052 On RISC machines where a constant address isn't valid,
6053 make some insns to get that address into a register. */
6054 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6055 && modifier != EXPAND_CONST_ADDRESS
6056 && modifier != EXPAND_INITIALIZER
6057 && modifier != EXPAND_SUM
6058 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6059 || (flag_force_addr
6060 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6061 return change_address (TREE_CST_RTL (exp), VOIDmode,
6062 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6063 return TREE_CST_RTL (exp);
6064
6065 case EXPR_WITH_FILE_LOCATION:
6066 {
6067 rtx to_return;
6068 char *saved_input_filename = input_filename;
6069 int saved_lineno = lineno;
6070 input_filename = EXPR_WFL_FILENAME (exp);
6071 lineno = EXPR_WFL_LINENO (exp);
6072 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6073 emit_line_note (input_filename, lineno);
6074 /* Possibly avoid switching back and force here */
6075 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6076 input_filename = saved_input_filename;
6077 lineno = saved_lineno;
6078 return to_return;
6079 }
6080
6081 case SAVE_EXPR:
6082 context = decl_function_context (exp);
6083
6084 /* If this SAVE_EXPR was at global context, assume we are an
6085 initialization function and move it into our context. */
6086 if (context == 0)
6087 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6088
6089 /* We treat inline_function_decl as an alias for the current function
6090 because that is the inline function whose vars, types, etc.
6091 are being merged into the current function.
6092 See expand_inline_function. */
6093 if (context == current_function_decl || context == inline_function_decl)
6094 context = 0;
6095
6096 /* If this is non-local, handle it. */
6097 if (context)
6098 {
6099 /* The following call just exists to abort if the context is
6100 not of a containing function. */
6101 find_function_data (context);
6102
6103 temp = SAVE_EXPR_RTL (exp);
6104 if (temp && GET_CODE (temp) == REG)
6105 {
6106 put_var_into_stack (exp);
6107 temp = SAVE_EXPR_RTL (exp);
6108 }
6109 if (temp == 0 || GET_CODE (temp) != MEM)
6110 abort ();
6111 return change_address (temp, mode,
6112 fix_lexical_addr (XEXP (temp, 0), exp));
6113 }
6114 if (SAVE_EXPR_RTL (exp) == 0)
6115 {
6116 if (mode == VOIDmode)
6117 temp = const0_rtx;
6118 else
6119 temp = assign_temp (type, 3, 0, 0);
6120
6121 SAVE_EXPR_RTL (exp) = temp;
6122 if (!optimize && GET_CODE (temp) == REG)
6123 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6124 save_expr_regs);
6125
6126 /* If the mode of TEMP does not match that of the expression, it
6127 must be a promoted value. We pass store_expr a SUBREG of the
6128 wanted mode but mark it so that we know that it was already
6129 extended. Note that `unsignedp' was modified above in
6130 this case. */
6131
6132 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6133 {
6134 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6135 SUBREG_PROMOTED_VAR_P (temp) = 1;
6136 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6137 }
6138
6139 if (temp == const0_rtx)
6140 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6141 EXPAND_MEMORY_USE_BAD);
6142 else
6143 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6144
6145 TREE_USED (exp) = 1;
6146 }
6147
6148 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6149 must be a promoted value. We return a SUBREG of the wanted mode,
6150 but mark it so that we know that it was already extended. */
6151
6152 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6153 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6154 {
6155 /* Compute the signedness and make the proper SUBREG. */
6156 promote_mode (type, mode, &unsignedp, 0);
6157 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6158 SUBREG_PROMOTED_VAR_P (temp) = 1;
6159 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6160 return temp;
6161 }
6162
6163 return SAVE_EXPR_RTL (exp);
6164
6165 case UNSAVE_EXPR:
6166 {
6167 rtx temp;
6168 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6169 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6170 return temp;
6171 }
6172
6173 case PLACEHOLDER_EXPR:
6174 {
6175 tree placeholder_expr;
6176
6177 /* If there is an object on the head of the placeholder list,
6178 see if some object in it of type TYPE or a pointer to it. For
6179 further information, see tree.def. */
6180 for (placeholder_expr = placeholder_list;
6181 placeholder_expr != 0;
6182 placeholder_expr = TREE_CHAIN (placeholder_expr))
6183 {
6184 tree need_type = TYPE_MAIN_VARIANT (type);
6185 tree object = 0;
6186 tree old_list = placeholder_list;
6187 tree elt;
6188
6189 /* Find the outermost reference that is of the type we want.
6190 If none, see if any object has a type that is a pointer to
6191 the type we want. */
6192 for (elt = TREE_PURPOSE (placeholder_expr);
6193 elt != 0 && object == 0;
6194 elt
6195 = ((TREE_CODE (elt) == COMPOUND_EXPR
6196 || TREE_CODE (elt) == COND_EXPR)
6197 ? TREE_OPERAND (elt, 1)
6198 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6199 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6200 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6201 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6202 ? TREE_OPERAND (elt, 0) : 0))
6203 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6204 object = elt;
6205
6206 for (elt = TREE_PURPOSE (placeholder_expr);
6207 elt != 0 && object == 0;
6208 elt
6209 = ((TREE_CODE (elt) == COMPOUND_EXPR
6210 || TREE_CODE (elt) == COND_EXPR)
6211 ? TREE_OPERAND (elt, 1)
6212 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6213 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6214 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6215 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6216 ? TREE_OPERAND (elt, 0) : 0))
6217 if (POINTER_TYPE_P (TREE_TYPE (elt))
6218 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6219 == need_type))
6220 object = build1 (INDIRECT_REF, need_type, elt);
6221
6222 if (object != 0)
6223 {
6224 /* Expand this object skipping the list entries before
6225 it was found in case it is also a PLACEHOLDER_EXPR.
6226 In that case, we want to translate it using subsequent
6227 entries. */
6228 placeholder_list = TREE_CHAIN (placeholder_expr);
6229 temp = expand_expr (object, original_target, tmode,
6230 ro_modifier);
6231 placeholder_list = old_list;
6232 return temp;
6233 }
6234 }
6235 }
6236
6237 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6238 abort ();
6239
6240 case WITH_RECORD_EXPR:
6241 /* Put the object on the placeholder list, expand our first operand,
6242 and pop the list. */
6243 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6244 placeholder_list);
6245 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6246 tmode, ro_modifier);
6247 placeholder_list = TREE_CHAIN (placeholder_list);
6248 return target;
6249
6250 case GOTO_EXPR:
6251 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6252 expand_goto (TREE_OPERAND (exp, 0));
6253 else
6254 expand_computed_goto (TREE_OPERAND (exp, 0));
6255 return const0_rtx;
6256
6257 case EXIT_EXPR:
6258 expand_exit_loop_if_false (NULL_PTR,
6259 invert_truthvalue (TREE_OPERAND (exp, 0)));
6260 return const0_rtx;
6261
6262 case LABELED_BLOCK_EXPR:
6263 if (LABELED_BLOCK_BODY (exp))
6264 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6265 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6266 return const0_rtx;
6267
6268 case EXIT_BLOCK_EXPR:
6269 if (EXIT_BLOCK_RETURN (exp))
6270 sorry ("returned value in block_exit_expr");
6271 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6272 return const0_rtx;
6273
6274 case LOOP_EXPR:
6275 push_temp_slots ();
6276 expand_start_loop (1);
6277 expand_expr_stmt (TREE_OPERAND (exp, 0));
6278 expand_end_loop ();
6279 pop_temp_slots ();
6280
6281 return const0_rtx;
6282
6283 case BIND_EXPR:
6284 {
6285 tree vars = TREE_OPERAND (exp, 0);
6286 int vars_need_expansion = 0;
6287
6288 /* Need to open a binding contour here because
6289 if there are any cleanups they must be contained here. */
6290 expand_start_bindings (2);
6291
6292 /* Mark the corresponding BLOCK for output in its proper place. */
6293 if (TREE_OPERAND (exp, 2) != 0
6294 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6295 insert_block (TREE_OPERAND (exp, 2));
6296
6297 /* If VARS have not yet been expanded, expand them now. */
6298 while (vars)
6299 {
6300 if (DECL_RTL (vars) == 0)
6301 {
6302 vars_need_expansion = 1;
6303 expand_decl (vars);
6304 }
6305 expand_decl_init (vars);
6306 vars = TREE_CHAIN (vars);
6307 }
6308
6309 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6310
6311 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6312
6313 return temp;
6314 }
6315
6316 case RTL_EXPR:
6317 if (RTL_EXPR_SEQUENCE (exp))
6318 {
6319 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6320 abort ();
6321 emit_insns (RTL_EXPR_SEQUENCE (exp));
6322 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6323 }
6324 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6325 free_temps_for_rtl_expr (exp);
6326 return RTL_EXPR_RTL (exp);
6327
6328 case CONSTRUCTOR:
6329 /* If we don't need the result, just ensure we evaluate any
6330 subexpressions. */
6331 if (ignore)
6332 {
6333 tree elt;
6334 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6335 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6336 EXPAND_MEMORY_USE_BAD);
6337 return const0_rtx;
6338 }
6339
6340 /* All elts simple constants => refer to a constant in memory. But
6341 if this is a non-BLKmode mode, let it store a field at a time
6342 since that should make a CONST_INT or CONST_DOUBLE when we
6343 fold. Likewise, if we have a target we can use, it is best to
6344 store directly into the target unless the type is large enough
6345 that memcpy will be used. If we are making an initializer and
6346 all operands are constant, put it in memory as well. */
6347 else if ((TREE_STATIC (exp)
6348 && ((mode == BLKmode
6349 && ! (target != 0 && safe_from_p (target, exp, 1)))
6350 || TREE_ADDRESSABLE (exp)
6351 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6352 && (! MOVE_BY_PIECES_P
6353 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6354 TYPE_ALIGN (type)))
6355 && ! mostly_zeros_p (exp))))
6356 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6357 {
6358 rtx constructor = output_constant_def (exp);
6359
6360 if (modifier != EXPAND_CONST_ADDRESS
6361 && modifier != EXPAND_INITIALIZER
6362 && modifier != EXPAND_SUM
6363 && (! memory_address_p (GET_MODE (constructor),
6364 XEXP (constructor, 0))
6365 || (flag_force_addr
6366 && GET_CODE (XEXP (constructor, 0)) != REG)))
6367 constructor = change_address (constructor, VOIDmode,
6368 XEXP (constructor, 0));
6369 return constructor;
6370 }
6371
6372 else
6373 {
6374 /* Handle calls that pass values in multiple non-contiguous
6375 locations. The Irix 6 ABI has examples of this. */
6376 if (target == 0 || ! safe_from_p (target, exp, 1)
6377 || GET_CODE (target) == PARALLEL)
6378 {
6379 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6380 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6381 else
6382 target = assign_temp (type, 0, 1, 1);
6383 }
6384
6385 if (TREE_READONLY (exp))
6386 {
6387 if (GET_CODE (target) == MEM)
6388 target = copy_rtx (target);
6389
6390 RTX_UNCHANGING_P (target) = 1;
6391 }
6392
6393 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6394 int_size_in_bytes (TREE_TYPE (exp)));
6395 return target;
6396 }
6397
6398 case INDIRECT_REF:
6399 {
6400 tree exp1 = TREE_OPERAND (exp, 0);
6401 tree exp2;
6402 tree index;
6403 tree string = string_constant (exp1, &index);
6404
6405 /* Try to optimize reads from const strings. */
6406 if (string
6407 && TREE_CODE (string) == STRING_CST
6408 && TREE_CODE (index) == INTEGER_CST
6409 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6410 && GET_MODE_CLASS (mode) == MODE_INT
6411 && GET_MODE_SIZE (mode) == 1
6412 && modifier != EXPAND_MEMORY_USE_WO)
6413 return
6414 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6415
6416 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6417 op0 = memory_address (mode, op0);
6418
6419 if (cfun && current_function_check_memory_usage
6420 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6421 {
6422 enum memory_use_mode memory_usage;
6423 memory_usage = get_memory_usage_from_modifier (modifier);
6424
6425 if (memory_usage != MEMORY_USE_DONT)
6426 {
6427 in_check_memory_usage = 1;
6428 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6429 op0, Pmode,
6430 GEN_INT (int_size_in_bytes (type)),
6431 TYPE_MODE (sizetype),
6432 GEN_INT (memory_usage),
6433 TYPE_MODE (integer_type_node));
6434 in_check_memory_usage = 0;
6435 }
6436 }
6437
6438 temp = gen_rtx_MEM (mode, op0);
6439 /* If address was computed by addition,
6440 mark this as an element of an aggregate. */
6441 if (TREE_CODE (exp1) == PLUS_EXPR
6442 || (TREE_CODE (exp1) == SAVE_EXPR
6443 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6444 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6445 || (TREE_CODE (exp1) == ADDR_EXPR
6446 && (exp2 = TREE_OPERAND (exp1, 0))
6447 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6448 MEM_SET_IN_STRUCT_P (temp, 1);
6449
6450 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6451 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6452
6453 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6454 here, because, in C and C++, the fact that a location is accessed
6455 through a pointer to const does not mean that the value there can
6456 never change. Languages where it can never change should
6457 also set TREE_STATIC. */
6458 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6459
6460 /* If we are writing to this object and its type is a record with
6461 readonly fields, we must mark it as readonly so it will
6462 conflict with readonly references to those fields. */
6463 if (modifier == EXPAND_MEMORY_USE_WO
6464 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6465 RTX_UNCHANGING_P (temp) = 1;
6466
6467 return temp;
6468 }
6469
6470 case ARRAY_REF:
6471 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6472 abort ();
6473
6474 {
6475 tree array = TREE_OPERAND (exp, 0);
6476 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6477 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6478 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6479 HOST_WIDE_INT i;
6480
6481 /* Optimize the special-case of a zero lower bound.
6482
6483 We convert the low_bound to sizetype to avoid some problems
6484 with constant folding. (E.g. suppose the lower bound is 1,
6485 and its mode is QI. Without the conversion, (ARRAY
6486 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6487 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6488
6489 if (! integer_zerop (low_bound))
6490 index = size_diffop (index, convert (sizetype, low_bound));
6491
6492 /* Fold an expression like: "foo"[2].
6493 This is not done in fold so it won't happen inside &.
6494 Don't fold if this is for wide characters since it's too
6495 difficult to do correctly and this is a very rare case. */
6496
6497 if (TREE_CODE (array) == STRING_CST
6498 && TREE_CODE (index) == INTEGER_CST
6499 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6500 && GET_MODE_CLASS (mode) == MODE_INT
6501 && GET_MODE_SIZE (mode) == 1)
6502 return
6503 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6504
6505 /* If this is a constant index into a constant array,
6506 just get the value from the array. Handle both the cases when
6507 we have an explicit constructor and when our operand is a variable
6508 that was declared const. */
6509
6510 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6511 && TREE_CODE (index) == INTEGER_CST
6512 && 0 > compare_tree_int (index,
6513 list_length (CONSTRUCTOR_ELTS
6514 (TREE_OPERAND (exp, 0)))))
6515 {
6516 tree elem;
6517
6518 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6519 i = TREE_INT_CST_LOW (index);
6520 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6521 ;
6522
6523 if (elem)
6524 return expand_expr (fold (TREE_VALUE (elem)), target,
6525 tmode, ro_modifier);
6526 }
6527
6528 else if (optimize >= 1
6529 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6530 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6531 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6532 {
6533 if (TREE_CODE (index) == INTEGER_CST)
6534 {
6535 tree init = DECL_INITIAL (array);
6536
6537 if (TREE_CODE (init) == CONSTRUCTOR)
6538 {
6539 tree elem;
6540
6541 for (elem = CONSTRUCTOR_ELTS (init);
6542 (elem
6543 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6544 elem = TREE_CHAIN (elem))
6545 ;
6546
6547 if (elem)
6548 return expand_expr (fold (TREE_VALUE (elem)), target,
6549 tmode, ro_modifier);
6550 }
6551 else if (TREE_CODE (init) == STRING_CST
6552 && 0 > compare_tree_int (index,
6553 TREE_STRING_LENGTH (init)))
6554 return (GEN_INT
6555 (TREE_STRING_POINTER
6556 (init)[TREE_INT_CST_LOW (index)]));
6557 }
6558 }
6559 }
6560
6561 /* ... fall through ... */
6562
6563 case COMPONENT_REF:
6564 case BIT_FIELD_REF:
6565 /* If the operand is a CONSTRUCTOR, we can just extract the
6566 appropriate field if it is present. Don't do this if we have
6567 already written the data since we want to refer to that copy
6568 and varasm.c assumes that's what we'll do. */
6569 if (code != ARRAY_REF
6570 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6571 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6572 {
6573 tree elt;
6574
6575 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6576 elt = TREE_CHAIN (elt))
6577 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6578 /* We can normally use the value of the field in the
6579 CONSTRUCTOR. However, if this is a bitfield in
6580 an integral mode that we can fit in a HOST_WIDE_INT,
6581 we must mask only the number of bits in the bitfield,
6582 since this is done implicitly by the constructor. If
6583 the bitfield does not meet either of those conditions,
6584 we can't do this optimization. */
6585 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6586 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6587 == MODE_INT)
6588 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6589 <= HOST_BITS_PER_WIDE_INT))))
6590 {
6591 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6592 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6593 {
6594 HOST_WIDE_INT bitsize
6595 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6596
6597 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6598 {
6599 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6600 op0 = expand_and (op0, op1, target);
6601 }
6602 else
6603 {
6604 enum machine_mode imode
6605 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6606 tree count
6607 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6608 0);
6609
6610 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6611 target, 0);
6612 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6613 target, 0);
6614 }
6615 }
6616
6617 return op0;
6618 }
6619 }
6620
6621 {
6622 enum machine_mode mode1;
6623 HOST_WIDE_INT bitsize, bitpos;
6624 tree offset;
6625 int volatilep = 0;
6626 unsigned int alignment;
6627 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6628 &mode1, &unsignedp, &volatilep,
6629 &alignment);
6630
6631 /* If we got back the original object, something is wrong. Perhaps
6632 we are evaluating an expression too early. In any event, don't
6633 infinitely recurse. */
6634 if (tem == exp)
6635 abort ();
6636
6637 /* If TEM's type is a union of variable size, pass TARGET to the inner
6638 computation, since it will need a temporary and TARGET is known
6639 to have to do. This occurs in unchecked conversion in Ada. */
6640
6641 op0 = expand_expr (tem,
6642 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6643 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6644 != INTEGER_CST)
6645 ? target : NULL_RTX),
6646 VOIDmode,
6647 (modifier == EXPAND_INITIALIZER
6648 || modifier == EXPAND_CONST_ADDRESS)
6649 ? modifier : EXPAND_NORMAL);
6650
6651 /* If this is a constant, put it into a register if it is a
6652 legitimate constant and OFFSET is 0 and memory if it isn't. */
6653 if (CONSTANT_P (op0))
6654 {
6655 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6656 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6657 && offset == 0)
6658 op0 = force_reg (mode, op0);
6659 else
6660 op0 = validize_mem (force_const_mem (mode, op0));
6661 }
6662
6663 if (offset != 0)
6664 {
6665 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6666
6667 /* If this object is in memory, put it into a register.
6668 This case can't occur in C, but can in Ada if we have
6669 unchecked conversion of an expression from a scalar type to
6670 an array or record type. */
6671 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6672 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6673 {
6674 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6675
6676 mark_temp_addr_taken (memloc);
6677 emit_move_insn (memloc, op0);
6678 op0 = memloc;
6679 }
6680
6681 if (GET_CODE (op0) != MEM)
6682 abort ();
6683
6684 if (GET_MODE (offset_rtx) != ptr_mode)
6685 {
6686 #ifdef POINTERS_EXTEND_UNSIGNED
6687 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6688 #else
6689 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6690 #endif
6691 }
6692
6693 /* A constant address in OP0 can have VOIDmode, we must not try
6694 to call force_reg for that case. Avoid that case. */
6695 if (GET_CODE (op0) == MEM
6696 && GET_MODE (op0) == BLKmode
6697 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6698 && bitsize != 0
6699 && (bitpos % bitsize) == 0
6700 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6701 && alignment == GET_MODE_ALIGNMENT (mode1))
6702 {
6703 rtx temp = change_address (op0, mode1,
6704 plus_constant (XEXP (op0, 0),
6705 (bitpos /
6706 BITS_PER_UNIT)));
6707 if (GET_CODE (XEXP (temp, 0)) == REG)
6708 op0 = temp;
6709 else
6710 op0 = change_address (op0, mode1,
6711 force_reg (GET_MODE (XEXP (temp, 0)),
6712 XEXP (temp, 0)));
6713 bitpos = 0;
6714 }
6715
6716
6717 op0 = change_address (op0, VOIDmode,
6718 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6719 force_reg (ptr_mode,
6720 offset_rtx)));
6721 }
6722
6723 /* Don't forget about volatility even if this is a bitfield. */
6724 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6725 {
6726 op0 = copy_rtx (op0);
6727 MEM_VOLATILE_P (op0) = 1;
6728 }
6729
6730 /* Check the access. */
6731 if (cfun != 0 && current_function_check_memory_usage
6732 && GET_CODE (op0) == MEM)
6733 {
6734 enum memory_use_mode memory_usage;
6735 memory_usage = get_memory_usage_from_modifier (modifier);
6736
6737 if (memory_usage != MEMORY_USE_DONT)
6738 {
6739 rtx to;
6740 int size;
6741
6742 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6743 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6744
6745 /* Check the access right of the pointer. */
6746 if (size > BITS_PER_UNIT)
6747 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6748 to, Pmode,
6749 GEN_INT (size / BITS_PER_UNIT),
6750 TYPE_MODE (sizetype),
6751 GEN_INT (memory_usage),
6752 TYPE_MODE (integer_type_node));
6753 }
6754 }
6755
6756 /* In cases where an aligned union has an unaligned object
6757 as a field, we might be extracting a BLKmode value from
6758 an integer-mode (e.g., SImode) object. Handle this case
6759 by doing the extract into an object as wide as the field
6760 (which we know to be the width of a basic mode), then
6761 storing into memory, and changing the mode to BLKmode.
6762 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6763 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6764 if (mode1 == VOIDmode
6765 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6766 || (modifier != EXPAND_CONST_ADDRESS
6767 && modifier != EXPAND_INITIALIZER
6768 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6769 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6770 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6771 /* If the field isn't aligned enough to fetch as a memref,
6772 fetch it as a bit field. */
6773 || (mode1 != BLKmode
6774 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6775 && ((TYPE_ALIGN (TREE_TYPE (tem))
6776 < GET_MODE_ALIGNMENT (mode))
6777 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6778 /* If the type and the field are a constant size and the
6779 size of the type isn't the same size as the bitfield,
6780 we must use bitfield operations. */
6781 || ((bitsize >= 0
6782 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6783 == INTEGER_CST)
6784 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6785 bitsize)))))
6786 || (modifier != EXPAND_CONST_ADDRESS
6787 && modifier != EXPAND_INITIALIZER
6788 && mode == BLKmode
6789 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6790 && (TYPE_ALIGN (type) > alignment
6791 || bitpos % TYPE_ALIGN (type) != 0)))
6792 {
6793 enum machine_mode ext_mode = mode;
6794
6795 if (ext_mode == BLKmode
6796 && ! (target != 0 && GET_CODE (op0) == MEM
6797 && GET_CODE (target) == MEM
6798 && bitpos % BITS_PER_UNIT == 0))
6799 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6800
6801 if (ext_mode == BLKmode)
6802 {
6803 /* In this case, BITPOS must start at a byte boundary and
6804 TARGET, if specified, must be a MEM. */
6805 if (GET_CODE (op0) != MEM
6806 || (target != 0 && GET_CODE (target) != MEM)
6807 || bitpos % BITS_PER_UNIT != 0)
6808 abort ();
6809
6810 op0 = change_address (op0, VOIDmode,
6811 plus_constant (XEXP (op0, 0),
6812 bitpos / BITS_PER_UNIT));
6813 if (target == 0)
6814 target = assign_temp (type, 0, 1, 1);
6815
6816 emit_block_move (target, op0,
6817 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6818 / BITS_PER_UNIT),
6819 BITS_PER_UNIT);
6820
6821 return target;
6822 }
6823
6824 op0 = validize_mem (op0);
6825
6826 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6827 mark_reg_pointer (XEXP (op0, 0), alignment);
6828
6829 op0 = extract_bit_field (op0, bitsize, bitpos,
6830 unsignedp, target, ext_mode, ext_mode,
6831 alignment,
6832 int_size_in_bytes (TREE_TYPE (tem)));
6833
6834 /* If the result is a record type and BITSIZE is narrower than
6835 the mode of OP0, an integral mode, and this is a big endian
6836 machine, we must put the field into the high-order bits. */
6837 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6838 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6839 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6840 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6841 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6842 - bitsize),
6843 op0, 1);
6844
6845 if (mode == BLKmode)
6846 {
6847 rtx new = assign_stack_temp (ext_mode,
6848 bitsize / BITS_PER_UNIT, 0);
6849
6850 emit_move_insn (new, op0);
6851 op0 = copy_rtx (new);
6852 PUT_MODE (op0, BLKmode);
6853 MEM_SET_IN_STRUCT_P (op0, 1);
6854 }
6855
6856 return op0;
6857 }
6858
6859 /* If the result is BLKmode, use that to access the object
6860 now as well. */
6861 if (mode == BLKmode)
6862 mode1 = BLKmode;
6863
6864 /* Get a reference to just this component. */
6865 if (modifier == EXPAND_CONST_ADDRESS
6866 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6867 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6868 (bitpos / BITS_PER_UNIT)));
6869 else
6870 op0 = change_address (op0, mode1,
6871 plus_constant (XEXP (op0, 0),
6872 (bitpos / BITS_PER_UNIT)));
6873
6874 if (GET_CODE (op0) == MEM)
6875 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6876
6877 if (GET_CODE (XEXP (op0, 0)) == REG)
6878 mark_reg_pointer (XEXP (op0, 0), alignment);
6879
6880 MEM_SET_IN_STRUCT_P (op0, 1);
6881 MEM_VOLATILE_P (op0) |= volatilep;
6882 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6883 || modifier == EXPAND_CONST_ADDRESS
6884 || modifier == EXPAND_INITIALIZER)
6885 return op0;
6886 else if (target == 0)
6887 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6888
6889 convert_move (target, op0, unsignedp);
6890 return target;
6891 }
6892
6893 /* Intended for a reference to a buffer of a file-object in Pascal.
6894 But it's not certain that a special tree code will really be
6895 necessary for these. INDIRECT_REF might work for them. */
6896 case BUFFER_REF:
6897 abort ();
6898
6899 case IN_EXPR:
6900 {
6901 /* Pascal set IN expression.
6902
6903 Algorithm:
6904 rlo = set_low - (set_low%bits_per_word);
6905 the_word = set [ (index - rlo)/bits_per_word ];
6906 bit_index = index % bits_per_word;
6907 bitmask = 1 << bit_index;
6908 return !!(the_word & bitmask); */
6909
6910 tree set = TREE_OPERAND (exp, 0);
6911 tree index = TREE_OPERAND (exp, 1);
6912 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6913 tree set_type = TREE_TYPE (set);
6914 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6915 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6916 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6917 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6918 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6919 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6920 rtx setaddr = XEXP (setval, 0);
6921 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6922 rtx rlow;
6923 rtx diff, quo, rem, addr, bit, result;
6924
6925 preexpand_calls (exp);
6926
6927 /* If domain is empty, answer is no. Likewise if index is constant
6928 and out of bounds. */
6929 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6930 && TREE_CODE (set_low_bound) == INTEGER_CST
6931 && tree_int_cst_lt (set_high_bound, set_low_bound))
6932 || (TREE_CODE (index) == INTEGER_CST
6933 && TREE_CODE (set_low_bound) == INTEGER_CST
6934 && tree_int_cst_lt (index, set_low_bound))
6935 || (TREE_CODE (set_high_bound) == INTEGER_CST
6936 && TREE_CODE (index) == INTEGER_CST
6937 && tree_int_cst_lt (set_high_bound, index))))
6938 return const0_rtx;
6939
6940 if (target == 0)
6941 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6942
6943 /* If we get here, we have to generate the code for both cases
6944 (in range and out of range). */
6945
6946 op0 = gen_label_rtx ();
6947 op1 = gen_label_rtx ();
6948
6949 if (! (GET_CODE (index_val) == CONST_INT
6950 && GET_CODE (lo_r) == CONST_INT))
6951 {
6952 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6953 GET_MODE (index_val), iunsignedp, 0, op1);
6954 }
6955
6956 if (! (GET_CODE (index_val) == CONST_INT
6957 && GET_CODE (hi_r) == CONST_INT))
6958 {
6959 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6960 GET_MODE (index_val), iunsignedp, 0, op1);
6961 }
6962
6963 /* Calculate the element number of bit zero in the first word
6964 of the set. */
6965 if (GET_CODE (lo_r) == CONST_INT)
6966 rlow = GEN_INT (INTVAL (lo_r)
6967 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6968 else
6969 rlow = expand_binop (index_mode, and_optab, lo_r,
6970 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6971 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6972
6973 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6974 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6975
6976 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6977 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6978 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6979 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6980
6981 addr = memory_address (byte_mode,
6982 expand_binop (index_mode, add_optab, diff,
6983 setaddr, NULL_RTX, iunsignedp,
6984 OPTAB_LIB_WIDEN));
6985
6986 /* Extract the bit we want to examine */
6987 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6988 gen_rtx_MEM (byte_mode, addr),
6989 make_tree (TREE_TYPE (index), rem),
6990 NULL_RTX, 1);
6991 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6992 GET_MODE (target) == byte_mode ? target : 0,
6993 1, OPTAB_LIB_WIDEN);
6994
6995 if (result != target)
6996 convert_move (target, result, 1);
6997
6998 /* Output the code to handle the out-of-range case. */
6999 emit_jump (op0);
7000 emit_label (op1);
7001 emit_move_insn (target, const0_rtx);
7002 emit_label (op0);
7003 return target;
7004 }
7005
7006 case WITH_CLEANUP_EXPR:
7007 if (RTL_EXPR_RTL (exp) == 0)
7008 {
7009 RTL_EXPR_RTL (exp)
7010 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7011 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7012
7013 /* That's it for this cleanup. */
7014 TREE_OPERAND (exp, 2) = 0;
7015 }
7016 return RTL_EXPR_RTL (exp);
7017
7018 case CLEANUP_POINT_EXPR:
7019 {
7020 /* Start a new binding layer that will keep track of all cleanup
7021 actions to be performed. */
7022 expand_start_bindings (2);
7023
7024 target_temp_slot_level = temp_slot_level;
7025
7026 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7027 /* If we're going to use this value, load it up now. */
7028 if (! ignore)
7029 op0 = force_not_mem (op0);
7030 preserve_temp_slots (op0);
7031 expand_end_bindings (NULL_TREE, 0, 0);
7032 }
7033 return op0;
7034
7035 case CALL_EXPR:
7036 /* Check for a built-in function. */
7037 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7038 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7039 == FUNCTION_DECL)
7040 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7041 return expand_builtin (exp, target, subtarget, tmode, ignore);
7042
7043 /* If this call was expanded already by preexpand_calls,
7044 just return the result we got. */
7045 if (CALL_EXPR_RTL (exp) != 0)
7046 return CALL_EXPR_RTL (exp);
7047
7048 return expand_call (exp, target, ignore);
7049
7050 case NON_LVALUE_EXPR:
7051 case NOP_EXPR:
7052 case CONVERT_EXPR:
7053 case REFERENCE_EXPR:
7054 if (TREE_CODE (type) == UNION_TYPE)
7055 {
7056 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7057
7058 /* If both input and output are BLKmode, this conversion
7059 isn't actually doing anything unless we need to make the
7060 alignment stricter. */
7061 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7062 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7063 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7064 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7065 modifier);
7066
7067 if (target == 0)
7068 {
7069 if (mode != BLKmode)
7070 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7071 else
7072 target = assign_temp (type, 0, 1, 1);
7073 }
7074
7075 if (GET_CODE (target) == MEM)
7076 /* Store data into beginning of memory target. */
7077 store_expr (TREE_OPERAND (exp, 0),
7078 change_address (target, TYPE_MODE (valtype), 0), 0);
7079
7080 else if (GET_CODE (target) == REG)
7081 /* Store this field into a union of the proper type. */
7082 store_field (target,
7083 MIN ((int_size_in_bytes (TREE_TYPE
7084 (TREE_OPERAND (exp, 0)))
7085 * BITS_PER_UNIT),
7086 GET_MODE_BITSIZE (mode)),
7087 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7088 VOIDmode, 0, BITS_PER_UNIT,
7089 int_size_in_bytes (type), 0);
7090 else
7091 abort ();
7092
7093 /* Return the entire union. */
7094 return target;
7095 }
7096
7097 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7098 {
7099 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7100 ro_modifier);
7101
7102 /* If the signedness of the conversion differs and OP0 is
7103 a promoted SUBREG, clear that indication since we now
7104 have to do the proper extension. */
7105 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7106 && GET_CODE (op0) == SUBREG)
7107 SUBREG_PROMOTED_VAR_P (op0) = 0;
7108
7109 return op0;
7110 }
7111
7112 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7113 if (GET_MODE (op0) == mode)
7114 return op0;
7115
7116 /* If OP0 is a constant, just convert it into the proper mode. */
7117 if (CONSTANT_P (op0))
7118 return
7119 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7120 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7121
7122 if (modifier == EXPAND_INITIALIZER)
7123 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7124
7125 if (target == 0)
7126 return
7127 convert_to_mode (mode, op0,
7128 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7129 else
7130 convert_move (target, op0,
7131 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7132 return target;
7133
7134 case PLUS_EXPR:
7135 /* We come here from MINUS_EXPR when the second operand is a
7136 constant. */
7137 plus_expr:
7138 this_optab = add_optab;
7139
7140 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7141 something else, make sure we add the register to the constant and
7142 then to the other thing. This case can occur during strength
7143 reduction and doing it this way will produce better code if the
7144 frame pointer or argument pointer is eliminated.
7145
7146 fold-const.c will ensure that the constant is always in the inner
7147 PLUS_EXPR, so the only case we need to do anything about is if
7148 sp, ap, or fp is our second argument, in which case we must swap
7149 the innermost first argument and our second argument. */
7150
7151 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7152 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7153 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7154 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7155 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7156 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7157 {
7158 tree t = TREE_OPERAND (exp, 1);
7159
7160 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7161 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7162 }
7163
7164 /* If the result is to be ptr_mode and we are adding an integer to
7165 something, we might be forming a constant. So try to use
7166 plus_constant. If it produces a sum and we can't accept it,
7167 use force_operand. This allows P = &ARR[const] to generate
7168 efficient code on machines where a SYMBOL_REF is not a valid
7169 address.
7170
7171 If this is an EXPAND_SUM call, always return the sum. */
7172 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7173 || mode == ptr_mode)
7174 {
7175 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7176 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7177 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7178 {
7179 rtx constant_part;
7180
7181 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7182 EXPAND_SUM);
7183 /* Use immed_double_const to ensure that the constant is
7184 truncated according to the mode of OP1, then sign extended
7185 to a HOST_WIDE_INT. Using the constant directly can result
7186 in non-canonical RTL in a 64x32 cross compile. */
7187 constant_part
7188 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7189 (HOST_WIDE_INT) 0,
7190 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7191 op1 = plus_constant (op1, INTVAL (constant_part));
7192 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7193 op1 = force_operand (op1, target);
7194 return op1;
7195 }
7196
7197 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7198 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7199 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7200 {
7201 rtx constant_part;
7202
7203 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7204 EXPAND_SUM);
7205 if (! CONSTANT_P (op0))
7206 {
7207 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7208 VOIDmode, modifier);
7209 /* Don't go to both_summands if modifier
7210 says it's not right to return a PLUS. */
7211 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7212 goto binop2;
7213 goto both_summands;
7214 }
7215 /* Use immed_double_const to ensure that the constant is
7216 truncated according to the mode of OP1, then sign extended
7217 to a HOST_WIDE_INT. Using the constant directly can result
7218 in non-canonical RTL in a 64x32 cross compile. */
7219 constant_part
7220 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7221 (HOST_WIDE_INT) 0,
7222 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7223 op0 = plus_constant (op0, INTVAL (constant_part));
7224 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7225 op0 = force_operand (op0, target);
7226 return op0;
7227 }
7228 }
7229
7230 /* No sense saving up arithmetic to be done
7231 if it's all in the wrong mode to form part of an address.
7232 And force_operand won't know whether to sign-extend or
7233 zero-extend. */
7234 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7235 || mode != ptr_mode)
7236 goto binop;
7237
7238 preexpand_calls (exp);
7239 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7240 subtarget = 0;
7241
7242 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7243 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7244
7245 both_summands:
7246 /* Make sure any term that's a sum with a constant comes last. */
7247 if (GET_CODE (op0) == PLUS
7248 && CONSTANT_P (XEXP (op0, 1)))
7249 {
7250 temp = op0;
7251 op0 = op1;
7252 op1 = temp;
7253 }
7254 /* If adding to a sum including a constant,
7255 associate it to put the constant outside. */
7256 if (GET_CODE (op1) == PLUS
7257 && CONSTANT_P (XEXP (op1, 1)))
7258 {
7259 rtx constant_term = const0_rtx;
7260
7261 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7262 if (temp != 0)
7263 op0 = temp;
7264 /* Ensure that MULT comes first if there is one. */
7265 else if (GET_CODE (op0) == MULT)
7266 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7267 else
7268 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7269
7270 /* Let's also eliminate constants from op0 if possible. */
7271 op0 = eliminate_constant_term (op0, &constant_term);
7272
7273 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7274 their sum should be a constant. Form it into OP1, since the
7275 result we want will then be OP0 + OP1. */
7276
7277 temp = simplify_binary_operation (PLUS, mode, constant_term,
7278 XEXP (op1, 1));
7279 if (temp != 0)
7280 op1 = temp;
7281 else
7282 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7283 }
7284
7285 /* Put a constant term last and put a multiplication first. */
7286 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7287 temp = op1, op1 = op0, op0 = temp;
7288
7289 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7290 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7291
7292 case MINUS_EXPR:
7293 /* For initializers, we are allowed to return a MINUS of two
7294 symbolic constants. Here we handle all cases when both operands
7295 are constant. */
7296 /* Handle difference of two symbolic constants,
7297 for the sake of an initializer. */
7298 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7299 && really_constant_p (TREE_OPERAND (exp, 0))
7300 && really_constant_p (TREE_OPERAND (exp, 1)))
7301 {
7302 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7303 VOIDmode, ro_modifier);
7304 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7305 VOIDmode, ro_modifier);
7306
7307 /* If the last operand is a CONST_INT, use plus_constant of
7308 the negated constant. Else make the MINUS. */
7309 if (GET_CODE (op1) == CONST_INT)
7310 return plus_constant (op0, - INTVAL (op1));
7311 else
7312 return gen_rtx_MINUS (mode, op0, op1);
7313 }
7314 /* Convert A - const to A + (-const). */
7315 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7316 {
7317 tree negated = fold (build1 (NEGATE_EXPR, type,
7318 TREE_OPERAND (exp, 1)));
7319
7320 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7321 /* If we can't negate the constant in TYPE, leave it alone and
7322 expand_binop will negate it for us. We used to try to do it
7323 here in the signed version of TYPE, but that doesn't work
7324 on POINTER_TYPEs. */;
7325 else
7326 {
7327 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7328 goto plus_expr;
7329 }
7330 }
7331 this_optab = sub_optab;
7332 goto binop;
7333
7334 case MULT_EXPR:
7335 preexpand_calls (exp);
7336 /* If first operand is constant, swap them.
7337 Thus the following special case checks need only
7338 check the second operand. */
7339 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7340 {
7341 register tree t1 = TREE_OPERAND (exp, 0);
7342 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7343 TREE_OPERAND (exp, 1) = t1;
7344 }
7345
7346 /* Attempt to return something suitable for generating an
7347 indexed address, for machines that support that. */
7348
7349 if (modifier == EXPAND_SUM && mode == ptr_mode
7350 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7351 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7352 {
7353 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7354 EXPAND_SUM);
7355
7356 /* Apply distributive law if OP0 is x+c. */
7357 if (GET_CODE (op0) == PLUS
7358 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7359 return
7360 gen_rtx_PLUS
7361 (mode,
7362 gen_rtx_MULT
7363 (mode, XEXP (op0, 0),
7364 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7365 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7366 * INTVAL (XEXP (op0, 1))));
7367
7368 if (GET_CODE (op0) != REG)
7369 op0 = force_operand (op0, NULL_RTX);
7370 if (GET_CODE (op0) != REG)
7371 op0 = copy_to_mode_reg (mode, op0);
7372
7373 return
7374 gen_rtx_MULT (mode, op0,
7375 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7376 }
7377
7378 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7379 subtarget = 0;
7380
7381 /* Check for multiplying things that have been extended
7382 from a narrower type. If this machine supports multiplying
7383 in that narrower type with a result in the desired type,
7384 do it that way, and avoid the explicit type-conversion. */
7385 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7386 && TREE_CODE (type) == INTEGER_TYPE
7387 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7388 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7389 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7390 && int_fits_type_p (TREE_OPERAND (exp, 1),
7391 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7392 /* Don't use a widening multiply if a shift will do. */
7393 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7394 > HOST_BITS_PER_WIDE_INT)
7395 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7396 ||
7397 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7398 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7399 ==
7400 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7401 /* If both operands are extended, they must either both
7402 be zero-extended or both be sign-extended. */
7403 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7404 ==
7405 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7406 {
7407 enum machine_mode innermode
7408 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7409 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7410 ? smul_widen_optab : umul_widen_optab);
7411 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7412 ? umul_widen_optab : smul_widen_optab);
7413 if (mode == GET_MODE_WIDER_MODE (innermode))
7414 {
7415 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7416 {
7417 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7418 NULL_RTX, VOIDmode, 0);
7419 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7420 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7421 VOIDmode, 0);
7422 else
7423 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7424 NULL_RTX, VOIDmode, 0);
7425 goto binop2;
7426 }
7427 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7428 && innermode == word_mode)
7429 {
7430 rtx htem;
7431 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7432 NULL_RTX, VOIDmode, 0);
7433 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7434 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7435 VOIDmode, 0);
7436 else
7437 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7438 NULL_RTX, VOIDmode, 0);
7439 temp = expand_binop (mode, other_optab, op0, op1, target,
7440 unsignedp, OPTAB_LIB_WIDEN);
7441 htem = expand_mult_highpart_adjust (innermode,
7442 gen_highpart (innermode, temp),
7443 op0, op1,
7444 gen_highpart (innermode, temp),
7445 unsignedp);
7446 emit_move_insn (gen_highpart (innermode, temp), htem);
7447 return temp;
7448 }
7449 }
7450 }
7451 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7452 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7453 return expand_mult (mode, op0, op1, target, unsignedp);
7454
7455 case TRUNC_DIV_EXPR:
7456 case FLOOR_DIV_EXPR:
7457 case CEIL_DIV_EXPR:
7458 case ROUND_DIV_EXPR:
7459 case EXACT_DIV_EXPR:
7460 preexpand_calls (exp);
7461 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7462 subtarget = 0;
7463 /* Possible optimization: compute the dividend with EXPAND_SUM
7464 then if the divisor is constant can optimize the case
7465 where some terms of the dividend have coeffs divisible by it. */
7466 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7467 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7468 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7469
7470 case RDIV_EXPR:
7471 this_optab = flodiv_optab;
7472 goto binop;
7473
7474 case TRUNC_MOD_EXPR:
7475 case FLOOR_MOD_EXPR:
7476 case CEIL_MOD_EXPR:
7477 case ROUND_MOD_EXPR:
7478 preexpand_calls (exp);
7479 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7480 subtarget = 0;
7481 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7482 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7483 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7484
7485 case FIX_ROUND_EXPR:
7486 case FIX_FLOOR_EXPR:
7487 case FIX_CEIL_EXPR:
7488 abort (); /* Not used for C. */
7489
7490 case FIX_TRUNC_EXPR:
7491 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7492 if (target == 0)
7493 target = gen_reg_rtx (mode);
7494 expand_fix (target, op0, unsignedp);
7495 return target;
7496
7497 case FLOAT_EXPR:
7498 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7499 if (target == 0)
7500 target = gen_reg_rtx (mode);
7501 /* expand_float can't figure out what to do if FROM has VOIDmode.
7502 So give it the correct mode. With -O, cse will optimize this. */
7503 if (GET_MODE (op0) == VOIDmode)
7504 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7505 op0);
7506 expand_float (target, op0,
7507 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7508 return target;
7509
7510 case NEGATE_EXPR:
7511 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7512 temp = expand_unop (mode, neg_optab, op0, target, 0);
7513 if (temp == 0)
7514 abort ();
7515 return temp;
7516
7517 case ABS_EXPR:
7518 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7519
7520 /* Handle complex values specially. */
7521 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7522 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7523 return expand_complex_abs (mode, op0, target, unsignedp);
7524
7525 /* Unsigned abs is simply the operand. Testing here means we don't
7526 risk generating incorrect code below. */
7527 if (TREE_UNSIGNED (type))
7528 return op0;
7529
7530 return expand_abs (mode, op0, target,
7531 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7532
7533 case MAX_EXPR:
7534 case MIN_EXPR:
7535 target = original_target;
7536 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7537 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7538 || GET_MODE (target) != mode
7539 || (GET_CODE (target) == REG
7540 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7541 target = gen_reg_rtx (mode);
7542 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7543 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7544
7545 /* First try to do it with a special MIN or MAX instruction.
7546 If that does not win, use a conditional jump to select the proper
7547 value. */
7548 this_optab = (TREE_UNSIGNED (type)
7549 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7550 : (code == MIN_EXPR ? smin_optab : smax_optab));
7551
7552 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7553 OPTAB_WIDEN);
7554 if (temp != 0)
7555 return temp;
7556
7557 /* At this point, a MEM target is no longer useful; we will get better
7558 code without it. */
7559
7560 if (GET_CODE (target) == MEM)
7561 target = gen_reg_rtx (mode);
7562
7563 if (target != op0)
7564 emit_move_insn (target, op0);
7565
7566 op0 = gen_label_rtx ();
7567
7568 /* If this mode is an integer too wide to compare properly,
7569 compare word by word. Rely on cse to optimize constant cases. */
7570 if (GET_MODE_CLASS (mode) == MODE_INT
7571 && ! can_compare_p (GE, mode, ccp_jump))
7572 {
7573 if (code == MAX_EXPR)
7574 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7575 target, op1, NULL_RTX, op0);
7576 else
7577 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7578 op1, target, NULL_RTX, op0);
7579 }
7580 else
7581 {
7582 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7583 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7584 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7585 op0);
7586 }
7587 emit_move_insn (target, op1);
7588 emit_label (op0);
7589 return target;
7590
7591 case BIT_NOT_EXPR:
7592 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7593 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7594 if (temp == 0)
7595 abort ();
7596 return temp;
7597
7598 case FFS_EXPR:
7599 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7600 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7601 if (temp == 0)
7602 abort ();
7603 return temp;
7604
7605 /* ??? Can optimize bitwise operations with one arg constant.
7606 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7607 and (a bitwise1 b) bitwise2 b (etc)
7608 but that is probably not worth while. */
7609
7610 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7611 boolean values when we want in all cases to compute both of them. In
7612 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7613 as actual zero-or-1 values and then bitwise anding. In cases where
7614 there cannot be any side effects, better code would be made by
7615 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7616 how to recognize those cases. */
7617
7618 case TRUTH_AND_EXPR:
7619 case BIT_AND_EXPR:
7620 this_optab = and_optab;
7621 goto binop;
7622
7623 case TRUTH_OR_EXPR:
7624 case BIT_IOR_EXPR:
7625 this_optab = ior_optab;
7626 goto binop;
7627
7628 case TRUTH_XOR_EXPR:
7629 case BIT_XOR_EXPR:
7630 this_optab = xor_optab;
7631 goto binop;
7632
7633 case LSHIFT_EXPR:
7634 case RSHIFT_EXPR:
7635 case LROTATE_EXPR:
7636 case RROTATE_EXPR:
7637 preexpand_calls (exp);
7638 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7639 subtarget = 0;
7640 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7641 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7642 unsignedp);
7643
7644 /* Could determine the answer when only additive constants differ. Also,
7645 the addition of one can be handled by changing the condition. */
7646 case LT_EXPR:
7647 case LE_EXPR:
7648 case GT_EXPR:
7649 case GE_EXPR:
7650 case EQ_EXPR:
7651 case NE_EXPR:
7652 case UNORDERED_EXPR:
7653 case ORDERED_EXPR:
7654 case UNLT_EXPR:
7655 case UNLE_EXPR:
7656 case UNGT_EXPR:
7657 case UNGE_EXPR:
7658 case UNEQ_EXPR:
7659 preexpand_calls (exp);
7660 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7661 if (temp != 0)
7662 return temp;
7663
7664 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7665 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7666 && original_target
7667 && GET_CODE (original_target) == REG
7668 && (GET_MODE (original_target)
7669 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7670 {
7671 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7672 VOIDmode, 0);
7673
7674 if (temp != original_target)
7675 temp = copy_to_reg (temp);
7676
7677 op1 = gen_label_rtx ();
7678 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7679 GET_MODE (temp), unsignedp, 0, op1);
7680 emit_move_insn (temp, const1_rtx);
7681 emit_label (op1);
7682 return temp;
7683 }
7684
7685 /* If no set-flag instruction, must generate a conditional
7686 store into a temporary variable. Drop through
7687 and handle this like && and ||. */
7688
7689 case TRUTH_ANDIF_EXPR:
7690 case TRUTH_ORIF_EXPR:
7691 if (! ignore
7692 && (target == 0 || ! safe_from_p (target, exp, 1)
7693 /* Make sure we don't have a hard reg (such as function's return
7694 value) live across basic blocks, if not optimizing. */
7695 || (!optimize && GET_CODE (target) == REG
7696 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7697 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7698
7699 if (target)
7700 emit_clr_insn (target);
7701
7702 op1 = gen_label_rtx ();
7703 jumpifnot (exp, op1);
7704
7705 if (target)
7706 emit_0_to_1_insn (target);
7707
7708 emit_label (op1);
7709 return ignore ? const0_rtx : target;
7710
7711 case TRUTH_NOT_EXPR:
7712 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7713 /* The parser is careful to generate TRUTH_NOT_EXPR
7714 only with operands that are always zero or one. */
7715 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7716 target, 1, OPTAB_LIB_WIDEN);
7717 if (temp == 0)
7718 abort ();
7719 return temp;
7720
7721 case COMPOUND_EXPR:
7722 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7723 emit_queue ();
7724 return expand_expr (TREE_OPERAND (exp, 1),
7725 (ignore ? const0_rtx : target),
7726 VOIDmode, 0);
7727
7728 case COND_EXPR:
7729 /* If we would have a "singleton" (see below) were it not for a
7730 conversion in each arm, bring that conversion back out. */
7731 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7732 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7733 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7734 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7735 {
7736 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7737 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7738
7739 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7740 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7741 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7742 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7743 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7744 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7745 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7746 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7747 return expand_expr (build1 (NOP_EXPR, type,
7748 build (COND_EXPR, TREE_TYPE (true),
7749 TREE_OPERAND (exp, 0),
7750 true, false)),
7751 target, tmode, modifier);
7752 }
7753
7754 {
7755 /* Note that COND_EXPRs whose type is a structure or union
7756 are required to be constructed to contain assignments of
7757 a temporary variable, so that we can evaluate them here
7758 for side effect only. If type is void, we must do likewise. */
7759
7760 /* If an arm of the branch requires a cleanup,
7761 only that cleanup is performed. */
7762
7763 tree singleton = 0;
7764 tree binary_op = 0, unary_op = 0;
7765
7766 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7767 convert it to our mode, if necessary. */
7768 if (integer_onep (TREE_OPERAND (exp, 1))
7769 && integer_zerop (TREE_OPERAND (exp, 2))
7770 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7771 {
7772 if (ignore)
7773 {
7774 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7775 ro_modifier);
7776 return const0_rtx;
7777 }
7778
7779 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7780 if (GET_MODE (op0) == mode)
7781 return op0;
7782
7783 if (target == 0)
7784 target = gen_reg_rtx (mode);
7785 convert_move (target, op0, unsignedp);
7786 return target;
7787 }
7788
7789 /* Check for X ? A + B : A. If we have this, we can copy A to the
7790 output and conditionally add B. Similarly for unary operations.
7791 Don't do this if X has side-effects because those side effects
7792 might affect A or B and the "?" operation is a sequence point in
7793 ANSI. (operand_equal_p tests for side effects.) */
7794
7795 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7796 && operand_equal_p (TREE_OPERAND (exp, 2),
7797 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7798 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7799 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7800 && operand_equal_p (TREE_OPERAND (exp, 1),
7801 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7802 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7803 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7804 && operand_equal_p (TREE_OPERAND (exp, 2),
7805 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7806 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7807 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7808 && operand_equal_p (TREE_OPERAND (exp, 1),
7809 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7810 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7811
7812 /* If we are not to produce a result, we have no target. Otherwise,
7813 if a target was specified use it; it will not be used as an
7814 intermediate target unless it is safe. If no target, use a
7815 temporary. */
7816
7817 if (ignore)
7818 temp = 0;
7819 else if (original_target
7820 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7821 || (singleton && GET_CODE (original_target) == REG
7822 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7823 && original_target == var_rtx (singleton)))
7824 && GET_MODE (original_target) == mode
7825 #ifdef HAVE_conditional_move
7826 && (! can_conditionally_move_p (mode)
7827 || GET_CODE (original_target) == REG
7828 || TREE_ADDRESSABLE (type))
7829 #endif
7830 && ! (GET_CODE (original_target) == MEM
7831 && MEM_VOLATILE_P (original_target)))
7832 temp = original_target;
7833 else if (TREE_ADDRESSABLE (type))
7834 abort ();
7835 else
7836 temp = assign_temp (type, 0, 0, 1);
7837
7838 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7839 do the test of X as a store-flag operation, do this as
7840 A + ((X != 0) << log C). Similarly for other simple binary
7841 operators. Only do for C == 1 if BRANCH_COST is low. */
7842 if (temp && singleton && binary_op
7843 && (TREE_CODE (binary_op) == PLUS_EXPR
7844 || TREE_CODE (binary_op) == MINUS_EXPR
7845 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7846 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7847 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7848 : integer_onep (TREE_OPERAND (binary_op, 1)))
7849 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7850 {
7851 rtx result;
7852 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7853 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7854 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7855 : xor_optab);
7856
7857 /* If we had X ? A : A + 1, do this as A + (X == 0).
7858
7859 We have to invert the truth value here and then put it
7860 back later if do_store_flag fails. We cannot simply copy
7861 TREE_OPERAND (exp, 0) to another variable and modify that
7862 because invert_truthvalue can modify the tree pointed to
7863 by its argument. */
7864 if (singleton == TREE_OPERAND (exp, 1))
7865 TREE_OPERAND (exp, 0)
7866 = invert_truthvalue (TREE_OPERAND (exp, 0));
7867
7868 result = do_store_flag (TREE_OPERAND (exp, 0),
7869 (safe_from_p (temp, singleton, 1)
7870 ? temp : NULL_RTX),
7871 mode, BRANCH_COST <= 1);
7872
7873 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7874 result = expand_shift (LSHIFT_EXPR, mode, result,
7875 build_int_2 (tree_log2
7876 (TREE_OPERAND
7877 (binary_op, 1)),
7878 0),
7879 (safe_from_p (temp, singleton, 1)
7880 ? temp : NULL_RTX), 0);
7881
7882 if (result)
7883 {
7884 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7885 return expand_binop (mode, boptab, op1, result, temp,
7886 unsignedp, OPTAB_LIB_WIDEN);
7887 }
7888 else if (singleton == TREE_OPERAND (exp, 1))
7889 TREE_OPERAND (exp, 0)
7890 = invert_truthvalue (TREE_OPERAND (exp, 0));
7891 }
7892
7893 do_pending_stack_adjust ();
7894 NO_DEFER_POP;
7895 op0 = gen_label_rtx ();
7896
7897 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7898 {
7899 if (temp != 0)
7900 {
7901 /* If the target conflicts with the other operand of the
7902 binary op, we can't use it. Also, we can't use the target
7903 if it is a hard register, because evaluating the condition
7904 might clobber it. */
7905 if ((binary_op
7906 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7907 || (GET_CODE (temp) == REG
7908 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7909 temp = gen_reg_rtx (mode);
7910 store_expr (singleton, temp, 0);
7911 }
7912 else
7913 expand_expr (singleton,
7914 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7915 if (singleton == TREE_OPERAND (exp, 1))
7916 jumpif (TREE_OPERAND (exp, 0), op0);
7917 else
7918 jumpifnot (TREE_OPERAND (exp, 0), op0);
7919
7920 start_cleanup_deferral ();
7921 if (binary_op && temp == 0)
7922 /* Just touch the other operand. */
7923 expand_expr (TREE_OPERAND (binary_op, 1),
7924 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7925 else if (binary_op)
7926 store_expr (build (TREE_CODE (binary_op), type,
7927 make_tree (type, temp),
7928 TREE_OPERAND (binary_op, 1)),
7929 temp, 0);
7930 else
7931 store_expr (build1 (TREE_CODE (unary_op), type,
7932 make_tree (type, temp)),
7933 temp, 0);
7934 op1 = op0;
7935 }
7936 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7937 comparison operator. If we have one of these cases, set the
7938 output to A, branch on A (cse will merge these two references),
7939 then set the output to FOO. */
7940 else if (temp
7941 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7942 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7943 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7944 TREE_OPERAND (exp, 1), 0)
7945 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7946 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7947 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7948 {
7949 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7950 temp = gen_reg_rtx (mode);
7951 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7952 jumpif (TREE_OPERAND (exp, 0), op0);
7953
7954 start_cleanup_deferral ();
7955 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7956 op1 = op0;
7957 }
7958 else if (temp
7959 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7960 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7961 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7962 TREE_OPERAND (exp, 2), 0)
7963 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7964 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7965 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7966 {
7967 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7968 temp = gen_reg_rtx (mode);
7969 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7970 jumpifnot (TREE_OPERAND (exp, 0), op0);
7971
7972 start_cleanup_deferral ();
7973 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7974 op1 = op0;
7975 }
7976 else
7977 {
7978 op1 = gen_label_rtx ();
7979 jumpifnot (TREE_OPERAND (exp, 0), op0);
7980
7981 start_cleanup_deferral ();
7982
7983 /* One branch of the cond can be void, if it never returns. For
7984 example A ? throw : E */
7985 if (temp != 0
7986 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7987 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7988 else
7989 expand_expr (TREE_OPERAND (exp, 1),
7990 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7991 end_cleanup_deferral ();
7992 emit_queue ();
7993 emit_jump_insn (gen_jump (op1));
7994 emit_barrier ();
7995 emit_label (op0);
7996 start_cleanup_deferral ();
7997 if (temp != 0
7998 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7999 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8000 else
8001 expand_expr (TREE_OPERAND (exp, 2),
8002 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8003 }
8004
8005 end_cleanup_deferral ();
8006
8007 emit_queue ();
8008 emit_label (op1);
8009 OK_DEFER_POP;
8010
8011 return temp;
8012 }
8013
8014 case TARGET_EXPR:
8015 {
8016 /* Something needs to be initialized, but we didn't know
8017 where that thing was when building the tree. For example,
8018 it could be the return value of a function, or a parameter
8019 to a function which lays down in the stack, or a temporary
8020 variable which must be passed by reference.
8021
8022 We guarantee that the expression will either be constructed
8023 or copied into our original target. */
8024
8025 tree slot = TREE_OPERAND (exp, 0);
8026 tree cleanups = NULL_TREE;
8027 tree exp1;
8028
8029 if (TREE_CODE (slot) != VAR_DECL)
8030 abort ();
8031
8032 if (! ignore)
8033 target = original_target;
8034
8035 /* Set this here so that if we get a target that refers to a
8036 register variable that's already been used, put_reg_into_stack
8037 knows that it should fix up those uses. */
8038 TREE_USED (slot) = 1;
8039
8040 if (target == 0)
8041 {
8042 if (DECL_RTL (slot) != 0)
8043 {
8044 target = DECL_RTL (slot);
8045 /* If we have already expanded the slot, so don't do
8046 it again. (mrs) */
8047 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8048 return target;
8049 }
8050 else
8051 {
8052 target = assign_temp (type, 2, 0, 1);
8053 /* All temp slots at this level must not conflict. */
8054 preserve_temp_slots (target);
8055 DECL_RTL (slot) = target;
8056 if (TREE_ADDRESSABLE (slot))
8057 {
8058 TREE_ADDRESSABLE (slot) = 0;
8059 mark_addressable (slot);
8060 }
8061
8062 /* Since SLOT is not known to the called function
8063 to belong to its stack frame, we must build an explicit
8064 cleanup. This case occurs when we must build up a reference
8065 to pass the reference as an argument. In this case,
8066 it is very likely that such a reference need not be
8067 built here. */
8068
8069 if (TREE_OPERAND (exp, 2) == 0)
8070 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8071 cleanups = TREE_OPERAND (exp, 2);
8072 }
8073 }
8074 else
8075 {
8076 /* This case does occur, when expanding a parameter which
8077 needs to be constructed on the stack. The target
8078 is the actual stack address that we want to initialize.
8079 The function we call will perform the cleanup in this case. */
8080
8081 /* If we have already assigned it space, use that space,
8082 not target that we were passed in, as our target
8083 parameter is only a hint. */
8084 if (DECL_RTL (slot) != 0)
8085 {
8086 target = DECL_RTL (slot);
8087 /* If we have already expanded the slot, so don't do
8088 it again. (mrs) */
8089 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8090 return target;
8091 }
8092 else
8093 {
8094 DECL_RTL (slot) = target;
8095 /* If we must have an addressable slot, then make sure that
8096 the RTL that we just stored in slot is OK. */
8097 if (TREE_ADDRESSABLE (slot))
8098 {
8099 TREE_ADDRESSABLE (slot) = 0;
8100 mark_addressable (slot);
8101 }
8102 }
8103 }
8104
8105 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8106 /* Mark it as expanded. */
8107 TREE_OPERAND (exp, 1) = NULL_TREE;
8108
8109 store_expr (exp1, target, 0);
8110
8111 expand_decl_cleanup (NULL_TREE, cleanups);
8112
8113 return target;
8114 }
8115
8116 case INIT_EXPR:
8117 {
8118 tree lhs = TREE_OPERAND (exp, 0);
8119 tree rhs = TREE_OPERAND (exp, 1);
8120 tree noncopied_parts = 0;
8121 tree lhs_type = TREE_TYPE (lhs);
8122
8123 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8124 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8125 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8126 TYPE_NONCOPIED_PARTS (lhs_type));
8127 while (noncopied_parts != 0)
8128 {
8129 expand_assignment (TREE_VALUE (noncopied_parts),
8130 TREE_PURPOSE (noncopied_parts), 0, 0);
8131 noncopied_parts = TREE_CHAIN (noncopied_parts);
8132 }
8133 return temp;
8134 }
8135
8136 case MODIFY_EXPR:
8137 {
8138 /* If lhs is complex, expand calls in rhs before computing it.
8139 That's so we don't compute a pointer and save it over a call.
8140 If lhs is simple, compute it first so we can give it as a
8141 target if the rhs is just a call. This avoids an extra temp and copy
8142 and that prevents a partial-subsumption which makes bad code.
8143 Actually we could treat component_ref's of vars like vars. */
8144
8145 tree lhs = TREE_OPERAND (exp, 0);
8146 tree rhs = TREE_OPERAND (exp, 1);
8147 tree noncopied_parts = 0;
8148 tree lhs_type = TREE_TYPE (lhs);
8149
8150 temp = 0;
8151
8152 if (TREE_CODE (lhs) != VAR_DECL
8153 && TREE_CODE (lhs) != RESULT_DECL
8154 && TREE_CODE (lhs) != PARM_DECL
8155 && ! (TREE_CODE (lhs) == INDIRECT_REF
8156 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8157 preexpand_calls (exp);
8158
8159 /* Check for |= or &= of a bitfield of size one into another bitfield
8160 of size 1. In this case, (unless we need the result of the
8161 assignment) we can do this more efficiently with a
8162 test followed by an assignment, if necessary.
8163
8164 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8165 things change so we do, this code should be enhanced to
8166 support it. */
8167 if (ignore
8168 && TREE_CODE (lhs) == COMPONENT_REF
8169 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8170 || TREE_CODE (rhs) == BIT_AND_EXPR)
8171 && TREE_OPERAND (rhs, 0) == lhs
8172 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8173 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8174 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8175 {
8176 rtx label = gen_label_rtx ();
8177
8178 do_jump (TREE_OPERAND (rhs, 1),
8179 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8180 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8181 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8182 (TREE_CODE (rhs) == BIT_IOR_EXPR
8183 ? integer_one_node
8184 : integer_zero_node)),
8185 0, 0);
8186 do_pending_stack_adjust ();
8187 emit_label (label);
8188 return const0_rtx;
8189 }
8190
8191 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8192 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8193 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8194 TYPE_NONCOPIED_PARTS (lhs_type));
8195
8196 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8197 while (noncopied_parts != 0)
8198 {
8199 expand_assignment (TREE_PURPOSE (noncopied_parts),
8200 TREE_VALUE (noncopied_parts), 0, 0);
8201 noncopied_parts = TREE_CHAIN (noncopied_parts);
8202 }
8203 return temp;
8204 }
8205
8206 case RETURN_EXPR:
8207 if (!TREE_OPERAND (exp, 0))
8208 expand_null_return ();
8209 else
8210 expand_return (TREE_OPERAND (exp, 0));
8211 return const0_rtx;
8212
8213 case PREINCREMENT_EXPR:
8214 case PREDECREMENT_EXPR:
8215 return expand_increment (exp, 0, ignore);
8216
8217 case POSTINCREMENT_EXPR:
8218 case POSTDECREMENT_EXPR:
8219 /* Faster to treat as pre-increment if result is not used. */
8220 return expand_increment (exp, ! ignore, ignore);
8221
8222 case ADDR_EXPR:
8223 /* If nonzero, TEMP will be set to the address of something that might
8224 be a MEM corresponding to a stack slot. */
8225 temp = 0;
8226
8227 /* Are we taking the address of a nested function? */
8228 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8229 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8230 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8231 && ! TREE_STATIC (exp))
8232 {
8233 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8234 op0 = force_operand (op0, target);
8235 }
8236 /* If we are taking the address of something erroneous, just
8237 return a zero. */
8238 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8239 return const0_rtx;
8240 else
8241 {
8242 /* We make sure to pass const0_rtx down if we came in with
8243 ignore set, to avoid doing the cleanups twice for something. */
8244 op0 = expand_expr (TREE_OPERAND (exp, 0),
8245 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8246 (modifier == EXPAND_INITIALIZER
8247 ? modifier : EXPAND_CONST_ADDRESS));
8248
8249 /* If we are going to ignore the result, OP0 will have been set
8250 to const0_rtx, so just return it. Don't get confused and
8251 think we are taking the address of the constant. */
8252 if (ignore)
8253 return op0;
8254
8255 op0 = protect_from_queue (op0, 0);
8256
8257 /* We would like the object in memory. If it is a constant, we can
8258 have it be statically allocated into memory. For a non-constant,
8259 we need to allocate some memory and store the value into it. */
8260
8261 if (CONSTANT_P (op0))
8262 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8263 op0);
8264 else if (GET_CODE (op0) == MEM)
8265 {
8266 mark_temp_addr_taken (op0);
8267 temp = XEXP (op0, 0);
8268 }
8269
8270 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8271 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8272 {
8273 /* If this object is in a register, it must be not
8274 be BLKmode. */
8275 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8276 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8277
8278 mark_temp_addr_taken (memloc);
8279 emit_move_insn (memloc, op0);
8280 op0 = memloc;
8281 }
8282
8283 if (GET_CODE (op0) != MEM)
8284 abort ();
8285
8286 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8287 {
8288 temp = XEXP (op0, 0);
8289 #ifdef POINTERS_EXTEND_UNSIGNED
8290 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8291 && mode == ptr_mode)
8292 temp = convert_memory_address (ptr_mode, temp);
8293 #endif
8294 return temp;
8295 }
8296
8297 op0 = force_operand (XEXP (op0, 0), target);
8298 }
8299
8300 if (flag_force_addr && GET_CODE (op0) != REG)
8301 op0 = force_reg (Pmode, op0);
8302
8303 if (GET_CODE (op0) == REG
8304 && ! REG_USERVAR_P (op0))
8305 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8306
8307 /* If we might have had a temp slot, add an equivalent address
8308 for it. */
8309 if (temp != 0)
8310 update_temp_slot_address (temp, op0);
8311
8312 #ifdef POINTERS_EXTEND_UNSIGNED
8313 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8314 && mode == ptr_mode)
8315 op0 = convert_memory_address (ptr_mode, op0);
8316 #endif
8317
8318 return op0;
8319
8320 case ENTRY_VALUE_EXPR:
8321 abort ();
8322
8323 /* COMPLEX type for Extended Pascal & Fortran */
8324 case COMPLEX_EXPR:
8325 {
8326 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8327 rtx insns;
8328
8329 /* Get the rtx code of the operands. */
8330 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8331 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8332
8333 if (! target)
8334 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8335
8336 start_sequence ();
8337
8338 /* Move the real (op0) and imaginary (op1) parts to their location. */
8339 emit_move_insn (gen_realpart (mode, target), op0);
8340 emit_move_insn (gen_imagpart (mode, target), op1);
8341
8342 insns = get_insns ();
8343 end_sequence ();
8344
8345 /* Complex construction should appear as a single unit. */
8346 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8347 each with a separate pseudo as destination.
8348 It's not correct for flow to treat them as a unit. */
8349 if (GET_CODE (target) != CONCAT)
8350 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8351 else
8352 emit_insns (insns);
8353
8354 return target;
8355 }
8356
8357 case REALPART_EXPR:
8358 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8359 return gen_realpart (mode, op0);
8360
8361 case IMAGPART_EXPR:
8362 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8363 return gen_imagpart (mode, op0);
8364
8365 case CONJ_EXPR:
8366 {
8367 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8368 rtx imag_t;
8369 rtx insns;
8370
8371 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8372
8373 if (! target)
8374 target = gen_reg_rtx (mode);
8375
8376 start_sequence ();
8377
8378 /* Store the realpart and the negated imagpart to target. */
8379 emit_move_insn (gen_realpart (partmode, target),
8380 gen_realpart (partmode, op0));
8381
8382 imag_t = gen_imagpart (partmode, target);
8383 temp = expand_unop (partmode, neg_optab,
8384 gen_imagpart (partmode, op0), imag_t, 0);
8385 if (temp != imag_t)
8386 emit_move_insn (imag_t, temp);
8387
8388 insns = get_insns ();
8389 end_sequence ();
8390
8391 /* Conjugate should appear as a single unit
8392 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8393 each with a separate pseudo as destination.
8394 It's not correct for flow to treat them as a unit. */
8395 if (GET_CODE (target) != CONCAT)
8396 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8397 else
8398 emit_insns (insns);
8399
8400 return target;
8401 }
8402
8403 case TRY_CATCH_EXPR:
8404 {
8405 tree handler = TREE_OPERAND (exp, 1);
8406
8407 expand_eh_region_start ();
8408
8409 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8410
8411 expand_eh_region_end (handler);
8412
8413 return op0;
8414 }
8415
8416 case TRY_FINALLY_EXPR:
8417 {
8418 tree try_block = TREE_OPERAND (exp, 0);
8419 tree finally_block = TREE_OPERAND (exp, 1);
8420 rtx finally_label = gen_label_rtx ();
8421 rtx done_label = gen_label_rtx ();
8422 rtx return_link = gen_reg_rtx (Pmode);
8423 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8424 (tree) finally_label, (tree) return_link);
8425 TREE_SIDE_EFFECTS (cleanup) = 1;
8426
8427 /* Start a new binding layer that will keep track of all cleanup
8428 actions to be performed. */
8429 expand_start_bindings (2);
8430
8431 target_temp_slot_level = temp_slot_level;
8432
8433 expand_decl_cleanup (NULL_TREE, cleanup);
8434 op0 = expand_expr (try_block, target, tmode, modifier);
8435
8436 preserve_temp_slots (op0);
8437 expand_end_bindings (NULL_TREE, 0, 0);
8438 emit_jump (done_label);
8439 emit_label (finally_label);
8440 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8441 emit_indirect_jump (return_link);
8442 emit_label (done_label);
8443 return op0;
8444 }
8445
8446 case GOTO_SUBROUTINE_EXPR:
8447 {
8448 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8449 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8450 rtx return_address = gen_label_rtx ();
8451 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8452 emit_jump (subr);
8453 emit_label (return_address);
8454 return const0_rtx;
8455 }
8456
8457 case POPDCC_EXPR:
8458 {
8459 rtx dcc = get_dynamic_cleanup_chain ();
8460 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8461 return const0_rtx;
8462 }
8463
8464 case POPDHC_EXPR:
8465 {
8466 rtx dhc = get_dynamic_handler_chain ();
8467 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8468 return const0_rtx;
8469 }
8470
8471 case VA_ARG_EXPR:
8472 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8473
8474 default:
8475 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8476 }
8477
8478 /* Here to do an ordinary binary operator, generating an instruction
8479 from the optab already placed in `this_optab'. */
8480 binop:
8481 preexpand_calls (exp);
8482 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8483 subtarget = 0;
8484 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8485 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8486 binop2:
8487 temp = expand_binop (mode, this_optab, op0, op1, target,
8488 unsignedp, OPTAB_LIB_WIDEN);
8489 if (temp == 0)
8490 abort ();
8491 return temp;
8492 }
8493 \f
8494 /* Similar to expand_expr, except that we don't specify a target, target
8495 mode, or modifier and we return the alignment of the inner type. This is
8496 used in cases where it is not necessary to align the result to the
8497 alignment of its type as long as we know the alignment of the result, for
8498 example for comparisons of BLKmode values. */
8499
8500 static rtx
8501 expand_expr_unaligned (exp, palign)
8502 register tree exp;
8503 unsigned int *palign;
8504 {
8505 register rtx op0;
8506 tree type = TREE_TYPE (exp);
8507 register enum machine_mode mode = TYPE_MODE (type);
8508
8509 /* Default the alignment we return to that of the type. */
8510 *palign = TYPE_ALIGN (type);
8511
8512 /* The only cases in which we do anything special is if the resulting mode
8513 is BLKmode. */
8514 if (mode != BLKmode)
8515 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8516
8517 switch (TREE_CODE (exp))
8518 {
8519 case CONVERT_EXPR:
8520 case NOP_EXPR:
8521 case NON_LVALUE_EXPR:
8522 /* Conversions between BLKmode values don't change the underlying
8523 alignment or value. */
8524 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8525 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8526 break;
8527
8528 case ARRAY_REF:
8529 /* Much of the code for this case is copied directly from expand_expr.
8530 We need to duplicate it here because we will do something different
8531 in the fall-through case, so we need to handle the same exceptions
8532 it does. */
8533 {
8534 tree array = TREE_OPERAND (exp, 0);
8535 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8536 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8537 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8538 HOST_WIDE_INT i;
8539
8540 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8541 abort ();
8542
8543 /* Optimize the special-case of a zero lower bound.
8544
8545 We convert the low_bound to sizetype to avoid some problems
8546 with constant folding. (E.g. suppose the lower bound is 1,
8547 and its mode is QI. Without the conversion, (ARRAY
8548 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8549 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8550
8551 if (! integer_zerop (low_bound))
8552 index = size_diffop (index, convert (sizetype, low_bound));
8553
8554 /* If this is a constant index into a constant array,
8555 just get the value from the array. Handle both the cases when
8556 we have an explicit constructor and when our operand is a variable
8557 that was declared const. */
8558
8559 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8560 && 0 > compare_tree_int (index,
8561 list_length (CONSTRUCTOR_ELTS
8562 (TREE_OPERAND (exp, 0)))))
8563 {
8564 tree elem;
8565
8566 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8567 i = TREE_INT_CST_LOW (index);
8568 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8569 ;
8570
8571 if (elem)
8572 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8573 }
8574
8575 else if (optimize >= 1
8576 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8577 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8578 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8579 {
8580 if (TREE_CODE (index) == INTEGER_CST)
8581 {
8582 tree init = DECL_INITIAL (array);
8583
8584 if (TREE_CODE (init) == CONSTRUCTOR)
8585 {
8586 tree elem;
8587
8588 for (elem = CONSTRUCTOR_ELTS (init);
8589 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8590 elem = TREE_CHAIN (elem))
8591 ;
8592
8593 if (elem)
8594 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8595 palign);
8596 }
8597 }
8598 }
8599 }
8600
8601 /* ... fall through ... */
8602
8603 case COMPONENT_REF:
8604 case BIT_FIELD_REF:
8605 /* If the operand is a CONSTRUCTOR, we can just extract the
8606 appropriate field if it is present. Don't do this if we have
8607 already written the data since we want to refer to that copy
8608 and varasm.c assumes that's what we'll do. */
8609 if (TREE_CODE (exp) != ARRAY_REF
8610 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8611 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8612 {
8613 tree elt;
8614
8615 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8616 elt = TREE_CHAIN (elt))
8617 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8618 /* Note that unlike the case in expand_expr, we know this is
8619 BLKmode and hence not an integer. */
8620 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8621 }
8622
8623 {
8624 enum machine_mode mode1;
8625 HOST_WIDE_INT bitsize, bitpos;
8626 tree offset;
8627 int volatilep = 0;
8628 unsigned int alignment;
8629 int unsignedp;
8630 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8631 &mode1, &unsignedp, &volatilep,
8632 &alignment);
8633
8634 /* If we got back the original object, something is wrong. Perhaps
8635 we are evaluating an expression too early. In any event, don't
8636 infinitely recurse. */
8637 if (tem == exp)
8638 abort ();
8639
8640 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8641
8642 /* If this is a constant, put it into a register if it is a
8643 legitimate constant and OFFSET is 0 and memory if it isn't. */
8644 if (CONSTANT_P (op0))
8645 {
8646 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8647
8648 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8649 && offset == 0)
8650 op0 = force_reg (inner_mode, op0);
8651 else
8652 op0 = validize_mem (force_const_mem (inner_mode, op0));
8653 }
8654
8655 if (offset != 0)
8656 {
8657 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8658
8659 /* If this object is in a register, put it into memory.
8660 This case can't occur in C, but can in Ada if we have
8661 unchecked conversion of an expression from a scalar type to
8662 an array or record type. */
8663 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8664 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8665 {
8666 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8667
8668 mark_temp_addr_taken (memloc);
8669 emit_move_insn (memloc, op0);
8670 op0 = memloc;
8671 }
8672
8673 if (GET_CODE (op0) != MEM)
8674 abort ();
8675
8676 if (GET_MODE (offset_rtx) != ptr_mode)
8677 {
8678 #ifdef POINTERS_EXTEND_UNSIGNED
8679 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8680 #else
8681 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8682 #endif
8683 }
8684
8685 op0 = change_address (op0, VOIDmode,
8686 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8687 force_reg (ptr_mode,
8688 offset_rtx)));
8689 }
8690
8691 /* Don't forget about volatility even if this is a bitfield. */
8692 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8693 {
8694 op0 = copy_rtx (op0);
8695 MEM_VOLATILE_P (op0) = 1;
8696 }
8697
8698 /* Check the access. */
8699 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8700 {
8701 rtx to;
8702 int size;
8703
8704 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8705 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8706
8707 /* Check the access right of the pointer. */
8708 if (size > BITS_PER_UNIT)
8709 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8710 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8711 TYPE_MODE (sizetype),
8712 GEN_INT (MEMORY_USE_RO),
8713 TYPE_MODE (integer_type_node));
8714 }
8715
8716 /* In cases where an aligned union has an unaligned object
8717 as a field, we might be extracting a BLKmode value from
8718 an integer-mode (e.g., SImode) object. Handle this case
8719 by doing the extract into an object as wide as the field
8720 (which we know to be the width of a basic mode), then
8721 storing into memory, and changing the mode to BLKmode.
8722 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8723 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8724 if (mode1 == VOIDmode
8725 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8726 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8727 && (TYPE_ALIGN (type) > alignment
8728 || bitpos % TYPE_ALIGN (type) != 0)))
8729 {
8730 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8731
8732 if (ext_mode == BLKmode)
8733 {
8734 /* In this case, BITPOS must start at a byte boundary. */
8735 if (GET_CODE (op0) != MEM
8736 || bitpos % BITS_PER_UNIT != 0)
8737 abort ();
8738
8739 op0 = change_address (op0, VOIDmode,
8740 plus_constant (XEXP (op0, 0),
8741 bitpos / BITS_PER_UNIT));
8742 }
8743 else
8744 {
8745 rtx new = assign_stack_temp (ext_mode,
8746 bitsize / BITS_PER_UNIT, 0);
8747
8748 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8749 unsignedp, NULL_RTX, ext_mode,
8750 ext_mode, alignment,
8751 int_size_in_bytes (TREE_TYPE (tem)));
8752
8753 /* If the result is a record type and BITSIZE is narrower than
8754 the mode of OP0, an integral mode, and this is a big endian
8755 machine, we must put the field into the high-order bits. */
8756 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8757 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8758 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8759 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8760 size_int (GET_MODE_BITSIZE
8761 (GET_MODE (op0))
8762 - bitsize),
8763 op0, 1);
8764
8765
8766 emit_move_insn (new, op0);
8767 op0 = copy_rtx (new);
8768 PUT_MODE (op0, BLKmode);
8769 }
8770 }
8771 else
8772 /* Get a reference to just this component. */
8773 op0 = change_address (op0, mode1,
8774 plus_constant (XEXP (op0, 0),
8775 (bitpos / BITS_PER_UNIT)));
8776
8777 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8778
8779 /* Adjust the alignment in case the bit position is not
8780 a multiple of the alignment of the inner object. */
8781 while (bitpos % alignment != 0)
8782 alignment >>= 1;
8783
8784 if (GET_CODE (XEXP (op0, 0)) == REG)
8785 mark_reg_pointer (XEXP (op0, 0), alignment);
8786
8787 MEM_IN_STRUCT_P (op0) = 1;
8788 MEM_VOLATILE_P (op0) |= volatilep;
8789
8790 *palign = alignment;
8791 return op0;
8792 }
8793
8794 default:
8795 break;
8796
8797 }
8798
8799 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8800 }
8801 \f
8802 /* Return the tree node if a ARG corresponds to a string constant or zero
8803 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8804 in bytes within the string that ARG is accessing. The type of the
8805 offset will be `sizetype'. */
8806
8807 tree
8808 string_constant (arg, ptr_offset)
8809 tree arg;
8810 tree *ptr_offset;
8811 {
8812 STRIP_NOPS (arg);
8813
8814 if (TREE_CODE (arg) == ADDR_EXPR
8815 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8816 {
8817 *ptr_offset = size_zero_node;
8818 return TREE_OPERAND (arg, 0);
8819 }
8820 else if (TREE_CODE (arg) == PLUS_EXPR)
8821 {
8822 tree arg0 = TREE_OPERAND (arg, 0);
8823 tree arg1 = TREE_OPERAND (arg, 1);
8824
8825 STRIP_NOPS (arg0);
8826 STRIP_NOPS (arg1);
8827
8828 if (TREE_CODE (arg0) == ADDR_EXPR
8829 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8830 {
8831 *ptr_offset = convert (sizetype, arg1);
8832 return TREE_OPERAND (arg0, 0);
8833 }
8834 else if (TREE_CODE (arg1) == ADDR_EXPR
8835 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8836 {
8837 *ptr_offset = convert (sizetype, arg0);
8838 return TREE_OPERAND (arg1, 0);
8839 }
8840 }
8841
8842 return 0;
8843 }
8844 \f
8845 /* Expand code for a post- or pre- increment or decrement
8846 and return the RTX for the result.
8847 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8848
8849 static rtx
8850 expand_increment (exp, post, ignore)
8851 register tree exp;
8852 int post, ignore;
8853 {
8854 register rtx op0, op1;
8855 register rtx temp, value;
8856 register tree incremented = TREE_OPERAND (exp, 0);
8857 optab this_optab = add_optab;
8858 int icode;
8859 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8860 int op0_is_copy = 0;
8861 int single_insn = 0;
8862 /* 1 means we can't store into OP0 directly,
8863 because it is a subreg narrower than a word,
8864 and we don't dare clobber the rest of the word. */
8865 int bad_subreg = 0;
8866
8867 /* Stabilize any component ref that might need to be
8868 evaluated more than once below. */
8869 if (!post
8870 || TREE_CODE (incremented) == BIT_FIELD_REF
8871 || (TREE_CODE (incremented) == COMPONENT_REF
8872 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8873 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8874 incremented = stabilize_reference (incremented);
8875 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8876 ones into save exprs so that they don't accidentally get evaluated
8877 more than once by the code below. */
8878 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8879 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8880 incremented = save_expr (incremented);
8881
8882 /* Compute the operands as RTX.
8883 Note whether OP0 is the actual lvalue or a copy of it:
8884 I believe it is a copy iff it is a register or subreg
8885 and insns were generated in computing it. */
8886
8887 temp = get_last_insn ();
8888 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8889
8890 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8891 in place but instead must do sign- or zero-extension during assignment,
8892 so we copy it into a new register and let the code below use it as
8893 a copy.
8894
8895 Note that we can safely modify this SUBREG since it is know not to be
8896 shared (it was made by the expand_expr call above). */
8897
8898 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8899 {
8900 if (post)
8901 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8902 else
8903 bad_subreg = 1;
8904 }
8905 else if (GET_CODE (op0) == SUBREG
8906 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8907 {
8908 /* We cannot increment this SUBREG in place. If we are
8909 post-incrementing, get a copy of the old value. Otherwise,
8910 just mark that we cannot increment in place. */
8911 if (post)
8912 op0 = copy_to_reg (op0);
8913 else
8914 bad_subreg = 1;
8915 }
8916
8917 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8918 && temp != get_last_insn ());
8919 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8920 EXPAND_MEMORY_USE_BAD);
8921
8922 /* Decide whether incrementing or decrementing. */
8923 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8924 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8925 this_optab = sub_optab;
8926
8927 /* Convert decrement by a constant into a negative increment. */
8928 if (this_optab == sub_optab
8929 && GET_CODE (op1) == CONST_INT)
8930 {
8931 op1 = GEN_INT (- INTVAL (op1));
8932 this_optab = add_optab;
8933 }
8934
8935 /* For a preincrement, see if we can do this with a single instruction. */
8936 if (!post)
8937 {
8938 icode = (int) this_optab->handlers[(int) mode].insn_code;
8939 if (icode != (int) CODE_FOR_nothing
8940 /* Make sure that OP0 is valid for operands 0 and 1
8941 of the insn we want to queue. */
8942 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8943 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8944 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8945 single_insn = 1;
8946 }
8947
8948 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8949 then we cannot just increment OP0. We must therefore contrive to
8950 increment the original value. Then, for postincrement, we can return
8951 OP0 since it is a copy of the old value. For preincrement, expand here
8952 unless we can do it with a single insn.
8953
8954 Likewise if storing directly into OP0 would clobber high bits
8955 we need to preserve (bad_subreg). */
8956 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8957 {
8958 /* This is the easiest way to increment the value wherever it is.
8959 Problems with multiple evaluation of INCREMENTED are prevented
8960 because either (1) it is a component_ref or preincrement,
8961 in which case it was stabilized above, or (2) it is an array_ref
8962 with constant index in an array in a register, which is
8963 safe to reevaluate. */
8964 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8965 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8966 ? MINUS_EXPR : PLUS_EXPR),
8967 TREE_TYPE (exp),
8968 incremented,
8969 TREE_OPERAND (exp, 1));
8970
8971 while (TREE_CODE (incremented) == NOP_EXPR
8972 || TREE_CODE (incremented) == CONVERT_EXPR)
8973 {
8974 newexp = convert (TREE_TYPE (incremented), newexp);
8975 incremented = TREE_OPERAND (incremented, 0);
8976 }
8977
8978 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8979 return post ? op0 : temp;
8980 }
8981
8982 if (post)
8983 {
8984 /* We have a true reference to the value in OP0.
8985 If there is an insn to add or subtract in this mode, queue it.
8986 Queueing the increment insn avoids the register shuffling
8987 that often results if we must increment now and first save
8988 the old value for subsequent use. */
8989
8990 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8991 op0 = stabilize (op0);
8992 #endif
8993
8994 icode = (int) this_optab->handlers[(int) mode].insn_code;
8995 if (icode != (int) CODE_FOR_nothing
8996 /* Make sure that OP0 is valid for operands 0 and 1
8997 of the insn we want to queue. */
8998 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8999 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9000 {
9001 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9002 op1 = force_reg (mode, op1);
9003
9004 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9005 }
9006 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9007 {
9008 rtx addr = (general_operand (XEXP (op0, 0), mode)
9009 ? force_reg (Pmode, XEXP (op0, 0))
9010 : copy_to_reg (XEXP (op0, 0)));
9011 rtx temp, result;
9012
9013 op0 = change_address (op0, VOIDmode, addr);
9014 temp = force_reg (GET_MODE (op0), op0);
9015 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9016 op1 = force_reg (mode, op1);
9017
9018 /* The increment queue is LIFO, thus we have to `queue'
9019 the instructions in reverse order. */
9020 enqueue_insn (op0, gen_move_insn (op0, temp));
9021 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9022 return result;
9023 }
9024 }
9025
9026 /* Preincrement, or we can't increment with one simple insn. */
9027 if (post)
9028 /* Save a copy of the value before inc or dec, to return it later. */
9029 temp = value = copy_to_reg (op0);
9030 else
9031 /* Arrange to return the incremented value. */
9032 /* Copy the rtx because expand_binop will protect from the queue,
9033 and the results of that would be invalid for us to return
9034 if our caller does emit_queue before using our result. */
9035 temp = copy_rtx (value = op0);
9036
9037 /* Increment however we can. */
9038 op1 = expand_binop (mode, this_optab, value, op1,
9039 current_function_check_memory_usage ? NULL_RTX : op0,
9040 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9041 /* Make sure the value is stored into OP0. */
9042 if (op1 != op0)
9043 emit_move_insn (op0, op1);
9044
9045 return temp;
9046 }
9047 \f
9048 /* Expand all function calls contained within EXP, innermost ones first.
9049 But don't look within expressions that have sequence points.
9050 For each CALL_EXPR, record the rtx for its value
9051 in the CALL_EXPR_RTL field. */
9052
9053 static void
9054 preexpand_calls (exp)
9055 tree exp;
9056 {
9057 register int nops, i;
9058 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9059
9060 if (! do_preexpand_calls)
9061 return;
9062
9063 /* Only expressions and references can contain calls. */
9064
9065 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9066 return;
9067
9068 switch (TREE_CODE (exp))
9069 {
9070 case CALL_EXPR:
9071 /* Do nothing if already expanded. */
9072 if (CALL_EXPR_RTL (exp) != 0
9073 /* Do nothing if the call returns a variable-sized object. */
9074 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9075 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9076 /* Do nothing to built-in functions. */
9077 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9078 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9079 == FUNCTION_DECL)
9080 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9081 return;
9082
9083 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9084 return;
9085
9086 case COMPOUND_EXPR:
9087 case COND_EXPR:
9088 case TRUTH_ANDIF_EXPR:
9089 case TRUTH_ORIF_EXPR:
9090 /* If we find one of these, then we can be sure
9091 the adjust will be done for it (since it makes jumps).
9092 Do it now, so that if this is inside an argument
9093 of a function, we don't get the stack adjustment
9094 after some other args have already been pushed. */
9095 do_pending_stack_adjust ();
9096 return;
9097
9098 case BLOCK:
9099 case RTL_EXPR:
9100 case WITH_CLEANUP_EXPR:
9101 case CLEANUP_POINT_EXPR:
9102 case TRY_CATCH_EXPR:
9103 return;
9104
9105 case SAVE_EXPR:
9106 if (SAVE_EXPR_RTL (exp) != 0)
9107 return;
9108
9109 default:
9110 break;
9111 }
9112
9113 nops = tree_code_length[(int) TREE_CODE (exp)];
9114 for (i = 0; i < nops; i++)
9115 if (TREE_OPERAND (exp, i) != 0)
9116 {
9117 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9118 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9119 It doesn't happen before the call is made. */
9120 ;
9121 else
9122 {
9123 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9124 if (type == 'e' || type == '<' || type == '1' || type == '2'
9125 || type == 'r')
9126 preexpand_calls (TREE_OPERAND (exp, i));
9127 }
9128 }
9129 }
9130 \f
9131 /* At the start of a function, record that we have no previously-pushed
9132 arguments waiting to be popped. */
9133
9134 void
9135 init_pending_stack_adjust ()
9136 {
9137 pending_stack_adjust = 0;
9138 }
9139
9140 /* When exiting from function, if safe, clear out any pending stack adjust
9141 so the adjustment won't get done.
9142
9143 Note, if the current function calls alloca, then it must have a
9144 frame pointer regardless of the value of flag_omit_frame_pointer. */
9145
9146 void
9147 clear_pending_stack_adjust ()
9148 {
9149 #ifdef EXIT_IGNORE_STACK
9150 if (optimize > 0
9151 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9152 && EXIT_IGNORE_STACK
9153 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9154 && ! flag_inline_functions)
9155 {
9156 stack_pointer_delta -= pending_stack_adjust,
9157 pending_stack_adjust = 0;
9158 }
9159 #endif
9160 }
9161
9162 /* Pop any previously-pushed arguments that have not been popped yet. */
9163
9164 void
9165 do_pending_stack_adjust ()
9166 {
9167 if (inhibit_defer_pop == 0)
9168 {
9169 if (pending_stack_adjust != 0)
9170 adjust_stack (GEN_INT (pending_stack_adjust));
9171 pending_stack_adjust = 0;
9172 }
9173 }
9174 \f
9175 /* Expand conditional expressions. */
9176
9177 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9178 LABEL is an rtx of code CODE_LABEL, in this function and all the
9179 functions here. */
9180
9181 void
9182 jumpifnot (exp, label)
9183 tree exp;
9184 rtx label;
9185 {
9186 do_jump (exp, label, NULL_RTX);
9187 }
9188
9189 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9190
9191 void
9192 jumpif (exp, label)
9193 tree exp;
9194 rtx label;
9195 {
9196 do_jump (exp, NULL_RTX, label);
9197 }
9198
9199 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9200 the result is zero, or IF_TRUE_LABEL if the result is one.
9201 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9202 meaning fall through in that case.
9203
9204 do_jump always does any pending stack adjust except when it does not
9205 actually perform a jump. An example where there is no jump
9206 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9207
9208 This function is responsible for optimizing cases such as
9209 &&, || and comparison operators in EXP. */
9210
9211 void
9212 do_jump (exp, if_false_label, if_true_label)
9213 tree exp;
9214 rtx if_false_label, if_true_label;
9215 {
9216 register enum tree_code code = TREE_CODE (exp);
9217 /* Some cases need to create a label to jump to
9218 in order to properly fall through.
9219 These cases set DROP_THROUGH_LABEL nonzero. */
9220 rtx drop_through_label = 0;
9221 rtx temp;
9222 int i;
9223 tree type;
9224 enum machine_mode mode;
9225
9226 #ifdef MAX_INTEGER_COMPUTATION_MODE
9227 check_max_integer_computation_mode (exp);
9228 #endif
9229
9230 emit_queue ();
9231
9232 switch (code)
9233 {
9234 case ERROR_MARK:
9235 break;
9236
9237 case INTEGER_CST:
9238 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9239 if (temp)
9240 emit_jump (temp);
9241 break;
9242
9243 #if 0
9244 /* This is not true with #pragma weak */
9245 case ADDR_EXPR:
9246 /* The address of something can never be zero. */
9247 if (if_true_label)
9248 emit_jump (if_true_label);
9249 break;
9250 #endif
9251
9252 case NOP_EXPR:
9253 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9254 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9255 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9256 goto normal;
9257 case CONVERT_EXPR:
9258 /* If we are narrowing the operand, we have to do the compare in the
9259 narrower mode. */
9260 if ((TYPE_PRECISION (TREE_TYPE (exp))
9261 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9262 goto normal;
9263 case NON_LVALUE_EXPR:
9264 case REFERENCE_EXPR:
9265 case ABS_EXPR:
9266 case NEGATE_EXPR:
9267 case LROTATE_EXPR:
9268 case RROTATE_EXPR:
9269 /* These cannot change zero->non-zero or vice versa. */
9270 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9271 break;
9272
9273 case WITH_RECORD_EXPR:
9274 /* Put the object on the placeholder list, recurse through our first
9275 operand, and pop the list. */
9276 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9277 placeholder_list);
9278 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9279 placeholder_list = TREE_CHAIN (placeholder_list);
9280 break;
9281
9282 #if 0
9283 /* This is never less insns than evaluating the PLUS_EXPR followed by
9284 a test and can be longer if the test is eliminated. */
9285 case PLUS_EXPR:
9286 /* Reduce to minus. */
9287 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9288 TREE_OPERAND (exp, 0),
9289 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9290 TREE_OPERAND (exp, 1))));
9291 /* Process as MINUS. */
9292 #endif
9293
9294 case MINUS_EXPR:
9295 /* Non-zero iff operands of minus differ. */
9296 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9297 TREE_OPERAND (exp, 0),
9298 TREE_OPERAND (exp, 1)),
9299 NE, NE, if_false_label, if_true_label);
9300 break;
9301
9302 case BIT_AND_EXPR:
9303 /* If we are AND'ing with a small constant, do this comparison in the
9304 smallest type that fits. If the machine doesn't have comparisons
9305 that small, it will be converted back to the wider comparison.
9306 This helps if we are testing the sign bit of a narrower object.
9307 combine can't do this for us because it can't know whether a
9308 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9309
9310 if (! SLOW_BYTE_ACCESS
9311 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9312 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9313 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9314 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9315 && (type = type_for_mode (mode, 1)) != 0
9316 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9317 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9318 != CODE_FOR_nothing))
9319 {
9320 do_jump (convert (type, exp), if_false_label, if_true_label);
9321 break;
9322 }
9323 goto normal;
9324
9325 case TRUTH_NOT_EXPR:
9326 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9327 break;
9328
9329 case TRUTH_ANDIF_EXPR:
9330 if (if_false_label == 0)
9331 if_false_label = drop_through_label = gen_label_rtx ();
9332 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9333 start_cleanup_deferral ();
9334 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9335 end_cleanup_deferral ();
9336 break;
9337
9338 case TRUTH_ORIF_EXPR:
9339 if (if_true_label == 0)
9340 if_true_label = drop_through_label = gen_label_rtx ();
9341 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9342 start_cleanup_deferral ();
9343 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9344 end_cleanup_deferral ();
9345 break;
9346
9347 case COMPOUND_EXPR:
9348 push_temp_slots ();
9349 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9350 preserve_temp_slots (NULL_RTX);
9351 free_temp_slots ();
9352 pop_temp_slots ();
9353 emit_queue ();
9354 do_pending_stack_adjust ();
9355 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9356 break;
9357
9358 case COMPONENT_REF:
9359 case BIT_FIELD_REF:
9360 case ARRAY_REF:
9361 {
9362 HOST_WIDE_INT bitsize, bitpos;
9363 int unsignedp;
9364 enum machine_mode mode;
9365 tree type;
9366 tree offset;
9367 int volatilep = 0;
9368 unsigned int alignment;
9369
9370 /* Get description of this reference. We don't actually care
9371 about the underlying object here. */
9372 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9373 &unsignedp, &volatilep, &alignment);
9374
9375 type = type_for_size (bitsize, unsignedp);
9376 if (! SLOW_BYTE_ACCESS
9377 && type != 0 && bitsize >= 0
9378 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9379 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9380 != CODE_FOR_nothing))
9381 {
9382 do_jump (convert (type, exp), if_false_label, if_true_label);
9383 break;
9384 }
9385 goto normal;
9386 }
9387
9388 case COND_EXPR:
9389 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9390 if (integer_onep (TREE_OPERAND (exp, 1))
9391 && integer_zerop (TREE_OPERAND (exp, 2)))
9392 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9393
9394 else if (integer_zerop (TREE_OPERAND (exp, 1))
9395 && integer_onep (TREE_OPERAND (exp, 2)))
9396 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9397
9398 else
9399 {
9400 register rtx label1 = gen_label_rtx ();
9401 drop_through_label = gen_label_rtx ();
9402
9403 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9404
9405 start_cleanup_deferral ();
9406 /* Now the THEN-expression. */
9407 do_jump (TREE_OPERAND (exp, 1),
9408 if_false_label ? if_false_label : drop_through_label,
9409 if_true_label ? if_true_label : drop_through_label);
9410 /* In case the do_jump just above never jumps. */
9411 do_pending_stack_adjust ();
9412 emit_label (label1);
9413
9414 /* Now the ELSE-expression. */
9415 do_jump (TREE_OPERAND (exp, 2),
9416 if_false_label ? if_false_label : drop_through_label,
9417 if_true_label ? if_true_label : drop_through_label);
9418 end_cleanup_deferral ();
9419 }
9420 break;
9421
9422 case EQ_EXPR:
9423 {
9424 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9425
9426 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9427 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9428 {
9429 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9430 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9431 do_jump
9432 (fold
9433 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9434 fold (build (EQ_EXPR, TREE_TYPE (exp),
9435 fold (build1 (REALPART_EXPR,
9436 TREE_TYPE (inner_type),
9437 exp0)),
9438 fold (build1 (REALPART_EXPR,
9439 TREE_TYPE (inner_type),
9440 exp1)))),
9441 fold (build (EQ_EXPR, TREE_TYPE (exp),
9442 fold (build1 (IMAGPART_EXPR,
9443 TREE_TYPE (inner_type),
9444 exp0)),
9445 fold (build1 (IMAGPART_EXPR,
9446 TREE_TYPE (inner_type),
9447 exp1)))))),
9448 if_false_label, if_true_label);
9449 }
9450
9451 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9452 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9453
9454 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9455 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9456 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9457 else
9458 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9459 break;
9460 }
9461
9462 case NE_EXPR:
9463 {
9464 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9465
9466 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9467 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9468 {
9469 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9470 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9471 do_jump
9472 (fold
9473 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9474 fold (build (NE_EXPR, TREE_TYPE (exp),
9475 fold (build1 (REALPART_EXPR,
9476 TREE_TYPE (inner_type),
9477 exp0)),
9478 fold (build1 (REALPART_EXPR,
9479 TREE_TYPE (inner_type),
9480 exp1)))),
9481 fold (build (NE_EXPR, TREE_TYPE (exp),
9482 fold (build1 (IMAGPART_EXPR,
9483 TREE_TYPE (inner_type),
9484 exp0)),
9485 fold (build1 (IMAGPART_EXPR,
9486 TREE_TYPE (inner_type),
9487 exp1)))))),
9488 if_false_label, if_true_label);
9489 }
9490
9491 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9492 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9493
9494 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9495 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9496 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9497 else
9498 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9499 break;
9500 }
9501
9502 case LT_EXPR:
9503 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9504 if (GET_MODE_CLASS (mode) == MODE_INT
9505 && ! can_compare_p (LT, mode, ccp_jump))
9506 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9507 else
9508 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9509 break;
9510
9511 case LE_EXPR:
9512 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9513 if (GET_MODE_CLASS (mode) == MODE_INT
9514 && ! can_compare_p (LE, mode, ccp_jump))
9515 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9516 else
9517 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9518 break;
9519
9520 case GT_EXPR:
9521 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9522 if (GET_MODE_CLASS (mode) == MODE_INT
9523 && ! can_compare_p (GT, mode, ccp_jump))
9524 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9525 else
9526 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9527 break;
9528
9529 case GE_EXPR:
9530 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9531 if (GET_MODE_CLASS (mode) == MODE_INT
9532 && ! can_compare_p (GE, mode, ccp_jump))
9533 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9534 else
9535 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9536 break;
9537
9538 case UNORDERED_EXPR:
9539 case ORDERED_EXPR:
9540 {
9541 enum rtx_code cmp, rcmp;
9542 int do_rev;
9543
9544 if (code == UNORDERED_EXPR)
9545 cmp = UNORDERED, rcmp = ORDERED;
9546 else
9547 cmp = ORDERED, rcmp = UNORDERED;
9548 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9549
9550 do_rev = 0;
9551 if (! can_compare_p (cmp, mode, ccp_jump)
9552 && (can_compare_p (rcmp, mode, ccp_jump)
9553 /* If the target doesn't provide either UNORDERED or ORDERED
9554 comparisons, canonicalize on UNORDERED for the library. */
9555 || rcmp == UNORDERED))
9556 do_rev = 1;
9557
9558 if (! do_rev)
9559 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9560 else
9561 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9562 }
9563 break;
9564
9565 {
9566 enum rtx_code rcode1;
9567 enum tree_code tcode2;
9568
9569 case UNLT_EXPR:
9570 rcode1 = UNLT;
9571 tcode2 = LT_EXPR;
9572 goto unordered_bcc;
9573 case UNLE_EXPR:
9574 rcode1 = UNLE;
9575 tcode2 = LE_EXPR;
9576 goto unordered_bcc;
9577 case UNGT_EXPR:
9578 rcode1 = UNGT;
9579 tcode2 = GT_EXPR;
9580 goto unordered_bcc;
9581 case UNGE_EXPR:
9582 rcode1 = UNGE;
9583 tcode2 = GE_EXPR;
9584 goto unordered_bcc;
9585 case UNEQ_EXPR:
9586 rcode1 = UNEQ;
9587 tcode2 = EQ_EXPR;
9588 goto unordered_bcc;
9589
9590 unordered_bcc:
9591 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9592 if (can_compare_p (rcode1, mode, ccp_jump))
9593 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9594 if_true_label);
9595 else
9596 {
9597 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9598 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9599 tree cmp0, cmp1;
9600
9601 /* If the target doesn't support combined unordered
9602 compares, decompose into UNORDERED + comparison. */
9603 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9604 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9605 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9606 do_jump (exp, if_false_label, if_true_label);
9607 }
9608 }
9609 break;
9610
9611 default:
9612 normal:
9613 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9614 #if 0
9615 /* This is not needed any more and causes poor code since it causes
9616 comparisons and tests from non-SI objects to have different code
9617 sequences. */
9618 /* Copy to register to avoid generating bad insns by cse
9619 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9620 if (!cse_not_expected && GET_CODE (temp) == MEM)
9621 temp = copy_to_reg (temp);
9622 #endif
9623 do_pending_stack_adjust ();
9624 /* Do any postincrements in the expression that was tested. */
9625 emit_queue ();
9626
9627 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9628 {
9629 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9630 if (target)
9631 emit_jump (target);
9632 }
9633 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9634 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9635 /* Note swapping the labels gives us not-equal. */
9636 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9637 else if (GET_MODE (temp) != VOIDmode)
9638 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9639 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9640 GET_MODE (temp), NULL_RTX, 0,
9641 if_false_label, if_true_label);
9642 else
9643 abort ();
9644 }
9645
9646 if (drop_through_label)
9647 {
9648 /* If do_jump produces code that might be jumped around,
9649 do any stack adjusts from that code, before the place
9650 where control merges in. */
9651 do_pending_stack_adjust ();
9652 emit_label (drop_through_label);
9653 }
9654 }
9655 \f
9656 /* Given a comparison expression EXP for values too wide to be compared
9657 with one insn, test the comparison and jump to the appropriate label.
9658 The code of EXP is ignored; we always test GT if SWAP is 0,
9659 and LT if SWAP is 1. */
9660
9661 static void
9662 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9663 tree exp;
9664 int swap;
9665 rtx if_false_label, if_true_label;
9666 {
9667 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9668 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9669 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9670 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9671
9672 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9673 }
9674
9675 /* Compare OP0 with OP1, word at a time, in mode MODE.
9676 UNSIGNEDP says to do unsigned comparison.
9677 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9678
9679 void
9680 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9681 enum machine_mode mode;
9682 int unsignedp;
9683 rtx op0, op1;
9684 rtx if_false_label, if_true_label;
9685 {
9686 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9687 rtx drop_through_label = 0;
9688 int i;
9689
9690 if (! if_true_label || ! if_false_label)
9691 drop_through_label = gen_label_rtx ();
9692 if (! if_true_label)
9693 if_true_label = drop_through_label;
9694 if (! if_false_label)
9695 if_false_label = drop_through_label;
9696
9697 /* Compare a word at a time, high order first. */
9698 for (i = 0; i < nwords; i++)
9699 {
9700 rtx op0_word, op1_word;
9701
9702 if (WORDS_BIG_ENDIAN)
9703 {
9704 op0_word = operand_subword_force (op0, i, mode);
9705 op1_word = operand_subword_force (op1, i, mode);
9706 }
9707 else
9708 {
9709 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9710 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9711 }
9712
9713 /* All but high-order word must be compared as unsigned. */
9714 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9715 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9716 NULL_RTX, if_true_label);
9717
9718 /* Consider lower words only if these are equal. */
9719 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9720 NULL_RTX, 0, NULL_RTX, if_false_label);
9721 }
9722
9723 if (if_false_label)
9724 emit_jump (if_false_label);
9725 if (drop_through_label)
9726 emit_label (drop_through_label);
9727 }
9728
9729 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9730 with one insn, test the comparison and jump to the appropriate label. */
9731
9732 static void
9733 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9734 tree exp;
9735 rtx if_false_label, if_true_label;
9736 {
9737 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9738 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9739 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9740 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9741 int i;
9742 rtx drop_through_label = 0;
9743
9744 if (! if_false_label)
9745 drop_through_label = if_false_label = gen_label_rtx ();
9746
9747 for (i = 0; i < nwords; i++)
9748 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9749 operand_subword_force (op1, i, mode),
9750 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9751 word_mode, NULL_RTX, 0, if_false_label,
9752 NULL_RTX);
9753
9754 if (if_true_label)
9755 emit_jump (if_true_label);
9756 if (drop_through_label)
9757 emit_label (drop_through_label);
9758 }
9759 \f
9760 /* Jump according to whether OP0 is 0.
9761 We assume that OP0 has an integer mode that is too wide
9762 for the available compare insns. */
9763
9764 void
9765 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9766 rtx op0;
9767 rtx if_false_label, if_true_label;
9768 {
9769 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9770 rtx part;
9771 int i;
9772 rtx drop_through_label = 0;
9773
9774 /* The fastest way of doing this comparison on almost any machine is to
9775 "or" all the words and compare the result. If all have to be loaded
9776 from memory and this is a very wide item, it's possible this may
9777 be slower, but that's highly unlikely. */
9778
9779 part = gen_reg_rtx (word_mode);
9780 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9781 for (i = 1; i < nwords && part != 0; i++)
9782 part = expand_binop (word_mode, ior_optab, part,
9783 operand_subword_force (op0, i, GET_MODE (op0)),
9784 part, 1, OPTAB_WIDEN);
9785
9786 if (part != 0)
9787 {
9788 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9789 NULL_RTX, 0, if_false_label, if_true_label);
9790
9791 return;
9792 }
9793
9794 /* If we couldn't do the "or" simply, do this with a series of compares. */
9795 if (! if_false_label)
9796 drop_through_label = if_false_label = gen_label_rtx ();
9797
9798 for (i = 0; i < nwords; i++)
9799 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9800 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9801 if_false_label, NULL_RTX);
9802
9803 if (if_true_label)
9804 emit_jump (if_true_label);
9805
9806 if (drop_through_label)
9807 emit_label (drop_through_label);
9808 }
9809 \f
9810 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9811 (including code to compute the values to be compared)
9812 and set (CC0) according to the result.
9813 The decision as to signed or unsigned comparison must be made by the caller.
9814
9815 We force a stack adjustment unless there are currently
9816 things pushed on the stack that aren't yet used.
9817
9818 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9819 compared.
9820
9821 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9822 size of MODE should be used. */
9823
9824 rtx
9825 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9826 register rtx op0, op1;
9827 enum rtx_code code;
9828 int unsignedp;
9829 enum machine_mode mode;
9830 rtx size;
9831 unsigned int align;
9832 {
9833 rtx tem;
9834
9835 /* If one operand is constant, make it the second one. Only do this
9836 if the other operand is not constant as well. */
9837
9838 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9839 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9840 {
9841 tem = op0;
9842 op0 = op1;
9843 op1 = tem;
9844 code = swap_condition (code);
9845 }
9846
9847 if (flag_force_mem)
9848 {
9849 op0 = force_not_mem (op0);
9850 op1 = force_not_mem (op1);
9851 }
9852
9853 do_pending_stack_adjust ();
9854
9855 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9856 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9857 return tem;
9858
9859 #if 0
9860 /* There's no need to do this now that combine.c can eliminate lots of
9861 sign extensions. This can be less efficient in certain cases on other
9862 machines. */
9863
9864 /* If this is a signed equality comparison, we can do it as an
9865 unsigned comparison since zero-extension is cheaper than sign
9866 extension and comparisons with zero are done as unsigned. This is
9867 the case even on machines that can do fast sign extension, since
9868 zero-extension is easier to combine with other operations than
9869 sign-extension is. If we are comparing against a constant, we must
9870 convert it to what it would look like unsigned. */
9871 if ((code == EQ || code == NE) && ! unsignedp
9872 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9873 {
9874 if (GET_CODE (op1) == CONST_INT
9875 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9876 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9877 unsignedp = 1;
9878 }
9879 #endif
9880
9881 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9882
9883 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9884 }
9885
9886 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9887 The decision as to signed or unsigned comparison must be made by the caller.
9888
9889 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9890 compared.
9891
9892 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9893 size of MODE should be used. */
9894
9895 void
9896 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9897 if_false_label, if_true_label)
9898 register rtx op0, op1;
9899 enum rtx_code code;
9900 int unsignedp;
9901 enum machine_mode mode;
9902 rtx size;
9903 unsigned int align;
9904 rtx if_false_label, if_true_label;
9905 {
9906 rtx tem;
9907 int dummy_true_label = 0;
9908
9909 /* Reverse the comparison if that is safe and we want to jump if it is
9910 false. */
9911 if (! if_true_label && ! FLOAT_MODE_P (mode))
9912 {
9913 if_true_label = if_false_label;
9914 if_false_label = 0;
9915 code = reverse_condition (code);
9916 }
9917
9918 /* If one operand is constant, make it the second one. Only do this
9919 if the other operand is not constant as well. */
9920
9921 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9922 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9923 {
9924 tem = op0;
9925 op0 = op1;
9926 op1 = tem;
9927 code = swap_condition (code);
9928 }
9929
9930 if (flag_force_mem)
9931 {
9932 op0 = force_not_mem (op0);
9933 op1 = force_not_mem (op1);
9934 }
9935
9936 do_pending_stack_adjust ();
9937
9938 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9939 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9940 {
9941 if (tem == const_true_rtx)
9942 {
9943 if (if_true_label)
9944 emit_jump (if_true_label);
9945 }
9946 else
9947 {
9948 if (if_false_label)
9949 emit_jump (if_false_label);
9950 }
9951 return;
9952 }
9953
9954 #if 0
9955 /* There's no need to do this now that combine.c can eliminate lots of
9956 sign extensions. This can be less efficient in certain cases on other
9957 machines. */
9958
9959 /* If this is a signed equality comparison, we can do it as an
9960 unsigned comparison since zero-extension is cheaper than sign
9961 extension and comparisons with zero are done as unsigned. This is
9962 the case even on machines that can do fast sign extension, since
9963 zero-extension is easier to combine with other operations than
9964 sign-extension is. If we are comparing against a constant, we must
9965 convert it to what it would look like unsigned. */
9966 if ((code == EQ || code == NE) && ! unsignedp
9967 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9968 {
9969 if (GET_CODE (op1) == CONST_INT
9970 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9971 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9972 unsignedp = 1;
9973 }
9974 #endif
9975
9976 if (! if_true_label)
9977 {
9978 dummy_true_label = 1;
9979 if_true_label = gen_label_rtx ();
9980 }
9981
9982 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9983 if_true_label);
9984
9985 if (if_false_label)
9986 emit_jump (if_false_label);
9987 if (dummy_true_label)
9988 emit_label (if_true_label);
9989 }
9990
9991 /* Generate code for a comparison expression EXP (including code to compute
9992 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9993 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9994 generated code will drop through.
9995 SIGNED_CODE should be the rtx operation for this comparison for
9996 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9997
9998 We force a stack adjustment unless there are currently
9999 things pushed on the stack that aren't yet used. */
10000
10001 static void
10002 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10003 if_true_label)
10004 register tree exp;
10005 enum rtx_code signed_code, unsigned_code;
10006 rtx if_false_label, if_true_label;
10007 {
10008 unsigned int align0, align1;
10009 register rtx op0, op1;
10010 register tree type;
10011 register enum machine_mode mode;
10012 int unsignedp;
10013 enum rtx_code code;
10014
10015 /* Don't crash if the comparison was erroneous. */
10016 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10017 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10018 return;
10019
10020 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10021 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10022 mode = TYPE_MODE (type);
10023 unsignedp = TREE_UNSIGNED (type);
10024 code = unsignedp ? unsigned_code : signed_code;
10025
10026 #ifdef HAVE_canonicalize_funcptr_for_compare
10027 /* If function pointers need to be "canonicalized" before they can
10028 be reliably compared, then canonicalize them. */
10029 if (HAVE_canonicalize_funcptr_for_compare
10030 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10031 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10032 == FUNCTION_TYPE))
10033 {
10034 rtx new_op0 = gen_reg_rtx (mode);
10035
10036 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10037 op0 = new_op0;
10038 }
10039
10040 if (HAVE_canonicalize_funcptr_for_compare
10041 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10042 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10043 == FUNCTION_TYPE))
10044 {
10045 rtx new_op1 = gen_reg_rtx (mode);
10046
10047 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10048 op1 = new_op1;
10049 }
10050 #endif
10051
10052 /* Do any postincrements in the expression that was tested. */
10053 emit_queue ();
10054
10055 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10056 ((mode == BLKmode)
10057 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10058 MIN (align0, align1),
10059 if_false_label, if_true_label);
10060 }
10061 \f
10062 /* Generate code to calculate EXP using a store-flag instruction
10063 and return an rtx for the result. EXP is either a comparison
10064 or a TRUTH_NOT_EXPR whose operand is a comparison.
10065
10066 If TARGET is nonzero, store the result there if convenient.
10067
10068 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10069 cheap.
10070
10071 Return zero if there is no suitable set-flag instruction
10072 available on this machine.
10073
10074 Once expand_expr has been called on the arguments of the comparison,
10075 we are committed to doing the store flag, since it is not safe to
10076 re-evaluate the expression. We emit the store-flag insn by calling
10077 emit_store_flag, but only expand the arguments if we have a reason
10078 to believe that emit_store_flag will be successful. If we think that
10079 it will, but it isn't, we have to simulate the store-flag with a
10080 set/jump/set sequence. */
10081
10082 static rtx
10083 do_store_flag (exp, target, mode, only_cheap)
10084 tree exp;
10085 rtx target;
10086 enum machine_mode mode;
10087 int only_cheap;
10088 {
10089 enum rtx_code code;
10090 tree arg0, arg1, type;
10091 tree tem;
10092 enum machine_mode operand_mode;
10093 int invert = 0;
10094 int unsignedp;
10095 rtx op0, op1;
10096 enum insn_code icode;
10097 rtx subtarget = target;
10098 rtx result, label;
10099
10100 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10101 result at the end. We can't simply invert the test since it would
10102 have already been inverted if it were valid. This case occurs for
10103 some floating-point comparisons. */
10104
10105 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10106 invert = 1, exp = TREE_OPERAND (exp, 0);
10107
10108 arg0 = TREE_OPERAND (exp, 0);
10109 arg1 = TREE_OPERAND (exp, 1);
10110 type = TREE_TYPE (arg0);
10111 operand_mode = TYPE_MODE (type);
10112 unsignedp = TREE_UNSIGNED (type);
10113
10114 /* We won't bother with BLKmode store-flag operations because it would mean
10115 passing a lot of information to emit_store_flag. */
10116 if (operand_mode == BLKmode)
10117 return 0;
10118
10119 /* We won't bother with store-flag operations involving function pointers
10120 when function pointers must be canonicalized before comparisons. */
10121 #ifdef HAVE_canonicalize_funcptr_for_compare
10122 if (HAVE_canonicalize_funcptr_for_compare
10123 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10124 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10125 == FUNCTION_TYPE))
10126 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10127 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10128 == FUNCTION_TYPE))))
10129 return 0;
10130 #endif
10131
10132 STRIP_NOPS (arg0);
10133 STRIP_NOPS (arg1);
10134
10135 /* Get the rtx comparison code to use. We know that EXP is a comparison
10136 operation of some type. Some comparisons against 1 and -1 can be
10137 converted to comparisons with zero. Do so here so that the tests
10138 below will be aware that we have a comparison with zero. These
10139 tests will not catch constants in the first operand, but constants
10140 are rarely passed as the first operand. */
10141
10142 switch (TREE_CODE (exp))
10143 {
10144 case EQ_EXPR:
10145 code = EQ;
10146 break;
10147 case NE_EXPR:
10148 code = NE;
10149 break;
10150 case LT_EXPR:
10151 if (integer_onep (arg1))
10152 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10153 else
10154 code = unsignedp ? LTU : LT;
10155 break;
10156 case LE_EXPR:
10157 if (! unsignedp && integer_all_onesp (arg1))
10158 arg1 = integer_zero_node, code = LT;
10159 else
10160 code = unsignedp ? LEU : LE;
10161 break;
10162 case GT_EXPR:
10163 if (! unsignedp && integer_all_onesp (arg1))
10164 arg1 = integer_zero_node, code = GE;
10165 else
10166 code = unsignedp ? GTU : GT;
10167 break;
10168 case GE_EXPR:
10169 if (integer_onep (arg1))
10170 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10171 else
10172 code = unsignedp ? GEU : GE;
10173 break;
10174
10175 case UNORDERED_EXPR:
10176 code = UNORDERED;
10177 break;
10178 case ORDERED_EXPR:
10179 code = ORDERED;
10180 break;
10181 case UNLT_EXPR:
10182 code = UNLT;
10183 break;
10184 case UNLE_EXPR:
10185 code = UNLE;
10186 break;
10187 case UNGT_EXPR:
10188 code = UNGT;
10189 break;
10190 case UNGE_EXPR:
10191 code = UNGE;
10192 break;
10193 case UNEQ_EXPR:
10194 code = UNEQ;
10195 break;
10196
10197 default:
10198 abort ();
10199 }
10200
10201 /* Put a constant second. */
10202 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10203 {
10204 tem = arg0; arg0 = arg1; arg1 = tem;
10205 code = swap_condition (code);
10206 }
10207
10208 /* If this is an equality or inequality test of a single bit, we can
10209 do this by shifting the bit being tested to the low-order bit and
10210 masking the result with the constant 1. If the condition was EQ,
10211 we xor it with 1. This does not require an scc insn and is faster
10212 than an scc insn even if we have it. */
10213
10214 if ((code == NE || code == EQ)
10215 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10216 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10217 {
10218 tree inner = TREE_OPERAND (arg0, 0);
10219 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10220 int ops_unsignedp;
10221
10222 /* If INNER is a right shift of a constant and it plus BITNUM does
10223 not overflow, adjust BITNUM and INNER. */
10224
10225 if (TREE_CODE (inner) == RSHIFT_EXPR
10226 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10227 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10228 && bitnum < TYPE_PRECISION (type)
10229 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10230 bitnum - TYPE_PRECISION (type)))
10231 {
10232 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10233 inner = TREE_OPERAND (inner, 0);
10234 }
10235
10236 /* If we are going to be able to omit the AND below, we must do our
10237 operations as unsigned. If we must use the AND, we have a choice.
10238 Normally unsigned is faster, but for some machines signed is. */
10239 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10240 #ifdef LOAD_EXTEND_OP
10241 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10242 #else
10243 : 1
10244 #endif
10245 );
10246
10247 if (subtarget == 0 || GET_CODE (subtarget) != REG
10248 || GET_MODE (subtarget) != operand_mode
10249 || ! safe_from_p (subtarget, inner, 1))
10250 subtarget = 0;
10251
10252 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10253
10254 if (bitnum != 0)
10255 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10256 size_int (bitnum), subtarget, ops_unsignedp);
10257
10258 if (GET_MODE (op0) != mode)
10259 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10260
10261 if ((code == EQ && ! invert) || (code == NE && invert))
10262 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10263 ops_unsignedp, OPTAB_LIB_WIDEN);
10264
10265 /* Put the AND last so it can combine with more things. */
10266 if (bitnum != TYPE_PRECISION (type) - 1)
10267 op0 = expand_and (op0, const1_rtx, subtarget);
10268
10269 return op0;
10270 }
10271
10272 /* Now see if we are likely to be able to do this. Return if not. */
10273 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10274 return 0;
10275
10276 icode = setcc_gen_code[(int) code];
10277 if (icode == CODE_FOR_nothing
10278 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10279 {
10280 /* We can only do this if it is one of the special cases that
10281 can be handled without an scc insn. */
10282 if ((code == LT && integer_zerop (arg1))
10283 || (! only_cheap && code == GE && integer_zerop (arg1)))
10284 ;
10285 else if (BRANCH_COST >= 0
10286 && ! only_cheap && (code == NE || code == EQ)
10287 && TREE_CODE (type) != REAL_TYPE
10288 && ((abs_optab->handlers[(int) operand_mode].insn_code
10289 != CODE_FOR_nothing)
10290 || (ffs_optab->handlers[(int) operand_mode].insn_code
10291 != CODE_FOR_nothing)))
10292 ;
10293 else
10294 return 0;
10295 }
10296
10297 preexpand_calls (exp);
10298 if (subtarget == 0 || GET_CODE (subtarget) != REG
10299 || GET_MODE (subtarget) != operand_mode
10300 || ! safe_from_p (subtarget, arg1, 1))
10301 subtarget = 0;
10302
10303 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10304 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10305
10306 if (target == 0)
10307 target = gen_reg_rtx (mode);
10308
10309 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10310 because, if the emit_store_flag does anything it will succeed and
10311 OP0 and OP1 will not be used subsequently. */
10312
10313 result = emit_store_flag (target, code,
10314 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10315 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10316 operand_mode, unsignedp, 1);
10317
10318 if (result)
10319 {
10320 if (invert)
10321 result = expand_binop (mode, xor_optab, result, const1_rtx,
10322 result, 0, OPTAB_LIB_WIDEN);
10323 return result;
10324 }
10325
10326 /* If this failed, we have to do this with set/compare/jump/set code. */
10327 if (GET_CODE (target) != REG
10328 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10329 target = gen_reg_rtx (GET_MODE (target));
10330
10331 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10332 result = compare_from_rtx (op0, op1, code, unsignedp,
10333 operand_mode, NULL_RTX, 0);
10334 if (GET_CODE (result) == CONST_INT)
10335 return (((result == const0_rtx && ! invert)
10336 || (result != const0_rtx && invert))
10337 ? const0_rtx : const1_rtx);
10338
10339 label = gen_label_rtx ();
10340 if (bcc_gen_fctn[(int) code] == 0)
10341 abort ();
10342
10343 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10344 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10345 emit_label (label);
10346
10347 return target;
10348 }
10349 \f
10350 /* Generate a tablejump instruction (used for switch statements). */
10351
10352 #ifdef HAVE_tablejump
10353
10354 /* INDEX is the value being switched on, with the lowest value
10355 in the table already subtracted.
10356 MODE is its expected mode (needed if INDEX is constant).
10357 RANGE is the length of the jump table.
10358 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10359
10360 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10361 index value is out of range. */
10362
10363 void
10364 do_tablejump (index, mode, range, table_label, default_label)
10365 rtx index, range, table_label, default_label;
10366 enum machine_mode mode;
10367 {
10368 register rtx temp, vector;
10369
10370 /* Do an unsigned comparison (in the proper mode) between the index
10371 expression and the value which represents the length of the range.
10372 Since we just finished subtracting the lower bound of the range
10373 from the index expression, this comparison allows us to simultaneously
10374 check that the original index expression value is both greater than
10375 or equal to the minimum value of the range and less than or equal to
10376 the maximum value of the range. */
10377
10378 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10379 0, default_label);
10380
10381 /* If index is in range, it must fit in Pmode.
10382 Convert to Pmode so we can index with it. */
10383 if (mode != Pmode)
10384 index = convert_to_mode (Pmode, index, 1);
10385
10386 /* Don't let a MEM slip thru, because then INDEX that comes
10387 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10388 and break_out_memory_refs will go to work on it and mess it up. */
10389 #ifdef PIC_CASE_VECTOR_ADDRESS
10390 if (flag_pic && GET_CODE (index) != REG)
10391 index = copy_to_mode_reg (Pmode, index);
10392 #endif
10393
10394 /* If flag_force_addr were to affect this address
10395 it could interfere with the tricky assumptions made
10396 about addresses that contain label-refs,
10397 which may be valid only very near the tablejump itself. */
10398 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10399 GET_MODE_SIZE, because this indicates how large insns are. The other
10400 uses should all be Pmode, because they are addresses. This code
10401 could fail if addresses and insns are not the same size. */
10402 index = gen_rtx_PLUS (Pmode,
10403 gen_rtx_MULT (Pmode, index,
10404 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10405 gen_rtx_LABEL_REF (Pmode, table_label));
10406 #ifdef PIC_CASE_VECTOR_ADDRESS
10407 if (flag_pic)
10408 index = PIC_CASE_VECTOR_ADDRESS (index);
10409 else
10410 #endif
10411 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10412 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10413 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10414 RTX_UNCHANGING_P (vector) = 1;
10415 convert_move (temp, vector, 0);
10416
10417 emit_jump_insn (gen_tablejump (temp, table_label));
10418
10419 /* If we are generating PIC code or if the table is PC-relative, the
10420 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10421 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10422 emit_barrier ();
10423 }
10424
10425 #endif /* HAVE_tablejump */