expr.c (expand_expr, [...]): Don't check for checking memory usage if not in a function.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "machmode.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "defaults.h"
44 #include "toplev.h"
45 #include "ggc.h"
46 #include "intl.h"
47 #include "tm_p.h"
48
49 #ifndef ACCUMULATE_OUTGOING_ARGS
50 #define ACCUMULATE_OUTGOING_ARGS 0
51 #endif
52
53 /* Supply a default definition for PUSH_ARGS. */
54 #ifndef PUSH_ARGS
55 #ifdef PUSH_ROUNDING
56 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
57 #else
58 #define PUSH_ARGS 0
59 #endif
60 #endif
61
62 /* Decide whether a function's arguments should be processed
63 from first to last or from last to first.
64
65 They should if the stack and args grow in opposite directions, but
66 only if we have push insns. */
67
68 #ifdef PUSH_ROUNDING
69
70 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
71 #define PUSH_ARGS_REVERSED /* If it's last to first */
72 #endif
73
74 #endif
75
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
79 #else
80 #define STACK_PUSH_CODE PRE_INC
81 #endif
82 #endif
83
84 /* Assume that case vectors are not pc-relative. */
85 #ifndef CASE_VECTOR_PC_RELATIVE
86 #define CASE_VECTOR_PC_RELATIVE 0
87 #endif
88
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
96
97 /* Nonzero to generate code for all the subroutines within an
98 expression before generating the upper levels of the expression.
99 Nowadays this is never zero. */
100 int do_preexpand_calls = 1;
101
102 /* Don't check memory usage, since code is being emitted to check a memory
103 usage. Used when current_function_check_memory_usage is true, to avoid
104 infinite recursion. */
105 static int in_check_memory_usage;
106
107 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
108 static tree placeholder_list = 0;
109
110 /* This structure is used by move_by_pieces to describe the move to
111 be performed. */
112 struct move_by_pieces
113 {
114 rtx to;
115 rtx to_addr;
116 int autinc_to;
117 int explicit_inc_to;
118 int to_struct;
119 int to_readonly;
120 rtx from;
121 rtx from_addr;
122 int autinc_from;
123 int explicit_inc_from;
124 int from_struct;
125 int from_readonly;
126 int len;
127 int offset;
128 int reverse;
129 };
130
131 /* This structure is used by clear_by_pieces to describe the clear to
132 be performed. */
133
134 struct clear_by_pieces
135 {
136 rtx to;
137 rtx to_addr;
138 int autinc_to;
139 int explicit_inc_to;
140 int to_struct;
141 int len;
142 int offset;
143 int reverse;
144 };
145
146 extern struct obstack permanent_obstack;
147
148 static rtx get_push_address PARAMS ((int));
149
150 static rtx enqueue_insn PARAMS ((rtx, rtx));
151 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
152 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
153 struct move_by_pieces *));
154 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
155 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
156 enum machine_mode,
157 struct clear_by_pieces *));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, unsigned int, int));
163 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
164 HOST_WIDE_INT));
165 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
166 HOST_WIDE_INT, enum machine_mode,
167 tree, enum machine_mode, int,
168 unsigned int, HOST_WIDE_INT, int));
169 static enum memory_use_mode
170 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
171 static tree save_noncopied_parts PARAMS ((tree, tree));
172 static tree init_noncopied_parts PARAMS ((tree, tree));
173 static int safe_from_p PARAMS ((rtx, tree, int));
174 static int fixed_type_p PARAMS ((tree));
175 static rtx var_rtx PARAMS ((tree));
176 static int readonly_fields_p PARAMS ((tree));
177 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
178 static rtx expand_increment PARAMS ((tree, int, int));
179 static void preexpand_calls PARAMS ((tree));
180 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
181 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
182 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
183 rtx, rtx));
184 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
185
186 /* Record for each mode whether we can move a register directly to or
187 from an object of that mode in memory. If we can't, we won't try
188 to use that mode directly when accessing a field of that mode. */
189
190 static char direct_load[NUM_MACHINE_MODES];
191 static char direct_store[NUM_MACHINE_MODES];
192
193 /* If a memory-to-memory move would take MOVE_RATIO or more simple
194 move-instruction sequences, we will do a movstr or libcall instead. */
195
196 #ifndef MOVE_RATIO
197 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
198 #define MOVE_RATIO 2
199 #else
200 /* If we are optimizing for space (-Os), cut down the default move ratio */
201 #define MOVE_RATIO (optimize_size ? 3 : 15)
202 #endif
203 #endif
204
205 /* This macro is used to determine whether move_by_pieces should be called
206 to perform a structure copy. */
207 #ifndef MOVE_BY_PIECES_P
208 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
209 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
210 #endif
211
212 /* This array records the insn_code of insns to perform block moves. */
213 enum insn_code movstr_optab[NUM_MACHINE_MODES];
214
215 /* This array records the insn_code of insns to perform block clears. */
216 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
217
218 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
219
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
222 #endif
223 \f
224 /* This is run once per compilation to set up which modes can be used
225 directly in memory and to initialize the block move optab. */
226
227 void
228 init_expr_once ()
229 {
230 rtx insn, pat;
231 enum machine_mode mode;
232 int num_clobbers;
233 rtx mem, mem1;
234 char *free_point;
235
236 start_sequence ();
237
238 /* Since we are on the permanent obstack, we must be sure we save this
239 spot AFTER we call start_sequence, since it will reuse the rtl it
240 makes. */
241 free_point = (char *) oballoc (0);
242
243 /* Try indexing by frame ptr and try by stack ptr.
244 It is known that on the Convex the stack ptr isn't a valid index.
245 With luck, one or the other is valid on any machine. */
246 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
247 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
248
249 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
250 pat = PATTERN (insn);
251
252 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
253 mode = (enum machine_mode) ((int) mode + 1))
254 {
255 int regno;
256 rtx reg;
257
258 direct_load[(int) mode] = direct_store[(int) mode] = 0;
259 PUT_MODE (mem, mode);
260 PUT_MODE (mem1, mode);
261
262 /* See if there is some register that can be used in this mode and
263 directly loaded or stored from memory. */
264
265 if (mode != VOIDmode && mode != BLKmode)
266 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
267 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
268 regno++)
269 {
270 if (! HARD_REGNO_MODE_OK (regno, mode))
271 continue;
272
273 reg = gen_rtx_REG (mode, regno);
274
275 SET_SRC (pat) = mem;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
279
280 SET_SRC (pat) = mem1;
281 SET_DEST (pat) = reg;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_load[(int) mode] = 1;
284
285 SET_SRC (pat) = reg;
286 SET_DEST (pat) = mem;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
289
290 SET_SRC (pat) = reg;
291 SET_DEST (pat) = mem1;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_store[(int) mode] = 1;
294 }
295 }
296
297 end_sequence ();
298 obfree (free_point);
299 }
300
301 /* This is run at the start of compiling a function. */
302
303 void
304 init_expr ()
305 {
306 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
307
308 pending_chain = 0;
309 pending_stack_adjust = 0;
310 stack_pointer_delta = 0;
311 inhibit_defer_pop = 0;
312 saveregs_value = 0;
313 apply_args_value = 0;
314 forced_labels = 0;
315 }
316
317 void
318 mark_expr_status (p)
319 struct expr_status *p;
320 {
321 if (p == NULL)
322 return;
323
324 ggc_mark_rtx (p->x_saveregs_value);
325 ggc_mark_rtx (p->x_apply_args_value);
326 ggc_mark_rtx (p->x_forced_labels);
327 }
328
329 void
330 free_expr_status (f)
331 struct function *f;
332 {
333 free (f->expr);
334 f->expr = NULL;
335 }
336
337 /* Small sanity check that the queue is empty at the end of a function. */
338 void
339 finish_expr_for_function ()
340 {
341 if (pending_chain)
342 abort ();
343 }
344 \f
345 /* Manage the queue of increment instructions to be output
346 for POSTINCREMENT_EXPR expressions, etc. */
347
348 /* Queue up to increment (or change) VAR later. BODY says how:
349 BODY should be the same thing you would pass to emit_insn
350 to increment right away. It will go to emit_insn later on.
351
352 The value is a QUEUED expression to be used in place of VAR
353 where you want to guarantee the pre-incrementation value of VAR. */
354
355 static rtx
356 enqueue_insn (var, body)
357 rtx var, body;
358 {
359 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
360 body, pending_chain);
361 return pending_chain;
362 }
363
364 /* Use protect_from_queue to convert a QUEUED expression
365 into something that you can put immediately into an instruction.
366 If the queued incrementation has not happened yet,
367 protect_from_queue returns the variable itself.
368 If the incrementation has happened, protect_from_queue returns a temp
369 that contains a copy of the old value of the variable.
370
371 Any time an rtx which might possibly be a QUEUED is to be put
372 into an instruction, it must be passed through protect_from_queue first.
373 QUEUED expressions are not meaningful in instructions.
374
375 Do not pass a value through protect_from_queue and then hold
376 on to it for a while before putting it in an instruction!
377 If the queue is flushed in between, incorrect code will result. */
378
379 rtx
380 protect_from_queue (x, modify)
381 register rtx x;
382 int modify;
383 {
384 register RTX_CODE code = GET_CODE (x);
385
386 #if 0 /* A QUEUED can hang around after the queue is forced out. */
387 /* Shortcut for most common case. */
388 if (pending_chain == 0)
389 return x;
390 #endif
391
392 if (code != QUEUED)
393 {
394 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
395 use of autoincrement. Make a copy of the contents of the memory
396 location rather than a copy of the address, but not if the value is
397 of mode BLKmode. Don't modify X in place since it might be
398 shared. */
399 if (code == MEM && GET_MODE (x) != BLKmode
400 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
401 {
402 register rtx y = XEXP (x, 0);
403 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
404
405 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
406 MEM_COPY_ATTRIBUTES (new, x);
407 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
408
409 if (QUEUED_INSN (y))
410 {
411 register rtx temp = gen_reg_rtx (GET_MODE (new));
412 emit_insn_before (gen_move_insn (temp, new),
413 QUEUED_INSN (y));
414 return temp;
415 }
416 return new;
417 }
418 /* Otherwise, recursively protect the subexpressions of all
419 the kinds of rtx's that can contain a QUEUED. */
420 if (code == MEM)
421 {
422 rtx tem = protect_from_queue (XEXP (x, 0), 0);
423 if (tem != XEXP (x, 0))
424 {
425 x = copy_rtx (x);
426 XEXP (x, 0) = tem;
427 }
428 }
429 else if (code == PLUS || code == MULT)
430 {
431 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
432 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
433 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
434 {
435 x = copy_rtx (x);
436 XEXP (x, 0) = new0;
437 XEXP (x, 1) = new1;
438 }
439 }
440 return x;
441 }
442 /* If the increment has not happened, use the variable itself. */
443 if (QUEUED_INSN (x) == 0)
444 return QUEUED_VAR (x);
445 /* If the increment has happened and a pre-increment copy exists,
446 use that copy. */
447 if (QUEUED_COPY (x) != 0)
448 return QUEUED_COPY (x);
449 /* The increment has happened but we haven't set up a pre-increment copy.
450 Set one up now, and use it. */
451 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
452 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
453 QUEUED_INSN (x));
454 return QUEUED_COPY (x);
455 }
456
457 /* Return nonzero if X contains a QUEUED expression:
458 if it contains anything that will be altered by a queued increment.
459 We handle only combinations of MEM, PLUS, MINUS and MULT operators
460 since memory addresses generally contain only those. */
461
462 int
463 queued_subexp_p (x)
464 rtx x;
465 {
466 register enum rtx_code code = GET_CODE (x);
467 switch (code)
468 {
469 case QUEUED:
470 return 1;
471 case MEM:
472 return queued_subexp_p (XEXP (x, 0));
473 case MULT:
474 case PLUS:
475 case MINUS:
476 return (queued_subexp_p (XEXP (x, 0))
477 || queued_subexp_p (XEXP (x, 1)));
478 default:
479 return 0;
480 }
481 }
482
483 /* Perform all the pending incrementations. */
484
485 void
486 emit_queue ()
487 {
488 register rtx p;
489 while ((p = pending_chain))
490 {
491 rtx body = QUEUED_BODY (p);
492
493 if (GET_CODE (body) == SEQUENCE)
494 {
495 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
496 emit_insn (QUEUED_BODY (p));
497 }
498 else
499 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
500 pending_chain = QUEUED_NEXT (p);
501 }
502 }
503 \f
504 /* Copy data from FROM to TO, where the machine modes are not the same.
505 Both modes may be integer, or both may be floating.
506 UNSIGNEDP should be nonzero if FROM is an unsigned type.
507 This causes zero-extension instead of sign-extension. */
508
509 void
510 convert_move (to, from, unsignedp)
511 register rtx to, from;
512 int unsignedp;
513 {
514 enum machine_mode to_mode = GET_MODE (to);
515 enum machine_mode from_mode = GET_MODE (from);
516 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
517 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
518 enum insn_code code;
519 rtx libcall;
520
521 /* rtx code for making an equivalent value. */
522 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
523
524 to = protect_from_queue (to, 1);
525 from = protect_from_queue (from, 0);
526
527 if (to_real != from_real)
528 abort ();
529
530 /* If FROM is a SUBREG that indicates that we have already done at least
531 the required extension, strip it. We don't handle such SUBREGs as
532 TO here. */
533
534 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
535 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
536 >= GET_MODE_SIZE (to_mode))
537 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
538 from = gen_lowpart (to_mode, from), from_mode = to_mode;
539
540 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
541 abort ();
542
543 if (to_mode == from_mode
544 || (from_mode == VOIDmode && CONSTANT_P (from)))
545 {
546 emit_move_insn (to, from);
547 return;
548 }
549
550 if (to_real)
551 {
552 rtx value;
553
554 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
555 {
556 /* Try converting directly if the insn is supported. */
557 if ((code = can_extend_p (to_mode, from_mode, 0))
558 != CODE_FOR_nothing)
559 {
560 emit_unop_insn (code, to, from, UNKNOWN);
561 return;
562 }
563 }
564
565 #ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
567 {
568 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
569 return;
570 }
571 #endif
572 #ifdef HAVE_trunctqfqf2
573 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
574 {
575 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
576 return;
577 }
578 #endif
579 #ifdef HAVE_truncsfqf2
580 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
583 return;
584 }
585 #endif
586 #ifdef HAVE_truncdfqf2
587 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
588 {
589 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
590 return;
591 }
592 #endif
593 #ifdef HAVE_truncxfqf2
594 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
595 {
596 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
597 return;
598 }
599 #endif
600 #ifdef HAVE_trunctfqf2
601 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
602 {
603 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
604 return;
605 }
606 #endif
607
608 #ifdef HAVE_trunctqfhf2
609 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
610 {
611 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
612 return;
613 }
614 #endif
615 #ifdef HAVE_truncsfhf2
616 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
617 {
618 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
619 return;
620 }
621 #endif
622 #ifdef HAVE_truncdfhf2
623 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
624 {
625 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
626 return;
627 }
628 #endif
629 #ifdef HAVE_truncxfhf2
630 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
631 {
632 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
633 return;
634 }
635 #endif
636 #ifdef HAVE_trunctfhf2
637 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
638 {
639 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
640 return;
641 }
642 #endif
643
644 #ifdef HAVE_truncsftqf2
645 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
646 {
647 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
648 return;
649 }
650 #endif
651 #ifdef HAVE_truncdftqf2
652 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
653 {
654 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
655 return;
656 }
657 #endif
658 #ifdef HAVE_truncxftqf2
659 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
660 {
661 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
662 return;
663 }
664 #endif
665 #ifdef HAVE_trunctftqf2
666 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
667 {
668 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
669 return;
670 }
671 #endif
672
673 #ifdef HAVE_truncdfsf2
674 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
675 {
676 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
677 return;
678 }
679 #endif
680 #ifdef HAVE_truncxfsf2
681 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
682 {
683 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
684 return;
685 }
686 #endif
687 #ifdef HAVE_trunctfsf2
688 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
689 {
690 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694 #ifdef HAVE_truncxfdf2
695 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
696 {
697 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
698 return;
699 }
700 #endif
701 #ifdef HAVE_trunctfdf2
702 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
703 {
704 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
705 return;
706 }
707 #endif
708
709 libcall = (rtx) 0;
710 switch (from_mode)
711 {
712 case SFmode:
713 switch (to_mode)
714 {
715 case DFmode:
716 libcall = extendsfdf2_libfunc;
717 break;
718
719 case XFmode:
720 libcall = extendsfxf2_libfunc;
721 break;
722
723 case TFmode:
724 libcall = extendsftf2_libfunc;
725 break;
726
727 default:
728 break;
729 }
730 break;
731
732 case DFmode:
733 switch (to_mode)
734 {
735 case SFmode:
736 libcall = truncdfsf2_libfunc;
737 break;
738
739 case XFmode:
740 libcall = extenddfxf2_libfunc;
741 break;
742
743 case TFmode:
744 libcall = extenddftf2_libfunc;
745 break;
746
747 default:
748 break;
749 }
750 break;
751
752 case XFmode:
753 switch (to_mode)
754 {
755 case SFmode:
756 libcall = truncxfsf2_libfunc;
757 break;
758
759 case DFmode:
760 libcall = truncxfdf2_libfunc;
761 break;
762
763 default:
764 break;
765 }
766 break;
767
768 case TFmode:
769 switch (to_mode)
770 {
771 case SFmode:
772 libcall = trunctfsf2_libfunc;
773 break;
774
775 case DFmode:
776 libcall = trunctfdf2_libfunc;
777 break;
778
779 default:
780 break;
781 }
782 break;
783
784 default:
785 break;
786 }
787
788 if (libcall == (rtx) 0)
789 /* This conversion is not implemented yet. */
790 abort ();
791
792 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
793 1, from, from_mode);
794 emit_move_insn (to, value);
795 return;
796 }
797
798 /* Now both modes are integers. */
799
800 /* Handle expanding beyond a word. */
801 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
802 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
803 {
804 rtx insns;
805 rtx lowpart;
806 rtx fill_value;
807 rtx lowfrom;
808 int i;
809 enum machine_mode lowpart_mode;
810 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
811
812 /* Try converting directly if the insn is supported. */
813 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
814 != CODE_FOR_nothing)
815 {
816 /* If FROM is a SUBREG, put it into a register. Do this
817 so that we always generate the same set of insns for
818 better cse'ing; if an intermediate assignment occurred,
819 we won't be doing the operation directly on the SUBREG. */
820 if (optimize > 0 && GET_CODE (from) == SUBREG)
821 from = force_reg (from_mode, from);
822 emit_unop_insn (code, to, from, equiv_code);
823 return;
824 }
825 /* Next, try converting via full word. */
826 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
827 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
828 != CODE_FOR_nothing))
829 {
830 if (GET_CODE (to) == REG)
831 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
832 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
833 emit_unop_insn (code, to,
834 gen_lowpart (word_mode, to), equiv_code);
835 return;
836 }
837
838 /* No special multiword conversion insn; do it by hand. */
839 start_sequence ();
840
841 /* Since we will turn this into a no conflict block, we must ensure
842 that the source does not overlap the target. */
843
844 if (reg_overlap_mentioned_p (to, from))
845 from = force_reg (from_mode, from);
846
847 /* Get a copy of FROM widened to a word, if necessary. */
848 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
849 lowpart_mode = word_mode;
850 else
851 lowpart_mode = from_mode;
852
853 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
854
855 lowpart = gen_lowpart (lowpart_mode, to);
856 emit_move_insn (lowpart, lowfrom);
857
858 /* Compute the value to put in each remaining word. */
859 if (unsignedp)
860 fill_value = const0_rtx;
861 else
862 {
863 #ifdef HAVE_slt
864 if (HAVE_slt
865 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
866 && STORE_FLAG_VALUE == -1)
867 {
868 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
869 lowpart_mode, 0, 0);
870 fill_value = gen_reg_rtx (word_mode);
871 emit_insn (gen_slt (fill_value));
872 }
873 else
874 #endif
875 {
876 fill_value
877 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
878 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
879 NULL_RTX, 0);
880 fill_value = convert_to_mode (word_mode, fill_value, 1);
881 }
882 }
883
884 /* Fill the remaining words. */
885 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
886 {
887 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
888 rtx subword = operand_subword (to, index, 1, to_mode);
889
890 if (subword == 0)
891 abort ();
892
893 if (fill_value != subword)
894 emit_move_insn (subword, fill_value);
895 }
896
897 insns = get_insns ();
898 end_sequence ();
899
900 emit_no_conflict_block (insns, to, from, NULL_RTX,
901 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
902 return;
903 }
904
905 /* Truncating multi-word to a word or less. */
906 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
907 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
908 {
909 if (!((GET_CODE (from) == MEM
910 && ! MEM_VOLATILE_P (from)
911 && direct_load[(int) to_mode]
912 && ! mode_dependent_address_p (XEXP (from, 0)))
913 || GET_CODE (from) == REG
914 || GET_CODE (from) == SUBREG))
915 from = force_reg (from_mode, from);
916 convert_move (to, gen_lowpart (word_mode, from), 0);
917 return;
918 }
919
920 /* Handle pointer conversion */ /* SPEE 900220 */
921 if (to_mode == PQImode)
922 {
923 if (from_mode != QImode)
924 from = convert_to_mode (QImode, from, unsignedp);
925
926 #ifdef HAVE_truncqipqi2
927 if (HAVE_truncqipqi2)
928 {
929 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
930 return;
931 }
932 #endif /* HAVE_truncqipqi2 */
933 abort ();
934 }
935
936 if (from_mode == PQImode)
937 {
938 if (to_mode != QImode)
939 {
940 from = convert_to_mode (QImode, from, unsignedp);
941 from_mode = QImode;
942 }
943 else
944 {
945 #ifdef HAVE_extendpqiqi2
946 if (HAVE_extendpqiqi2)
947 {
948 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
949 return;
950 }
951 #endif /* HAVE_extendpqiqi2 */
952 abort ();
953 }
954 }
955
956 if (to_mode == PSImode)
957 {
958 if (from_mode != SImode)
959 from = convert_to_mode (SImode, from, unsignedp);
960
961 #ifdef HAVE_truncsipsi2
962 if (HAVE_truncsipsi2)
963 {
964 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
965 return;
966 }
967 #endif /* HAVE_truncsipsi2 */
968 abort ();
969 }
970
971 if (from_mode == PSImode)
972 {
973 if (to_mode != SImode)
974 {
975 from = convert_to_mode (SImode, from, unsignedp);
976 from_mode = SImode;
977 }
978 else
979 {
980 #ifdef HAVE_extendpsisi2
981 if (HAVE_extendpsisi2)
982 {
983 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
984 return;
985 }
986 #endif /* HAVE_extendpsisi2 */
987 abort ();
988 }
989 }
990
991 if (to_mode == PDImode)
992 {
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
995
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
998 {
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1000 return;
1001 }
1002 #endif /* HAVE_truncdipdi2 */
1003 abort ();
1004 }
1005
1006 if (from_mode == PDImode)
1007 {
1008 if (to_mode != DImode)
1009 {
1010 from = convert_to_mode (DImode, from, unsignedp);
1011 from_mode = DImode;
1012 }
1013 else
1014 {
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1017 {
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1019 return;
1020 }
1021 #endif /* HAVE_extendpdidi2 */
1022 abort ();
1023 }
1024 }
1025
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1028
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1033 {
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1045 return;
1046 }
1047
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1050 {
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1054 {
1055 emit_unop_insn (code, to, from, equiv_code);
1056 return;
1057 }
1058 else
1059 {
1060 enum machine_mode intermediate;
1061 rtx tmp;
1062 tree shift_amount;
1063
1064 /* Search for a mode to convert via. */
1065 for (intermediate = from_mode; intermediate != VOIDmode;
1066 intermediate = GET_MODE_WIDER_MODE (intermediate))
1067 if (((can_extend_p (to_mode, intermediate, unsignedp)
1068 != CODE_FOR_nothing)
1069 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1070 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1071 GET_MODE_BITSIZE (intermediate))))
1072 && (can_extend_p (intermediate, from_mode, unsignedp)
1073 != CODE_FOR_nothing))
1074 {
1075 convert_move (to, convert_to_mode (intermediate, from,
1076 unsignedp), unsignedp);
1077 return;
1078 }
1079
1080 /* No suitable intermediate mode.
1081 Generate what we need with shifts. */
1082 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1083 - GET_MODE_BITSIZE (from_mode), 0);
1084 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1085 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1086 to, unsignedp);
1087 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1088 to, unsignedp);
1089 if (tmp != to)
1090 emit_move_insn (to, tmp);
1091 return;
1092 }
1093 }
1094
1095 /* Support special truncate insns for certain modes. */
1096
1097 if (from_mode == DImode && to_mode == SImode)
1098 {
1099 #ifdef HAVE_truncdisi2
1100 if (HAVE_truncdisi2)
1101 {
1102 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1103 return;
1104 }
1105 #endif
1106 convert_move (to, force_reg (from_mode, from), unsignedp);
1107 return;
1108 }
1109
1110 if (from_mode == DImode && to_mode == HImode)
1111 {
1112 #ifdef HAVE_truncdihi2
1113 if (HAVE_truncdihi2)
1114 {
1115 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1116 return;
1117 }
1118 #endif
1119 convert_move (to, force_reg (from_mode, from), unsignedp);
1120 return;
1121 }
1122
1123 if (from_mode == DImode && to_mode == QImode)
1124 {
1125 #ifdef HAVE_truncdiqi2
1126 if (HAVE_truncdiqi2)
1127 {
1128 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1129 return;
1130 }
1131 #endif
1132 convert_move (to, force_reg (from_mode, from), unsignedp);
1133 return;
1134 }
1135
1136 if (from_mode == SImode && to_mode == HImode)
1137 {
1138 #ifdef HAVE_truncsihi2
1139 if (HAVE_truncsihi2)
1140 {
1141 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1142 return;
1143 }
1144 #endif
1145 convert_move (to, force_reg (from_mode, from), unsignedp);
1146 return;
1147 }
1148
1149 if (from_mode == SImode && to_mode == QImode)
1150 {
1151 #ifdef HAVE_truncsiqi2
1152 if (HAVE_truncsiqi2)
1153 {
1154 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1155 return;
1156 }
1157 #endif
1158 convert_move (to, force_reg (from_mode, from), unsignedp);
1159 return;
1160 }
1161
1162 if (from_mode == HImode && to_mode == QImode)
1163 {
1164 #ifdef HAVE_trunchiqi2
1165 if (HAVE_trunchiqi2)
1166 {
1167 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1168 return;
1169 }
1170 #endif
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1172 return;
1173 }
1174
1175 if (from_mode == TImode && to_mode == DImode)
1176 {
1177 #ifdef HAVE_trunctidi2
1178 if (HAVE_trunctidi2)
1179 {
1180 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1181 return;
1182 }
1183 #endif
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1185 return;
1186 }
1187
1188 if (from_mode == TImode && to_mode == SImode)
1189 {
1190 #ifdef HAVE_trunctisi2
1191 if (HAVE_trunctisi2)
1192 {
1193 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1194 return;
1195 }
1196 #endif
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1198 return;
1199 }
1200
1201 if (from_mode == TImode && to_mode == HImode)
1202 {
1203 #ifdef HAVE_trunctihi2
1204 if (HAVE_trunctihi2)
1205 {
1206 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1207 return;
1208 }
1209 #endif
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1211 return;
1212 }
1213
1214 if (from_mode == TImode && to_mode == QImode)
1215 {
1216 #ifdef HAVE_trunctiqi2
1217 if (HAVE_trunctiqi2)
1218 {
1219 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1220 return;
1221 }
1222 #endif
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1224 return;
1225 }
1226
1227 /* Handle truncation of volatile memrefs, and so on;
1228 the things that couldn't be truncated directly,
1229 and for which there was no special instruction. */
1230 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1231 {
1232 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1233 emit_move_insn (to, temp);
1234 return;
1235 }
1236
1237 /* Mode combination is not recognized. */
1238 abort ();
1239 }
1240
1241 /* Return an rtx for a value that would result
1242 from converting X to mode MODE.
1243 Both X and MODE may be floating, or both integer.
1244 UNSIGNEDP is nonzero if X is an unsigned value.
1245 This can be done by referring to a part of X in place
1246 or by copying to a new temporary with conversion.
1247
1248 This function *must not* call protect_from_queue
1249 except when putting X into an insn (in which case convert_move does it). */
1250
1251 rtx
1252 convert_to_mode (mode, x, unsignedp)
1253 enum machine_mode mode;
1254 rtx x;
1255 int unsignedp;
1256 {
1257 return convert_modes (mode, VOIDmode, x, unsignedp);
1258 }
1259
1260 /* Return an rtx for a value that would result
1261 from converting X from mode OLDMODE to mode MODE.
1262 Both modes may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1264
1265 This can be done by referring to a part of X in place
1266 or by copying to a new temporary with conversion.
1267
1268 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1269
1270 This function *must not* call protect_from_queue
1271 except when putting X into an insn (in which case convert_move does it). */
1272
1273 rtx
1274 convert_modes (mode, oldmode, x, unsignedp)
1275 enum machine_mode mode, oldmode;
1276 rtx x;
1277 int unsignedp;
1278 {
1279 register rtx temp;
1280
1281 /* If FROM is a SUBREG that indicates that we have already done at least
1282 the required extension, strip it. */
1283
1284 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1285 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1286 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1287 x = gen_lowpart (mode, x);
1288
1289 if (GET_MODE (x) != VOIDmode)
1290 oldmode = GET_MODE (x);
1291
1292 if (mode == oldmode)
1293 return x;
1294
1295 /* There is one case that we must handle specially: If we are converting
1296 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1297 we are to interpret the constant as unsigned, gen_lowpart will do
1298 the wrong if the constant appears negative. What we want to do is
1299 make the high-order word of the constant zero, not all ones. */
1300
1301 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1302 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1303 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1304 {
1305 HOST_WIDE_INT val = INTVAL (x);
1306
1307 if (oldmode != VOIDmode
1308 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1309 {
1310 int width = GET_MODE_BITSIZE (oldmode);
1311
1312 /* We need to zero extend VAL. */
1313 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1314 }
1315
1316 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1317 }
1318
1319 /* We can do this with a gen_lowpart if both desired and current modes
1320 are integer, and this is either a constant integer, a register, or a
1321 non-volatile MEM. Except for the constant case where MODE is no
1322 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1323
1324 if ((GET_CODE (x) == CONST_INT
1325 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1326 || (GET_MODE_CLASS (mode) == MODE_INT
1327 && GET_MODE_CLASS (oldmode) == MODE_INT
1328 && (GET_CODE (x) == CONST_DOUBLE
1329 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1330 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1331 && direct_load[(int) mode])
1332 || (GET_CODE (x) == REG
1333 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1334 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1335 {
1336 /* ?? If we don't know OLDMODE, we have to assume here that
1337 X does not need sign- or zero-extension. This may not be
1338 the case, but it's the best we can do. */
1339 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1340 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1341 {
1342 HOST_WIDE_INT val = INTVAL (x);
1343 int width = GET_MODE_BITSIZE (oldmode);
1344
1345 /* We must sign or zero-extend in this case. Start by
1346 zero-extending, then sign extend if we need to. */
1347 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1348 if (! unsignedp
1349 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1350 val |= (HOST_WIDE_INT) (-1) << width;
1351
1352 return GEN_INT (val);
1353 }
1354
1355 return gen_lowpart (mode, x);
1356 }
1357
1358 temp = gen_reg_rtx (mode);
1359 convert_move (temp, x, unsignedp);
1360 return temp;
1361 }
1362 \f
1363
1364 /* This macro is used to determine what the largest unit size that
1365 move_by_pieces can use is. */
1366
1367 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1368 move efficiently, as opposed to MOVE_MAX which is the maximum
1369 number of bytes we can move with a single instruction. */
1370
1371 #ifndef MOVE_MAX_PIECES
1372 #define MOVE_MAX_PIECES MOVE_MAX
1373 #endif
1374
1375 /* Generate several move instructions to copy LEN bytes
1376 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1377 The caller must pass FROM and TO
1378 through protect_from_queue before calling.
1379 ALIGN is maximum alignment we can assume. */
1380
1381 void
1382 move_by_pieces (to, from, len, align)
1383 rtx to, from;
1384 int len;
1385 unsigned int align;
1386 {
1387 struct move_by_pieces data;
1388 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1389 unsigned int max_size = MOVE_MAX_PIECES + 1;
1390 enum machine_mode mode = VOIDmode, tmode;
1391 enum insn_code icode;
1392
1393 data.offset = 0;
1394 data.to_addr = to_addr;
1395 data.from_addr = from_addr;
1396 data.to = to;
1397 data.from = from;
1398 data.autinc_to
1399 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1400 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1401 data.autinc_from
1402 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1403 || GET_CODE (from_addr) == POST_INC
1404 || GET_CODE (from_addr) == POST_DEC);
1405
1406 data.explicit_inc_from = 0;
1407 data.explicit_inc_to = 0;
1408 data.reverse
1409 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1410 if (data.reverse) data.offset = len;
1411 data.len = len;
1412
1413 data.to_struct = MEM_IN_STRUCT_P (to);
1414 data.from_struct = MEM_IN_STRUCT_P (from);
1415 data.to_readonly = RTX_UNCHANGING_P (to);
1416 data.from_readonly = RTX_UNCHANGING_P (from);
1417
1418 /* If copying requires more than two move insns,
1419 copy addresses to registers (to make displacements shorter)
1420 and use post-increment if available. */
1421 if (!(data.autinc_from && data.autinc_to)
1422 && move_by_pieces_ninsns (len, align) > 2)
1423 {
1424 /* Find the mode of the largest move... */
1425 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1426 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1427 if (GET_MODE_SIZE (tmode) < max_size)
1428 mode = tmode;
1429
1430 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1431 {
1432 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1433 data.autinc_from = 1;
1434 data.explicit_inc_from = -1;
1435 }
1436 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1437 {
1438 data.from_addr = copy_addr_to_reg (from_addr);
1439 data.autinc_from = 1;
1440 data.explicit_inc_from = 1;
1441 }
1442 if (!data.autinc_from && CONSTANT_P (from_addr))
1443 data.from_addr = copy_addr_to_reg (from_addr);
1444 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1445 {
1446 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1447 data.autinc_to = 1;
1448 data.explicit_inc_to = -1;
1449 }
1450 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1451 {
1452 data.to_addr = copy_addr_to_reg (to_addr);
1453 data.autinc_to = 1;
1454 data.explicit_inc_to = 1;
1455 }
1456 if (!data.autinc_to && CONSTANT_P (to_addr))
1457 data.to_addr = copy_addr_to_reg (to_addr);
1458 }
1459
1460 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1461 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1462 align = MOVE_MAX * BITS_PER_UNIT;
1463
1464 /* First move what we can in the largest integer mode, then go to
1465 successively smaller modes. */
1466
1467 while (max_size > 1)
1468 {
1469 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1470 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1471 if (GET_MODE_SIZE (tmode) < max_size)
1472 mode = tmode;
1473
1474 if (mode == VOIDmode)
1475 break;
1476
1477 icode = mov_optab->handlers[(int) mode].insn_code;
1478 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1479 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1480
1481 max_size = GET_MODE_SIZE (mode);
1482 }
1483
1484 /* The code above should have handled everything. */
1485 if (data.len > 0)
1486 abort ();
1487 }
1488
1489 /* Return number of insns required to move L bytes by pieces.
1490 ALIGN (in bytes) is maximum alignment we can assume. */
1491
1492 static int
1493 move_by_pieces_ninsns (l, align)
1494 unsigned int l;
1495 unsigned int align;
1496 {
1497 register int n_insns = 0;
1498 unsigned int max_size = MOVE_MAX + 1;
1499
1500 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1501 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1502 align = MOVE_MAX * BITS_PER_UNIT;
1503
1504 while (max_size > 1)
1505 {
1506 enum machine_mode mode = VOIDmode, tmode;
1507 enum insn_code icode;
1508
1509 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1510 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1511 if (GET_MODE_SIZE (tmode) < max_size)
1512 mode = tmode;
1513
1514 if (mode == VOIDmode)
1515 break;
1516
1517 icode = mov_optab->handlers[(int) mode].insn_code;
1518 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1519 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1520
1521 max_size = GET_MODE_SIZE (mode);
1522 }
1523
1524 return n_insns;
1525 }
1526
1527 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1528 with move instructions for mode MODE. GENFUN is the gen_... function
1529 to make a move insn for that mode. DATA has all the other info. */
1530
1531 static void
1532 move_by_pieces_1 (genfun, mode, data)
1533 rtx (*genfun) PARAMS ((rtx, ...));
1534 enum machine_mode mode;
1535 struct move_by_pieces *data;
1536 {
1537 register int size = GET_MODE_SIZE (mode);
1538 register rtx to1, from1;
1539
1540 while (data->len >= size)
1541 {
1542 if (data->reverse) data->offset -= size;
1543
1544 to1 = (data->autinc_to
1545 ? gen_rtx_MEM (mode, data->to_addr)
1546 : copy_rtx (change_address (data->to, mode,
1547 plus_constant (data->to_addr,
1548 data->offset))));
1549 MEM_IN_STRUCT_P (to1) = data->to_struct;
1550 RTX_UNCHANGING_P (to1) = data->to_readonly;
1551
1552 from1
1553 = (data->autinc_from
1554 ? gen_rtx_MEM (mode, data->from_addr)
1555 : copy_rtx (change_address (data->from, mode,
1556 plus_constant (data->from_addr,
1557 data->offset))));
1558 MEM_IN_STRUCT_P (from1) = data->from_struct;
1559 RTX_UNCHANGING_P (from1) = data->from_readonly;
1560
1561 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1562 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1563 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1564 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1565
1566 emit_insn ((*genfun) (to1, from1));
1567 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1568 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1569 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1570 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1571
1572 if (! data->reverse) data->offset += size;
1573
1574 data->len -= size;
1575 }
1576 }
1577 \f
1578 /* Emit code to move a block Y to a block X.
1579 This may be done with string-move instructions,
1580 with multiple scalar move instructions, or with a library call.
1581
1582 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1583 with mode BLKmode.
1584 SIZE is an rtx that says how long they are.
1585 ALIGN is the maximum alignment we can assume they have.
1586
1587 Return the address of the new block, if memcpy is called and returns it,
1588 0 otherwise. */
1589
1590 rtx
1591 emit_block_move (x, y, size, align)
1592 rtx x, y;
1593 rtx size;
1594 unsigned int align;
1595 {
1596 rtx retval = 0;
1597 #ifdef TARGET_MEM_FUNCTIONS
1598 static tree fn;
1599 tree call_expr, arg_list;
1600 #endif
1601
1602 if (GET_MODE (x) != BLKmode)
1603 abort ();
1604
1605 if (GET_MODE (y) != BLKmode)
1606 abort ();
1607
1608 x = protect_from_queue (x, 1);
1609 y = protect_from_queue (y, 0);
1610 size = protect_from_queue (size, 0);
1611
1612 if (GET_CODE (x) != MEM)
1613 abort ();
1614 if (GET_CODE (y) != MEM)
1615 abort ();
1616 if (size == 0)
1617 abort ();
1618
1619 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1620 move_by_pieces (x, y, INTVAL (size), align);
1621 else
1622 {
1623 /* Try the most limited insn first, because there's no point
1624 including more than one in the machine description unless
1625 the more limited one has some advantage. */
1626
1627 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1628 enum machine_mode mode;
1629
1630 /* Since this is a move insn, we don't care about volatility. */
1631 volatile_ok = 1;
1632
1633 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1634 mode = GET_MODE_WIDER_MODE (mode))
1635 {
1636 enum insn_code code = movstr_optab[(int) mode];
1637 insn_operand_predicate_fn pred;
1638
1639 if (code != CODE_FOR_nothing
1640 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1641 here because if SIZE is less than the mode mask, as it is
1642 returned by the macro, it will definitely be less than the
1643 actual mode mask. */
1644 && ((GET_CODE (size) == CONST_INT
1645 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1646 <= (GET_MODE_MASK (mode) >> 1)))
1647 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1648 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1649 || (*pred) (x, BLKmode))
1650 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1651 || (*pred) (y, BLKmode))
1652 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1653 || (*pred) (opalign, VOIDmode)))
1654 {
1655 rtx op2;
1656 rtx last = get_last_insn ();
1657 rtx pat;
1658
1659 op2 = convert_to_mode (mode, size, 1);
1660 pred = insn_data[(int) code].operand[2].predicate;
1661 if (pred != 0 && ! (*pred) (op2, mode))
1662 op2 = copy_to_mode_reg (mode, op2);
1663
1664 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1665 if (pat)
1666 {
1667 emit_insn (pat);
1668 volatile_ok = 0;
1669 return 0;
1670 }
1671 else
1672 delete_insns_since (last);
1673 }
1674 }
1675
1676 volatile_ok = 0;
1677
1678 /* X, Y, or SIZE may have been passed through protect_from_queue.
1679
1680 It is unsafe to save the value generated by protect_from_queue
1681 and reuse it later. Consider what happens if emit_queue is
1682 called before the return value from protect_from_queue is used.
1683
1684 Expansion of the CALL_EXPR below will call emit_queue before
1685 we are finished emitting RTL for argument setup. So if we are
1686 not careful we could get the wrong value for an argument.
1687
1688 To avoid this problem we go ahead and emit code to copy X, Y &
1689 SIZE into new pseudos. We can then place those new pseudos
1690 into an RTL_EXPR and use them later, even after a call to
1691 emit_queue.
1692
1693 Note this is not strictly needed for library calls since they
1694 do not call emit_queue before loading their arguments. However,
1695 we may need to have library calls call emit_queue in the future
1696 since failing to do so could cause problems for targets which
1697 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1698 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1699 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1700
1701 #ifdef TARGET_MEM_FUNCTIONS
1702 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1703 #else
1704 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1705 TREE_UNSIGNED (integer_type_node));
1706 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1707 #endif
1708
1709 #ifdef TARGET_MEM_FUNCTIONS
1710 /* It is incorrect to use the libcall calling conventions to call
1711 memcpy in this context.
1712
1713 This could be a user call to memcpy and the user may wish to
1714 examine the return value from memcpy.
1715
1716 For targets where libcalls and normal calls have different conventions
1717 for returning pointers, we could end up generating incorrect code.
1718
1719 So instead of using a libcall sequence we build up a suitable
1720 CALL_EXPR and expand the call in the normal fashion. */
1721 if (fn == NULL_TREE)
1722 {
1723 tree fntype;
1724
1725 /* This was copied from except.c, I don't know if all this is
1726 necessary in this context or not. */
1727 fn = get_identifier ("memcpy");
1728 push_obstacks_nochange ();
1729 end_temporary_allocation ();
1730 fntype = build_pointer_type (void_type_node);
1731 fntype = build_function_type (fntype, NULL_TREE);
1732 fn = build_decl (FUNCTION_DECL, fn, fntype);
1733 ggc_add_tree_root (&fn, 1);
1734 DECL_EXTERNAL (fn) = 1;
1735 TREE_PUBLIC (fn) = 1;
1736 DECL_ARTIFICIAL (fn) = 1;
1737 make_decl_rtl (fn, NULL_PTR, 1);
1738 assemble_external (fn);
1739 pop_obstacks ();
1740 }
1741
1742 /* We need to make an argument list for the function call.
1743
1744 memcpy has three arguments, the first two are void * addresses and
1745 the last is a size_t byte count for the copy. */
1746 arg_list
1747 = build_tree_list (NULL_TREE,
1748 make_tree (build_pointer_type (void_type_node), x));
1749 TREE_CHAIN (arg_list)
1750 = build_tree_list (NULL_TREE,
1751 make_tree (build_pointer_type (void_type_node), y));
1752 TREE_CHAIN (TREE_CHAIN (arg_list))
1753 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1754 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1755
1756 /* Now we have to build up the CALL_EXPR itself. */
1757 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1758 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1759 call_expr, arg_list, NULL_TREE);
1760 TREE_SIDE_EFFECTS (call_expr) = 1;
1761
1762 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1763 #else
1764 emit_library_call (bcopy_libfunc, 0,
1765 VOIDmode, 3, y, Pmode, x, Pmode,
1766 convert_to_mode (TYPE_MODE (integer_type_node), size,
1767 TREE_UNSIGNED (integer_type_node)),
1768 TYPE_MODE (integer_type_node));
1769 #endif
1770 }
1771
1772 return retval;
1773 }
1774 \f
1775 /* Copy all or part of a value X into registers starting at REGNO.
1776 The number of registers to be filled is NREGS. */
1777
1778 void
1779 move_block_to_reg (regno, x, nregs, mode)
1780 int regno;
1781 rtx x;
1782 int nregs;
1783 enum machine_mode mode;
1784 {
1785 int i;
1786 #ifdef HAVE_load_multiple
1787 rtx pat;
1788 rtx last;
1789 #endif
1790
1791 if (nregs == 0)
1792 return;
1793
1794 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1795 x = validize_mem (force_const_mem (mode, x));
1796
1797 /* See if the machine can do this with a load multiple insn. */
1798 #ifdef HAVE_load_multiple
1799 if (HAVE_load_multiple)
1800 {
1801 last = get_last_insn ();
1802 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1803 GEN_INT (nregs));
1804 if (pat)
1805 {
1806 emit_insn (pat);
1807 return;
1808 }
1809 else
1810 delete_insns_since (last);
1811 }
1812 #endif
1813
1814 for (i = 0; i < nregs; i++)
1815 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1816 operand_subword_force (x, i, mode));
1817 }
1818
1819 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1820 The number of registers to be filled is NREGS. SIZE indicates the number
1821 of bytes in the object X. */
1822
1823
1824 void
1825 move_block_from_reg (regno, x, nregs, size)
1826 int regno;
1827 rtx x;
1828 int nregs;
1829 int size;
1830 {
1831 int i;
1832 #ifdef HAVE_store_multiple
1833 rtx pat;
1834 rtx last;
1835 #endif
1836 enum machine_mode mode;
1837
1838 /* If SIZE is that of a mode no bigger than a word, just use that
1839 mode's store operation. */
1840 if (size <= UNITS_PER_WORD
1841 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1842 {
1843 emit_move_insn (change_address (x, mode, NULL),
1844 gen_rtx_REG (mode, regno));
1845 return;
1846 }
1847
1848 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1849 to the left before storing to memory. Note that the previous test
1850 doesn't handle all cases (e.g. SIZE == 3). */
1851 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1852 {
1853 rtx tem = operand_subword (x, 0, 1, BLKmode);
1854 rtx shift;
1855
1856 if (tem == 0)
1857 abort ();
1858
1859 shift = expand_shift (LSHIFT_EXPR, word_mode,
1860 gen_rtx_REG (word_mode, regno),
1861 build_int_2 ((UNITS_PER_WORD - size)
1862 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1863 emit_move_insn (tem, shift);
1864 return;
1865 }
1866
1867 /* See if the machine can do this with a store multiple insn. */
1868 #ifdef HAVE_store_multiple
1869 if (HAVE_store_multiple)
1870 {
1871 last = get_last_insn ();
1872 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1873 GEN_INT (nregs));
1874 if (pat)
1875 {
1876 emit_insn (pat);
1877 return;
1878 }
1879 else
1880 delete_insns_since (last);
1881 }
1882 #endif
1883
1884 for (i = 0; i < nregs; i++)
1885 {
1886 rtx tem = operand_subword (x, i, 1, BLKmode);
1887
1888 if (tem == 0)
1889 abort ();
1890
1891 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1892 }
1893 }
1894
1895 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1896 registers represented by a PARALLEL. SSIZE represents the total size of
1897 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1898 SRC in bits. */
1899 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1900 the balance will be in what would be the low-order memory addresses, i.e.
1901 left justified for big endian, right justified for little endian. This
1902 happens to be true for the targets currently using this support. If this
1903 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1904 would be needed. */
1905
1906 void
1907 emit_group_load (dst, orig_src, ssize, align)
1908 rtx dst, orig_src;
1909 unsigned int align;
1910 int ssize;
1911 {
1912 rtx *tmps, src;
1913 int start, i;
1914
1915 if (GET_CODE (dst) != PARALLEL)
1916 abort ();
1917
1918 /* Check for a NULL entry, used to indicate that the parameter goes
1919 both on the stack and in registers. */
1920 if (XEXP (XVECEXP (dst, 0, 0), 0))
1921 start = 0;
1922 else
1923 start = 1;
1924
1925 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1926
1927 /* If we won't be loading directly from memory, protect the real source
1928 from strange tricks we might play. */
1929 src = orig_src;
1930 if (GET_CODE (src) != MEM)
1931 {
1932 if (GET_CODE (src) == VOIDmode)
1933 src = gen_reg_rtx (GET_MODE (dst));
1934 else
1935 src = gen_reg_rtx (GET_MODE (orig_src));
1936 emit_move_insn (src, orig_src);
1937 }
1938
1939 /* Process the pieces. */
1940 for (i = start; i < XVECLEN (dst, 0); i++)
1941 {
1942 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1943 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1944 unsigned int bytelen = GET_MODE_SIZE (mode);
1945 int shift = 0;
1946
1947 /* Handle trailing fragments that run over the size of the struct. */
1948 if (ssize >= 0 && bytepos + bytelen > ssize)
1949 {
1950 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1951 bytelen = ssize - bytepos;
1952 if (bytelen <= 0)
1953 abort ();
1954 }
1955
1956 /* Optimize the access just a bit. */
1957 if (GET_CODE (src) == MEM
1958 && align >= GET_MODE_ALIGNMENT (mode)
1959 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1960 && bytelen == GET_MODE_SIZE (mode))
1961 {
1962 tmps[i] = gen_reg_rtx (mode);
1963 emit_move_insn (tmps[i],
1964 change_address (src, mode,
1965 plus_constant (XEXP (src, 0),
1966 bytepos)));
1967 }
1968 else if (GET_CODE (src) == CONCAT)
1969 {
1970 if (bytepos == 0
1971 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1972 tmps[i] = XEXP (src, 0);
1973 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1974 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1975 tmps[i] = XEXP (src, 1);
1976 else
1977 abort ();
1978 }
1979 else
1980 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1981 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1982 mode, mode, align, ssize);
1983
1984 if (BYTES_BIG_ENDIAN && shift)
1985 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1986 tmps[i], 0, OPTAB_WIDEN);
1987 }
1988
1989 emit_queue();
1990
1991 /* Copy the extracted pieces into the proper (probable) hard regs. */
1992 for (i = start; i < XVECLEN (dst, 0); i++)
1993 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1994 }
1995
1996 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1997 registers represented by a PARALLEL. SSIZE represents the total size of
1998 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1999
2000 void
2001 emit_group_store (orig_dst, src, ssize, align)
2002 rtx orig_dst, src;
2003 int ssize;
2004 unsigned int align;
2005 {
2006 rtx *tmps, dst;
2007 int start, i;
2008
2009 if (GET_CODE (src) != PARALLEL)
2010 abort ();
2011
2012 /* Check for a NULL entry, used to indicate that the parameter goes
2013 both on the stack and in registers. */
2014 if (XEXP (XVECEXP (src, 0, 0), 0))
2015 start = 0;
2016 else
2017 start = 1;
2018
2019 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2020
2021 /* Copy the (probable) hard regs into pseudos. */
2022 for (i = start; i < XVECLEN (src, 0); i++)
2023 {
2024 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2025 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2026 emit_move_insn (tmps[i], reg);
2027 }
2028 emit_queue();
2029
2030 /* If we won't be storing directly into memory, protect the real destination
2031 from strange tricks we might play. */
2032 dst = orig_dst;
2033 if (GET_CODE (dst) == PARALLEL)
2034 {
2035 rtx temp;
2036
2037 /* We can get a PARALLEL dst if there is a conditional expression in
2038 a return statement. In that case, the dst and src are the same,
2039 so no action is necessary. */
2040 if (rtx_equal_p (dst, src))
2041 return;
2042
2043 /* It is unclear if we can ever reach here, but we may as well handle
2044 it. Allocate a temporary, and split this into a store/load to/from
2045 the temporary. */
2046
2047 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2048 emit_group_store (temp, src, ssize, align);
2049 emit_group_load (dst, temp, ssize, align);
2050 return;
2051 }
2052 else if (GET_CODE (dst) != MEM)
2053 {
2054 dst = gen_reg_rtx (GET_MODE (orig_dst));
2055 /* Make life a bit easier for combine. */
2056 emit_move_insn (dst, const0_rtx);
2057 }
2058 else if (! MEM_IN_STRUCT_P (dst))
2059 {
2060 /* store_bit_field requires that memory operations have
2061 mem_in_struct_p set; we might not. */
2062
2063 dst = copy_rtx (orig_dst);
2064 MEM_SET_IN_STRUCT_P (dst, 1);
2065 }
2066
2067 /* Process the pieces. */
2068 for (i = start; i < XVECLEN (src, 0); i++)
2069 {
2070 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2071 enum machine_mode mode = GET_MODE (tmps[i]);
2072 unsigned int bytelen = GET_MODE_SIZE (mode);
2073
2074 /* Handle trailing fragments that run over the size of the struct. */
2075 if (ssize >= 0 && bytepos + bytelen > ssize)
2076 {
2077 if (BYTES_BIG_ENDIAN)
2078 {
2079 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2080 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2081 tmps[i], 0, OPTAB_WIDEN);
2082 }
2083 bytelen = ssize - bytepos;
2084 }
2085
2086 /* Optimize the access just a bit. */
2087 if (GET_CODE (dst) == MEM
2088 && align >= GET_MODE_ALIGNMENT (mode)
2089 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2090 && bytelen == GET_MODE_SIZE (mode))
2091 emit_move_insn (change_address (dst, mode,
2092 plus_constant (XEXP (dst, 0),
2093 bytepos)),
2094 tmps[i]);
2095 else
2096 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2097 mode, tmps[i], align, ssize);
2098 }
2099
2100 emit_queue();
2101
2102 /* Copy from the pseudo into the (probable) hard reg. */
2103 if (GET_CODE (dst) == REG)
2104 emit_move_insn (orig_dst, dst);
2105 }
2106
2107 /* Generate code to copy a BLKmode object of TYPE out of a
2108 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2109 is null, a stack temporary is created. TGTBLK is returned.
2110
2111 The primary purpose of this routine is to handle functions
2112 that return BLKmode structures in registers. Some machines
2113 (the PA for example) want to return all small structures
2114 in registers regardless of the structure's alignment. */
2115
2116 rtx
2117 copy_blkmode_from_reg (tgtblk, srcreg, type)
2118 rtx tgtblk;
2119 rtx srcreg;
2120 tree type;
2121 {
2122 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2123 rtx src = NULL, dst = NULL;
2124 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2125 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2126
2127 if (tgtblk == 0)
2128 {
2129 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2130 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2131 preserve_temp_slots (tgtblk);
2132 }
2133
2134 /* This code assumes srcreg is at least a full word. If it isn't,
2135 copy it into a new pseudo which is a full word. */
2136 if (GET_MODE (srcreg) != BLKmode
2137 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2138 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2139
2140 /* Structures whose size is not a multiple of a word are aligned
2141 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2142 machine, this means we must skip the empty high order bytes when
2143 calculating the bit offset. */
2144 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2145 big_endian_correction
2146 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2147
2148 /* Copy the structure BITSIZE bites at a time.
2149
2150 We could probably emit more efficient code for machines which do not use
2151 strict alignment, but it doesn't seem worth the effort at the current
2152 time. */
2153 for (bitpos = 0, xbitpos = big_endian_correction;
2154 bitpos < bytes * BITS_PER_UNIT;
2155 bitpos += bitsize, xbitpos += bitsize)
2156 {
2157 /* We need a new source operand each time xbitpos is on a
2158 word boundary and when xbitpos == big_endian_correction
2159 (the first time through). */
2160 if (xbitpos % BITS_PER_WORD == 0
2161 || xbitpos == big_endian_correction)
2162 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2163
2164 /* We need a new destination operand each time bitpos is on
2165 a word boundary. */
2166 if (bitpos % BITS_PER_WORD == 0)
2167 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2168
2169 /* Use xbitpos for the source extraction (right justified) and
2170 xbitpos for the destination store (left justified). */
2171 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2172 extract_bit_field (src, bitsize,
2173 xbitpos % BITS_PER_WORD, 1,
2174 NULL_RTX, word_mode, word_mode,
2175 bitsize, BITS_PER_WORD),
2176 bitsize, BITS_PER_WORD);
2177 }
2178
2179 return tgtblk;
2180 }
2181
2182
2183 /* Add a USE expression for REG to the (possibly empty) list pointed
2184 to by CALL_FUSAGE. REG must denote a hard register. */
2185
2186 void
2187 use_reg (call_fusage, reg)
2188 rtx *call_fusage, reg;
2189 {
2190 if (GET_CODE (reg) != REG
2191 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2192 abort();
2193
2194 *call_fusage
2195 = gen_rtx_EXPR_LIST (VOIDmode,
2196 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2197 }
2198
2199 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2200 starting at REGNO. All of these registers must be hard registers. */
2201
2202 void
2203 use_regs (call_fusage, regno, nregs)
2204 rtx *call_fusage;
2205 int regno;
2206 int nregs;
2207 {
2208 int i;
2209
2210 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2211 abort ();
2212
2213 for (i = 0; i < nregs; i++)
2214 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2215 }
2216
2217 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2218 PARALLEL REGS. This is for calls that pass values in multiple
2219 non-contiguous locations. The Irix 6 ABI has examples of this. */
2220
2221 void
2222 use_group_regs (call_fusage, regs)
2223 rtx *call_fusage;
2224 rtx regs;
2225 {
2226 int i;
2227
2228 for (i = 0; i < XVECLEN (regs, 0); i++)
2229 {
2230 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2231
2232 /* A NULL entry means the parameter goes both on the stack and in
2233 registers. This can also be a MEM for targets that pass values
2234 partially on the stack and partially in registers. */
2235 if (reg != 0 && GET_CODE (reg) == REG)
2236 use_reg (call_fusage, reg);
2237 }
2238 }
2239 \f
2240 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2241 rtx with BLKmode). The caller must pass TO through protect_from_queue
2242 before calling. ALIGN is maximum alignment we can assume. */
2243
2244 static void
2245 clear_by_pieces (to, len, align)
2246 rtx to;
2247 int len;
2248 unsigned int align;
2249 {
2250 struct clear_by_pieces data;
2251 rtx to_addr = XEXP (to, 0);
2252 unsigned int max_size = MOVE_MAX_PIECES + 1;
2253 enum machine_mode mode = VOIDmode, tmode;
2254 enum insn_code icode;
2255
2256 data.offset = 0;
2257 data.to_addr = to_addr;
2258 data.to = to;
2259 data.autinc_to
2260 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2261 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2262
2263 data.explicit_inc_to = 0;
2264 data.reverse
2265 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2266 if (data.reverse) data.offset = len;
2267 data.len = len;
2268
2269 data.to_struct = MEM_IN_STRUCT_P (to);
2270
2271 /* If copying requires more than two move insns,
2272 copy addresses to registers (to make displacements shorter)
2273 and use post-increment if available. */
2274 if (!data.autinc_to
2275 && move_by_pieces_ninsns (len, align) > 2)
2276 {
2277 /* Determine the main mode we'll be using */
2278 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2279 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2280 if (GET_MODE_SIZE (tmode) < max_size)
2281 mode = tmode;
2282
2283 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2284 {
2285 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2286 data.autinc_to = 1;
2287 data.explicit_inc_to = -1;
2288 }
2289 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2290 {
2291 data.to_addr = copy_addr_to_reg (to_addr);
2292 data.autinc_to = 1;
2293 data.explicit_inc_to = 1;
2294 }
2295 if (!data.autinc_to && CONSTANT_P (to_addr))
2296 data.to_addr = copy_addr_to_reg (to_addr);
2297 }
2298
2299 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2300 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2301 align = MOVE_MAX * BITS_PER_UNIT;
2302
2303 /* First move what we can in the largest integer mode, then go to
2304 successively smaller modes. */
2305
2306 while (max_size > 1)
2307 {
2308 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2309 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2310 if (GET_MODE_SIZE (tmode) < max_size)
2311 mode = tmode;
2312
2313 if (mode == VOIDmode)
2314 break;
2315
2316 icode = mov_optab->handlers[(int) mode].insn_code;
2317 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2318 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2319
2320 max_size = GET_MODE_SIZE (mode);
2321 }
2322
2323 /* The code above should have handled everything. */
2324 if (data.len != 0)
2325 abort ();
2326 }
2327
2328 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2329 with move instructions for mode MODE. GENFUN is the gen_... function
2330 to make a move insn for that mode. DATA has all the other info. */
2331
2332 static void
2333 clear_by_pieces_1 (genfun, mode, data)
2334 rtx (*genfun) PARAMS ((rtx, ...));
2335 enum machine_mode mode;
2336 struct clear_by_pieces *data;
2337 {
2338 register int size = GET_MODE_SIZE (mode);
2339 register rtx to1;
2340
2341 while (data->len >= size)
2342 {
2343 if (data->reverse) data->offset -= size;
2344
2345 to1 = (data->autinc_to
2346 ? gen_rtx_MEM (mode, data->to_addr)
2347 : copy_rtx (change_address (data->to, mode,
2348 plus_constant (data->to_addr,
2349 data->offset))));
2350 MEM_IN_STRUCT_P (to1) = data->to_struct;
2351
2352 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2353 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2354
2355 emit_insn ((*genfun) (to1, const0_rtx));
2356 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2357 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2358
2359 if (! data->reverse) data->offset += size;
2360
2361 data->len -= size;
2362 }
2363 }
2364 \f
2365 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2366 its length in bytes and ALIGN is the maximum alignment we can is has.
2367
2368 If we call a function that returns the length of the block, return it. */
2369
2370 rtx
2371 clear_storage (object, size, align)
2372 rtx object;
2373 rtx size;
2374 unsigned int align;
2375 {
2376 #ifdef TARGET_MEM_FUNCTIONS
2377 static tree fn;
2378 tree call_expr, arg_list;
2379 #endif
2380 rtx retval = 0;
2381
2382 if (GET_MODE (object) == BLKmode)
2383 {
2384 object = protect_from_queue (object, 1);
2385 size = protect_from_queue (size, 0);
2386
2387 if (GET_CODE (size) == CONST_INT
2388 && MOVE_BY_PIECES_P (INTVAL (size), align))
2389 clear_by_pieces (object, INTVAL (size), align);
2390 else
2391 {
2392 /* Try the most limited insn first, because there's no point
2393 including more than one in the machine description unless
2394 the more limited one has some advantage. */
2395
2396 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2397 enum machine_mode mode;
2398
2399 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2400 mode = GET_MODE_WIDER_MODE (mode))
2401 {
2402 enum insn_code code = clrstr_optab[(int) mode];
2403 insn_operand_predicate_fn pred;
2404
2405 if (code != CODE_FOR_nothing
2406 /* We don't need MODE to be narrower than
2407 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2408 the mode mask, as it is returned by the macro, it will
2409 definitely be less than the actual mode mask. */
2410 && ((GET_CODE (size) == CONST_INT
2411 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2412 <= (GET_MODE_MASK (mode) >> 1)))
2413 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2414 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2415 || (*pred) (object, BLKmode))
2416 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2417 || (*pred) (opalign, VOIDmode)))
2418 {
2419 rtx op1;
2420 rtx last = get_last_insn ();
2421 rtx pat;
2422
2423 op1 = convert_to_mode (mode, size, 1);
2424 pred = insn_data[(int) code].operand[1].predicate;
2425 if (pred != 0 && ! (*pred) (op1, mode))
2426 op1 = copy_to_mode_reg (mode, op1);
2427
2428 pat = GEN_FCN ((int) code) (object, op1, opalign);
2429 if (pat)
2430 {
2431 emit_insn (pat);
2432 return 0;
2433 }
2434 else
2435 delete_insns_since (last);
2436 }
2437 }
2438
2439 /* OBJECT or SIZE may have been passed through protect_from_queue.
2440
2441 It is unsafe to save the value generated by protect_from_queue
2442 and reuse it later. Consider what happens if emit_queue is
2443 called before the return value from protect_from_queue is used.
2444
2445 Expansion of the CALL_EXPR below will call emit_queue before
2446 we are finished emitting RTL for argument setup. So if we are
2447 not careful we could get the wrong value for an argument.
2448
2449 To avoid this problem we go ahead and emit code to copy OBJECT
2450 and SIZE into new pseudos. We can then place those new pseudos
2451 into an RTL_EXPR and use them later, even after a call to
2452 emit_queue.
2453
2454 Note this is not strictly needed for library calls since they
2455 do not call emit_queue before loading their arguments. However,
2456 we may need to have library calls call emit_queue in the future
2457 since failing to do so could cause problems for targets which
2458 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2459 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2460
2461 #ifdef TARGET_MEM_FUNCTIONS
2462 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2463 #else
2464 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2465 TREE_UNSIGNED (integer_type_node));
2466 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2467 #endif
2468
2469
2470 #ifdef TARGET_MEM_FUNCTIONS
2471 /* It is incorrect to use the libcall calling conventions to call
2472 memset in this context.
2473
2474 This could be a user call to memset and the user may wish to
2475 examine the return value from memset.
2476
2477 For targets where libcalls and normal calls have different
2478 conventions for returning pointers, we could end up generating
2479 incorrect code.
2480
2481 So instead of using a libcall sequence we build up a suitable
2482 CALL_EXPR and expand the call in the normal fashion. */
2483 if (fn == NULL_TREE)
2484 {
2485 tree fntype;
2486
2487 /* This was copied from except.c, I don't know if all this is
2488 necessary in this context or not. */
2489 fn = get_identifier ("memset");
2490 push_obstacks_nochange ();
2491 end_temporary_allocation ();
2492 fntype = build_pointer_type (void_type_node);
2493 fntype = build_function_type (fntype, NULL_TREE);
2494 fn = build_decl (FUNCTION_DECL, fn, fntype);
2495 ggc_add_tree_root (&fn, 1);
2496 DECL_EXTERNAL (fn) = 1;
2497 TREE_PUBLIC (fn) = 1;
2498 DECL_ARTIFICIAL (fn) = 1;
2499 make_decl_rtl (fn, NULL_PTR, 1);
2500 assemble_external (fn);
2501 pop_obstacks ();
2502 }
2503
2504 /* We need to make an argument list for the function call.
2505
2506 memset has three arguments, the first is a void * addresses, the
2507 second a integer with the initialization value, the last is a
2508 size_t byte count for the copy. */
2509 arg_list
2510 = build_tree_list (NULL_TREE,
2511 make_tree (build_pointer_type (void_type_node),
2512 object));
2513 TREE_CHAIN (arg_list)
2514 = build_tree_list (NULL_TREE,
2515 make_tree (integer_type_node, const0_rtx));
2516 TREE_CHAIN (TREE_CHAIN (arg_list))
2517 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2518 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2519
2520 /* Now we have to build up the CALL_EXPR itself. */
2521 call_expr = build1 (ADDR_EXPR,
2522 build_pointer_type (TREE_TYPE (fn)), fn);
2523 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2524 call_expr, arg_list, NULL_TREE);
2525 TREE_SIDE_EFFECTS (call_expr) = 1;
2526
2527 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2528 #else
2529 emit_library_call (bzero_libfunc, 0,
2530 VOIDmode, 2, object, Pmode, size,
2531 TYPE_MODE (integer_type_node));
2532 #endif
2533 }
2534 }
2535 else
2536 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2537
2538 return retval;
2539 }
2540
2541 /* Generate code to copy Y into X.
2542 Both Y and X must have the same mode, except that
2543 Y can be a constant with VOIDmode.
2544 This mode cannot be BLKmode; use emit_block_move for that.
2545
2546 Return the last instruction emitted. */
2547
2548 rtx
2549 emit_move_insn (x, y)
2550 rtx x, y;
2551 {
2552 enum machine_mode mode = GET_MODE (x);
2553
2554 x = protect_from_queue (x, 1);
2555 y = protect_from_queue (y, 0);
2556
2557 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2558 abort ();
2559
2560 /* Never force constant_p_rtx to memory. */
2561 if (GET_CODE (y) == CONSTANT_P_RTX)
2562 ;
2563 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2564 y = force_const_mem (mode, y);
2565
2566 /* If X or Y are memory references, verify that their addresses are valid
2567 for the machine. */
2568 if (GET_CODE (x) == MEM
2569 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2570 && ! push_operand (x, GET_MODE (x)))
2571 || (flag_force_addr
2572 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2573 x = change_address (x, VOIDmode, XEXP (x, 0));
2574
2575 if (GET_CODE (y) == MEM
2576 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2577 || (flag_force_addr
2578 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2579 y = change_address (y, VOIDmode, XEXP (y, 0));
2580
2581 if (mode == BLKmode)
2582 abort ();
2583
2584 return emit_move_insn_1 (x, y);
2585 }
2586
2587 /* Low level part of emit_move_insn.
2588 Called just like emit_move_insn, but assumes X and Y
2589 are basically valid. */
2590
2591 rtx
2592 emit_move_insn_1 (x, y)
2593 rtx x, y;
2594 {
2595 enum machine_mode mode = GET_MODE (x);
2596 enum machine_mode submode;
2597 enum mode_class class = GET_MODE_CLASS (mode);
2598 unsigned int i;
2599
2600 if (mode >= MAX_MACHINE_MODE)
2601 abort ();
2602
2603 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2604 return
2605 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2606
2607 /* Expand complex moves by moving real part and imag part, if possible. */
2608 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2609 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2610 * BITS_PER_UNIT),
2611 (class == MODE_COMPLEX_INT
2612 ? MODE_INT : MODE_FLOAT),
2613 0))
2614 && (mov_optab->handlers[(int) submode].insn_code
2615 != CODE_FOR_nothing))
2616 {
2617 /* Don't split destination if it is a stack push. */
2618 int stack = push_operand (x, GET_MODE (x));
2619
2620 /* If this is a stack, push the highpart first, so it
2621 will be in the argument order.
2622
2623 In that case, change_address is used only to convert
2624 the mode, not to change the address. */
2625 if (stack)
2626 {
2627 /* Note that the real part always precedes the imag part in memory
2628 regardless of machine's endianness. */
2629 #ifdef STACK_GROWS_DOWNWARD
2630 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2631 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2632 gen_imagpart (submode, y)));
2633 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2634 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2635 gen_realpart (submode, y)));
2636 #else
2637 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2638 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2639 gen_realpart (submode, y)));
2640 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2641 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2642 gen_imagpart (submode, y)));
2643 #endif
2644 }
2645 else
2646 {
2647 rtx realpart_x, realpart_y;
2648 rtx imagpart_x, imagpart_y;
2649
2650 /* If this is a complex value with each part being smaller than a
2651 word, the usual calling sequence will likely pack the pieces into
2652 a single register. Unfortunately, SUBREG of hard registers only
2653 deals in terms of words, so we have a problem converting input
2654 arguments to the CONCAT of two registers that is used elsewhere
2655 for complex values. If this is before reload, we can copy it into
2656 memory and reload. FIXME, we should see about using extract and
2657 insert on integer registers, but complex short and complex char
2658 variables should be rarely used. */
2659 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2660 && (reload_in_progress | reload_completed) == 0)
2661 {
2662 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2663 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2664
2665 if (packed_dest_p || packed_src_p)
2666 {
2667 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2668 ? MODE_FLOAT : MODE_INT);
2669
2670 enum machine_mode reg_mode =
2671 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2672
2673 if (reg_mode != BLKmode)
2674 {
2675 rtx mem = assign_stack_temp (reg_mode,
2676 GET_MODE_SIZE (mode), 0);
2677
2678 rtx cmem = change_address (mem, mode, NULL_RTX);
2679
2680 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2681
2682 if (packed_dest_p)
2683 {
2684 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2685 emit_move_insn_1 (cmem, y);
2686 return emit_move_insn_1 (sreg, mem);
2687 }
2688 else
2689 {
2690 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2691 emit_move_insn_1 (mem, sreg);
2692 return emit_move_insn_1 (x, cmem);
2693 }
2694 }
2695 }
2696 }
2697
2698 realpart_x = gen_realpart (submode, x);
2699 realpart_y = gen_realpart (submode, y);
2700 imagpart_x = gen_imagpart (submode, x);
2701 imagpart_y = gen_imagpart (submode, y);
2702
2703 /* Show the output dies here. This is necessary for SUBREGs
2704 of pseudos since we cannot track their lifetimes correctly;
2705 hard regs shouldn't appear here except as return values.
2706 We never want to emit such a clobber after reload. */
2707 if (x != y
2708 && ! (reload_in_progress || reload_completed)
2709 && (GET_CODE (realpart_x) == SUBREG
2710 || GET_CODE (imagpart_x) == SUBREG))
2711 {
2712 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2713 }
2714
2715 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2716 (realpart_x, realpart_y));
2717 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2718 (imagpart_x, imagpart_y));
2719 }
2720
2721 return get_last_insn ();
2722 }
2723
2724 /* This will handle any multi-word mode that lacks a move_insn pattern.
2725 However, you will get better code if you define such patterns,
2726 even if they must turn into multiple assembler instructions. */
2727 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2728 {
2729 rtx last_insn = 0;
2730 rtx seq, inner;
2731 int need_clobber;
2732
2733 #ifdef PUSH_ROUNDING
2734
2735 /* If X is a push on the stack, do the push now and replace
2736 X with a reference to the stack pointer. */
2737 if (push_operand (x, GET_MODE (x)))
2738 {
2739 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2740 x = change_address (x, VOIDmode, stack_pointer_rtx);
2741 }
2742 #endif
2743
2744 /* If we are in reload, see if either operand is a MEM whose address
2745 is scheduled for replacement. */
2746 if (reload_in_progress && GET_CODE (x) == MEM
2747 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2748 {
2749 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2750
2751 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
2752 MEM_COPY_ATTRIBUTES (new, x);
2753 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
2754 x = new;
2755 }
2756 if (reload_in_progress && GET_CODE (y) == MEM
2757 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2758 {
2759 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2760
2761 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (y);
2762 MEM_COPY_ATTRIBUTES (new, y);
2763 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (y);
2764 y = new;
2765 }
2766
2767 start_sequence ();
2768
2769 need_clobber = 0;
2770 for (i = 0;
2771 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2772 i++)
2773 {
2774 rtx xpart = operand_subword (x, i, 1, mode);
2775 rtx ypart = operand_subword (y, i, 1, mode);
2776
2777 /* If we can't get a part of Y, put Y into memory if it is a
2778 constant. Otherwise, force it into a register. If we still
2779 can't get a part of Y, abort. */
2780 if (ypart == 0 && CONSTANT_P (y))
2781 {
2782 y = force_const_mem (mode, y);
2783 ypart = operand_subword (y, i, 1, mode);
2784 }
2785 else if (ypart == 0)
2786 ypart = operand_subword_force (y, i, mode);
2787
2788 if (xpart == 0 || ypart == 0)
2789 abort ();
2790
2791 need_clobber |= (GET_CODE (xpart) == SUBREG);
2792
2793 last_insn = emit_move_insn (xpart, ypart);
2794 }
2795
2796 seq = gen_sequence ();
2797 end_sequence ();
2798
2799 /* Show the output dies here. This is necessary for SUBREGs
2800 of pseudos since we cannot track their lifetimes correctly;
2801 hard regs shouldn't appear here except as return values.
2802 We never want to emit such a clobber after reload. */
2803 if (x != y
2804 && ! (reload_in_progress || reload_completed)
2805 && need_clobber != 0)
2806 {
2807 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2808 }
2809
2810 emit_insn (seq);
2811
2812 return last_insn;
2813 }
2814 else
2815 abort ();
2816 }
2817 \f
2818 /* Pushing data onto the stack. */
2819
2820 /* Push a block of length SIZE (perhaps variable)
2821 and return an rtx to address the beginning of the block.
2822 Note that it is not possible for the value returned to be a QUEUED.
2823 The value may be virtual_outgoing_args_rtx.
2824
2825 EXTRA is the number of bytes of padding to push in addition to SIZE.
2826 BELOW nonzero means this padding comes at low addresses;
2827 otherwise, the padding comes at high addresses. */
2828
2829 rtx
2830 push_block (size, extra, below)
2831 rtx size;
2832 int extra, below;
2833 {
2834 register rtx temp;
2835
2836 size = convert_modes (Pmode, ptr_mode, size, 1);
2837 if (CONSTANT_P (size))
2838 anti_adjust_stack (plus_constant (size, extra));
2839 else if (GET_CODE (size) == REG && extra == 0)
2840 anti_adjust_stack (size);
2841 else
2842 {
2843 temp = copy_to_mode_reg (Pmode, size);
2844 if (extra != 0)
2845 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2846 temp, 0, OPTAB_LIB_WIDEN);
2847 anti_adjust_stack (temp);
2848 }
2849
2850 #ifndef STACK_GROWS_DOWNWARD
2851 #ifdef ARGS_GROW_DOWNWARD
2852 if (!ACCUMULATE_OUTGOING_ARGS)
2853 #else
2854 if (0)
2855 #endif
2856 #else
2857 if (1)
2858 #endif
2859 {
2860 /* Return the lowest stack address when STACK or ARGS grow downward and
2861 we are not aaccumulating outgoing arguments (the c4x port uses such
2862 conventions). */
2863 temp = virtual_outgoing_args_rtx;
2864 if (extra != 0 && below)
2865 temp = plus_constant (temp, extra);
2866 }
2867 else
2868 {
2869 if (GET_CODE (size) == CONST_INT)
2870 temp = plus_constant (virtual_outgoing_args_rtx,
2871 - INTVAL (size) - (below ? 0 : extra));
2872 else if (extra != 0 && !below)
2873 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2874 negate_rtx (Pmode, plus_constant (size, extra)));
2875 else
2876 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2877 negate_rtx (Pmode, size));
2878 }
2879
2880 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2881 }
2882
2883 rtx
2884 gen_push_operand ()
2885 {
2886 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2887 }
2888
2889 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2890 block of SIZE bytes. */
2891
2892 static rtx
2893 get_push_address (size)
2894 int size;
2895 {
2896 register rtx temp;
2897
2898 if (STACK_PUSH_CODE == POST_DEC)
2899 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2900 else if (STACK_PUSH_CODE == POST_INC)
2901 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2902 else
2903 temp = stack_pointer_rtx;
2904
2905 return copy_to_reg (temp);
2906 }
2907
2908 /* Generate code to push X onto the stack, assuming it has mode MODE and
2909 type TYPE.
2910 MODE is redundant except when X is a CONST_INT (since they don't
2911 carry mode info).
2912 SIZE is an rtx for the size of data to be copied (in bytes),
2913 needed only if X is BLKmode.
2914
2915 ALIGN is maximum alignment we can assume.
2916
2917 If PARTIAL and REG are both nonzero, then copy that many of the first
2918 words of X into registers starting with REG, and push the rest of X.
2919 The amount of space pushed is decreased by PARTIAL words,
2920 rounded *down* to a multiple of PARM_BOUNDARY.
2921 REG must be a hard register in this case.
2922 If REG is zero but PARTIAL is not, take any all others actions for an
2923 argument partially in registers, but do not actually load any
2924 registers.
2925
2926 EXTRA is the amount in bytes of extra space to leave next to this arg.
2927 This is ignored if an argument block has already been allocated.
2928
2929 On a machine that lacks real push insns, ARGS_ADDR is the address of
2930 the bottom of the argument block for this call. We use indexing off there
2931 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2932 argument block has not been preallocated.
2933
2934 ARGS_SO_FAR is the size of args previously pushed for this call.
2935
2936 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2937 for arguments passed in registers. If nonzero, it will be the number
2938 of bytes required. */
2939
2940 void
2941 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2942 args_addr, args_so_far, reg_parm_stack_space,
2943 alignment_pad)
2944 register rtx x;
2945 enum machine_mode mode;
2946 tree type;
2947 rtx size;
2948 unsigned int align;
2949 int partial;
2950 rtx reg;
2951 int extra;
2952 rtx args_addr;
2953 rtx args_so_far;
2954 int reg_parm_stack_space;
2955 rtx alignment_pad;
2956 {
2957 rtx xinner;
2958 enum direction stack_direction
2959 #ifdef STACK_GROWS_DOWNWARD
2960 = downward;
2961 #else
2962 = upward;
2963 #endif
2964
2965 /* Decide where to pad the argument: `downward' for below,
2966 `upward' for above, or `none' for don't pad it.
2967 Default is below for small data on big-endian machines; else above. */
2968 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2969
2970 /* Invert direction if stack is post-update. */
2971 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2972 if (where_pad != none)
2973 where_pad = (where_pad == downward ? upward : downward);
2974
2975 xinner = x = protect_from_queue (x, 0);
2976
2977 if (mode == BLKmode)
2978 {
2979 /* Copy a block into the stack, entirely or partially. */
2980
2981 register rtx temp;
2982 int used = partial * UNITS_PER_WORD;
2983 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2984 int skip;
2985
2986 if (size == 0)
2987 abort ();
2988
2989 used -= offset;
2990
2991 /* USED is now the # of bytes we need not copy to the stack
2992 because registers will take care of them. */
2993
2994 if (partial != 0)
2995 xinner = change_address (xinner, BLKmode,
2996 plus_constant (XEXP (xinner, 0), used));
2997
2998 /* If the partial register-part of the arg counts in its stack size,
2999 skip the part of stack space corresponding to the registers.
3000 Otherwise, start copying to the beginning of the stack space,
3001 by setting SKIP to 0. */
3002 skip = (reg_parm_stack_space == 0) ? 0 : used;
3003
3004 #ifdef PUSH_ROUNDING
3005 /* Do it with several push insns if that doesn't take lots of insns
3006 and if there is no difficulty with push insns that skip bytes
3007 on the stack for alignment purposes. */
3008 if (args_addr == 0
3009 && PUSH_ARGS
3010 && GET_CODE (size) == CONST_INT
3011 && skip == 0
3012 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3013 /* Here we avoid the case of a structure whose weak alignment
3014 forces many pushes of a small amount of data,
3015 and such small pushes do rounding that causes trouble. */
3016 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3017 || align >= BIGGEST_ALIGNMENT
3018 || PUSH_ROUNDING (align) == align)
3019 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3020 {
3021 /* Push padding now if padding above and stack grows down,
3022 or if padding below and stack grows up.
3023 But if space already allocated, this has already been done. */
3024 if (extra && args_addr == 0
3025 && where_pad != none && where_pad != stack_direction)
3026 anti_adjust_stack (GEN_INT (extra));
3027
3028 stack_pointer_delta += INTVAL (size) - used;
3029 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3030 INTVAL (size) - used, align);
3031
3032 if (current_function_check_memory_usage && ! in_check_memory_usage)
3033 {
3034 rtx temp;
3035
3036 in_check_memory_usage = 1;
3037 temp = get_push_address (INTVAL(size) - used);
3038 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3039 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3040 temp, Pmode,
3041 XEXP (xinner, 0), Pmode,
3042 GEN_INT (INTVAL(size) - used),
3043 TYPE_MODE (sizetype));
3044 else
3045 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3046 temp, Pmode,
3047 GEN_INT (INTVAL(size) - used),
3048 TYPE_MODE (sizetype),
3049 GEN_INT (MEMORY_USE_RW),
3050 TYPE_MODE (integer_type_node));
3051 in_check_memory_usage = 0;
3052 }
3053 }
3054 else
3055 #endif /* PUSH_ROUNDING */
3056 {
3057 /* Otherwise make space on the stack and copy the data
3058 to the address of that space. */
3059
3060 /* Deduct words put into registers from the size we must copy. */
3061 if (partial != 0)
3062 {
3063 if (GET_CODE (size) == CONST_INT)
3064 size = GEN_INT (INTVAL (size) - used);
3065 else
3066 size = expand_binop (GET_MODE (size), sub_optab, size,
3067 GEN_INT (used), NULL_RTX, 0,
3068 OPTAB_LIB_WIDEN);
3069 }
3070
3071 /* Get the address of the stack space.
3072 In this case, we do not deal with EXTRA separately.
3073 A single stack adjust will do. */
3074 if (! args_addr)
3075 {
3076 temp = push_block (size, extra, where_pad == downward);
3077 extra = 0;
3078 }
3079 else if (GET_CODE (args_so_far) == CONST_INT)
3080 temp = memory_address (BLKmode,
3081 plus_constant (args_addr,
3082 skip + INTVAL (args_so_far)));
3083 else
3084 temp = memory_address (BLKmode,
3085 plus_constant (gen_rtx_PLUS (Pmode,
3086 args_addr,
3087 args_so_far),
3088 skip));
3089 if (current_function_check_memory_usage && ! in_check_memory_usage)
3090 {
3091 rtx target;
3092
3093 in_check_memory_usage = 1;
3094 target = copy_to_reg (temp);
3095 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3096 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3097 target, Pmode,
3098 XEXP (xinner, 0), Pmode,
3099 size, TYPE_MODE (sizetype));
3100 else
3101 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3102 target, Pmode,
3103 size, TYPE_MODE (sizetype),
3104 GEN_INT (MEMORY_USE_RW),
3105 TYPE_MODE (integer_type_node));
3106 in_check_memory_usage = 0;
3107 }
3108
3109 /* TEMP is the address of the block. Copy the data there. */
3110 if (GET_CODE (size) == CONST_INT
3111 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3112 {
3113 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3114 INTVAL (size), align);
3115 goto ret;
3116 }
3117 else
3118 {
3119 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3120 enum machine_mode mode;
3121 rtx target = gen_rtx_MEM (BLKmode, temp);
3122
3123 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3124 mode != VOIDmode;
3125 mode = GET_MODE_WIDER_MODE (mode))
3126 {
3127 enum insn_code code = movstr_optab[(int) mode];
3128 insn_operand_predicate_fn pred;
3129
3130 if (code != CODE_FOR_nothing
3131 && ((GET_CODE (size) == CONST_INT
3132 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3133 <= (GET_MODE_MASK (mode) >> 1)))
3134 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3135 && (!(pred = insn_data[(int) code].operand[0].predicate)
3136 || ((*pred) (target, BLKmode)))
3137 && (!(pred = insn_data[(int) code].operand[1].predicate)
3138 || ((*pred) (xinner, BLKmode)))
3139 && (!(pred = insn_data[(int) code].operand[3].predicate)
3140 || ((*pred) (opalign, VOIDmode))))
3141 {
3142 rtx op2 = convert_to_mode (mode, size, 1);
3143 rtx last = get_last_insn ();
3144 rtx pat;
3145
3146 pred = insn_data[(int) code].operand[2].predicate;
3147 if (pred != 0 && ! (*pred) (op2, mode))
3148 op2 = copy_to_mode_reg (mode, op2);
3149
3150 pat = GEN_FCN ((int) code) (target, xinner,
3151 op2, opalign);
3152 if (pat)
3153 {
3154 emit_insn (pat);
3155 goto ret;
3156 }
3157 else
3158 delete_insns_since (last);
3159 }
3160 }
3161 }
3162
3163 if (!ACCUMULATE_OUTGOING_ARGS)
3164 {
3165 /* If the source is referenced relative to the stack pointer,
3166 copy it to another register to stabilize it. We do not need
3167 to do this if we know that we won't be changing sp. */
3168
3169 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3170 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3171 temp = copy_to_reg (temp);
3172 }
3173
3174 /* Make inhibit_defer_pop nonzero around the library call
3175 to force it to pop the bcopy-arguments right away. */
3176 NO_DEFER_POP;
3177 #ifdef TARGET_MEM_FUNCTIONS
3178 emit_library_call (memcpy_libfunc, 0,
3179 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3180 convert_to_mode (TYPE_MODE (sizetype),
3181 size, TREE_UNSIGNED (sizetype)),
3182 TYPE_MODE (sizetype));
3183 #else
3184 emit_library_call (bcopy_libfunc, 0,
3185 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3186 convert_to_mode (TYPE_MODE (integer_type_node),
3187 size,
3188 TREE_UNSIGNED (integer_type_node)),
3189 TYPE_MODE (integer_type_node));
3190 #endif
3191 OK_DEFER_POP;
3192 }
3193 }
3194 else if (partial > 0)
3195 {
3196 /* Scalar partly in registers. */
3197
3198 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3199 int i;
3200 int not_stack;
3201 /* # words of start of argument
3202 that we must make space for but need not store. */
3203 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3204 int args_offset = INTVAL (args_so_far);
3205 int skip;
3206
3207 /* Push padding now if padding above and stack grows down,
3208 or if padding below and stack grows up.
3209 But if space already allocated, this has already been done. */
3210 if (extra && args_addr == 0
3211 && where_pad != none && where_pad != stack_direction)
3212 anti_adjust_stack (GEN_INT (extra));
3213
3214 /* If we make space by pushing it, we might as well push
3215 the real data. Otherwise, we can leave OFFSET nonzero
3216 and leave the space uninitialized. */
3217 if (args_addr == 0)
3218 offset = 0;
3219
3220 /* Now NOT_STACK gets the number of words that we don't need to
3221 allocate on the stack. */
3222 not_stack = partial - offset;
3223
3224 /* If the partial register-part of the arg counts in its stack size,
3225 skip the part of stack space corresponding to the registers.
3226 Otherwise, start copying to the beginning of the stack space,
3227 by setting SKIP to 0. */
3228 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3229
3230 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3231 x = validize_mem (force_const_mem (mode, x));
3232
3233 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3234 SUBREGs of such registers are not allowed. */
3235 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3236 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3237 x = copy_to_reg (x);
3238
3239 /* Loop over all the words allocated on the stack for this arg. */
3240 /* We can do it by words, because any scalar bigger than a word
3241 has a size a multiple of a word. */
3242 #ifndef PUSH_ARGS_REVERSED
3243 for (i = not_stack; i < size; i++)
3244 #else
3245 for (i = size - 1; i >= not_stack; i--)
3246 #endif
3247 if (i >= not_stack + offset)
3248 emit_push_insn (operand_subword_force (x, i, mode),
3249 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3250 0, args_addr,
3251 GEN_INT (args_offset + ((i - not_stack + skip)
3252 * UNITS_PER_WORD)),
3253 reg_parm_stack_space, alignment_pad);
3254 }
3255 else
3256 {
3257 rtx addr;
3258 rtx target = NULL_RTX;
3259
3260 /* Push padding now if padding above and stack grows down,
3261 or if padding below and stack grows up.
3262 But if space already allocated, this has already been done. */
3263 if (extra && args_addr == 0
3264 && where_pad != none && where_pad != stack_direction)
3265 anti_adjust_stack (GEN_INT (extra));
3266
3267 #ifdef PUSH_ROUNDING
3268 if (args_addr == 0 && PUSH_ARGS)
3269 {
3270 addr = gen_push_operand ();
3271 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3272 }
3273 else
3274 #endif
3275 {
3276 if (GET_CODE (args_so_far) == CONST_INT)
3277 addr
3278 = memory_address (mode,
3279 plus_constant (args_addr,
3280 INTVAL (args_so_far)));
3281 else
3282 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3283 args_so_far));
3284 target = addr;
3285 }
3286
3287 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3288
3289 if (current_function_check_memory_usage && ! in_check_memory_usage)
3290 {
3291 in_check_memory_usage = 1;
3292 if (target == 0)
3293 target = get_push_address (GET_MODE_SIZE (mode));
3294
3295 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3296 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3297 target, Pmode,
3298 XEXP (x, 0), Pmode,
3299 GEN_INT (GET_MODE_SIZE (mode)),
3300 TYPE_MODE (sizetype));
3301 else
3302 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3303 target, Pmode,
3304 GEN_INT (GET_MODE_SIZE (mode)),
3305 TYPE_MODE (sizetype),
3306 GEN_INT (MEMORY_USE_RW),
3307 TYPE_MODE (integer_type_node));
3308 in_check_memory_usage = 0;
3309 }
3310 }
3311
3312 ret:
3313 /* If part should go in registers, copy that part
3314 into the appropriate registers. Do this now, at the end,
3315 since mem-to-mem copies above may do function calls. */
3316 if (partial > 0 && reg != 0)
3317 {
3318 /* Handle calls that pass values in multiple non-contiguous locations.
3319 The Irix 6 ABI has examples of this. */
3320 if (GET_CODE (reg) == PARALLEL)
3321 emit_group_load (reg, x, -1, align); /* ??? size? */
3322 else
3323 move_block_to_reg (REGNO (reg), x, partial, mode);
3324 }
3325
3326 if (extra && args_addr == 0 && where_pad == stack_direction)
3327 anti_adjust_stack (GEN_INT (extra));
3328
3329 if (alignment_pad)
3330 anti_adjust_stack (alignment_pad);
3331 }
3332 \f
3333 /* Expand an assignment that stores the value of FROM into TO.
3334 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3335 (This may contain a QUEUED rtx;
3336 if the value is constant, this rtx is a constant.)
3337 Otherwise, the returned value is NULL_RTX.
3338
3339 SUGGEST_REG is no longer actually used.
3340 It used to mean, copy the value through a register
3341 and return that register, if that is possible.
3342 We now use WANT_VALUE to decide whether to do this. */
3343
3344 rtx
3345 expand_assignment (to, from, want_value, suggest_reg)
3346 tree to, from;
3347 int want_value;
3348 int suggest_reg ATTRIBUTE_UNUSED;
3349 {
3350 register rtx to_rtx = 0;
3351 rtx result;
3352
3353 /* Don't crash if the lhs of the assignment was erroneous. */
3354
3355 if (TREE_CODE (to) == ERROR_MARK)
3356 {
3357 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3358 return want_value ? result : NULL_RTX;
3359 }
3360
3361 /* Assignment of a structure component needs special treatment
3362 if the structure component's rtx is not simply a MEM.
3363 Assignment of an array element at a constant index, and assignment of
3364 an array element in an unaligned packed structure field, has the same
3365 problem. */
3366
3367 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3368 || TREE_CODE (to) == ARRAY_REF)
3369 {
3370 enum machine_mode mode1;
3371 HOST_WIDE_INT bitsize, bitpos;
3372 tree offset;
3373 int unsignedp;
3374 int volatilep = 0;
3375 tree tem;
3376 unsigned int alignment;
3377
3378 push_temp_slots ();
3379 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3380 &unsignedp, &volatilep, &alignment);
3381
3382 /* If we are going to use store_bit_field and extract_bit_field,
3383 make sure to_rtx will be safe for multiple use. */
3384
3385 if (mode1 == VOIDmode && want_value)
3386 tem = stabilize_reference (tem);
3387
3388 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3389 if (offset != 0)
3390 {
3391 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3392
3393 if (GET_CODE (to_rtx) != MEM)
3394 abort ();
3395
3396 if (GET_MODE (offset_rtx) != ptr_mode)
3397 {
3398 #ifdef POINTERS_EXTEND_UNSIGNED
3399 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3400 #else
3401 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3402 #endif
3403 }
3404
3405 /* A constant address in TO_RTX can have VOIDmode, we must not try
3406 to call force_reg for that case. Avoid that case. */
3407 if (GET_CODE (to_rtx) == MEM
3408 && GET_MODE (to_rtx) == BLKmode
3409 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3410 && bitsize
3411 && (bitpos % bitsize) == 0
3412 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3413 && alignment == GET_MODE_ALIGNMENT (mode1))
3414 {
3415 rtx temp = change_address (to_rtx, mode1,
3416 plus_constant (XEXP (to_rtx, 0),
3417 (bitpos /
3418 BITS_PER_UNIT)));
3419 if (GET_CODE (XEXP (temp, 0)) == REG)
3420 to_rtx = temp;
3421 else
3422 to_rtx = change_address (to_rtx, mode1,
3423 force_reg (GET_MODE (XEXP (temp, 0)),
3424 XEXP (temp, 0)));
3425 bitpos = 0;
3426 }
3427
3428 to_rtx = change_address (to_rtx, VOIDmode,
3429 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3430 force_reg (ptr_mode,
3431 offset_rtx)));
3432 }
3433
3434 if (volatilep)
3435 {
3436 if (GET_CODE (to_rtx) == MEM)
3437 {
3438 /* When the offset is zero, to_rtx is the address of the
3439 structure we are storing into, and hence may be shared.
3440 We must make a new MEM before setting the volatile bit. */
3441 if (offset == 0)
3442 to_rtx = copy_rtx (to_rtx);
3443
3444 MEM_VOLATILE_P (to_rtx) = 1;
3445 }
3446 #if 0 /* This was turned off because, when a field is volatile
3447 in an object which is not volatile, the object may be in a register,
3448 and then we would abort over here. */
3449 else
3450 abort ();
3451 #endif
3452 }
3453
3454 if (TREE_CODE (to) == COMPONENT_REF
3455 && TREE_READONLY (TREE_OPERAND (to, 1)))
3456 {
3457 if (offset == 0)
3458 to_rtx = copy_rtx (to_rtx);
3459
3460 RTX_UNCHANGING_P (to_rtx) = 1;
3461 }
3462
3463 /* Check the access. */
3464 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3465 {
3466 rtx to_addr;
3467 int size;
3468 int best_mode_size;
3469 enum machine_mode best_mode;
3470
3471 best_mode = get_best_mode (bitsize, bitpos,
3472 TYPE_ALIGN (TREE_TYPE (tem)),
3473 mode1, volatilep);
3474 if (best_mode == VOIDmode)
3475 best_mode = QImode;
3476
3477 best_mode_size = GET_MODE_BITSIZE (best_mode);
3478 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3479 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3480 size *= GET_MODE_SIZE (best_mode);
3481
3482 /* Check the access right of the pointer. */
3483 if (size)
3484 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3485 to_addr, Pmode,
3486 GEN_INT (size), TYPE_MODE (sizetype),
3487 GEN_INT (MEMORY_USE_WO),
3488 TYPE_MODE (integer_type_node));
3489 }
3490
3491 /* If this is a varying-length object, we must get the address of
3492 the source and do an explicit block move. */
3493 if (bitsize < 0)
3494 {
3495 unsigned int from_align;
3496 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3497 rtx inner_to_rtx
3498 = change_address (to_rtx, VOIDmode,
3499 plus_constant (XEXP (to_rtx, 0),
3500 bitpos / BITS_PER_UNIT));
3501
3502 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3503 MIN (alignment, from_align));
3504 free_temp_slots ();
3505 pop_temp_slots ();
3506 return to_rtx;
3507 }
3508 else
3509 {
3510 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3511 (want_value
3512 /* Spurious cast for HPUX compiler. */
3513 ? ((enum machine_mode)
3514 TYPE_MODE (TREE_TYPE (to)))
3515 : VOIDmode),
3516 unsignedp,
3517 alignment,
3518 int_size_in_bytes (TREE_TYPE (tem)),
3519 get_alias_set (to));
3520
3521 preserve_temp_slots (result);
3522 free_temp_slots ();
3523 pop_temp_slots ();
3524
3525 /* If the value is meaningful, convert RESULT to the proper mode.
3526 Otherwise, return nothing. */
3527 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3528 TYPE_MODE (TREE_TYPE (from)),
3529 result,
3530 TREE_UNSIGNED (TREE_TYPE (to)))
3531 : NULL_RTX);
3532 }
3533 }
3534
3535 /* If the rhs is a function call and its value is not an aggregate,
3536 call the function before we start to compute the lhs.
3537 This is needed for correct code for cases such as
3538 val = setjmp (buf) on machines where reference to val
3539 requires loading up part of an address in a separate insn.
3540
3541 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3542 since it might be a promoted variable where the zero- or sign- extension
3543 needs to be done. Handling this in the normal way is safe because no
3544 computation is done before the call. */
3545 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3546 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3547 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3548 && GET_CODE (DECL_RTL (to)) == REG))
3549 {
3550 rtx value;
3551
3552 push_temp_slots ();
3553 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3554 if (to_rtx == 0)
3555 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3556
3557 /* Handle calls that return values in multiple non-contiguous locations.
3558 The Irix 6 ABI has examples of this. */
3559 if (GET_CODE (to_rtx) == PARALLEL)
3560 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3561 TYPE_ALIGN (TREE_TYPE (from)));
3562 else if (GET_MODE (to_rtx) == BLKmode)
3563 emit_block_move (to_rtx, value, expr_size (from),
3564 TYPE_ALIGN (TREE_TYPE (from)));
3565 else
3566 {
3567 #ifdef POINTERS_EXTEND_UNSIGNED
3568 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3569 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3570 value = convert_memory_address (GET_MODE (to_rtx), value);
3571 #endif
3572 emit_move_insn (to_rtx, value);
3573 }
3574 preserve_temp_slots (to_rtx);
3575 free_temp_slots ();
3576 pop_temp_slots ();
3577 return want_value ? to_rtx : NULL_RTX;
3578 }
3579
3580 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3581 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3582
3583 if (to_rtx == 0)
3584 {
3585 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3586 if (GET_CODE (to_rtx) == MEM)
3587 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3588 }
3589
3590 /* Don't move directly into a return register. */
3591 if (TREE_CODE (to) == RESULT_DECL
3592 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3593 {
3594 rtx temp;
3595
3596 push_temp_slots ();
3597 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3598
3599 if (GET_CODE (to_rtx) == PARALLEL)
3600 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3601 TYPE_ALIGN (TREE_TYPE (from)));
3602 else
3603 emit_move_insn (to_rtx, temp);
3604
3605 preserve_temp_slots (to_rtx);
3606 free_temp_slots ();
3607 pop_temp_slots ();
3608 return want_value ? to_rtx : NULL_RTX;
3609 }
3610
3611 /* In case we are returning the contents of an object which overlaps
3612 the place the value is being stored, use a safe function when copying
3613 a value through a pointer into a structure value return block. */
3614 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3615 && current_function_returns_struct
3616 && !current_function_returns_pcc_struct)
3617 {
3618 rtx from_rtx, size;
3619
3620 push_temp_slots ();
3621 size = expr_size (from);
3622 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3623 EXPAND_MEMORY_USE_DONT);
3624
3625 /* Copy the rights of the bitmap. */
3626 if (current_function_check_memory_usage)
3627 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3628 XEXP (to_rtx, 0), Pmode,
3629 XEXP (from_rtx, 0), Pmode,
3630 convert_to_mode (TYPE_MODE (sizetype),
3631 size, TREE_UNSIGNED (sizetype)),
3632 TYPE_MODE (sizetype));
3633
3634 #ifdef TARGET_MEM_FUNCTIONS
3635 emit_library_call (memcpy_libfunc, 0,
3636 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3637 XEXP (from_rtx, 0), Pmode,
3638 convert_to_mode (TYPE_MODE (sizetype),
3639 size, TREE_UNSIGNED (sizetype)),
3640 TYPE_MODE (sizetype));
3641 #else
3642 emit_library_call (bcopy_libfunc, 0,
3643 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3644 XEXP (to_rtx, 0), Pmode,
3645 convert_to_mode (TYPE_MODE (integer_type_node),
3646 size, TREE_UNSIGNED (integer_type_node)),
3647 TYPE_MODE (integer_type_node));
3648 #endif
3649
3650 preserve_temp_slots (to_rtx);
3651 free_temp_slots ();
3652 pop_temp_slots ();
3653 return want_value ? to_rtx : NULL_RTX;
3654 }
3655
3656 /* Compute FROM and store the value in the rtx we got. */
3657
3658 push_temp_slots ();
3659 result = store_expr (from, to_rtx, want_value);
3660 preserve_temp_slots (result);
3661 free_temp_slots ();
3662 pop_temp_slots ();
3663 return want_value ? result : NULL_RTX;
3664 }
3665
3666 /* Generate code for computing expression EXP,
3667 and storing the value into TARGET.
3668 TARGET may contain a QUEUED rtx.
3669
3670 If WANT_VALUE is nonzero, return a copy of the value
3671 not in TARGET, so that we can be sure to use the proper
3672 value in a containing expression even if TARGET has something
3673 else stored in it. If possible, we copy the value through a pseudo
3674 and return that pseudo. Or, if the value is constant, we try to
3675 return the constant. In some cases, we return a pseudo
3676 copied *from* TARGET.
3677
3678 If the mode is BLKmode then we may return TARGET itself.
3679 It turns out that in BLKmode it doesn't cause a problem.
3680 because C has no operators that could combine two different
3681 assignments into the same BLKmode object with different values
3682 with no sequence point. Will other languages need this to
3683 be more thorough?
3684
3685 If WANT_VALUE is 0, we return NULL, to make sure
3686 to catch quickly any cases where the caller uses the value
3687 and fails to set WANT_VALUE. */
3688
3689 rtx
3690 store_expr (exp, target, want_value)
3691 register tree exp;
3692 register rtx target;
3693 int want_value;
3694 {
3695 register rtx temp;
3696 int dont_return_target = 0;
3697
3698 if (TREE_CODE (exp) == COMPOUND_EXPR)
3699 {
3700 /* Perform first part of compound expression, then assign from second
3701 part. */
3702 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3703 emit_queue ();
3704 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3705 }
3706 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3707 {
3708 /* For conditional expression, get safe form of the target. Then
3709 test the condition, doing the appropriate assignment on either
3710 side. This avoids the creation of unnecessary temporaries.
3711 For non-BLKmode, it is more efficient not to do this. */
3712
3713 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3714
3715 emit_queue ();
3716 target = protect_from_queue (target, 1);
3717
3718 do_pending_stack_adjust ();
3719 NO_DEFER_POP;
3720 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3721 start_cleanup_deferral ();
3722 store_expr (TREE_OPERAND (exp, 1), target, 0);
3723 end_cleanup_deferral ();
3724 emit_queue ();
3725 emit_jump_insn (gen_jump (lab2));
3726 emit_barrier ();
3727 emit_label (lab1);
3728 start_cleanup_deferral ();
3729 store_expr (TREE_OPERAND (exp, 2), target, 0);
3730 end_cleanup_deferral ();
3731 emit_queue ();
3732 emit_label (lab2);
3733 OK_DEFER_POP;
3734
3735 return want_value ? target : NULL_RTX;
3736 }
3737 else if (queued_subexp_p (target))
3738 /* If target contains a postincrement, let's not risk
3739 using it as the place to generate the rhs. */
3740 {
3741 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3742 {
3743 /* Expand EXP into a new pseudo. */
3744 temp = gen_reg_rtx (GET_MODE (target));
3745 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3746 }
3747 else
3748 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3749
3750 /* If target is volatile, ANSI requires accessing the value
3751 *from* the target, if it is accessed. So make that happen.
3752 In no case return the target itself. */
3753 if (! MEM_VOLATILE_P (target) && want_value)
3754 dont_return_target = 1;
3755 }
3756 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3757 && GET_MODE (target) != BLKmode)
3758 /* If target is in memory and caller wants value in a register instead,
3759 arrange that. Pass TARGET as target for expand_expr so that,
3760 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3761 We know expand_expr will not use the target in that case.
3762 Don't do this if TARGET is volatile because we are supposed
3763 to write it and then read it. */
3764 {
3765 temp = expand_expr (exp, target, GET_MODE (target), 0);
3766 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3767 temp = copy_to_reg (temp);
3768 dont_return_target = 1;
3769 }
3770 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3771 /* If this is an scalar in a register that is stored in a wider mode
3772 than the declared mode, compute the result into its declared mode
3773 and then convert to the wider mode. Our value is the computed
3774 expression. */
3775 {
3776 /* If we don't want a value, we can do the conversion inside EXP,
3777 which will often result in some optimizations. Do the conversion
3778 in two steps: first change the signedness, if needed, then
3779 the extend. But don't do this if the type of EXP is a subtype
3780 of something else since then the conversion might involve
3781 more than just converting modes. */
3782 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3783 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3784 {
3785 if (TREE_UNSIGNED (TREE_TYPE (exp))
3786 != SUBREG_PROMOTED_UNSIGNED_P (target))
3787 exp
3788 = convert
3789 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3790 TREE_TYPE (exp)),
3791 exp);
3792
3793 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3794 SUBREG_PROMOTED_UNSIGNED_P (target)),
3795 exp);
3796 }
3797
3798 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3799
3800 /* If TEMP is a volatile MEM and we want a result value, make
3801 the access now so it gets done only once. Likewise if
3802 it contains TARGET. */
3803 if (GET_CODE (temp) == MEM && want_value
3804 && (MEM_VOLATILE_P (temp)
3805 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3806 temp = copy_to_reg (temp);
3807
3808 /* If TEMP is a VOIDmode constant, use convert_modes to make
3809 sure that we properly convert it. */
3810 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3811 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3812 TYPE_MODE (TREE_TYPE (exp)), temp,
3813 SUBREG_PROMOTED_UNSIGNED_P (target));
3814
3815 convert_move (SUBREG_REG (target), temp,
3816 SUBREG_PROMOTED_UNSIGNED_P (target));
3817
3818 /* If we promoted a constant, change the mode back down to match
3819 target. Otherwise, the caller might get confused by a result whose
3820 mode is larger than expected. */
3821
3822 if (want_value && GET_MODE (temp) != GET_MODE (target)
3823 && GET_MODE (temp) != VOIDmode)
3824 {
3825 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3826 SUBREG_PROMOTED_VAR_P (temp) = 1;
3827 SUBREG_PROMOTED_UNSIGNED_P (temp)
3828 = SUBREG_PROMOTED_UNSIGNED_P (target);
3829 }
3830
3831 return want_value ? temp : NULL_RTX;
3832 }
3833 else
3834 {
3835 temp = expand_expr (exp, target, GET_MODE (target), 0);
3836 /* Return TARGET if it's a specified hardware register.
3837 If TARGET is a volatile mem ref, either return TARGET
3838 or return a reg copied *from* TARGET; ANSI requires this.
3839
3840 Otherwise, if TEMP is not TARGET, return TEMP
3841 if it is constant (for efficiency),
3842 or if we really want the correct value. */
3843 if (!(target && GET_CODE (target) == REG
3844 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3845 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3846 && ! rtx_equal_p (temp, target)
3847 && (CONSTANT_P (temp) || want_value))
3848 dont_return_target = 1;
3849 }
3850
3851 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3852 the same as that of TARGET, adjust the constant. This is needed, for
3853 example, in case it is a CONST_DOUBLE and we want only a word-sized
3854 value. */
3855 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3856 && TREE_CODE (exp) != ERROR_MARK
3857 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3858 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3859 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3860
3861 if (current_function_check_memory_usage
3862 && GET_CODE (target) == MEM
3863 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3864 {
3865 if (GET_CODE (temp) == MEM)
3866 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3867 XEXP (target, 0), Pmode,
3868 XEXP (temp, 0), Pmode,
3869 expr_size (exp), TYPE_MODE (sizetype));
3870 else
3871 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3872 XEXP (target, 0), Pmode,
3873 expr_size (exp), TYPE_MODE (sizetype),
3874 GEN_INT (MEMORY_USE_WO),
3875 TYPE_MODE (integer_type_node));
3876 }
3877
3878 /* If value was not generated in the target, store it there.
3879 Convert the value to TARGET's type first if nec. */
3880 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3881 one or both of them are volatile memory refs, we have to distinguish
3882 two cases:
3883 - expand_expr has used TARGET. In this case, we must not generate
3884 another copy. This can be detected by TARGET being equal according
3885 to == .
3886 - expand_expr has not used TARGET - that means that the source just
3887 happens to have the same RTX form. Since temp will have been created
3888 by expand_expr, it will compare unequal according to == .
3889 We must generate a copy in this case, to reach the correct number
3890 of volatile memory references. */
3891
3892 if ((! rtx_equal_p (temp, target)
3893 || (temp != target && (side_effects_p (temp)
3894 || side_effects_p (target))))
3895 && TREE_CODE (exp) != ERROR_MARK)
3896 {
3897 target = protect_from_queue (target, 1);
3898 if (GET_MODE (temp) != GET_MODE (target)
3899 && GET_MODE (temp) != VOIDmode)
3900 {
3901 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3902 if (dont_return_target)
3903 {
3904 /* In this case, we will return TEMP,
3905 so make sure it has the proper mode.
3906 But don't forget to store the value into TARGET. */
3907 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3908 emit_move_insn (target, temp);
3909 }
3910 else
3911 convert_move (target, temp, unsignedp);
3912 }
3913
3914 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3915 {
3916 /* Handle copying a string constant into an array.
3917 The string constant may be shorter than the array.
3918 So copy just the string's actual length, and clear the rest. */
3919 rtx size;
3920 rtx addr;
3921
3922 /* Get the size of the data type of the string,
3923 which is actually the size of the target. */
3924 size = expr_size (exp);
3925 if (GET_CODE (size) == CONST_INT
3926 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3927 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3928 else
3929 {
3930 /* Compute the size of the data to copy from the string. */
3931 tree copy_size
3932 = size_binop (MIN_EXPR,
3933 make_tree (sizetype, size),
3934 size_int (TREE_STRING_LENGTH (exp)));
3935 int align = TYPE_ALIGN (TREE_TYPE (exp));
3936 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3937 VOIDmode, 0);
3938 rtx label = 0;
3939
3940 /* Copy that much. */
3941 emit_block_move (target, temp, copy_size_rtx,
3942 TYPE_ALIGN (TREE_TYPE (exp)));
3943
3944 /* Figure out how much is left in TARGET that we have to clear.
3945 Do all calculations in ptr_mode. */
3946
3947 addr = XEXP (target, 0);
3948 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3949
3950 if (GET_CODE (copy_size_rtx) == CONST_INT)
3951 {
3952 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3953 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3954 align = MIN (align, (BITS_PER_UNIT
3955 * (INTVAL (copy_size_rtx)
3956 & - INTVAL (copy_size_rtx))));
3957 }
3958 else
3959 {
3960 addr = force_reg (ptr_mode, addr);
3961 addr = expand_binop (ptr_mode, add_optab, addr,
3962 copy_size_rtx, NULL_RTX, 0,
3963 OPTAB_LIB_WIDEN);
3964
3965 size = expand_binop (ptr_mode, sub_optab, size,
3966 copy_size_rtx, NULL_RTX, 0,
3967 OPTAB_LIB_WIDEN);
3968
3969 align = BITS_PER_UNIT;
3970 label = gen_label_rtx ();
3971 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3972 GET_MODE (size), 0, 0, label);
3973 }
3974 align = MIN (align, expr_align (copy_size));
3975
3976 if (size != const0_rtx)
3977 {
3978 /* Be sure we can write on ADDR. */
3979 if (current_function_check_memory_usage)
3980 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3981 addr, Pmode,
3982 size, TYPE_MODE (sizetype),
3983 GEN_INT (MEMORY_USE_WO),
3984 TYPE_MODE (integer_type_node));
3985 clear_storage (gen_rtx_MEM (BLKmode, addr), size, align);
3986 }
3987
3988 if (label)
3989 emit_label (label);
3990 }
3991 }
3992 /* Handle calls that return values in multiple non-contiguous locations.
3993 The Irix 6 ABI has examples of this. */
3994 else if (GET_CODE (target) == PARALLEL)
3995 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3996 TYPE_ALIGN (TREE_TYPE (exp)));
3997 else if (GET_MODE (temp) == BLKmode)
3998 emit_block_move (target, temp, expr_size (exp),
3999 TYPE_ALIGN (TREE_TYPE (exp)));
4000 else
4001 emit_move_insn (target, temp);
4002 }
4003
4004 /* If we don't want a value, return NULL_RTX. */
4005 if (! want_value)
4006 return NULL_RTX;
4007
4008 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4009 ??? The latter test doesn't seem to make sense. */
4010 else if (dont_return_target && GET_CODE (temp) != MEM)
4011 return temp;
4012
4013 /* Return TARGET itself if it is a hard register. */
4014 else if (want_value && GET_MODE (target) != BLKmode
4015 && ! (GET_CODE (target) == REG
4016 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4017 return copy_to_reg (target);
4018
4019 else
4020 return target;
4021 }
4022 \f
4023 /* Return 1 if EXP just contains zeros. */
4024
4025 static int
4026 is_zeros_p (exp)
4027 tree exp;
4028 {
4029 tree elt;
4030
4031 switch (TREE_CODE (exp))
4032 {
4033 case CONVERT_EXPR:
4034 case NOP_EXPR:
4035 case NON_LVALUE_EXPR:
4036 return is_zeros_p (TREE_OPERAND (exp, 0));
4037
4038 case INTEGER_CST:
4039 return integer_zerop (exp);
4040
4041 case COMPLEX_CST:
4042 return
4043 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4044
4045 case REAL_CST:
4046 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4047
4048 case CONSTRUCTOR:
4049 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4050 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4051 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4052 if (! is_zeros_p (TREE_VALUE (elt)))
4053 return 0;
4054
4055 return 1;
4056
4057 default:
4058 return 0;
4059 }
4060 }
4061
4062 /* Return 1 if EXP contains mostly (3/4) zeros. */
4063
4064 static int
4065 mostly_zeros_p (exp)
4066 tree exp;
4067 {
4068 if (TREE_CODE (exp) == CONSTRUCTOR)
4069 {
4070 int elts = 0, zeros = 0;
4071 tree elt = CONSTRUCTOR_ELTS (exp);
4072 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4073 {
4074 /* If there are no ranges of true bits, it is all zero. */
4075 return elt == NULL_TREE;
4076 }
4077 for (; elt; elt = TREE_CHAIN (elt))
4078 {
4079 /* We do not handle the case where the index is a RANGE_EXPR,
4080 so the statistic will be somewhat inaccurate.
4081 We do make a more accurate count in store_constructor itself,
4082 so since this function is only used for nested array elements,
4083 this should be close enough. */
4084 if (mostly_zeros_p (TREE_VALUE (elt)))
4085 zeros++;
4086 elts++;
4087 }
4088
4089 return 4 * zeros >= 3 * elts;
4090 }
4091
4092 return is_zeros_p (exp);
4093 }
4094 \f
4095 /* Helper function for store_constructor.
4096 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4097 TYPE is the type of the CONSTRUCTOR, not the element type.
4098 ALIGN and CLEARED are as for store_constructor.
4099
4100 This provides a recursive shortcut back to store_constructor when it isn't
4101 necessary to go through store_field. This is so that we can pass through
4102 the cleared field to let store_constructor know that we may not have to
4103 clear a substructure if the outer structure has already been cleared. */
4104
4105 static void
4106 store_constructor_field (target, bitsize, bitpos,
4107 mode, exp, type, align, cleared)
4108 rtx target;
4109 unsigned HOST_WIDE_INT bitsize;
4110 HOST_WIDE_INT bitpos;
4111 enum machine_mode mode;
4112 tree exp, type;
4113 unsigned int align;
4114 int cleared;
4115 {
4116 if (TREE_CODE (exp) == CONSTRUCTOR
4117 && bitpos % BITS_PER_UNIT == 0
4118 /* If we have a non-zero bitpos for a register target, then we just
4119 let store_field do the bitfield handling. This is unlikely to
4120 generate unnecessary clear instructions anyways. */
4121 && (bitpos == 0 || GET_CODE (target) == MEM))
4122 {
4123 if (bitpos != 0)
4124 target
4125 = change_address (target,
4126 GET_MODE (target) == BLKmode
4127 || 0 != (bitpos
4128 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4129 ? BLKmode : VOIDmode,
4130 plus_constant (XEXP (target, 0),
4131 bitpos / BITS_PER_UNIT));
4132 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4133 }
4134 else
4135 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4136 int_size_in_bytes (type), 0);
4137 }
4138
4139 /* Store the value of constructor EXP into the rtx TARGET.
4140 TARGET is either a REG or a MEM.
4141 ALIGN is the maximum known alignment for TARGET.
4142 CLEARED is true if TARGET is known to have been zero'd.
4143 SIZE is the number of bytes of TARGET we are allowed to modify: this
4144 may not be the same as the size of EXP if we are assigning to a field
4145 which has been packed to exclude padding bits. */
4146
4147 static void
4148 store_constructor (exp, target, align, cleared, size)
4149 tree exp;
4150 rtx target;
4151 unsigned int align;
4152 int cleared;
4153 HOST_WIDE_INT size;
4154 {
4155 tree type = TREE_TYPE (exp);
4156 #ifdef WORD_REGISTER_OPERATIONS
4157 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4158 #endif
4159
4160 /* We know our target cannot conflict, since safe_from_p has been called. */
4161 #if 0
4162 /* Don't try copying piece by piece into a hard register
4163 since that is vulnerable to being clobbered by EXP.
4164 Instead, construct in a pseudo register and then copy it all. */
4165 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4166 {
4167 rtx temp = gen_reg_rtx (GET_MODE (target));
4168 store_constructor (exp, temp, align, cleared, size);
4169 emit_move_insn (target, temp);
4170 return;
4171 }
4172 #endif
4173
4174 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4175 || TREE_CODE (type) == QUAL_UNION_TYPE)
4176 {
4177 register tree elt;
4178
4179 /* Inform later passes that the whole union value is dead. */
4180 if ((TREE_CODE (type) == UNION_TYPE
4181 || TREE_CODE (type) == QUAL_UNION_TYPE)
4182 && ! cleared)
4183 {
4184 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4185
4186 /* If the constructor is empty, clear the union. */
4187 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4188 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4189 }
4190
4191 /* If we are building a static constructor into a register,
4192 set the initial value as zero so we can fold the value into
4193 a constant. But if more than one register is involved,
4194 this probably loses. */
4195 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4196 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4197 {
4198 if (! cleared)
4199 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4200
4201 cleared = 1;
4202 }
4203
4204 /* If the constructor has fewer fields than the structure
4205 or if we are initializing the structure to mostly zeros,
4206 clear the whole structure first. */
4207 else if (size > 0
4208 && ((list_length (CONSTRUCTOR_ELTS (exp))
4209 != fields_length (type))
4210 || mostly_zeros_p (exp)))
4211 {
4212 if (! cleared)
4213 clear_storage (target, GEN_INT (size), align);
4214
4215 cleared = 1;
4216 }
4217 else if (! cleared)
4218 /* Inform later passes that the old value is dead. */
4219 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4220
4221 /* Store each element of the constructor into
4222 the corresponding field of TARGET. */
4223
4224 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4225 {
4226 register tree field = TREE_PURPOSE (elt);
4227 #ifdef WORD_REGISTER_OPERATIONS
4228 tree value = TREE_VALUE (elt);
4229 #endif
4230 register enum machine_mode mode;
4231 HOST_WIDE_INT bitsize;
4232 HOST_WIDE_INT bitpos = 0;
4233 int unsignedp;
4234 tree offset;
4235 rtx to_rtx = target;
4236
4237 /* Just ignore missing fields.
4238 We cleared the whole structure, above,
4239 if any fields are missing. */
4240 if (field == 0)
4241 continue;
4242
4243 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4244 continue;
4245
4246 if (host_integerp (DECL_SIZE (field), 1))
4247 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4248 else
4249 bitsize = -1;
4250
4251 unsignedp = TREE_UNSIGNED (field);
4252 mode = DECL_MODE (field);
4253 if (DECL_BIT_FIELD (field))
4254 mode = VOIDmode;
4255
4256 offset = DECL_FIELD_OFFSET (field);
4257 if (host_integerp (offset, 0)
4258 && host_integerp (bit_position (field), 0))
4259 {
4260 bitpos = int_bit_position (field);
4261 offset = 0;
4262 }
4263 else
4264 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4265
4266 if (offset)
4267 {
4268 rtx offset_rtx;
4269
4270 if (contains_placeholder_p (offset))
4271 offset = build (WITH_RECORD_EXPR, sizetype,
4272 offset, make_tree (TREE_TYPE (exp), target));
4273
4274 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4275 if (GET_CODE (to_rtx) != MEM)
4276 abort ();
4277
4278 if (GET_MODE (offset_rtx) != ptr_mode)
4279 {
4280 #ifdef POINTERS_EXTEND_UNSIGNED
4281 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4282 #else
4283 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4284 #endif
4285 }
4286
4287 to_rtx
4288 = change_address (to_rtx, VOIDmode,
4289 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4290 force_reg (ptr_mode,
4291 offset_rtx)));
4292 align = DECL_OFFSET_ALIGN (field);
4293 }
4294
4295 if (TREE_READONLY (field))
4296 {
4297 if (GET_CODE (to_rtx) == MEM)
4298 to_rtx = copy_rtx (to_rtx);
4299
4300 RTX_UNCHANGING_P (to_rtx) = 1;
4301 }
4302
4303 #ifdef WORD_REGISTER_OPERATIONS
4304 /* If this initializes a field that is smaller than a word, at the
4305 start of a word, try to widen it to a full word.
4306 This special case allows us to output C++ member function
4307 initializations in a form that the optimizers can understand. */
4308 if (GET_CODE (target) == REG
4309 && bitsize < BITS_PER_WORD
4310 && bitpos % BITS_PER_WORD == 0
4311 && GET_MODE_CLASS (mode) == MODE_INT
4312 && TREE_CODE (value) == INTEGER_CST
4313 && exp_size >= 0
4314 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4315 {
4316 tree type = TREE_TYPE (value);
4317 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4318 {
4319 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4320 value = convert (type, value);
4321 }
4322 if (BYTES_BIG_ENDIAN)
4323 value
4324 = fold (build (LSHIFT_EXPR, type, value,
4325 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4326 bitsize = BITS_PER_WORD;
4327 mode = word_mode;
4328 }
4329 #endif
4330 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4331 TREE_VALUE (elt), type, align, cleared);
4332 }
4333 }
4334 else if (TREE_CODE (type) == ARRAY_TYPE)
4335 {
4336 register tree elt;
4337 register int i;
4338 int need_to_clear;
4339 tree domain = TYPE_DOMAIN (type);
4340 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4341 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4342 tree elttype = TREE_TYPE (type);
4343
4344 /* If the constructor has fewer elements than the array,
4345 clear the whole array first. Similarly if this is
4346 static constructor of a non-BLKmode object. */
4347 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4348 need_to_clear = 1;
4349 else
4350 {
4351 HOST_WIDE_INT count = 0, zero_count = 0;
4352 need_to_clear = 0;
4353 /* This loop is a more accurate version of the loop in
4354 mostly_zeros_p (it handles RANGE_EXPR in an index).
4355 It is also needed to check for missing elements. */
4356 for (elt = CONSTRUCTOR_ELTS (exp);
4357 elt != NULL_TREE;
4358 elt = TREE_CHAIN (elt))
4359 {
4360 tree index = TREE_PURPOSE (elt);
4361 HOST_WIDE_INT this_node_count;
4362
4363 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4364 {
4365 tree lo_index = TREE_OPERAND (index, 0);
4366 tree hi_index = TREE_OPERAND (index, 1);
4367
4368 if (! host_integerp (lo_index, 1)
4369 || ! host_integerp (hi_index, 1))
4370 {
4371 need_to_clear = 1;
4372 break;
4373 }
4374
4375 this_node_count = (tree_low_cst (hi_index, 1)
4376 - tree_low_cst (lo_index, 1) + 1);
4377 }
4378 else
4379 this_node_count = 1;
4380 count += this_node_count;
4381 if (mostly_zeros_p (TREE_VALUE (elt)))
4382 zero_count += this_node_count;
4383 }
4384 /* Clear the entire array first if there are any missing elements,
4385 or if the incidence of zero elements is >= 75%. */
4386 if (count < maxelt - minelt + 1
4387 || 4 * zero_count >= 3 * count)
4388 need_to_clear = 1;
4389 }
4390 if (need_to_clear && size > 0)
4391 {
4392 if (! cleared)
4393 clear_storage (target, GEN_INT (size), align);
4394 cleared = 1;
4395 }
4396 else
4397 /* Inform later passes that the old value is dead. */
4398 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4399
4400 /* Store each element of the constructor into
4401 the corresponding element of TARGET, determined
4402 by counting the elements. */
4403 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4404 elt;
4405 elt = TREE_CHAIN (elt), i++)
4406 {
4407 register enum machine_mode mode;
4408 HOST_WIDE_INT bitsize;
4409 HOST_WIDE_INT bitpos;
4410 int unsignedp;
4411 tree value = TREE_VALUE (elt);
4412 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4413 tree index = TREE_PURPOSE (elt);
4414 rtx xtarget = target;
4415
4416 if (cleared && is_zeros_p (value))
4417 continue;
4418
4419 unsignedp = TREE_UNSIGNED (elttype);
4420 mode = TYPE_MODE (elttype);
4421 if (mode == BLKmode)
4422 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4423 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4424 : -1);
4425 else
4426 bitsize = GET_MODE_BITSIZE (mode);
4427
4428 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4429 {
4430 tree lo_index = TREE_OPERAND (index, 0);
4431 tree hi_index = TREE_OPERAND (index, 1);
4432 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4433 struct nesting *loop;
4434 HOST_WIDE_INT lo, hi, count;
4435 tree position;
4436
4437 /* If the range is constant and "small", unroll the loop. */
4438 if (host_integerp (lo_index, 0)
4439 && host_integerp (hi_index, 0)
4440 && (lo = tree_low_cst (lo_index, 0),
4441 hi = tree_low_cst (hi_index, 0),
4442 count = hi - lo + 1,
4443 (GET_CODE (target) != MEM
4444 || count <= 2
4445 || (host_integerp (TYPE_SIZE (elttype), 1)
4446 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4447 <= 40 * 8)))))
4448 {
4449 lo -= minelt; hi -= minelt;
4450 for (; lo <= hi; lo++)
4451 {
4452 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4453 store_constructor_field (target, bitsize, bitpos, mode,
4454 value, type, align, cleared);
4455 }
4456 }
4457 else
4458 {
4459 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4460 loop_top = gen_label_rtx ();
4461 loop_end = gen_label_rtx ();
4462
4463 unsignedp = TREE_UNSIGNED (domain);
4464
4465 index = build_decl (VAR_DECL, NULL_TREE, domain);
4466
4467 DECL_RTL (index) = index_r
4468 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4469 &unsignedp, 0));
4470
4471 if (TREE_CODE (value) == SAVE_EXPR
4472 && SAVE_EXPR_RTL (value) == 0)
4473 {
4474 /* Make sure value gets expanded once before the
4475 loop. */
4476 expand_expr (value, const0_rtx, VOIDmode, 0);
4477 emit_queue ();
4478 }
4479 store_expr (lo_index, index_r, 0);
4480 loop = expand_start_loop (0);
4481
4482 /* Assign value to element index. */
4483 position
4484 = convert (ssizetype,
4485 fold (build (MINUS_EXPR, TREE_TYPE (index),
4486 index, TYPE_MIN_VALUE (domain))));
4487 position = size_binop (MULT_EXPR, position,
4488 convert (ssizetype,
4489 TYPE_SIZE_UNIT (elttype)));
4490
4491 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4492 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4493 xtarget = change_address (target, mode, addr);
4494 if (TREE_CODE (value) == CONSTRUCTOR)
4495 store_constructor (value, xtarget, align, cleared,
4496 bitsize / BITS_PER_UNIT);
4497 else
4498 store_expr (value, xtarget, 0);
4499
4500 expand_exit_loop_if_false (loop,
4501 build (LT_EXPR, integer_type_node,
4502 index, hi_index));
4503
4504 expand_increment (build (PREINCREMENT_EXPR,
4505 TREE_TYPE (index),
4506 index, integer_one_node), 0, 0);
4507 expand_end_loop ();
4508 emit_label (loop_end);
4509 }
4510 }
4511 else if ((index != 0 && ! host_integerp (index, 0))
4512 || ! host_integerp (TYPE_SIZE (elttype), 1))
4513 {
4514 rtx pos_rtx, addr;
4515 tree position;
4516
4517 if (index == 0)
4518 index = ssize_int (1);
4519
4520 if (minelt)
4521 index = convert (ssizetype,
4522 fold (build (MINUS_EXPR, index,
4523 TYPE_MIN_VALUE (domain))));
4524
4525 position = size_binop (MULT_EXPR, index,
4526 convert (ssizetype,
4527 TYPE_SIZE_UNIT (elttype)));
4528 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4529 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4530 xtarget = change_address (target, mode, addr);
4531 store_expr (value, xtarget, 0);
4532 }
4533 else
4534 {
4535 if (index != 0)
4536 bitpos = ((tree_low_cst (index, 0) - minelt)
4537 * tree_low_cst (TYPE_SIZE (elttype), 1));
4538 else
4539 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4540
4541 store_constructor_field (target, bitsize, bitpos, mode, value,
4542 type, align, cleared);
4543 }
4544 }
4545 }
4546
4547 /* Set constructor assignments */
4548 else if (TREE_CODE (type) == SET_TYPE)
4549 {
4550 tree elt = CONSTRUCTOR_ELTS (exp);
4551 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4552 tree domain = TYPE_DOMAIN (type);
4553 tree domain_min, domain_max, bitlength;
4554
4555 /* The default implementation strategy is to extract the constant
4556 parts of the constructor, use that to initialize the target,
4557 and then "or" in whatever non-constant ranges we need in addition.
4558
4559 If a large set is all zero or all ones, it is
4560 probably better to set it using memset (if available) or bzero.
4561 Also, if a large set has just a single range, it may also be
4562 better to first clear all the first clear the set (using
4563 bzero/memset), and set the bits we want. */
4564
4565 /* Check for all zeros. */
4566 if (elt == NULL_TREE && size > 0)
4567 {
4568 if (!cleared)
4569 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4570 return;
4571 }
4572
4573 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4574 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4575 bitlength = size_binop (PLUS_EXPR,
4576 size_diffop (domain_max, domain_min),
4577 ssize_int (1));
4578
4579 nbits = tree_low_cst (bitlength, 1);
4580
4581 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4582 are "complicated" (more than one range), initialize (the
4583 constant parts) by copying from a constant. */
4584 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4585 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4586 {
4587 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4588 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4589 char *bit_buffer = (char *) alloca (nbits);
4590 HOST_WIDE_INT word = 0;
4591 unsigned int bit_pos = 0;
4592 unsigned int ibit = 0;
4593 unsigned int offset = 0; /* In bytes from beginning of set. */
4594
4595 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4596 for (;;)
4597 {
4598 if (bit_buffer[ibit])
4599 {
4600 if (BYTES_BIG_ENDIAN)
4601 word |= (1 << (set_word_size - 1 - bit_pos));
4602 else
4603 word |= 1 << bit_pos;
4604 }
4605
4606 bit_pos++; ibit++;
4607 if (bit_pos >= set_word_size || ibit == nbits)
4608 {
4609 if (word != 0 || ! cleared)
4610 {
4611 rtx datum = GEN_INT (word);
4612 rtx to_rtx;
4613
4614 /* The assumption here is that it is safe to use
4615 XEXP if the set is multi-word, but not if
4616 it's single-word. */
4617 if (GET_CODE (target) == MEM)
4618 {
4619 to_rtx = plus_constant (XEXP (target, 0), offset);
4620 to_rtx = change_address (target, mode, to_rtx);
4621 }
4622 else if (offset == 0)
4623 to_rtx = target;
4624 else
4625 abort ();
4626 emit_move_insn (to_rtx, datum);
4627 }
4628
4629 if (ibit == nbits)
4630 break;
4631 word = 0;
4632 bit_pos = 0;
4633 offset += set_word_size / BITS_PER_UNIT;
4634 }
4635 }
4636 }
4637 else if (!cleared)
4638 /* Don't bother clearing storage if the set is all ones. */
4639 if (TREE_CHAIN (elt) != NULL_TREE
4640 || (TREE_PURPOSE (elt) == NULL_TREE
4641 ? nbits != 1
4642 : ( ! host_integerp (TREE_VALUE (elt), 0)
4643 || ! host_integerp (TREE_PURPOSE (elt), 0)
4644 || (tree_low_cst (TREE_VALUE (elt), 0)
4645 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4646 != (HOST_WIDE_INT) nbits))))
4647 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4648
4649 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4650 {
4651 /* start of range of element or NULL */
4652 tree startbit = TREE_PURPOSE (elt);
4653 /* end of range of element, or element value */
4654 tree endbit = TREE_VALUE (elt);
4655 #ifdef TARGET_MEM_FUNCTIONS
4656 HOST_WIDE_INT startb, endb;
4657 #endif
4658 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4659
4660 bitlength_rtx = expand_expr (bitlength,
4661 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4662
4663 /* handle non-range tuple element like [ expr ] */
4664 if (startbit == NULL_TREE)
4665 {
4666 startbit = save_expr (endbit);
4667 endbit = startbit;
4668 }
4669
4670 startbit = convert (sizetype, startbit);
4671 endbit = convert (sizetype, endbit);
4672 if (! integer_zerop (domain_min))
4673 {
4674 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4675 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4676 }
4677 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4678 EXPAND_CONST_ADDRESS);
4679 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4680 EXPAND_CONST_ADDRESS);
4681
4682 if (REG_P (target))
4683 {
4684 targetx = assign_stack_temp (GET_MODE (target),
4685 GET_MODE_SIZE (GET_MODE (target)),
4686 0);
4687 emit_move_insn (targetx, target);
4688 }
4689
4690 else if (GET_CODE (target) == MEM)
4691 targetx = target;
4692 else
4693 abort ();
4694
4695 #ifdef TARGET_MEM_FUNCTIONS
4696 /* Optimization: If startbit and endbit are
4697 constants divisible by BITS_PER_UNIT,
4698 call memset instead. */
4699 if (TREE_CODE (startbit) == INTEGER_CST
4700 && TREE_CODE (endbit) == INTEGER_CST
4701 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4702 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4703 {
4704 emit_library_call (memset_libfunc, 0,
4705 VOIDmode, 3,
4706 plus_constant (XEXP (targetx, 0),
4707 startb / BITS_PER_UNIT),
4708 Pmode,
4709 constm1_rtx, TYPE_MODE (integer_type_node),
4710 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4711 TYPE_MODE (sizetype));
4712 }
4713 else
4714 #endif
4715 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4716 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4717 bitlength_rtx, TYPE_MODE (sizetype),
4718 startbit_rtx, TYPE_MODE (sizetype),
4719 endbit_rtx, TYPE_MODE (sizetype));
4720
4721 if (REG_P (target))
4722 emit_move_insn (target, targetx);
4723 }
4724 }
4725
4726 else
4727 abort ();
4728 }
4729
4730 /* Store the value of EXP (an expression tree)
4731 into a subfield of TARGET which has mode MODE and occupies
4732 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4733 If MODE is VOIDmode, it means that we are storing into a bit-field.
4734
4735 If VALUE_MODE is VOIDmode, return nothing in particular.
4736 UNSIGNEDP is not used in this case.
4737
4738 Otherwise, return an rtx for the value stored. This rtx
4739 has mode VALUE_MODE if that is convenient to do.
4740 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4741
4742 ALIGN is the alignment that TARGET is known to have.
4743 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4744
4745 ALIAS_SET is the alias set for the destination. This value will
4746 (in general) be different from that for TARGET, since TARGET is a
4747 reference to the containing structure. */
4748
4749 static rtx
4750 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4751 unsignedp, align, total_size, alias_set)
4752 rtx target;
4753 HOST_WIDE_INT bitsize;
4754 HOST_WIDE_INT bitpos;
4755 enum machine_mode mode;
4756 tree exp;
4757 enum machine_mode value_mode;
4758 int unsignedp;
4759 unsigned int align;
4760 HOST_WIDE_INT total_size;
4761 int alias_set;
4762 {
4763 HOST_WIDE_INT width_mask = 0;
4764
4765 if (TREE_CODE (exp) == ERROR_MARK)
4766 return const0_rtx;
4767
4768 if (bitsize < HOST_BITS_PER_WIDE_INT)
4769 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4770
4771 /* If we are storing into an unaligned field of an aligned union that is
4772 in a register, we may have the mode of TARGET being an integer mode but
4773 MODE == BLKmode. In that case, get an aligned object whose size and
4774 alignment are the same as TARGET and store TARGET into it (we can avoid
4775 the store if the field being stored is the entire width of TARGET). Then
4776 call ourselves recursively to store the field into a BLKmode version of
4777 that object. Finally, load from the object into TARGET. This is not
4778 very efficient in general, but should only be slightly more expensive
4779 than the otherwise-required unaligned accesses. Perhaps this can be
4780 cleaned up later. */
4781
4782 if (mode == BLKmode
4783 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4784 {
4785 rtx object = assign_stack_temp (GET_MODE (target),
4786 GET_MODE_SIZE (GET_MODE (target)), 0);
4787 rtx blk_object = copy_rtx (object);
4788
4789 MEM_SET_IN_STRUCT_P (object, 1);
4790 MEM_SET_IN_STRUCT_P (blk_object, 1);
4791 PUT_MODE (blk_object, BLKmode);
4792
4793 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4794 emit_move_insn (object, target);
4795
4796 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4797 align, total_size, alias_set);
4798
4799 /* Even though we aren't returning target, we need to
4800 give it the updated value. */
4801 emit_move_insn (target, object);
4802
4803 return blk_object;
4804 }
4805
4806 if (GET_CODE (target) == CONCAT)
4807 {
4808 /* We're storing into a struct containing a single __complex. */
4809
4810 if (bitpos != 0)
4811 abort ();
4812 return store_expr (exp, target, 0);
4813 }
4814
4815 /* If the structure is in a register or if the component
4816 is a bit field, we cannot use addressing to access it.
4817 Use bit-field techniques or SUBREG to store in it. */
4818
4819 if (mode == VOIDmode
4820 || (mode != BLKmode && ! direct_store[(int) mode]
4821 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4822 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4823 || GET_CODE (target) == REG
4824 || GET_CODE (target) == SUBREG
4825 /* If the field isn't aligned enough to store as an ordinary memref,
4826 store it as a bit field. */
4827 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4828 && (align < GET_MODE_ALIGNMENT (mode)
4829 || bitpos % GET_MODE_ALIGNMENT (mode)))
4830 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4831 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4832 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4833 /* If the RHS and field are a constant size and the size of the
4834 RHS isn't the same size as the bitfield, we must use bitfield
4835 operations. */
4836 || (bitsize >= 0
4837 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4838 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4839 {
4840 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4841
4842 /* If BITSIZE is narrower than the size of the type of EXP
4843 we will be narrowing TEMP. Normally, what's wanted are the
4844 low-order bits. However, if EXP's type is a record and this is
4845 big-endian machine, we want the upper BITSIZE bits. */
4846 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4847 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4848 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4849 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4850 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4851 - bitsize),
4852 temp, 1);
4853
4854 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4855 MODE. */
4856 if (mode != VOIDmode && mode != BLKmode
4857 && mode != TYPE_MODE (TREE_TYPE (exp)))
4858 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4859
4860 /* If the modes of TARGET and TEMP are both BLKmode, both
4861 must be in memory and BITPOS must be aligned on a byte
4862 boundary. If so, we simply do a block copy. */
4863 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4864 {
4865 unsigned int exp_align = expr_align (exp);
4866
4867 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4868 || bitpos % BITS_PER_UNIT != 0)
4869 abort ();
4870
4871 target = change_address (target, VOIDmode,
4872 plus_constant (XEXP (target, 0),
4873 bitpos / BITS_PER_UNIT));
4874
4875 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4876 align = MIN (exp_align, align);
4877
4878 /* Find an alignment that is consistent with the bit position. */
4879 while ((bitpos % align) != 0)
4880 align >>= 1;
4881
4882 emit_block_move (target, temp,
4883 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4884 / BITS_PER_UNIT),
4885 align);
4886
4887 return value_mode == VOIDmode ? const0_rtx : target;
4888 }
4889
4890 /* Store the value in the bitfield. */
4891 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4892 if (value_mode != VOIDmode)
4893 {
4894 /* The caller wants an rtx for the value. */
4895 /* If possible, avoid refetching from the bitfield itself. */
4896 if (width_mask != 0
4897 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4898 {
4899 tree count;
4900 enum machine_mode tmode;
4901
4902 if (unsignedp)
4903 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4904 tmode = GET_MODE (temp);
4905 if (tmode == VOIDmode)
4906 tmode = value_mode;
4907 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4908 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4909 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4910 }
4911 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4912 NULL_RTX, value_mode, 0, align,
4913 total_size);
4914 }
4915 return const0_rtx;
4916 }
4917 else
4918 {
4919 rtx addr = XEXP (target, 0);
4920 rtx to_rtx;
4921
4922 /* If a value is wanted, it must be the lhs;
4923 so make the address stable for multiple use. */
4924
4925 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4926 && ! CONSTANT_ADDRESS_P (addr)
4927 /* A frame-pointer reference is already stable. */
4928 && ! (GET_CODE (addr) == PLUS
4929 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4930 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4931 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4932 addr = copy_to_reg (addr);
4933
4934 /* Now build a reference to just the desired component. */
4935
4936 to_rtx = copy_rtx (change_address (target, mode,
4937 plus_constant (addr,
4938 (bitpos
4939 / BITS_PER_UNIT))));
4940 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4941 MEM_ALIAS_SET (to_rtx) = alias_set;
4942
4943 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4944 }
4945 }
4946 \f
4947 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4948 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4949 ARRAY_REFs and find the ultimate containing object, which we return.
4950
4951 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4952 bit position, and *PUNSIGNEDP to the signedness of the field.
4953 If the position of the field is variable, we store a tree
4954 giving the variable offset (in units) in *POFFSET.
4955 This offset is in addition to the bit position.
4956 If the position is not variable, we store 0 in *POFFSET.
4957 We set *PALIGNMENT to the alignment of the address that will be
4958 computed. This is the alignment of the thing we return if *POFFSET
4959 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4960
4961 If any of the extraction expressions is volatile,
4962 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4963
4964 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4965 is a mode that can be used to access the field. In that case, *PBITSIZE
4966 is redundant.
4967
4968 If the field describes a variable-sized object, *PMODE is set to
4969 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4970 this case, but the address of the object can be found. */
4971
4972 tree
4973 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4974 punsignedp, pvolatilep, palignment)
4975 tree exp;
4976 HOST_WIDE_INT *pbitsize;
4977 HOST_WIDE_INT *pbitpos;
4978 tree *poffset;
4979 enum machine_mode *pmode;
4980 int *punsignedp;
4981 int *pvolatilep;
4982 unsigned int *palignment;
4983 {
4984 tree size_tree = 0;
4985 enum machine_mode mode = VOIDmode;
4986 tree offset = size_zero_node;
4987 tree bit_offset = bitsize_zero_node;
4988 unsigned int alignment = BIGGEST_ALIGNMENT;
4989 tree tem;
4990
4991 /* First get the mode, signedness, and size. We do this from just the
4992 outermost expression. */
4993 if (TREE_CODE (exp) == COMPONENT_REF)
4994 {
4995 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4996 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4997 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4998
4999 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5000 }
5001 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5002 {
5003 size_tree = TREE_OPERAND (exp, 1);
5004 *punsignedp = TREE_UNSIGNED (exp);
5005 }
5006 else
5007 {
5008 mode = TYPE_MODE (TREE_TYPE (exp));
5009 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5010
5011 if (mode == BLKmode)
5012 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5013 else
5014 *pbitsize = GET_MODE_BITSIZE (mode);
5015 }
5016
5017 if (size_tree != 0)
5018 {
5019 if (! host_integerp (size_tree, 1))
5020 mode = BLKmode, *pbitsize = -1;
5021 else
5022 *pbitsize = tree_low_cst (size_tree, 1);
5023 }
5024
5025 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5026 and find the ultimate containing object. */
5027 while (1)
5028 {
5029 if (TREE_CODE (exp) == BIT_FIELD_REF)
5030 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5031 else if (TREE_CODE (exp) == COMPONENT_REF)
5032 {
5033 tree field = TREE_OPERAND (exp, 1);
5034 tree this_offset = DECL_FIELD_OFFSET (field);
5035
5036 /* If this field hasn't been filled in yet, don't go
5037 past it. This should only happen when folding expressions
5038 made during type construction. */
5039 if (this_offset == 0)
5040 break;
5041 else if (! TREE_CONSTANT (this_offset)
5042 && contains_placeholder_p (this_offset))
5043 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5044
5045 offset = size_binop (PLUS_EXPR, offset, this_offset);
5046 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5047 DECL_FIELD_BIT_OFFSET (field));
5048
5049 if (! host_integerp (offset, 0))
5050 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5051 }
5052
5053 else if (TREE_CODE (exp) == ARRAY_REF)
5054 {
5055 tree index = TREE_OPERAND (exp, 1);
5056 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5057 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5058 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5059
5060 /* We assume all arrays have sizes that are a multiple of a byte.
5061 First subtract the lower bound, if any, in the type of the
5062 index, then convert to sizetype and multiply by the size of the
5063 array element. */
5064 if (low_bound != 0 && ! integer_zerop (low_bound))
5065 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5066 index, low_bound));
5067
5068 /* If the index has a self-referential type, pass it to a
5069 WITH_RECORD_EXPR; if the component size is, pass our
5070 component to one. */
5071 if (! TREE_CONSTANT (index)
5072 && contains_placeholder_p (index))
5073 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5074 if (! TREE_CONSTANT (unit_size)
5075 && contains_placeholder_p (unit_size))
5076 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5077 TREE_OPERAND (exp, 0));
5078
5079 offset = size_binop (PLUS_EXPR, offset,
5080 size_binop (MULT_EXPR,
5081 convert (sizetype, index),
5082 unit_size));
5083 }
5084
5085 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5086 && ! ((TREE_CODE (exp) == NOP_EXPR
5087 || TREE_CODE (exp) == CONVERT_EXPR)
5088 && (TYPE_MODE (TREE_TYPE (exp))
5089 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5090 break;
5091
5092 /* If any reference in the chain is volatile, the effect is volatile. */
5093 if (TREE_THIS_VOLATILE (exp))
5094 *pvolatilep = 1;
5095
5096 /* If the offset is non-constant already, then we can't assume any
5097 alignment more than the alignment here. */
5098 if (! TREE_CONSTANT (offset))
5099 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5100
5101 exp = TREE_OPERAND (exp, 0);
5102 }
5103
5104 if (DECL_P (exp))
5105 alignment = MIN (alignment, DECL_ALIGN (exp));
5106 else if (TREE_TYPE (exp) != 0)
5107 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5108
5109 /* If OFFSET is constant, see if we can return the whole thing as a
5110 constant bit position. Otherwise, split it up. */
5111 if (host_integerp (offset, 0)
5112 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5113 bitsize_unit_node))
5114 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5115 && host_integerp (tem, 0))
5116 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5117 else
5118 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5119
5120 *pmode = mode;
5121 *palignment = alignment;
5122 return exp;
5123 }
5124
5125 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5126
5127 static enum memory_use_mode
5128 get_memory_usage_from_modifier (modifier)
5129 enum expand_modifier modifier;
5130 {
5131 switch (modifier)
5132 {
5133 case EXPAND_NORMAL:
5134 case EXPAND_SUM:
5135 return MEMORY_USE_RO;
5136 break;
5137 case EXPAND_MEMORY_USE_WO:
5138 return MEMORY_USE_WO;
5139 break;
5140 case EXPAND_MEMORY_USE_RW:
5141 return MEMORY_USE_RW;
5142 break;
5143 case EXPAND_MEMORY_USE_DONT:
5144 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5145 MEMORY_USE_DONT, because they are modifiers to a call of
5146 expand_expr in the ADDR_EXPR case of expand_expr. */
5147 case EXPAND_CONST_ADDRESS:
5148 case EXPAND_INITIALIZER:
5149 return MEMORY_USE_DONT;
5150 case EXPAND_MEMORY_USE_BAD:
5151 default:
5152 abort ();
5153 }
5154 }
5155 \f
5156 /* Given an rtx VALUE that may contain additions and multiplications,
5157 return an equivalent value that just refers to a register or memory.
5158 This is done by generating instructions to perform the arithmetic
5159 and returning a pseudo-register containing the value.
5160
5161 The returned value may be a REG, SUBREG, MEM or constant. */
5162
5163 rtx
5164 force_operand (value, target)
5165 rtx value, target;
5166 {
5167 register optab binoptab = 0;
5168 /* Use a temporary to force order of execution of calls to
5169 `force_operand'. */
5170 rtx tmp;
5171 register rtx op2;
5172 /* Use subtarget as the target for operand 0 of a binary operation. */
5173 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5174
5175 /* Check for a PIC address load. */
5176 if (flag_pic
5177 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5178 && XEXP (value, 0) == pic_offset_table_rtx
5179 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5180 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5181 || GET_CODE (XEXP (value, 1)) == CONST))
5182 {
5183 if (!subtarget)
5184 subtarget = gen_reg_rtx (GET_MODE (value));
5185 emit_move_insn (subtarget, value);
5186 return subtarget;
5187 }
5188
5189 if (GET_CODE (value) == PLUS)
5190 binoptab = add_optab;
5191 else if (GET_CODE (value) == MINUS)
5192 binoptab = sub_optab;
5193 else if (GET_CODE (value) == MULT)
5194 {
5195 op2 = XEXP (value, 1);
5196 if (!CONSTANT_P (op2)
5197 && !(GET_CODE (op2) == REG && op2 != subtarget))
5198 subtarget = 0;
5199 tmp = force_operand (XEXP (value, 0), subtarget);
5200 return expand_mult (GET_MODE (value), tmp,
5201 force_operand (op2, NULL_RTX),
5202 target, 0);
5203 }
5204
5205 if (binoptab)
5206 {
5207 op2 = XEXP (value, 1);
5208 if (!CONSTANT_P (op2)
5209 && !(GET_CODE (op2) == REG && op2 != subtarget))
5210 subtarget = 0;
5211 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5212 {
5213 binoptab = add_optab;
5214 op2 = negate_rtx (GET_MODE (value), op2);
5215 }
5216
5217 /* Check for an addition with OP2 a constant integer and our first
5218 operand a PLUS of a virtual register and something else. In that
5219 case, we want to emit the sum of the virtual register and the
5220 constant first and then add the other value. This allows virtual
5221 register instantiation to simply modify the constant rather than
5222 creating another one around this addition. */
5223 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5224 && GET_CODE (XEXP (value, 0)) == PLUS
5225 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5226 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5227 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5228 {
5229 rtx temp = expand_binop (GET_MODE (value), binoptab,
5230 XEXP (XEXP (value, 0), 0), op2,
5231 subtarget, 0, OPTAB_LIB_WIDEN);
5232 return expand_binop (GET_MODE (value), binoptab, temp,
5233 force_operand (XEXP (XEXP (value, 0), 1), 0),
5234 target, 0, OPTAB_LIB_WIDEN);
5235 }
5236
5237 tmp = force_operand (XEXP (value, 0), subtarget);
5238 return expand_binop (GET_MODE (value), binoptab, tmp,
5239 force_operand (op2, NULL_RTX),
5240 target, 0, OPTAB_LIB_WIDEN);
5241 /* We give UNSIGNEDP = 0 to expand_binop
5242 because the only operations we are expanding here are signed ones. */
5243 }
5244 return value;
5245 }
5246 \f
5247 /* Subroutine of expand_expr:
5248 save the non-copied parts (LIST) of an expr (LHS), and return a list
5249 which can restore these values to their previous values,
5250 should something modify their storage. */
5251
5252 static tree
5253 save_noncopied_parts (lhs, list)
5254 tree lhs;
5255 tree list;
5256 {
5257 tree tail;
5258 tree parts = 0;
5259
5260 for (tail = list; tail; tail = TREE_CHAIN (tail))
5261 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5262 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5263 else
5264 {
5265 tree part = TREE_VALUE (tail);
5266 tree part_type = TREE_TYPE (part);
5267 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5268 rtx target = assign_temp (part_type, 0, 1, 1);
5269 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5270 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5271 parts = tree_cons (to_be_saved,
5272 build (RTL_EXPR, part_type, NULL_TREE,
5273 (tree) target),
5274 parts);
5275 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5276 }
5277 return parts;
5278 }
5279
5280 /* Subroutine of expand_expr:
5281 record the non-copied parts (LIST) of an expr (LHS), and return a list
5282 which specifies the initial values of these parts. */
5283
5284 static tree
5285 init_noncopied_parts (lhs, list)
5286 tree lhs;
5287 tree list;
5288 {
5289 tree tail;
5290 tree parts = 0;
5291
5292 for (tail = list; tail; tail = TREE_CHAIN (tail))
5293 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5294 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5295 else if (TREE_PURPOSE (tail))
5296 {
5297 tree part = TREE_VALUE (tail);
5298 tree part_type = TREE_TYPE (part);
5299 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5300 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5301 }
5302 return parts;
5303 }
5304
5305 /* Subroutine of expand_expr: return nonzero iff there is no way that
5306 EXP can reference X, which is being modified. TOP_P is nonzero if this
5307 call is going to be used to determine whether we need a temporary
5308 for EXP, as opposed to a recursive call to this function.
5309
5310 It is always safe for this routine to return zero since it merely
5311 searches for optimization opportunities. */
5312
5313 static int
5314 safe_from_p (x, exp, top_p)
5315 rtx x;
5316 tree exp;
5317 int top_p;
5318 {
5319 rtx exp_rtl = 0;
5320 int i, nops;
5321 static int save_expr_count;
5322 static int save_expr_size = 0;
5323 static tree *save_expr_rewritten;
5324 static tree save_expr_trees[256];
5325
5326 if (x == 0
5327 /* If EXP has varying size, we MUST use a target since we currently
5328 have no way of allocating temporaries of variable size
5329 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5330 So we assume here that something at a higher level has prevented a
5331 clash. This is somewhat bogus, but the best we can do. Only
5332 do this when X is BLKmode and when we are at the top level. */
5333 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5334 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5335 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5336 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5337 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5338 != INTEGER_CST)
5339 && GET_MODE (x) == BLKmode))
5340 return 1;
5341
5342 if (top_p && save_expr_size == 0)
5343 {
5344 int rtn;
5345
5346 save_expr_count = 0;
5347 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5348 save_expr_rewritten = &save_expr_trees[0];
5349
5350 rtn = safe_from_p (x, exp, 1);
5351
5352 for (i = 0; i < save_expr_count; ++i)
5353 {
5354 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5355 abort ();
5356 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5357 }
5358
5359 save_expr_size = 0;
5360
5361 return rtn;
5362 }
5363
5364 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5365 find the underlying pseudo. */
5366 if (GET_CODE (x) == SUBREG)
5367 {
5368 x = SUBREG_REG (x);
5369 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5370 return 0;
5371 }
5372
5373 /* If X is a location in the outgoing argument area, it is always safe. */
5374 if (GET_CODE (x) == MEM
5375 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5376 || (GET_CODE (XEXP (x, 0)) == PLUS
5377 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5378 return 1;
5379
5380 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5381 {
5382 case 'd':
5383 exp_rtl = DECL_RTL (exp);
5384 break;
5385
5386 case 'c':
5387 return 1;
5388
5389 case 'x':
5390 if (TREE_CODE (exp) == TREE_LIST)
5391 return ((TREE_VALUE (exp) == 0
5392 || safe_from_p (x, TREE_VALUE (exp), 0))
5393 && (TREE_CHAIN (exp) == 0
5394 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5395 else if (TREE_CODE (exp) == ERROR_MARK)
5396 return 1; /* An already-visited SAVE_EXPR? */
5397 else
5398 return 0;
5399
5400 case '1':
5401 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5402
5403 case '2':
5404 case '<':
5405 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5406 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5407
5408 case 'e':
5409 case 'r':
5410 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5411 the expression. If it is set, we conflict iff we are that rtx or
5412 both are in memory. Otherwise, we check all operands of the
5413 expression recursively. */
5414
5415 switch (TREE_CODE (exp))
5416 {
5417 case ADDR_EXPR:
5418 return (staticp (TREE_OPERAND (exp, 0))
5419 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5420 || TREE_STATIC (exp));
5421
5422 case INDIRECT_REF:
5423 if (GET_CODE (x) == MEM)
5424 return 0;
5425 break;
5426
5427 case CALL_EXPR:
5428 exp_rtl = CALL_EXPR_RTL (exp);
5429 if (exp_rtl == 0)
5430 {
5431 /* Assume that the call will clobber all hard registers and
5432 all of memory. */
5433 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5434 || GET_CODE (x) == MEM)
5435 return 0;
5436 }
5437
5438 break;
5439
5440 case RTL_EXPR:
5441 /* If a sequence exists, we would have to scan every instruction
5442 in the sequence to see if it was safe. This is probably not
5443 worthwhile. */
5444 if (RTL_EXPR_SEQUENCE (exp))
5445 return 0;
5446
5447 exp_rtl = RTL_EXPR_RTL (exp);
5448 break;
5449
5450 case WITH_CLEANUP_EXPR:
5451 exp_rtl = RTL_EXPR_RTL (exp);
5452 break;
5453
5454 case CLEANUP_POINT_EXPR:
5455 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5456
5457 case SAVE_EXPR:
5458 exp_rtl = SAVE_EXPR_RTL (exp);
5459 if (exp_rtl)
5460 break;
5461
5462 /* This SAVE_EXPR might appear many times in the top-level
5463 safe_from_p() expression, and if it has a complex
5464 subexpression, examining it multiple times could result
5465 in a combinatorial explosion. E.g. on an Alpha
5466 running at least 200MHz, a Fortran test case compiled with
5467 optimization took about 28 minutes to compile -- even though
5468 it was only a few lines long, and the complicated line causing
5469 so much time to be spent in the earlier version of safe_from_p()
5470 had only 293 or so unique nodes.
5471
5472 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5473 where it is so we can turn it back in the top-level safe_from_p()
5474 when we're done. */
5475
5476 /* For now, don't bother re-sizing the array. */
5477 if (save_expr_count >= save_expr_size)
5478 return 0;
5479 save_expr_rewritten[save_expr_count++] = exp;
5480
5481 nops = tree_code_length[(int) SAVE_EXPR];
5482 for (i = 0; i < nops; i++)
5483 {
5484 tree operand = TREE_OPERAND (exp, i);
5485 if (operand == NULL_TREE)
5486 continue;
5487 TREE_SET_CODE (exp, ERROR_MARK);
5488 if (!safe_from_p (x, operand, 0))
5489 return 0;
5490 TREE_SET_CODE (exp, SAVE_EXPR);
5491 }
5492 TREE_SET_CODE (exp, ERROR_MARK);
5493 return 1;
5494
5495 case BIND_EXPR:
5496 /* The only operand we look at is operand 1. The rest aren't
5497 part of the expression. */
5498 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5499
5500 case METHOD_CALL_EXPR:
5501 /* This takes a rtx argument, but shouldn't appear here. */
5502 abort ();
5503
5504 default:
5505 break;
5506 }
5507
5508 /* If we have an rtx, we do not need to scan our operands. */
5509 if (exp_rtl)
5510 break;
5511
5512 nops = tree_code_length[(int) TREE_CODE (exp)];
5513 for (i = 0; i < nops; i++)
5514 if (TREE_OPERAND (exp, i) != 0
5515 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5516 return 0;
5517 }
5518
5519 /* If we have an rtl, find any enclosed object. Then see if we conflict
5520 with it. */
5521 if (exp_rtl)
5522 {
5523 if (GET_CODE (exp_rtl) == SUBREG)
5524 {
5525 exp_rtl = SUBREG_REG (exp_rtl);
5526 if (GET_CODE (exp_rtl) == REG
5527 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5528 return 0;
5529 }
5530
5531 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5532 are memory and EXP is not readonly. */
5533 return ! (rtx_equal_p (x, exp_rtl)
5534 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5535 && ! TREE_READONLY (exp)));
5536 }
5537
5538 /* If we reach here, it is safe. */
5539 return 1;
5540 }
5541
5542 /* Subroutine of expand_expr: return nonzero iff EXP is an
5543 expression whose type is statically determinable. */
5544
5545 static int
5546 fixed_type_p (exp)
5547 tree exp;
5548 {
5549 if (TREE_CODE (exp) == PARM_DECL
5550 || TREE_CODE (exp) == VAR_DECL
5551 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5552 || TREE_CODE (exp) == COMPONENT_REF
5553 || TREE_CODE (exp) == ARRAY_REF)
5554 return 1;
5555 return 0;
5556 }
5557
5558 /* Subroutine of expand_expr: return rtx if EXP is a
5559 variable or parameter; else return 0. */
5560
5561 static rtx
5562 var_rtx (exp)
5563 tree exp;
5564 {
5565 STRIP_NOPS (exp);
5566 switch (TREE_CODE (exp))
5567 {
5568 case PARM_DECL:
5569 case VAR_DECL:
5570 return DECL_RTL (exp);
5571 default:
5572 return 0;
5573 }
5574 }
5575
5576 #ifdef MAX_INTEGER_COMPUTATION_MODE
5577 void
5578 check_max_integer_computation_mode (exp)
5579 tree exp;
5580 {
5581 enum tree_code code;
5582 enum machine_mode mode;
5583
5584 /* Strip any NOPs that don't change the mode. */
5585 STRIP_NOPS (exp);
5586 code = TREE_CODE (exp);
5587
5588 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5589 if (code == NOP_EXPR
5590 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5591 return;
5592
5593 /* First check the type of the overall operation. We need only look at
5594 unary, binary and relational operations. */
5595 if (TREE_CODE_CLASS (code) == '1'
5596 || TREE_CODE_CLASS (code) == '2'
5597 || TREE_CODE_CLASS (code) == '<')
5598 {
5599 mode = TYPE_MODE (TREE_TYPE (exp));
5600 if (GET_MODE_CLASS (mode) == MODE_INT
5601 && mode > MAX_INTEGER_COMPUTATION_MODE)
5602 fatal ("unsupported wide integer operation");
5603 }
5604
5605 /* Check operand of a unary op. */
5606 if (TREE_CODE_CLASS (code) == '1')
5607 {
5608 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5609 if (GET_MODE_CLASS (mode) == MODE_INT
5610 && mode > MAX_INTEGER_COMPUTATION_MODE)
5611 fatal ("unsupported wide integer operation");
5612 }
5613
5614 /* Check operands of a binary/comparison op. */
5615 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5616 {
5617 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5618 if (GET_MODE_CLASS (mode) == MODE_INT
5619 && mode > MAX_INTEGER_COMPUTATION_MODE)
5620 fatal ("unsupported wide integer operation");
5621
5622 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5623 if (GET_MODE_CLASS (mode) == MODE_INT
5624 && mode > MAX_INTEGER_COMPUTATION_MODE)
5625 fatal ("unsupported wide integer operation");
5626 }
5627 }
5628 #endif
5629
5630 \f
5631 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5632 has any readonly fields. If any of the fields have types that
5633 contain readonly fields, return true as well. */
5634
5635 static int
5636 readonly_fields_p (type)
5637 tree type;
5638 {
5639 tree field;
5640
5641 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5642 if (TREE_CODE (field) == FIELD_DECL
5643 && (TREE_READONLY (field)
5644 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5645 && readonly_fields_p (TREE_TYPE (field)))))
5646 return 1;
5647
5648 return 0;
5649 }
5650 \f
5651 /* expand_expr: generate code for computing expression EXP.
5652 An rtx for the computed value is returned. The value is never null.
5653 In the case of a void EXP, const0_rtx is returned.
5654
5655 The value may be stored in TARGET if TARGET is nonzero.
5656 TARGET is just a suggestion; callers must assume that
5657 the rtx returned may not be the same as TARGET.
5658
5659 If TARGET is CONST0_RTX, it means that the value will be ignored.
5660
5661 If TMODE is not VOIDmode, it suggests generating the
5662 result in mode TMODE. But this is done only when convenient.
5663 Otherwise, TMODE is ignored and the value generated in its natural mode.
5664 TMODE is just a suggestion; callers must assume that
5665 the rtx returned may not have mode TMODE.
5666
5667 Note that TARGET may have neither TMODE nor MODE. In that case, it
5668 probably will not be used.
5669
5670 If MODIFIER is EXPAND_SUM then when EXP is an addition
5671 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5672 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5673 products as above, or REG or MEM, or constant.
5674 Ordinarily in such cases we would output mul or add instructions
5675 and then return a pseudo reg containing the sum.
5676
5677 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5678 it also marks a label as absolutely required (it can't be dead).
5679 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5680 This is used for outputting expressions used in initializers.
5681
5682 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5683 with a constant address even if that address is not normally legitimate.
5684 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5685
5686 rtx
5687 expand_expr (exp, target, tmode, modifier)
5688 register tree exp;
5689 rtx target;
5690 enum machine_mode tmode;
5691 enum expand_modifier modifier;
5692 {
5693 register rtx op0, op1, temp;
5694 tree type = TREE_TYPE (exp);
5695 int unsignedp = TREE_UNSIGNED (type);
5696 register enum machine_mode mode;
5697 register enum tree_code code = TREE_CODE (exp);
5698 optab this_optab;
5699 rtx subtarget, original_target;
5700 int ignore;
5701 tree context;
5702 /* Used by check-memory-usage to make modifier read only. */
5703 enum expand_modifier ro_modifier;
5704
5705 /* Handle ERROR_MARK before anybody tries to access its type. */
5706 if (TREE_CODE (exp) == ERROR_MARK)
5707 {
5708 op0 = CONST0_RTX (tmode);
5709 if (op0 != 0)
5710 return op0;
5711 return const0_rtx;
5712 }
5713
5714 mode = TYPE_MODE (type);
5715 /* Use subtarget as the target for operand 0 of a binary operation. */
5716 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5717 original_target = target;
5718 ignore = (target == const0_rtx
5719 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5720 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5721 || code == COND_EXPR)
5722 && TREE_CODE (type) == VOID_TYPE));
5723
5724 /* Make a read-only version of the modifier. */
5725 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5726 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5727 ro_modifier = modifier;
5728 else
5729 ro_modifier = EXPAND_NORMAL;
5730
5731 /* Don't use hard regs as subtargets, because the combiner
5732 can only handle pseudo regs. */
5733 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5734 subtarget = 0;
5735 /* Avoid subtargets inside loops,
5736 since they hide some invariant expressions. */
5737 if (preserve_subexpressions_p ())
5738 subtarget = 0;
5739
5740 /* If we are going to ignore this result, we need only do something
5741 if there is a side-effect somewhere in the expression. If there
5742 is, short-circuit the most common cases here. Note that we must
5743 not call expand_expr with anything but const0_rtx in case this
5744 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5745
5746 if (ignore)
5747 {
5748 if (! TREE_SIDE_EFFECTS (exp))
5749 return const0_rtx;
5750
5751 /* Ensure we reference a volatile object even if value is ignored, but
5752 don't do this if all we are doing is taking its address. */
5753 if (TREE_THIS_VOLATILE (exp)
5754 && TREE_CODE (exp) != FUNCTION_DECL
5755 && mode != VOIDmode && mode != BLKmode
5756 && modifier != EXPAND_CONST_ADDRESS)
5757 {
5758 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5759 if (GET_CODE (temp) == MEM)
5760 temp = copy_to_reg (temp);
5761 return const0_rtx;
5762 }
5763
5764 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5765 || code == INDIRECT_REF || code == BUFFER_REF)
5766 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5767 VOIDmode, ro_modifier);
5768 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5769 || code == ARRAY_REF)
5770 {
5771 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5772 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5773 return const0_rtx;
5774 }
5775 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5776 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5777 /* If the second operand has no side effects, just evaluate
5778 the first. */
5779 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5780 VOIDmode, ro_modifier);
5781 else if (code == BIT_FIELD_REF)
5782 {
5783 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5784 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5785 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5786 return const0_rtx;
5787 }
5788 ;
5789 target = 0;
5790 }
5791
5792 #ifdef MAX_INTEGER_COMPUTATION_MODE
5793 /* Only check stuff here if the mode we want is different from the mode
5794 of the expression; if it's the same, check_max_integer_computiation_mode
5795 will handle it. Do we really need to check this stuff at all? */
5796
5797 if (target
5798 && GET_MODE (target) != mode
5799 && TREE_CODE (exp) != INTEGER_CST
5800 && TREE_CODE (exp) != PARM_DECL
5801 && TREE_CODE (exp) != ARRAY_REF
5802 && TREE_CODE (exp) != COMPONENT_REF
5803 && TREE_CODE (exp) != BIT_FIELD_REF
5804 && TREE_CODE (exp) != INDIRECT_REF
5805 && TREE_CODE (exp) != CALL_EXPR
5806 && TREE_CODE (exp) != VAR_DECL
5807 && TREE_CODE (exp) != RTL_EXPR)
5808 {
5809 enum machine_mode mode = GET_MODE (target);
5810
5811 if (GET_MODE_CLASS (mode) == MODE_INT
5812 && mode > MAX_INTEGER_COMPUTATION_MODE)
5813 fatal ("unsupported wide integer operation");
5814 }
5815
5816 if (tmode != mode
5817 && TREE_CODE (exp) != INTEGER_CST
5818 && TREE_CODE (exp) != PARM_DECL
5819 && TREE_CODE (exp) != ARRAY_REF
5820 && TREE_CODE (exp) != COMPONENT_REF
5821 && TREE_CODE (exp) != BIT_FIELD_REF
5822 && TREE_CODE (exp) != INDIRECT_REF
5823 && TREE_CODE (exp) != VAR_DECL
5824 && TREE_CODE (exp) != CALL_EXPR
5825 && TREE_CODE (exp) != RTL_EXPR
5826 && GET_MODE_CLASS (tmode) == MODE_INT
5827 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5828 fatal ("unsupported wide integer operation");
5829
5830 check_max_integer_computation_mode (exp);
5831 #endif
5832
5833 /* If will do cse, generate all results into pseudo registers
5834 since 1) that allows cse to find more things
5835 and 2) otherwise cse could produce an insn the machine
5836 cannot support. */
5837
5838 if (! cse_not_expected && mode != BLKmode && target
5839 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5840 target = subtarget;
5841
5842 switch (code)
5843 {
5844 case LABEL_DECL:
5845 {
5846 tree function = decl_function_context (exp);
5847 /* Handle using a label in a containing function. */
5848 if (function != current_function_decl
5849 && function != inline_function_decl && function != 0)
5850 {
5851 struct function *p = find_function_data (function);
5852 /* Allocate in the memory associated with the function
5853 that the label is in. */
5854 push_obstacks (p->function_obstack,
5855 p->function_maybepermanent_obstack);
5856
5857 p->expr->x_forced_labels
5858 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5859 p->expr->x_forced_labels);
5860 pop_obstacks ();
5861 }
5862 else
5863 {
5864 if (modifier == EXPAND_INITIALIZER)
5865 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5866 label_rtx (exp),
5867 forced_labels);
5868 }
5869
5870 temp = gen_rtx_MEM (FUNCTION_MODE,
5871 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5872 if (function != current_function_decl
5873 && function != inline_function_decl && function != 0)
5874 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5875 return temp;
5876 }
5877
5878 case PARM_DECL:
5879 if (DECL_RTL (exp) == 0)
5880 {
5881 error_with_decl (exp, "prior parameter's size depends on `%s'");
5882 return CONST0_RTX (mode);
5883 }
5884
5885 /* ... fall through ... */
5886
5887 case VAR_DECL:
5888 /* If a static var's type was incomplete when the decl was written,
5889 but the type is complete now, lay out the decl now. */
5890 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5891 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5892 {
5893 push_obstacks_nochange ();
5894 end_temporary_allocation ();
5895 layout_decl (exp, 0);
5896 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5897 pop_obstacks ();
5898 }
5899
5900 /* Although static-storage variables start off initialized, according to
5901 ANSI C, a memcpy could overwrite them with uninitialized values. So
5902 we check them too. This also lets us check for read-only variables
5903 accessed via a non-const declaration, in case it won't be detected
5904 any other way (e.g., in an embedded system or OS kernel without
5905 memory protection).
5906
5907 Aggregates are not checked here; they're handled elsewhere. */
5908 if (cfun && current_function_check_memory_usage
5909 && code == VAR_DECL
5910 && GET_CODE (DECL_RTL (exp)) == MEM
5911 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5912 {
5913 enum memory_use_mode memory_usage;
5914 memory_usage = get_memory_usage_from_modifier (modifier);
5915
5916 if (memory_usage != MEMORY_USE_DONT)
5917 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5918 XEXP (DECL_RTL (exp), 0), Pmode,
5919 GEN_INT (int_size_in_bytes (type)),
5920 TYPE_MODE (sizetype),
5921 GEN_INT (memory_usage),
5922 TYPE_MODE (integer_type_node));
5923 }
5924
5925 /* ... fall through ... */
5926
5927 case FUNCTION_DECL:
5928 case RESULT_DECL:
5929 if (DECL_RTL (exp) == 0)
5930 abort ();
5931
5932 /* Ensure variable marked as used even if it doesn't go through
5933 a parser. If it hasn't be used yet, write out an external
5934 definition. */
5935 if (! TREE_USED (exp))
5936 {
5937 assemble_external (exp);
5938 TREE_USED (exp) = 1;
5939 }
5940
5941 /* Show we haven't gotten RTL for this yet. */
5942 temp = 0;
5943
5944 /* Handle variables inherited from containing functions. */
5945 context = decl_function_context (exp);
5946
5947 /* We treat inline_function_decl as an alias for the current function
5948 because that is the inline function whose vars, types, etc.
5949 are being merged into the current function.
5950 See expand_inline_function. */
5951
5952 if (context != 0 && context != current_function_decl
5953 && context != inline_function_decl
5954 /* If var is static, we don't need a static chain to access it. */
5955 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5956 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5957 {
5958 rtx addr;
5959
5960 /* Mark as non-local and addressable. */
5961 DECL_NONLOCAL (exp) = 1;
5962 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5963 abort ();
5964 mark_addressable (exp);
5965 if (GET_CODE (DECL_RTL (exp)) != MEM)
5966 abort ();
5967 addr = XEXP (DECL_RTL (exp), 0);
5968 if (GET_CODE (addr) == MEM)
5969 addr = gen_rtx_MEM (Pmode,
5970 fix_lexical_addr (XEXP (addr, 0), exp));
5971 else
5972 addr = fix_lexical_addr (addr, exp);
5973 temp = change_address (DECL_RTL (exp), mode, addr);
5974 }
5975
5976 /* This is the case of an array whose size is to be determined
5977 from its initializer, while the initializer is still being parsed.
5978 See expand_decl. */
5979
5980 else if (GET_CODE (DECL_RTL (exp)) == MEM
5981 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5982 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5983 XEXP (DECL_RTL (exp), 0));
5984
5985 /* If DECL_RTL is memory, we are in the normal case and either
5986 the address is not valid or it is not a register and -fforce-addr
5987 is specified, get the address into a register. */
5988
5989 else if (GET_CODE (DECL_RTL (exp)) == MEM
5990 && modifier != EXPAND_CONST_ADDRESS
5991 && modifier != EXPAND_SUM
5992 && modifier != EXPAND_INITIALIZER
5993 && (! memory_address_p (DECL_MODE (exp),
5994 XEXP (DECL_RTL (exp), 0))
5995 || (flag_force_addr
5996 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5997 temp = change_address (DECL_RTL (exp), VOIDmode,
5998 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5999
6000 /* If we got something, return it. But first, set the alignment
6001 the address is a register. */
6002 if (temp != 0)
6003 {
6004 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6005 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6006
6007 return temp;
6008 }
6009
6010 /* If the mode of DECL_RTL does not match that of the decl, it
6011 must be a promoted value. We return a SUBREG of the wanted mode,
6012 but mark it so that we know that it was already extended. */
6013
6014 if (GET_CODE (DECL_RTL (exp)) == REG
6015 && GET_MODE (DECL_RTL (exp)) != mode)
6016 {
6017 /* Get the signedness used for this variable. Ensure we get the
6018 same mode we got when the variable was declared. */
6019 if (GET_MODE (DECL_RTL (exp))
6020 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6021 abort ();
6022
6023 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6024 SUBREG_PROMOTED_VAR_P (temp) = 1;
6025 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6026 return temp;
6027 }
6028
6029 return DECL_RTL (exp);
6030
6031 case INTEGER_CST:
6032 return immed_double_const (TREE_INT_CST_LOW (exp),
6033 TREE_INT_CST_HIGH (exp), mode);
6034
6035 case CONST_DECL:
6036 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6037 EXPAND_MEMORY_USE_BAD);
6038
6039 case REAL_CST:
6040 /* If optimized, generate immediate CONST_DOUBLE
6041 which will be turned into memory by reload if necessary.
6042
6043 We used to force a register so that loop.c could see it. But
6044 this does not allow gen_* patterns to perform optimizations with
6045 the constants. It also produces two insns in cases like "x = 1.0;".
6046 On most machines, floating-point constants are not permitted in
6047 many insns, so we'd end up copying it to a register in any case.
6048
6049 Now, we do the copying in expand_binop, if appropriate. */
6050 return immed_real_const (exp);
6051
6052 case COMPLEX_CST:
6053 case STRING_CST:
6054 if (! TREE_CST_RTL (exp))
6055 output_constant_def (exp);
6056
6057 /* TREE_CST_RTL probably contains a constant address.
6058 On RISC machines where a constant address isn't valid,
6059 make some insns to get that address into a register. */
6060 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6061 && modifier != EXPAND_CONST_ADDRESS
6062 && modifier != EXPAND_INITIALIZER
6063 && modifier != EXPAND_SUM
6064 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6065 || (flag_force_addr
6066 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6067 return change_address (TREE_CST_RTL (exp), VOIDmode,
6068 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6069 return TREE_CST_RTL (exp);
6070
6071 case EXPR_WITH_FILE_LOCATION:
6072 {
6073 rtx to_return;
6074 char *saved_input_filename = input_filename;
6075 int saved_lineno = lineno;
6076 input_filename = EXPR_WFL_FILENAME (exp);
6077 lineno = EXPR_WFL_LINENO (exp);
6078 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6079 emit_line_note (input_filename, lineno);
6080 /* Possibly avoid switching back and force here */
6081 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6082 input_filename = saved_input_filename;
6083 lineno = saved_lineno;
6084 return to_return;
6085 }
6086
6087 case SAVE_EXPR:
6088 context = decl_function_context (exp);
6089
6090 /* If this SAVE_EXPR was at global context, assume we are an
6091 initialization function and move it into our context. */
6092 if (context == 0)
6093 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6094
6095 /* We treat inline_function_decl as an alias for the current function
6096 because that is the inline function whose vars, types, etc.
6097 are being merged into the current function.
6098 See expand_inline_function. */
6099 if (context == current_function_decl || context == inline_function_decl)
6100 context = 0;
6101
6102 /* If this is non-local, handle it. */
6103 if (context)
6104 {
6105 /* The following call just exists to abort if the context is
6106 not of a containing function. */
6107 find_function_data (context);
6108
6109 temp = SAVE_EXPR_RTL (exp);
6110 if (temp && GET_CODE (temp) == REG)
6111 {
6112 put_var_into_stack (exp);
6113 temp = SAVE_EXPR_RTL (exp);
6114 }
6115 if (temp == 0 || GET_CODE (temp) != MEM)
6116 abort ();
6117 return change_address (temp, mode,
6118 fix_lexical_addr (XEXP (temp, 0), exp));
6119 }
6120 if (SAVE_EXPR_RTL (exp) == 0)
6121 {
6122 if (mode == VOIDmode)
6123 temp = const0_rtx;
6124 else
6125 temp = assign_temp (type, 3, 0, 0);
6126
6127 SAVE_EXPR_RTL (exp) = temp;
6128 if (!optimize && GET_CODE (temp) == REG)
6129 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6130 save_expr_regs);
6131
6132 /* If the mode of TEMP does not match that of the expression, it
6133 must be a promoted value. We pass store_expr a SUBREG of the
6134 wanted mode but mark it so that we know that it was already
6135 extended. Note that `unsignedp' was modified above in
6136 this case. */
6137
6138 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6139 {
6140 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6141 SUBREG_PROMOTED_VAR_P (temp) = 1;
6142 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6143 }
6144
6145 if (temp == const0_rtx)
6146 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6147 EXPAND_MEMORY_USE_BAD);
6148 else
6149 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6150
6151 TREE_USED (exp) = 1;
6152 }
6153
6154 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6155 must be a promoted value. We return a SUBREG of the wanted mode,
6156 but mark it so that we know that it was already extended. */
6157
6158 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6159 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6160 {
6161 /* Compute the signedness and make the proper SUBREG. */
6162 promote_mode (type, mode, &unsignedp, 0);
6163 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6164 SUBREG_PROMOTED_VAR_P (temp) = 1;
6165 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6166 return temp;
6167 }
6168
6169 return SAVE_EXPR_RTL (exp);
6170
6171 case UNSAVE_EXPR:
6172 {
6173 rtx temp;
6174 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6175 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6176 return temp;
6177 }
6178
6179 case PLACEHOLDER_EXPR:
6180 {
6181 tree placeholder_expr;
6182
6183 /* If there is an object on the head of the placeholder list,
6184 see if some object in it of type TYPE or a pointer to it. For
6185 further information, see tree.def. */
6186 for (placeholder_expr = placeholder_list;
6187 placeholder_expr != 0;
6188 placeholder_expr = TREE_CHAIN (placeholder_expr))
6189 {
6190 tree need_type = TYPE_MAIN_VARIANT (type);
6191 tree object = 0;
6192 tree old_list = placeholder_list;
6193 tree elt;
6194
6195 /* Find the outermost reference that is of the type we want.
6196 If none, see if any object has a type that is a pointer to
6197 the type we want. */
6198 for (elt = TREE_PURPOSE (placeholder_expr);
6199 elt != 0 && object == 0;
6200 elt
6201 = ((TREE_CODE (elt) == COMPOUND_EXPR
6202 || TREE_CODE (elt) == COND_EXPR)
6203 ? TREE_OPERAND (elt, 1)
6204 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6205 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6206 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6207 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6208 ? TREE_OPERAND (elt, 0) : 0))
6209 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6210 object = elt;
6211
6212 for (elt = TREE_PURPOSE (placeholder_expr);
6213 elt != 0 && object == 0;
6214 elt
6215 = ((TREE_CODE (elt) == COMPOUND_EXPR
6216 || TREE_CODE (elt) == COND_EXPR)
6217 ? TREE_OPERAND (elt, 1)
6218 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6219 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6220 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6221 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6222 ? TREE_OPERAND (elt, 0) : 0))
6223 if (POINTER_TYPE_P (TREE_TYPE (elt))
6224 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6225 == need_type))
6226 object = build1 (INDIRECT_REF, need_type, elt);
6227
6228 if (object != 0)
6229 {
6230 /* Expand this object skipping the list entries before
6231 it was found in case it is also a PLACEHOLDER_EXPR.
6232 In that case, we want to translate it using subsequent
6233 entries. */
6234 placeholder_list = TREE_CHAIN (placeholder_expr);
6235 temp = expand_expr (object, original_target, tmode,
6236 ro_modifier);
6237 placeholder_list = old_list;
6238 return temp;
6239 }
6240 }
6241 }
6242
6243 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6244 abort ();
6245
6246 case WITH_RECORD_EXPR:
6247 /* Put the object on the placeholder list, expand our first operand,
6248 and pop the list. */
6249 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6250 placeholder_list);
6251 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6252 tmode, ro_modifier);
6253 placeholder_list = TREE_CHAIN (placeholder_list);
6254 return target;
6255
6256 case GOTO_EXPR:
6257 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6258 expand_goto (TREE_OPERAND (exp, 0));
6259 else
6260 expand_computed_goto (TREE_OPERAND (exp, 0));
6261 return const0_rtx;
6262
6263 case EXIT_EXPR:
6264 expand_exit_loop_if_false (NULL_PTR,
6265 invert_truthvalue (TREE_OPERAND (exp, 0)));
6266 return const0_rtx;
6267
6268 case LABELED_BLOCK_EXPR:
6269 if (LABELED_BLOCK_BODY (exp))
6270 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6271 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6272 return const0_rtx;
6273
6274 case EXIT_BLOCK_EXPR:
6275 if (EXIT_BLOCK_RETURN (exp))
6276 sorry ("returned value in block_exit_expr");
6277 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6278 return const0_rtx;
6279
6280 case LOOP_EXPR:
6281 push_temp_slots ();
6282 expand_start_loop (1);
6283 expand_expr_stmt (TREE_OPERAND (exp, 0));
6284 expand_end_loop ();
6285 pop_temp_slots ();
6286
6287 return const0_rtx;
6288
6289 case BIND_EXPR:
6290 {
6291 tree vars = TREE_OPERAND (exp, 0);
6292 int vars_need_expansion = 0;
6293
6294 /* Need to open a binding contour here because
6295 if there are any cleanups they must be contained here. */
6296 expand_start_bindings (2);
6297
6298 /* Mark the corresponding BLOCK for output in its proper place. */
6299 if (TREE_OPERAND (exp, 2) != 0
6300 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6301 insert_block (TREE_OPERAND (exp, 2));
6302
6303 /* If VARS have not yet been expanded, expand them now. */
6304 while (vars)
6305 {
6306 if (DECL_RTL (vars) == 0)
6307 {
6308 vars_need_expansion = 1;
6309 expand_decl (vars);
6310 }
6311 expand_decl_init (vars);
6312 vars = TREE_CHAIN (vars);
6313 }
6314
6315 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6316
6317 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6318
6319 return temp;
6320 }
6321
6322 case RTL_EXPR:
6323 if (RTL_EXPR_SEQUENCE (exp))
6324 {
6325 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6326 abort ();
6327 emit_insns (RTL_EXPR_SEQUENCE (exp));
6328 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6329 }
6330 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6331 free_temps_for_rtl_expr (exp);
6332 return RTL_EXPR_RTL (exp);
6333
6334 case CONSTRUCTOR:
6335 /* If we don't need the result, just ensure we evaluate any
6336 subexpressions. */
6337 if (ignore)
6338 {
6339 tree elt;
6340 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6341 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6342 EXPAND_MEMORY_USE_BAD);
6343 return const0_rtx;
6344 }
6345
6346 /* All elts simple constants => refer to a constant in memory. But
6347 if this is a non-BLKmode mode, let it store a field at a time
6348 since that should make a CONST_INT or CONST_DOUBLE when we
6349 fold. Likewise, if we have a target we can use, it is best to
6350 store directly into the target unless the type is large enough
6351 that memcpy will be used. If we are making an initializer and
6352 all operands are constant, put it in memory as well. */
6353 else if ((TREE_STATIC (exp)
6354 && ((mode == BLKmode
6355 && ! (target != 0 && safe_from_p (target, exp, 1)))
6356 || TREE_ADDRESSABLE (exp)
6357 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6358 && (! MOVE_BY_PIECES_P
6359 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6360 TYPE_ALIGN (type)))
6361 && ! mostly_zeros_p (exp))))
6362 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6363 {
6364 rtx constructor = output_constant_def (exp);
6365
6366 if (modifier != EXPAND_CONST_ADDRESS
6367 && modifier != EXPAND_INITIALIZER
6368 && modifier != EXPAND_SUM
6369 && (! memory_address_p (GET_MODE (constructor),
6370 XEXP (constructor, 0))
6371 || (flag_force_addr
6372 && GET_CODE (XEXP (constructor, 0)) != REG)))
6373 constructor = change_address (constructor, VOIDmode,
6374 XEXP (constructor, 0));
6375 return constructor;
6376 }
6377
6378 else
6379 {
6380 /* Handle calls that pass values in multiple non-contiguous
6381 locations. The Irix 6 ABI has examples of this. */
6382 if (target == 0 || ! safe_from_p (target, exp, 1)
6383 || GET_CODE (target) == PARALLEL)
6384 {
6385 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6386 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6387 else
6388 target = assign_temp (type, 0, 1, 1);
6389 }
6390
6391 if (TREE_READONLY (exp))
6392 {
6393 if (GET_CODE (target) == MEM)
6394 target = copy_rtx (target);
6395
6396 RTX_UNCHANGING_P (target) = 1;
6397 }
6398
6399 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6400 int_size_in_bytes (TREE_TYPE (exp)));
6401 return target;
6402 }
6403
6404 case INDIRECT_REF:
6405 {
6406 tree exp1 = TREE_OPERAND (exp, 0);
6407 tree exp2;
6408 tree index;
6409 tree string = string_constant (exp1, &index);
6410
6411 /* Try to optimize reads from const strings. */
6412 if (string
6413 && TREE_CODE (string) == STRING_CST
6414 && TREE_CODE (index) == INTEGER_CST
6415 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6416 && GET_MODE_CLASS (mode) == MODE_INT
6417 && GET_MODE_SIZE (mode) == 1
6418 && modifier != EXPAND_MEMORY_USE_WO)
6419 return
6420 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6421
6422 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6423 op0 = memory_address (mode, op0);
6424
6425 if (cfun && current_function_check_memory_usage
6426 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6427 {
6428 enum memory_use_mode memory_usage;
6429 memory_usage = get_memory_usage_from_modifier (modifier);
6430
6431 if (memory_usage != MEMORY_USE_DONT)
6432 {
6433 in_check_memory_usage = 1;
6434 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6435 op0, Pmode,
6436 GEN_INT (int_size_in_bytes (type)),
6437 TYPE_MODE (sizetype),
6438 GEN_INT (memory_usage),
6439 TYPE_MODE (integer_type_node));
6440 in_check_memory_usage = 0;
6441 }
6442 }
6443
6444 temp = gen_rtx_MEM (mode, op0);
6445 /* If address was computed by addition,
6446 mark this as an element of an aggregate. */
6447 if (TREE_CODE (exp1) == PLUS_EXPR
6448 || (TREE_CODE (exp1) == SAVE_EXPR
6449 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6450 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6451 || (TREE_CODE (exp1) == ADDR_EXPR
6452 && (exp2 = TREE_OPERAND (exp1, 0))
6453 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6454 MEM_SET_IN_STRUCT_P (temp, 1);
6455
6456 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6457 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6458
6459 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6460 here, because, in C and C++, the fact that a location is accessed
6461 through a pointer to const does not mean that the value there can
6462 never change. Languages where it can never change should
6463 also set TREE_STATIC. */
6464 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6465
6466 /* If we are writing to this object and its type is a record with
6467 readonly fields, we must mark it as readonly so it will
6468 conflict with readonly references to those fields. */
6469 if (modifier == EXPAND_MEMORY_USE_WO
6470 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6471 RTX_UNCHANGING_P (temp) = 1;
6472
6473 return temp;
6474 }
6475
6476 case ARRAY_REF:
6477 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6478 abort ();
6479
6480 {
6481 tree array = TREE_OPERAND (exp, 0);
6482 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6483 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6484 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6485 HOST_WIDE_INT i;
6486
6487 /* Optimize the special-case of a zero lower bound.
6488
6489 We convert the low_bound to sizetype to avoid some problems
6490 with constant folding. (E.g. suppose the lower bound is 1,
6491 and its mode is QI. Without the conversion, (ARRAY
6492 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6493 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6494
6495 if (! integer_zerop (low_bound))
6496 index = size_diffop (index, convert (sizetype, low_bound));
6497
6498 /* Fold an expression like: "foo"[2].
6499 This is not done in fold so it won't happen inside &.
6500 Don't fold if this is for wide characters since it's too
6501 difficult to do correctly and this is a very rare case. */
6502
6503 if (TREE_CODE (array) == STRING_CST
6504 && TREE_CODE (index) == INTEGER_CST
6505 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6506 && GET_MODE_CLASS (mode) == MODE_INT
6507 && GET_MODE_SIZE (mode) == 1)
6508 return
6509 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6510
6511 /* If this is a constant index into a constant array,
6512 just get the value from the array. Handle both the cases when
6513 we have an explicit constructor and when our operand is a variable
6514 that was declared const. */
6515
6516 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6517 && TREE_CODE (index) == INTEGER_CST
6518 && 0 > compare_tree_int (index,
6519 list_length (CONSTRUCTOR_ELTS
6520 (TREE_OPERAND (exp, 0)))))
6521 {
6522 tree elem;
6523
6524 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6525 i = TREE_INT_CST_LOW (index);
6526 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6527 ;
6528
6529 if (elem)
6530 return expand_expr (fold (TREE_VALUE (elem)), target,
6531 tmode, ro_modifier);
6532 }
6533
6534 else if (optimize >= 1
6535 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6536 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6537 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6538 {
6539 if (TREE_CODE (index) == INTEGER_CST)
6540 {
6541 tree init = DECL_INITIAL (array);
6542
6543 if (TREE_CODE (init) == CONSTRUCTOR)
6544 {
6545 tree elem;
6546
6547 for (elem = CONSTRUCTOR_ELTS (init);
6548 (elem
6549 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6550 elem = TREE_CHAIN (elem))
6551 ;
6552
6553 if (elem)
6554 return expand_expr (fold (TREE_VALUE (elem)), target,
6555 tmode, ro_modifier);
6556 }
6557 else if (TREE_CODE (init) == STRING_CST
6558 && 0 > compare_tree_int (index,
6559 TREE_STRING_LENGTH (init)))
6560 return (GEN_INT
6561 (TREE_STRING_POINTER
6562 (init)[TREE_INT_CST_LOW (index)]));
6563 }
6564 }
6565 }
6566
6567 /* ... fall through ... */
6568
6569 case COMPONENT_REF:
6570 case BIT_FIELD_REF:
6571 /* If the operand is a CONSTRUCTOR, we can just extract the
6572 appropriate field if it is present. Don't do this if we have
6573 already written the data since we want to refer to that copy
6574 and varasm.c assumes that's what we'll do. */
6575 if (code != ARRAY_REF
6576 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6577 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6578 {
6579 tree elt;
6580
6581 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6582 elt = TREE_CHAIN (elt))
6583 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6584 /* We can normally use the value of the field in the
6585 CONSTRUCTOR. However, if this is a bitfield in
6586 an integral mode that we can fit in a HOST_WIDE_INT,
6587 we must mask only the number of bits in the bitfield,
6588 since this is done implicitly by the constructor. If
6589 the bitfield does not meet either of those conditions,
6590 we can't do this optimization. */
6591 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6592 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6593 == MODE_INT)
6594 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6595 <= HOST_BITS_PER_WIDE_INT))))
6596 {
6597 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6598 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6599 {
6600 HOST_WIDE_INT bitsize
6601 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6602
6603 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6604 {
6605 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6606 op0 = expand_and (op0, op1, target);
6607 }
6608 else
6609 {
6610 enum machine_mode imode
6611 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6612 tree count
6613 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6614 0);
6615
6616 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6617 target, 0);
6618 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6619 target, 0);
6620 }
6621 }
6622
6623 return op0;
6624 }
6625 }
6626
6627 {
6628 enum machine_mode mode1;
6629 HOST_WIDE_INT bitsize, bitpos;
6630 tree offset;
6631 int volatilep = 0;
6632 unsigned int alignment;
6633 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6634 &mode1, &unsignedp, &volatilep,
6635 &alignment);
6636
6637 /* If we got back the original object, something is wrong. Perhaps
6638 we are evaluating an expression too early. In any event, don't
6639 infinitely recurse. */
6640 if (tem == exp)
6641 abort ();
6642
6643 /* If TEM's type is a union of variable size, pass TARGET to the inner
6644 computation, since it will need a temporary and TARGET is known
6645 to have to do. This occurs in unchecked conversion in Ada. */
6646
6647 op0 = expand_expr (tem,
6648 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6649 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6650 != INTEGER_CST)
6651 ? target : NULL_RTX),
6652 VOIDmode,
6653 (modifier == EXPAND_INITIALIZER
6654 || modifier == EXPAND_CONST_ADDRESS)
6655 ? modifier : EXPAND_NORMAL);
6656
6657 /* If this is a constant, put it into a register if it is a
6658 legitimate constant and OFFSET is 0 and memory if it isn't. */
6659 if (CONSTANT_P (op0))
6660 {
6661 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6662 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6663 && offset == 0)
6664 op0 = force_reg (mode, op0);
6665 else
6666 op0 = validize_mem (force_const_mem (mode, op0));
6667 }
6668
6669 if (offset != 0)
6670 {
6671 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6672
6673 /* If this object is in memory, put it into a register.
6674 This case can't occur in C, but can in Ada if we have
6675 unchecked conversion of an expression from a scalar type to
6676 an array or record type. */
6677 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6678 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6679 {
6680 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6681
6682 mark_temp_addr_taken (memloc);
6683 emit_move_insn (memloc, op0);
6684 op0 = memloc;
6685 }
6686
6687 if (GET_CODE (op0) != MEM)
6688 abort ();
6689
6690 if (GET_MODE (offset_rtx) != ptr_mode)
6691 {
6692 #ifdef POINTERS_EXTEND_UNSIGNED
6693 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6694 #else
6695 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6696 #endif
6697 }
6698
6699 /* A constant address in OP0 can have VOIDmode, we must not try
6700 to call force_reg for that case. Avoid that case. */
6701 if (GET_CODE (op0) == MEM
6702 && GET_MODE (op0) == BLKmode
6703 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6704 && bitsize != 0
6705 && (bitpos % bitsize) == 0
6706 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6707 && alignment == GET_MODE_ALIGNMENT (mode1))
6708 {
6709 rtx temp = change_address (op0, mode1,
6710 plus_constant (XEXP (op0, 0),
6711 (bitpos /
6712 BITS_PER_UNIT)));
6713 if (GET_CODE (XEXP (temp, 0)) == REG)
6714 op0 = temp;
6715 else
6716 op0 = change_address (op0, mode1,
6717 force_reg (GET_MODE (XEXP (temp, 0)),
6718 XEXP (temp, 0)));
6719 bitpos = 0;
6720 }
6721
6722
6723 op0 = change_address (op0, VOIDmode,
6724 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6725 force_reg (ptr_mode,
6726 offset_rtx)));
6727 }
6728
6729 /* Don't forget about volatility even if this is a bitfield. */
6730 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6731 {
6732 op0 = copy_rtx (op0);
6733 MEM_VOLATILE_P (op0) = 1;
6734 }
6735
6736 /* Check the access. */
6737 if (cfun != 0 && current_function_check_memory_usage
6738 && GET_CODE (op0) == MEM)
6739 {
6740 enum memory_use_mode memory_usage;
6741 memory_usage = get_memory_usage_from_modifier (modifier);
6742
6743 if (memory_usage != MEMORY_USE_DONT)
6744 {
6745 rtx to;
6746 int size;
6747
6748 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6749 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6750
6751 /* Check the access right of the pointer. */
6752 if (size > BITS_PER_UNIT)
6753 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6754 to, Pmode,
6755 GEN_INT (size / BITS_PER_UNIT),
6756 TYPE_MODE (sizetype),
6757 GEN_INT (memory_usage),
6758 TYPE_MODE (integer_type_node));
6759 }
6760 }
6761
6762 /* In cases where an aligned union has an unaligned object
6763 as a field, we might be extracting a BLKmode value from
6764 an integer-mode (e.g., SImode) object. Handle this case
6765 by doing the extract into an object as wide as the field
6766 (which we know to be the width of a basic mode), then
6767 storing into memory, and changing the mode to BLKmode.
6768 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6769 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6770 if (mode1 == VOIDmode
6771 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6772 || (modifier != EXPAND_CONST_ADDRESS
6773 && modifier != EXPAND_INITIALIZER
6774 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6775 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6776 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6777 /* If the field isn't aligned enough to fetch as a memref,
6778 fetch it as a bit field. */
6779 || (mode1 != BLKmode
6780 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6781 && ((TYPE_ALIGN (TREE_TYPE (tem))
6782 < GET_MODE_ALIGNMENT (mode))
6783 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6784 /* If the type and the field are a constant size and the
6785 size of the type isn't the same size as the bitfield,
6786 we must use bitfield operations. */
6787 || ((bitsize >= 0
6788 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6789 == INTEGER_CST)
6790 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6791 bitsize)))))
6792 || (modifier != EXPAND_CONST_ADDRESS
6793 && modifier != EXPAND_INITIALIZER
6794 && mode == BLKmode
6795 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6796 && (TYPE_ALIGN (type) > alignment
6797 || bitpos % TYPE_ALIGN (type) != 0)))
6798 {
6799 enum machine_mode ext_mode = mode;
6800
6801 if (ext_mode == BLKmode
6802 && ! (target != 0 && GET_CODE (op0) == MEM
6803 && GET_CODE (target) == MEM
6804 && bitpos % BITS_PER_UNIT == 0))
6805 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6806
6807 if (ext_mode == BLKmode)
6808 {
6809 /* In this case, BITPOS must start at a byte boundary and
6810 TARGET, if specified, must be a MEM. */
6811 if (GET_CODE (op0) != MEM
6812 || (target != 0 && GET_CODE (target) != MEM)
6813 || bitpos % BITS_PER_UNIT != 0)
6814 abort ();
6815
6816 op0 = change_address (op0, VOIDmode,
6817 plus_constant (XEXP (op0, 0),
6818 bitpos / BITS_PER_UNIT));
6819 if (target == 0)
6820 target = assign_temp (type, 0, 1, 1);
6821
6822 emit_block_move (target, op0,
6823 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6824 / BITS_PER_UNIT),
6825 BITS_PER_UNIT);
6826
6827 return target;
6828 }
6829
6830 op0 = validize_mem (op0);
6831
6832 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6833 mark_reg_pointer (XEXP (op0, 0), alignment);
6834
6835 op0 = extract_bit_field (op0, bitsize, bitpos,
6836 unsignedp, target, ext_mode, ext_mode,
6837 alignment,
6838 int_size_in_bytes (TREE_TYPE (tem)));
6839
6840 /* If the result is a record type and BITSIZE is narrower than
6841 the mode of OP0, an integral mode, and this is a big endian
6842 machine, we must put the field into the high-order bits. */
6843 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6844 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6845 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6846 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6847 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6848 - bitsize),
6849 op0, 1);
6850
6851 if (mode == BLKmode)
6852 {
6853 rtx new = assign_stack_temp (ext_mode,
6854 bitsize / BITS_PER_UNIT, 0);
6855
6856 emit_move_insn (new, op0);
6857 op0 = copy_rtx (new);
6858 PUT_MODE (op0, BLKmode);
6859 MEM_SET_IN_STRUCT_P (op0, 1);
6860 }
6861
6862 return op0;
6863 }
6864
6865 /* If the result is BLKmode, use that to access the object
6866 now as well. */
6867 if (mode == BLKmode)
6868 mode1 = BLKmode;
6869
6870 /* Get a reference to just this component. */
6871 if (modifier == EXPAND_CONST_ADDRESS
6872 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6873 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6874 (bitpos / BITS_PER_UNIT)));
6875 else
6876 op0 = change_address (op0, mode1,
6877 plus_constant (XEXP (op0, 0),
6878 (bitpos / BITS_PER_UNIT)));
6879
6880 if (GET_CODE (op0) == MEM)
6881 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6882
6883 if (GET_CODE (XEXP (op0, 0)) == REG)
6884 mark_reg_pointer (XEXP (op0, 0), alignment);
6885
6886 MEM_SET_IN_STRUCT_P (op0, 1);
6887 MEM_VOLATILE_P (op0) |= volatilep;
6888 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6889 || modifier == EXPAND_CONST_ADDRESS
6890 || modifier == EXPAND_INITIALIZER)
6891 return op0;
6892 else if (target == 0)
6893 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6894
6895 convert_move (target, op0, unsignedp);
6896 return target;
6897 }
6898
6899 /* Intended for a reference to a buffer of a file-object in Pascal.
6900 But it's not certain that a special tree code will really be
6901 necessary for these. INDIRECT_REF might work for them. */
6902 case BUFFER_REF:
6903 abort ();
6904
6905 case IN_EXPR:
6906 {
6907 /* Pascal set IN expression.
6908
6909 Algorithm:
6910 rlo = set_low - (set_low%bits_per_word);
6911 the_word = set [ (index - rlo)/bits_per_word ];
6912 bit_index = index % bits_per_word;
6913 bitmask = 1 << bit_index;
6914 return !!(the_word & bitmask); */
6915
6916 tree set = TREE_OPERAND (exp, 0);
6917 tree index = TREE_OPERAND (exp, 1);
6918 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6919 tree set_type = TREE_TYPE (set);
6920 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6921 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6922 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6923 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6924 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6925 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6926 rtx setaddr = XEXP (setval, 0);
6927 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6928 rtx rlow;
6929 rtx diff, quo, rem, addr, bit, result;
6930
6931 preexpand_calls (exp);
6932
6933 /* If domain is empty, answer is no. Likewise if index is constant
6934 and out of bounds. */
6935 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6936 && TREE_CODE (set_low_bound) == INTEGER_CST
6937 && tree_int_cst_lt (set_high_bound, set_low_bound))
6938 || (TREE_CODE (index) == INTEGER_CST
6939 && TREE_CODE (set_low_bound) == INTEGER_CST
6940 && tree_int_cst_lt (index, set_low_bound))
6941 || (TREE_CODE (set_high_bound) == INTEGER_CST
6942 && TREE_CODE (index) == INTEGER_CST
6943 && tree_int_cst_lt (set_high_bound, index))))
6944 return const0_rtx;
6945
6946 if (target == 0)
6947 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6948
6949 /* If we get here, we have to generate the code for both cases
6950 (in range and out of range). */
6951
6952 op0 = gen_label_rtx ();
6953 op1 = gen_label_rtx ();
6954
6955 if (! (GET_CODE (index_val) == CONST_INT
6956 && GET_CODE (lo_r) == CONST_INT))
6957 {
6958 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6959 GET_MODE (index_val), iunsignedp, 0, op1);
6960 }
6961
6962 if (! (GET_CODE (index_val) == CONST_INT
6963 && GET_CODE (hi_r) == CONST_INT))
6964 {
6965 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6966 GET_MODE (index_val), iunsignedp, 0, op1);
6967 }
6968
6969 /* Calculate the element number of bit zero in the first word
6970 of the set. */
6971 if (GET_CODE (lo_r) == CONST_INT)
6972 rlow = GEN_INT (INTVAL (lo_r)
6973 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6974 else
6975 rlow = expand_binop (index_mode, and_optab, lo_r,
6976 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6977 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6978
6979 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6980 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6981
6982 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6983 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6984 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6985 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6986
6987 addr = memory_address (byte_mode,
6988 expand_binop (index_mode, add_optab, diff,
6989 setaddr, NULL_RTX, iunsignedp,
6990 OPTAB_LIB_WIDEN));
6991
6992 /* Extract the bit we want to examine */
6993 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6994 gen_rtx_MEM (byte_mode, addr),
6995 make_tree (TREE_TYPE (index), rem),
6996 NULL_RTX, 1);
6997 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6998 GET_MODE (target) == byte_mode ? target : 0,
6999 1, OPTAB_LIB_WIDEN);
7000
7001 if (result != target)
7002 convert_move (target, result, 1);
7003
7004 /* Output the code to handle the out-of-range case. */
7005 emit_jump (op0);
7006 emit_label (op1);
7007 emit_move_insn (target, const0_rtx);
7008 emit_label (op0);
7009 return target;
7010 }
7011
7012 case WITH_CLEANUP_EXPR:
7013 if (RTL_EXPR_RTL (exp) == 0)
7014 {
7015 RTL_EXPR_RTL (exp)
7016 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7017 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7018
7019 /* That's it for this cleanup. */
7020 TREE_OPERAND (exp, 2) = 0;
7021 }
7022 return RTL_EXPR_RTL (exp);
7023
7024 case CLEANUP_POINT_EXPR:
7025 {
7026 /* Start a new binding layer that will keep track of all cleanup
7027 actions to be performed. */
7028 expand_start_bindings (2);
7029
7030 target_temp_slot_level = temp_slot_level;
7031
7032 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7033 /* If we're going to use this value, load it up now. */
7034 if (! ignore)
7035 op0 = force_not_mem (op0);
7036 preserve_temp_slots (op0);
7037 expand_end_bindings (NULL_TREE, 0, 0);
7038 }
7039 return op0;
7040
7041 case CALL_EXPR:
7042 /* Check for a built-in function. */
7043 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7044 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7045 == FUNCTION_DECL)
7046 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7047 return expand_builtin (exp, target, subtarget, tmode, ignore);
7048
7049 /* If this call was expanded already by preexpand_calls,
7050 just return the result we got. */
7051 if (CALL_EXPR_RTL (exp) != 0)
7052 return CALL_EXPR_RTL (exp);
7053
7054 return expand_call (exp, target, ignore);
7055
7056 case NON_LVALUE_EXPR:
7057 case NOP_EXPR:
7058 case CONVERT_EXPR:
7059 case REFERENCE_EXPR:
7060 if (TREE_CODE (type) == UNION_TYPE)
7061 {
7062 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7063
7064 /* If both input and output are BLKmode, this conversion
7065 isn't actually doing anything unless we need to make the
7066 alignment stricter. */
7067 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7068 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7069 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7070 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7071 modifier);
7072
7073 if (target == 0)
7074 {
7075 if (mode != BLKmode)
7076 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7077 else
7078 target = assign_temp (type, 0, 1, 1);
7079 }
7080
7081 if (GET_CODE (target) == MEM)
7082 /* Store data into beginning of memory target. */
7083 store_expr (TREE_OPERAND (exp, 0),
7084 change_address (target, TYPE_MODE (valtype), 0), 0);
7085
7086 else if (GET_CODE (target) == REG)
7087 /* Store this field into a union of the proper type. */
7088 store_field (target,
7089 MIN ((int_size_in_bytes (TREE_TYPE
7090 (TREE_OPERAND (exp, 0)))
7091 * BITS_PER_UNIT),
7092 GET_MODE_BITSIZE (mode)),
7093 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7094 VOIDmode, 0, BITS_PER_UNIT,
7095 int_size_in_bytes (type), 0);
7096 else
7097 abort ();
7098
7099 /* Return the entire union. */
7100 return target;
7101 }
7102
7103 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7104 {
7105 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7106 ro_modifier);
7107
7108 /* If the signedness of the conversion differs and OP0 is
7109 a promoted SUBREG, clear that indication since we now
7110 have to do the proper extension. */
7111 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7112 && GET_CODE (op0) == SUBREG)
7113 SUBREG_PROMOTED_VAR_P (op0) = 0;
7114
7115 return op0;
7116 }
7117
7118 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7119 if (GET_MODE (op0) == mode)
7120 return op0;
7121
7122 /* If OP0 is a constant, just convert it into the proper mode. */
7123 if (CONSTANT_P (op0))
7124 return
7125 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7126 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7127
7128 if (modifier == EXPAND_INITIALIZER)
7129 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7130
7131 if (target == 0)
7132 return
7133 convert_to_mode (mode, op0,
7134 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7135 else
7136 convert_move (target, op0,
7137 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7138 return target;
7139
7140 case PLUS_EXPR:
7141 /* We come here from MINUS_EXPR when the second operand is a
7142 constant. */
7143 plus_expr:
7144 this_optab = add_optab;
7145
7146 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7147 something else, make sure we add the register to the constant and
7148 then to the other thing. This case can occur during strength
7149 reduction and doing it this way will produce better code if the
7150 frame pointer or argument pointer is eliminated.
7151
7152 fold-const.c will ensure that the constant is always in the inner
7153 PLUS_EXPR, so the only case we need to do anything about is if
7154 sp, ap, or fp is our second argument, in which case we must swap
7155 the innermost first argument and our second argument. */
7156
7157 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7158 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7159 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7160 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7161 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7162 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7163 {
7164 tree t = TREE_OPERAND (exp, 1);
7165
7166 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7167 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7168 }
7169
7170 /* If the result is to be ptr_mode and we are adding an integer to
7171 something, we might be forming a constant. So try to use
7172 plus_constant. If it produces a sum and we can't accept it,
7173 use force_operand. This allows P = &ARR[const] to generate
7174 efficient code on machines where a SYMBOL_REF is not a valid
7175 address.
7176
7177 If this is an EXPAND_SUM call, always return the sum. */
7178 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7179 || mode == ptr_mode)
7180 {
7181 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7182 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7183 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7184 {
7185 rtx constant_part;
7186
7187 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7188 EXPAND_SUM);
7189 /* Use immed_double_const to ensure that the constant is
7190 truncated according to the mode of OP1, then sign extended
7191 to a HOST_WIDE_INT. Using the constant directly can result
7192 in non-canonical RTL in a 64x32 cross compile. */
7193 constant_part
7194 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7195 (HOST_WIDE_INT) 0,
7196 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7197 op1 = plus_constant (op1, INTVAL (constant_part));
7198 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7199 op1 = force_operand (op1, target);
7200 return op1;
7201 }
7202
7203 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7204 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7205 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7206 {
7207 rtx constant_part;
7208
7209 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7210 EXPAND_SUM);
7211 if (! CONSTANT_P (op0))
7212 {
7213 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7214 VOIDmode, modifier);
7215 /* Don't go to both_summands if modifier
7216 says it's not right to return a PLUS. */
7217 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7218 goto binop2;
7219 goto both_summands;
7220 }
7221 /* Use immed_double_const to ensure that the constant is
7222 truncated according to the mode of OP1, then sign extended
7223 to a HOST_WIDE_INT. Using the constant directly can result
7224 in non-canonical RTL in a 64x32 cross compile. */
7225 constant_part
7226 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7227 (HOST_WIDE_INT) 0,
7228 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7229 op0 = plus_constant (op0, INTVAL (constant_part));
7230 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7231 op0 = force_operand (op0, target);
7232 return op0;
7233 }
7234 }
7235
7236 /* No sense saving up arithmetic to be done
7237 if it's all in the wrong mode to form part of an address.
7238 And force_operand won't know whether to sign-extend or
7239 zero-extend. */
7240 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7241 || mode != ptr_mode)
7242 goto binop;
7243
7244 preexpand_calls (exp);
7245 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7246 subtarget = 0;
7247
7248 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7249 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7250
7251 both_summands:
7252 /* Make sure any term that's a sum with a constant comes last. */
7253 if (GET_CODE (op0) == PLUS
7254 && CONSTANT_P (XEXP (op0, 1)))
7255 {
7256 temp = op0;
7257 op0 = op1;
7258 op1 = temp;
7259 }
7260 /* If adding to a sum including a constant,
7261 associate it to put the constant outside. */
7262 if (GET_CODE (op1) == PLUS
7263 && CONSTANT_P (XEXP (op1, 1)))
7264 {
7265 rtx constant_term = const0_rtx;
7266
7267 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7268 if (temp != 0)
7269 op0 = temp;
7270 /* Ensure that MULT comes first if there is one. */
7271 else if (GET_CODE (op0) == MULT)
7272 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7273 else
7274 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7275
7276 /* Let's also eliminate constants from op0 if possible. */
7277 op0 = eliminate_constant_term (op0, &constant_term);
7278
7279 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7280 their sum should be a constant. Form it into OP1, since the
7281 result we want will then be OP0 + OP1. */
7282
7283 temp = simplify_binary_operation (PLUS, mode, constant_term,
7284 XEXP (op1, 1));
7285 if (temp != 0)
7286 op1 = temp;
7287 else
7288 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7289 }
7290
7291 /* Put a constant term last and put a multiplication first. */
7292 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7293 temp = op1, op1 = op0, op0 = temp;
7294
7295 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7296 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7297
7298 case MINUS_EXPR:
7299 /* For initializers, we are allowed to return a MINUS of two
7300 symbolic constants. Here we handle all cases when both operands
7301 are constant. */
7302 /* Handle difference of two symbolic constants,
7303 for the sake of an initializer. */
7304 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7305 && really_constant_p (TREE_OPERAND (exp, 0))
7306 && really_constant_p (TREE_OPERAND (exp, 1)))
7307 {
7308 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7309 VOIDmode, ro_modifier);
7310 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7311 VOIDmode, ro_modifier);
7312
7313 /* If the last operand is a CONST_INT, use plus_constant of
7314 the negated constant. Else make the MINUS. */
7315 if (GET_CODE (op1) == CONST_INT)
7316 return plus_constant (op0, - INTVAL (op1));
7317 else
7318 return gen_rtx_MINUS (mode, op0, op1);
7319 }
7320 /* Convert A - const to A + (-const). */
7321 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7322 {
7323 tree negated = fold (build1 (NEGATE_EXPR, type,
7324 TREE_OPERAND (exp, 1)));
7325
7326 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7327 /* If we can't negate the constant in TYPE, leave it alone and
7328 expand_binop will negate it for us. We used to try to do it
7329 here in the signed version of TYPE, but that doesn't work
7330 on POINTER_TYPEs. */;
7331 else
7332 {
7333 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7334 goto plus_expr;
7335 }
7336 }
7337 this_optab = sub_optab;
7338 goto binop;
7339
7340 case MULT_EXPR:
7341 preexpand_calls (exp);
7342 /* If first operand is constant, swap them.
7343 Thus the following special case checks need only
7344 check the second operand. */
7345 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7346 {
7347 register tree t1 = TREE_OPERAND (exp, 0);
7348 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7349 TREE_OPERAND (exp, 1) = t1;
7350 }
7351
7352 /* Attempt to return something suitable for generating an
7353 indexed address, for machines that support that. */
7354
7355 if (modifier == EXPAND_SUM && mode == ptr_mode
7356 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7357 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7358 {
7359 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7360 EXPAND_SUM);
7361
7362 /* Apply distributive law if OP0 is x+c. */
7363 if (GET_CODE (op0) == PLUS
7364 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7365 return
7366 gen_rtx_PLUS
7367 (mode,
7368 gen_rtx_MULT
7369 (mode, XEXP (op0, 0),
7370 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7371 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7372 * INTVAL (XEXP (op0, 1))));
7373
7374 if (GET_CODE (op0) != REG)
7375 op0 = force_operand (op0, NULL_RTX);
7376 if (GET_CODE (op0) != REG)
7377 op0 = copy_to_mode_reg (mode, op0);
7378
7379 return
7380 gen_rtx_MULT (mode, op0,
7381 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7382 }
7383
7384 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7385 subtarget = 0;
7386
7387 /* Check for multiplying things that have been extended
7388 from a narrower type. If this machine supports multiplying
7389 in that narrower type with a result in the desired type,
7390 do it that way, and avoid the explicit type-conversion. */
7391 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7392 && TREE_CODE (type) == INTEGER_TYPE
7393 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7394 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7395 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7396 && int_fits_type_p (TREE_OPERAND (exp, 1),
7397 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7398 /* Don't use a widening multiply if a shift will do. */
7399 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7400 > HOST_BITS_PER_WIDE_INT)
7401 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7402 ||
7403 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7404 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7405 ==
7406 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7407 /* If both operands are extended, they must either both
7408 be zero-extended or both be sign-extended. */
7409 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7410 ==
7411 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7412 {
7413 enum machine_mode innermode
7414 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7415 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7416 ? smul_widen_optab : umul_widen_optab);
7417 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7418 ? umul_widen_optab : smul_widen_optab);
7419 if (mode == GET_MODE_WIDER_MODE (innermode))
7420 {
7421 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7422 {
7423 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7424 NULL_RTX, VOIDmode, 0);
7425 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7426 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7427 VOIDmode, 0);
7428 else
7429 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7430 NULL_RTX, VOIDmode, 0);
7431 goto binop2;
7432 }
7433 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7434 && innermode == word_mode)
7435 {
7436 rtx htem;
7437 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7438 NULL_RTX, VOIDmode, 0);
7439 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7440 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7441 VOIDmode, 0);
7442 else
7443 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7444 NULL_RTX, VOIDmode, 0);
7445 temp = expand_binop (mode, other_optab, op0, op1, target,
7446 unsignedp, OPTAB_LIB_WIDEN);
7447 htem = expand_mult_highpart_adjust (innermode,
7448 gen_highpart (innermode, temp),
7449 op0, op1,
7450 gen_highpart (innermode, temp),
7451 unsignedp);
7452 emit_move_insn (gen_highpart (innermode, temp), htem);
7453 return temp;
7454 }
7455 }
7456 }
7457 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7458 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7459 return expand_mult (mode, op0, op1, target, unsignedp);
7460
7461 case TRUNC_DIV_EXPR:
7462 case FLOOR_DIV_EXPR:
7463 case CEIL_DIV_EXPR:
7464 case ROUND_DIV_EXPR:
7465 case EXACT_DIV_EXPR:
7466 preexpand_calls (exp);
7467 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7468 subtarget = 0;
7469 /* Possible optimization: compute the dividend with EXPAND_SUM
7470 then if the divisor is constant can optimize the case
7471 where some terms of the dividend have coeffs divisible by it. */
7472 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7473 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7474 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7475
7476 case RDIV_EXPR:
7477 this_optab = flodiv_optab;
7478 goto binop;
7479
7480 case TRUNC_MOD_EXPR:
7481 case FLOOR_MOD_EXPR:
7482 case CEIL_MOD_EXPR:
7483 case ROUND_MOD_EXPR:
7484 preexpand_calls (exp);
7485 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7486 subtarget = 0;
7487 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7488 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7489 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7490
7491 case FIX_ROUND_EXPR:
7492 case FIX_FLOOR_EXPR:
7493 case FIX_CEIL_EXPR:
7494 abort (); /* Not used for C. */
7495
7496 case FIX_TRUNC_EXPR:
7497 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7498 if (target == 0)
7499 target = gen_reg_rtx (mode);
7500 expand_fix (target, op0, unsignedp);
7501 return target;
7502
7503 case FLOAT_EXPR:
7504 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7505 if (target == 0)
7506 target = gen_reg_rtx (mode);
7507 /* expand_float can't figure out what to do if FROM has VOIDmode.
7508 So give it the correct mode. With -O, cse will optimize this. */
7509 if (GET_MODE (op0) == VOIDmode)
7510 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7511 op0);
7512 expand_float (target, op0,
7513 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7514 return target;
7515
7516 case NEGATE_EXPR:
7517 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7518 temp = expand_unop (mode, neg_optab, op0, target, 0);
7519 if (temp == 0)
7520 abort ();
7521 return temp;
7522
7523 case ABS_EXPR:
7524 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7525
7526 /* Handle complex values specially. */
7527 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7528 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7529 return expand_complex_abs (mode, op0, target, unsignedp);
7530
7531 /* Unsigned abs is simply the operand. Testing here means we don't
7532 risk generating incorrect code below. */
7533 if (TREE_UNSIGNED (type))
7534 return op0;
7535
7536 return expand_abs (mode, op0, target,
7537 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7538
7539 case MAX_EXPR:
7540 case MIN_EXPR:
7541 target = original_target;
7542 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7543 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7544 || GET_MODE (target) != mode
7545 || (GET_CODE (target) == REG
7546 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7547 target = gen_reg_rtx (mode);
7548 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7549 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7550
7551 /* First try to do it with a special MIN or MAX instruction.
7552 If that does not win, use a conditional jump to select the proper
7553 value. */
7554 this_optab = (TREE_UNSIGNED (type)
7555 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7556 : (code == MIN_EXPR ? smin_optab : smax_optab));
7557
7558 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7559 OPTAB_WIDEN);
7560 if (temp != 0)
7561 return temp;
7562
7563 /* At this point, a MEM target is no longer useful; we will get better
7564 code without it. */
7565
7566 if (GET_CODE (target) == MEM)
7567 target = gen_reg_rtx (mode);
7568
7569 if (target != op0)
7570 emit_move_insn (target, op0);
7571
7572 op0 = gen_label_rtx ();
7573
7574 /* If this mode is an integer too wide to compare properly,
7575 compare word by word. Rely on cse to optimize constant cases. */
7576 if (GET_MODE_CLASS (mode) == MODE_INT
7577 && ! can_compare_p (GE, mode, ccp_jump))
7578 {
7579 if (code == MAX_EXPR)
7580 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7581 target, op1, NULL_RTX, op0);
7582 else
7583 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7584 op1, target, NULL_RTX, op0);
7585 }
7586 else
7587 {
7588 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7589 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7590 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7591 op0);
7592 }
7593 emit_move_insn (target, op1);
7594 emit_label (op0);
7595 return target;
7596
7597 case BIT_NOT_EXPR:
7598 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7599 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7600 if (temp == 0)
7601 abort ();
7602 return temp;
7603
7604 case FFS_EXPR:
7605 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7606 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7607 if (temp == 0)
7608 abort ();
7609 return temp;
7610
7611 /* ??? Can optimize bitwise operations with one arg constant.
7612 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7613 and (a bitwise1 b) bitwise2 b (etc)
7614 but that is probably not worth while. */
7615
7616 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7617 boolean values when we want in all cases to compute both of them. In
7618 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7619 as actual zero-or-1 values and then bitwise anding. In cases where
7620 there cannot be any side effects, better code would be made by
7621 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7622 how to recognize those cases. */
7623
7624 case TRUTH_AND_EXPR:
7625 case BIT_AND_EXPR:
7626 this_optab = and_optab;
7627 goto binop;
7628
7629 case TRUTH_OR_EXPR:
7630 case BIT_IOR_EXPR:
7631 this_optab = ior_optab;
7632 goto binop;
7633
7634 case TRUTH_XOR_EXPR:
7635 case BIT_XOR_EXPR:
7636 this_optab = xor_optab;
7637 goto binop;
7638
7639 case LSHIFT_EXPR:
7640 case RSHIFT_EXPR:
7641 case LROTATE_EXPR:
7642 case RROTATE_EXPR:
7643 preexpand_calls (exp);
7644 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7645 subtarget = 0;
7646 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7647 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7648 unsignedp);
7649
7650 /* Could determine the answer when only additive constants differ. Also,
7651 the addition of one can be handled by changing the condition. */
7652 case LT_EXPR:
7653 case LE_EXPR:
7654 case GT_EXPR:
7655 case GE_EXPR:
7656 case EQ_EXPR:
7657 case NE_EXPR:
7658 case UNORDERED_EXPR:
7659 case ORDERED_EXPR:
7660 case UNLT_EXPR:
7661 case UNLE_EXPR:
7662 case UNGT_EXPR:
7663 case UNGE_EXPR:
7664 case UNEQ_EXPR:
7665 preexpand_calls (exp);
7666 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7667 if (temp != 0)
7668 return temp;
7669
7670 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7671 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7672 && original_target
7673 && GET_CODE (original_target) == REG
7674 && (GET_MODE (original_target)
7675 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7676 {
7677 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7678 VOIDmode, 0);
7679
7680 if (temp != original_target)
7681 temp = copy_to_reg (temp);
7682
7683 op1 = gen_label_rtx ();
7684 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7685 GET_MODE (temp), unsignedp, 0, op1);
7686 emit_move_insn (temp, const1_rtx);
7687 emit_label (op1);
7688 return temp;
7689 }
7690
7691 /* If no set-flag instruction, must generate a conditional
7692 store into a temporary variable. Drop through
7693 and handle this like && and ||. */
7694
7695 case TRUTH_ANDIF_EXPR:
7696 case TRUTH_ORIF_EXPR:
7697 if (! ignore
7698 && (target == 0 || ! safe_from_p (target, exp, 1)
7699 /* Make sure we don't have a hard reg (such as function's return
7700 value) live across basic blocks, if not optimizing. */
7701 || (!optimize && GET_CODE (target) == REG
7702 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7703 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7704
7705 if (target)
7706 emit_clr_insn (target);
7707
7708 op1 = gen_label_rtx ();
7709 jumpifnot (exp, op1);
7710
7711 if (target)
7712 emit_0_to_1_insn (target);
7713
7714 emit_label (op1);
7715 return ignore ? const0_rtx : target;
7716
7717 case TRUTH_NOT_EXPR:
7718 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7719 /* The parser is careful to generate TRUTH_NOT_EXPR
7720 only with operands that are always zero or one. */
7721 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7722 target, 1, OPTAB_LIB_WIDEN);
7723 if (temp == 0)
7724 abort ();
7725 return temp;
7726
7727 case COMPOUND_EXPR:
7728 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7729 emit_queue ();
7730 return expand_expr (TREE_OPERAND (exp, 1),
7731 (ignore ? const0_rtx : target),
7732 VOIDmode, 0);
7733
7734 case COND_EXPR:
7735 /* If we would have a "singleton" (see below) were it not for a
7736 conversion in each arm, bring that conversion back out. */
7737 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7738 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7739 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7740 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7741 {
7742 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7743 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7744
7745 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7746 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7747 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7748 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7749 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7750 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7751 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7752 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7753 return expand_expr (build1 (NOP_EXPR, type,
7754 build (COND_EXPR, TREE_TYPE (true),
7755 TREE_OPERAND (exp, 0),
7756 true, false)),
7757 target, tmode, modifier);
7758 }
7759
7760 {
7761 /* Note that COND_EXPRs whose type is a structure or union
7762 are required to be constructed to contain assignments of
7763 a temporary variable, so that we can evaluate them here
7764 for side effect only. If type is void, we must do likewise. */
7765
7766 /* If an arm of the branch requires a cleanup,
7767 only that cleanup is performed. */
7768
7769 tree singleton = 0;
7770 tree binary_op = 0, unary_op = 0;
7771
7772 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7773 convert it to our mode, if necessary. */
7774 if (integer_onep (TREE_OPERAND (exp, 1))
7775 && integer_zerop (TREE_OPERAND (exp, 2))
7776 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7777 {
7778 if (ignore)
7779 {
7780 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7781 ro_modifier);
7782 return const0_rtx;
7783 }
7784
7785 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7786 if (GET_MODE (op0) == mode)
7787 return op0;
7788
7789 if (target == 0)
7790 target = gen_reg_rtx (mode);
7791 convert_move (target, op0, unsignedp);
7792 return target;
7793 }
7794
7795 /* Check for X ? A + B : A. If we have this, we can copy A to the
7796 output and conditionally add B. Similarly for unary operations.
7797 Don't do this if X has side-effects because those side effects
7798 might affect A or B and the "?" operation is a sequence point in
7799 ANSI. (operand_equal_p tests for side effects.) */
7800
7801 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7802 && operand_equal_p (TREE_OPERAND (exp, 2),
7803 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7804 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7805 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7806 && operand_equal_p (TREE_OPERAND (exp, 1),
7807 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7808 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7809 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7810 && operand_equal_p (TREE_OPERAND (exp, 2),
7811 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7812 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7813 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7814 && operand_equal_p (TREE_OPERAND (exp, 1),
7815 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7816 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7817
7818 /* If we are not to produce a result, we have no target. Otherwise,
7819 if a target was specified use it; it will not be used as an
7820 intermediate target unless it is safe. If no target, use a
7821 temporary. */
7822
7823 if (ignore)
7824 temp = 0;
7825 else if (original_target
7826 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7827 || (singleton && GET_CODE (original_target) == REG
7828 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7829 && original_target == var_rtx (singleton)))
7830 && GET_MODE (original_target) == mode
7831 #ifdef HAVE_conditional_move
7832 && (! can_conditionally_move_p (mode)
7833 || GET_CODE (original_target) == REG
7834 || TREE_ADDRESSABLE (type))
7835 #endif
7836 && ! (GET_CODE (original_target) == MEM
7837 && MEM_VOLATILE_P (original_target)))
7838 temp = original_target;
7839 else if (TREE_ADDRESSABLE (type))
7840 abort ();
7841 else
7842 temp = assign_temp (type, 0, 0, 1);
7843
7844 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7845 do the test of X as a store-flag operation, do this as
7846 A + ((X != 0) << log C). Similarly for other simple binary
7847 operators. Only do for C == 1 if BRANCH_COST is low. */
7848 if (temp && singleton && binary_op
7849 && (TREE_CODE (binary_op) == PLUS_EXPR
7850 || TREE_CODE (binary_op) == MINUS_EXPR
7851 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7852 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7853 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7854 : integer_onep (TREE_OPERAND (binary_op, 1)))
7855 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7856 {
7857 rtx result;
7858 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7859 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7860 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7861 : xor_optab);
7862
7863 /* If we had X ? A : A + 1, do this as A + (X == 0).
7864
7865 We have to invert the truth value here and then put it
7866 back later if do_store_flag fails. We cannot simply copy
7867 TREE_OPERAND (exp, 0) to another variable and modify that
7868 because invert_truthvalue can modify the tree pointed to
7869 by its argument. */
7870 if (singleton == TREE_OPERAND (exp, 1))
7871 TREE_OPERAND (exp, 0)
7872 = invert_truthvalue (TREE_OPERAND (exp, 0));
7873
7874 result = do_store_flag (TREE_OPERAND (exp, 0),
7875 (safe_from_p (temp, singleton, 1)
7876 ? temp : NULL_RTX),
7877 mode, BRANCH_COST <= 1);
7878
7879 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7880 result = expand_shift (LSHIFT_EXPR, mode, result,
7881 build_int_2 (tree_log2
7882 (TREE_OPERAND
7883 (binary_op, 1)),
7884 0),
7885 (safe_from_p (temp, singleton, 1)
7886 ? temp : NULL_RTX), 0);
7887
7888 if (result)
7889 {
7890 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7891 return expand_binop (mode, boptab, op1, result, temp,
7892 unsignedp, OPTAB_LIB_WIDEN);
7893 }
7894 else if (singleton == TREE_OPERAND (exp, 1))
7895 TREE_OPERAND (exp, 0)
7896 = invert_truthvalue (TREE_OPERAND (exp, 0));
7897 }
7898
7899 do_pending_stack_adjust ();
7900 NO_DEFER_POP;
7901 op0 = gen_label_rtx ();
7902
7903 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7904 {
7905 if (temp != 0)
7906 {
7907 /* If the target conflicts with the other operand of the
7908 binary op, we can't use it. Also, we can't use the target
7909 if it is a hard register, because evaluating the condition
7910 might clobber it. */
7911 if ((binary_op
7912 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7913 || (GET_CODE (temp) == REG
7914 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7915 temp = gen_reg_rtx (mode);
7916 store_expr (singleton, temp, 0);
7917 }
7918 else
7919 expand_expr (singleton,
7920 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7921 if (singleton == TREE_OPERAND (exp, 1))
7922 jumpif (TREE_OPERAND (exp, 0), op0);
7923 else
7924 jumpifnot (TREE_OPERAND (exp, 0), op0);
7925
7926 start_cleanup_deferral ();
7927 if (binary_op && temp == 0)
7928 /* Just touch the other operand. */
7929 expand_expr (TREE_OPERAND (binary_op, 1),
7930 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7931 else if (binary_op)
7932 store_expr (build (TREE_CODE (binary_op), type,
7933 make_tree (type, temp),
7934 TREE_OPERAND (binary_op, 1)),
7935 temp, 0);
7936 else
7937 store_expr (build1 (TREE_CODE (unary_op), type,
7938 make_tree (type, temp)),
7939 temp, 0);
7940 op1 = op0;
7941 }
7942 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7943 comparison operator. If we have one of these cases, set the
7944 output to A, branch on A (cse will merge these two references),
7945 then set the output to FOO. */
7946 else if (temp
7947 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7948 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7949 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7950 TREE_OPERAND (exp, 1), 0)
7951 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7952 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7953 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7954 {
7955 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7956 temp = gen_reg_rtx (mode);
7957 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7958 jumpif (TREE_OPERAND (exp, 0), op0);
7959
7960 start_cleanup_deferral ();
7961 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7962 op1 = op0;
7963 }
7964 else if (temp
7965 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7966 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7967 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7968 TREE_OPERAND (exp, 2), 0)
7969 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7970 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7971 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7972 {
7973 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7974 temp = gen_reg_rtx (mode);
7975 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7976 jumpifnot (TREE_OPERAND (exp, 0), op0);
7977
7978 start_cleanup_deferral ();
7979 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7980 op1 = op0;
7981 }
7982 else
7983 {
7984 op1 = gen_label_rtx ();
7985 jumpifnot (TREE_OPERAND (exp, 0), op0);
7986
7987 start_cleanup_deferral ();
7988
7989 /* One branch of the cond can be void, if it never returns. For
7990 example A ? throw : E */
7991 if (temp != 0
7992 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7993 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7994 else
7995 expand_expr (TREE_OPERAND (exp, 1),
7996 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7997 end_cleanup_deferral ();
7998 emit_queue ();
7999 emit_jump_insn (gen_jump (op1));
8000 emit_barrier ();
8001 emit_label (op0);
8002 start_cleanup_deferral ();
8003 if (temp != 0
8004 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8005 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8006 else
8007 expand_expr (TREE_OPERAND (exp, 2),
8008 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8009 }
8010
8011 end_cleanup_deferral ();
8012
8013 emit_queue ();
8014 emit_label (op1);
8015 OK_DEFER_POP;
8016
8017 return temp;
8018 }
8019
8020 case TARGET_EXPR:
8021 {
8022 /* Something needs to be initialized, but we didn't know
8023 where that thing was when building the tree. For example,
8024 it could be the return value of a function, or a parameter
8025 to a function which lays down in the stack, or a temporary
8026 variable which must be passed by reference.
8027
8028 We guarantee that the expression will either be constructed
8029 or copied into our original target. */
8030
8031 tree slot = TREE_OPERAND (exp, 0);
8032 tree cleanups = NULL_TREE;
8033 tree exp1;
8034
8035 if (TREE_CODE (slot) != VAR_DECL)
8036 abort ();
8037
8038 if (! ignore)
8039 target = original_target;
8040
8041 /* Set this here so that if we get a target that refers to a
8042 register variable that's already been used, put_reg_into_stack
8043 knows that it should fix up those uses. */
8044 TREE_USED (slot) = 1;
8045
8046 if (target == 0)
8047 {
8048 if (DECL_RTL (slot) != 0)
8049 {
8050 target = DECL_RTL (slot);
8051 /* If we have already expanded the slot, so don't do
8052 it again. (mrs) */
8053 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8054 return target;
8055 }
8056 else
8057 {
8058 target = assign_temp (type, 2, 0, 1);
8059 /* All temp slots at this level must not conflict. */
8060 preserve_temp_slots (target);
8061 DECL_RTL (slot) = target;
8062 if (TREE_ADDRESSABLE (slot))
8063 {
8064 TREE_ADDRESSABLE (slot) = 0;
8065 mark_addressable (slot);
8066 }
8067
8068 /* Since SLOT is not known to the called function
8069 to belong to its stack frame, we must build an explicit
8070 cleanup. This case occurs when we must build up a reference
8071 to pass the reference as an argument. In this case,
8072 it is very likely that such a reference need not be
8073 built here. */
8074
8075 if (TREE_OPERAND (exp, 2) == 0)
8076 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8077 cleanups = TREE_OPERAND (exp, 2);
8078 }
8079 }
8080 else
8081 {
8082 /* This case does occur, when expanding a parameter which
8083 needs to be constructed on the stack. The target
8084 is the actual stack address that we want to initialize.
8085 The function we call will perform the cleanup in this case. */
8086
8087 /* If we have already assigned it space, use that space,
8088 not target that we were passed in, as our target
8089 parameter is only a hint. */
8090 if (DECL_RTL (slot) != 0)
8091 {
8092 target = DECL_RTL (slot);
8093 /* If we have already expanded the slot, so don't do
8094 it again. (mrs) */
8095 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8096 return target;
8097 }
8098 else
8099 {
8100 DECL_RTL (slot) = target;
8101 /* If we must have an addressable slot, then make sure that
8102 the RTL that we just stored in slot is OK. */
8103 if (TREE_ADDRESSABLE (slot))
8104 {
8105 TREE_ADDRESSABLE (slot) = 0;
8106 mark_addressable (slot);
8107 }
8108 }
8109 }
8110
8111 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8112 /* Mark it as expanded. */
8113 TREE_OPERAND (exp, 1) = NULL_TREE;
8114
8115 store_expr (exp1, target, 0);
8116
8117 expand_decl_cleanup (NULL_TREE, cleanups);
8118
8119 return target;
8120 }
8121
8122 case INIT_EXPR:
8123 {
8124 tree lhs = TREE_OPERAND (exp, 0);
8125 tree rhs = TREE_OPERAND (exp, 1);
8126 tree noncopied_parts = 0;
8127 tree lhs_type = TREE_TYPE (lhs);
8128
8129 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8130 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8131 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8132 TYPE_NONCOPIED_PARTS (lhs_type));
8133 while (noncopied_parts != 0)
8134 {
8135 expand_assignment (TREE_VALUE (noncopied_parts),
8136 TREE_PURPOSE (noncopied_parts), 0, 0);
8137 noncopied_parts = TREE_CHAIN (noncopied_parts);
8138 }
8139 return temp;
8140 }
8141
8142 case MODIFY_EXPR:
8143 {
8144 /* If lhs is complex, expand calls in rhs before computing it.
8145 That's so we don't compute a pointer and save it over a call.
8146 If lhs is simple, compute it first so we can give it as a
8147 target if the rhs is just a call. This avoids an extra temp and copy
8148 and that prevents a partial-subsumption which makes bad code.
8149 Actually we could treat component_ref's of vars like vars. */
8150
8151 tree lhs = TREE_OPERAND (exp, 0);
8152 tree rhs = TREE_OPERAND (exp, 1);
8153 tree noncopied_parts = 0;
8154 tree lhs_type = TREE_TYPE (lhs);
8155
8156 temp = 0;
8157
8158 if (TREE_CODE (lhs) != VAR_DECL
8159 && TREE_CODE (lhs) != RESULT_DECL
8160 && TREE_CODE (lhs) != PARM_DECL
8161 && ! (TREE_CODE (lhs) == INDIRECT_REF
8162 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8163 preexpand_calls (exp);
8164
8165 /* Check for |= or &= of a bitfield of size one into another bitfield
8166 of size 1. In this case, (unless we need the result of the
8167 assignment) we can do this more efficiently with a
8168 test followed by an assignment, if necessary.
8169
8170 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8171 things change so we do, this code should be enhanced to
8172 support it. */
8173 if (ignore
8174 && TREE_CODE (lhs) == COMPONENT_REF
8175 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8176 || TREE_CODE (rhs) == BIT_AND_EXPR)
8177 && TREE_OPERAND (rhs, 0) == lhs
8178 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8179 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8180 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8181 {
8182 rtx label = gen_label_rtx ();
8183
8184 do_jump (TREE_OPERAND (rhs, 1),
8185 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8186 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8187 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8188 (TREE_CODE (rhs) == BIT_IOR_EXPR
8189 ? integer_one_node
8190 : integer_zero_node)),
8191 0, 0);
8192 do_pending_stack_adjust ();
8193 emit_label (label);
8194 return const0_rtx;
8195 }
8196
8197 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8198 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8199 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8200 TYPE_NONCOPIED_PARTS (lhs_type));
8201
8202 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8203 while (noncopied_parts != 0)
8204 {
8205 expand_assignment (TREE_PURPOSE (noncopied_parts),
8206 TREE_VALUE (noncopied_parts), 0, 0);
8207 noncopied_parts = TREE_CHAIN (noncopied_parts);
8208 }
8209 return temp;
8210 }
8211
8212 case RETURN_EXPR:
8213 if (!TREE_OPERAND (exp, 0))
8214 expand_null_return ();
8215 else
8216 expand_return (TREE_OPERAND (exp, 0));
8217 return const0_rtx;
8218
8219 case PREINCREMENT_EXPR:
8220 case PREDECREMENT_EXPR:
8221 return expand_increment (exp, 0, ignore);
8222
8223 case POSTINCREMENT_EXPR:
8224 case POSTDECREMENT_EXPR:
8225 /* Faster to treat as pre-increment if result is not used. */
8226 return expand_increment (exp, ! ignore, ignore);
8227
8228 case ADDR_EXPR:
8229 /* If nonzero, TEMP will be set to the address of something that might
8230 be a MEM corresponding to a stack slot. */
8231 temp = 0;
8232
8233 /* Are we taking the address of a nested function? */
8234 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8235 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8236 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8237 && ! TREE_STATIC (exp))
8238 {
8239 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8240 op0 = force_operand (op0, target);
8241 }
8242 /* If we are taking the address of something erroneous, just
8243 return a zero. */
8244 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8245 return const0_rtx;
8246 else
8247 {
8248 /* We make sure to pass const0_rtx down if we came in with
8249 ignore set, to avoid doing the cleanups twice for something. */
8250 op0 = expand_expr (TREE_OPERAND (exp, 0),
8251 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8252 (modifier == EXPAND_INITIALIZER
8253 ? modifier : EXPAND_CONST_ADDRESS));
8254
8255 /* If we are going to ignore the result, OP0 will have been set
8256 to const0_rtx, so just return it. Don't get confused and
8257 think we are taking the address of the constant. */
8258 if (ignore)
8259 return op0;
8260
8261 op0 = protect_from_queue (op0, 0);
8262
8263 /* We would like the object in memory. If it is a constant, we can
8264 have it be statically allocated into memory. For a non-constant,
8265 we need to allocate some memory and store the value into it. */
8266
8267 if (CONSTANT_P (op0))
8268 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8269 op0);
8270 else if (GET_CODE (op0) == MEM)
8271 {
8272 mark_temp_addr_taken (op0);
8273 temp = XEXP (op0, 0);
8274 }
8275
8276 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8277 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8278 {
8279 /* If this object is in a register, it must be not
8280 be BLKmode. */
8281 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8282 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8283
8284 mark_temp_addr_taken (memloc);
8285 emit_move_insn (memloc, op0);
8286 op0 = memloc;
8287 }
8288
8289 if (GET_CODE (op0) != MEM)
8290 abort ();
8291
8292 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8293 {
8294 temp = XEXP (op0, 0);
8295 #ifdef POINTERS_EXTEND_UNSIGNED
8296 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8297 && mode == ptr_mode)
8298 temp = convert_memory_address (ptr_mode, temp);
8299 #endif
8300 return temp;
8301 }
8302
8303 op0 = force_operand (XEXP (op0, 0), target);
8304 }
8305
8306 if (flag_force_addr && GET_CODE (op0) != REG)
8307 op0 = force_reg (Pmode, op0);
8308
8309 if (GET_CODE (op0) == REG
8310 && ! REG_USERVAR_P (op0))
8311 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8312
8313 /* If we might have had a temp slot, add an equivalent address
8314 for it. */
8315 if (temp != 0)
8316 update_temp_slot_address (temp, op0);
8317
8318 #ifdef POINTERS_EXTEND_UNSIGNED
8319 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8320 && mode == ptr_mode)
8321 op0 = convert_memory_address (ptr_mode, op0);
8322 #endif
8323
8324 return op0;
8325
8326 case ENTRY_VALUE_EXPR:
8327 abort ();
8328
8329 /* COMPLEX type for Extended Pascal & Fortran */
8330 case COMPLEX_EXPR:
8331 {
8332 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8333 rtx insns;
8334
8335 /* Get the rtx code of the operands. */
8336 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8337 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8338
8339 if (! target)
8340 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8341
8342 start_sequence ();
8343
8344 /* Move the real (op0) and imaginary (op1) parts to their location. */
8345 emit_move_insn (gen_realpart (mode, target), op0);
8346 emit_move_insn (gen_imagpart (mode, target), op1);
8347
8348 insns = get_insns ();
8349 end_sequence ();
8350
8351 /* Complex construction should appear as a single unit. */
8352 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8353 each with a separate pseudo as destination.
8354 It's not correct for flow to treat them as a unit. */
8355 if (GET_CODE (target) != CONCAT)
8356 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8357 else
8358 emit_insns (insns);
8359
8360 return target;
8361 }
8362
8363 case REALPART_EXPR:
8364 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8365 return gen_realpart (mode, op0);
8366
8367 case IMAGPART_EXPR:
8368 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8369 return gen_imagpart (mode, op0);
8370
8371 case CONJ_EXPR:
8372 {
8373 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8374 rtx imag_t;
8375 rtx insns;
8376
8377 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8378
8379 if (! target)
8380 target = gen_reg_rtx (mode);
8381
8382 start_sequence ();
8383
8384 /* Store the realpart and the negated imagpart to target. */
8385 emit_move_insn (gen_realpart (partmode, target),
8386 gen_realpart (partmode, op0));
8387
8388 imag_t = gen_imagpart (partmode, target);
8389 temp = expand_unop (partmode, neg_optab,
8390 gen_imagpart (partmode, op0), imag_t, 0);
8391 if (temp != imag_t)
8392 emit_move_insn (imag_t, temp);
8393
8394 insns = get_insns ();
8395 end_sequence ();
8396
8397 /* Conjugate should appear as a single unit
8398 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8399 each with a separate pseudo as destination.
8400 It's not correct for flow to treat them as a unit. */
8401 if (GET_CODE (target) != CONCAT)
8402 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8403 else
8404 emit_insns (insns);
8405
8406 return target;
8407 }
8408
8409 case TRY_CATCH_EXPR:
8410 {
8411 tree handler = TREE_OPERAND (exp, 1);
8412
8413 expand_eh_region_start ();
8414
8415 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8416
8417 expand_eh_region_end (handler);
8418
8419 return op0;
8420 }
8421
8422 case TRY_FINALLY_EXPR:
8423 {
8424 tree try_block = TREE_OPERAND (exp, 0);
8425 tree finally_block = TREE_OPERAND (exp, 1);
8426 rtx finally_label = gen_label_rtx ();
8427 rtx done_label = gen_label_rtx ();
8428 rtx return_link = gen_reg_rtx (Pmode);
8429 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8430 (tree) finally_label, (tree) return_link);
8431 TREE_SIDE_EFFECTS (cleanup) = 1;
8432
8433 /* Start a new binding layer that will keep track of all cleanup
8434 actions to be performed. */
8435 expand_start_bindings (2);
8436
8437 target_temp_slot_level = temp_slot_level;
8438
8439 expand_decl_cleanup (NULL_TREE, cleanup);
8440 op0 = expand_expr (try_block, target, tmode, modifier);
8441
8442 preserve_temp_slots (op0);
8443 expand_end_bindings (NULL_TREE, 0, 0);
8444 emit_jump (done_label);
8445 emit_label (finally_label);
8446 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8447 emit_indirect_jump (return_link);
8448 emit_label (done_label);
8449 return op0;
8450 }
8451
8452 case GOTO_SUBROUTINE_EXPR:
8453 {
8454 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8455 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8456 rtx return_address = gen_label_rtx ();
8457 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8458 emit_jump (subr);
8459 emit_label (return_address);
8460 return const0_rtx;
8461 }
8462
8463 case POPDCC_EXPR:
8464 {
8465 rtx dcc = get_dynamic_cleanup_chain ();
8466 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8467 return const0_rtx;
8468 }
8469
8470 case POPDHC_EXPR:
8471 {
8472 rtx dhc = get_dynamic_handler_chain ();
8473 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8474 return const0_rtx;
8475 }
8476
8477 case VA_ARG_EXPR:
8478 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8479
8480 default:
8481 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8482 }
8483
8484 /* Here to do an ordinary binary operator, generating an instruction
8485 from the optab already placed in `this_optab'. */
8486 binop:
8487 preexpand_calls (exp);
8488 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8489 subtarget = 0;
8490 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8491 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8492 binop2:
8493 temp = expand_binop (mode, this_optab, op0, op1, target,
8494 unsignedp, OPTAB_LIB_WIDEN);
8495 if (temp == 0)
8496 abort ();
8497 return temp;
8498 }
8499 \f
8500 /* Similar to expand_expr, except that we don't specify a target, target
8501 mode, or modifier and we return the alignment of the inner type. This is
8502 used in cases where it is not necessary to align the result to the
8503 alignment of its type as long as we know the alignment of the result, for
8504 example for comparisons of BLKmode values. */
8505
8506 static rtx
8507 expand_expr_unaligned (exp, palign)
8508 register tree exp;
8509 unsigned int *palign;
8510 {
8511 register rtx op0;
8512 tree type = TREE_TYPE (exp);
8513 register enum machine_mode mode = TYPE_MODE (type);
8514
8515 /* Default the alignment we return to that of the type. */
8516 *palign = TYPE_ALIGN (type);
8517
8518 /* The only cases in which we do anything special is if the resulting mode
8519 is BLKmode. */
8520 if (mode != BLKmode)
8521 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8522
8523 switch (TREE_CODE (exp))
8524 {
8525 case CONVERT_EXPR:
8526 case NOP_EXPR:
8527 case NON_LVALUE_EXPR:
8528 /* Conversions between BLKmode values don't change the underlying
8529 alignment or value. */
8530 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8531 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8532 break;
8533
8534 case ARRAY_REF:
8535 /* Much of the code for this case is copied directly from expand_expr.
8536 We need to duplicate it here because we will do something different
8537 in the fall-through case, so we need to handle the same exceptions
8538 it does. */
8539 {
8540 tree array = TREE_OPERAND (exp, 0);
8541 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8542 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8543 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8544 HOST_WIDE_INT i;
8545
8546 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8547 abort ();
8548
8549 /* Optimize the special-case of a zero lower bound.
8550
8551 We convert the low_bound to sizetype to avoid some problems
8552 with constant folding. (E.g. suppose the lower bound is 1,
8553 and its mode is QI. Without the conversion, (ARRAY
8554 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8555 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8556
8557 if (! integer_zerop (low_bound))
8558 index = size_diffop (index, convert (sizetype, low_bound));
8559
8560 /* If this is a constant index into a constant array,
8561 just get the value from the array. Handle both the cases when
8562 we have an explicit constructor and when our operand is a variable
8563 that was declared const. */
8564
8565 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8566 && 0 > compare_tree_int (index,
8567 list_length (CONSTRUCTOR_ELTS
8568 (TREE_OPERAND (exp, 0)))))
8569 {
8570 tree elem;
8571
8572 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8573 i = TREE_INT_CST_LOW (index);
8574 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8575 ;
8576
8577 if (elem)
8578 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8579 }
8580
8581 else if (optimize >= 1
8582 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8583 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8584 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8585 {
8586 if (TREE_CODE (index) == INTEGER_CST)
8587 {
8588 tree init = DECL_INITIAL (array);
8589
8590 if (TREE_CODE (init) == CONSTRUCTOR)
8591 {
8592 tree elem;
8593
8594 for (elem = CONSTRUCTOR_ELTS (init);
8595 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8596 elem = TREE_CHAIN (elem))
8597 ;
8598
8599 if (elem)
8600 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8601 palign);
8602 }
8603 }
8604 }
8605 }
8606
8607 /* ... fall through ... */
8608
8609 case COMPONENT_REF:
8610 case BIT_FIELD_REF:
8611 /* If the operand is a CONSTRUCTOR, we can just extract the
8612 appropriate field if it is present. Don't do this if we have
8613 already written the data since we want to refer to that copy
8614 and varasm.c assumes that's what we'll do. */
8615 if (TREE_CODE (exp) != ARRAY_REF
8616 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8617 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8618 {
8619 tree elt;
8620
8621 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8622 elt = TREE_CHAIN (elt))
8623 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8624 /* Note that unlike the case in expand_expr, we know this is
8625 BLKmode and hence not an integer. */
8626 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8627 }
8628
8629 {
8630 enum machine_mode mode1;
8631 HOST_WIDE_INT bitsize, bitpos;
8632 tree offset;
8633 int volatilep = 0;
8634 unsigned int alignment;
8635 int unsignedp;
8636 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8637 &mode1, &unsignedp, &volatilep,
8638 &alignment);
8639
8640 /* If we got back the original object, something is wrong. Perhaps
8641 we are evaluating an expression too early. In any event, don't
8642 infinitely recurse. */
8643 if (tem == exp)
8644 abort ();
8645
8646 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8647
8648 /* If this is a constant, put it into a register if it is a
8649 legitimate constant and OFFSET is 0 and memory if it isn't. */
8650 if (CONSTANT_P (op0))
8651 {
8652 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8653
8654 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8655 && offset == 0)
8656 op0 = force_reg (inner_mode, op0);
8657 else
8658 op0 = validize_mem (force_const_mem (inner_mode, op0));
8659 }
8660
8661 if (offset != 0)
8662 {
8663 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8664
8665 /* If this object is in a register, put it into memory.
8666 This case can't occur in C, but can in Ada if we have
8667 unchecked conversion of an expression from a scalar type to
8668 an array or record type. */
8669 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8670 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8671 {
8672 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8673
8674 mark_temp_addr_taken (memloc);
8675 emit_move_insn (memloc, op0);
8676 op0 = memloc;
8677 }
8678
8679 if (GET_CODE (op0) != MEM)
8680 abort ();
8681
8682 if (GET_MODE (offset_rtx) != ptr_mode)
8683 {
8684 #ifdef POINTERS_EXTEND_UNSIGNED
8685 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8686 #else
8687 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8688 #endif
8689 }
8690
8691 op0 = change_address (op0, VOIDmode,
8692 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8693 force_reg (ptr_mode,
8694 offset_rtx)));
8695 }
8696
8697 /* Don't forget about volatility even if this is a bitfield. */
8698 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8699 {
8700 op0 = copy_rtx (op0);
8701 MEM_VOLATILE_P (op0) = 1;
8702 }
8703
8704 /* Check the access. */
8705 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8706 {
8707 rtx to;
8708 int size;
8709
8710 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8711 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8712
8713 /* Check the access right of the pointer. */
8714 if (size > BITS_PER_UNIT)
8715 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8716 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8717 TYPE_MODE (sizetype),
8718 GEN_INT (MEMORY_USE_RO),
8719 TYPE_MODE (integer_type_node));
8720 }
8721
8722 /* In cases where an aligned union has an unaligned object
8723 as a field, we might be extracting a BLKmode value from
8724 an integer-mode (e.g., SImode) object. Handle this case
8725 by doing the extract into an object as wide as the field
8726 (which we know to be the width of a basic mode), then
8727 storing into memory, and changing the mode to BLKmode.
8728 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8729 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8730 if (mode1 == VOIDmode
8731 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8732 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8733 && (TYPE_ALIGN (type) > alignment
8734 || bitpos % TYPE_ALIGN (type) != 0)))
8735 {
8736 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8737
8738 if (ext_mode == BLKmode)
8739 {
8740 /* In this case, BITPOS must start at a byte boundary. */
8741 if (GET_CODE (op0) != MEM
8742 || bitpos % BITS_PER_UNIT != 0)
8743 abort ();
8744
8745 op0 = change_address (op0, VOIDmode,
8746 plus_constant (XEXP (op0, 0),
8747 bitpos / BITS_PER_UNIT));
8748 }
8749 else
8750 {
8751 rtx new = assign_stack_temp (ext_mode,
8752 bitsize / BITS_PER_UNIT, 0);
8753
8754 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8755 unsignedp, NULL_RTX, ext_mode,
8756 ext_mode, alignment,
8757 int_size_in_bytes (TREE_TYPE (tem)));
8758
8759 /* If the result is a record type and BITSIZE is narrower than
8760 the mode of OP0, an integral mode, and this is a big endian
8761 machine, we must put the field into the high-order bits. */
8762 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8763 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8764 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8765 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8766 size_int (GET_MODE_BITSIZE
8767 (GET_MODE (op0))
8768 - bitsize),
8769 op0, 1);
8770
8771
8772 emit_move_insn (new, op0);
8773 op0 = copy_rtx (new);
8774 PUT_MODE (op0, BLKmode);
8775 }
8776 }
8777 else
8778 /* Get a reference to just this component. */
8779 op0 = change_address (op0, mode1,
8780 plus_constant (XEXP (op0, 0),
8781 (bitpos / BITS_PER_UNIT)));
8782
8783 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8784
8785 /* Adjust the alignment in case the bit position is not
8786 a multiple of the alignment of the inner object. */
8787 while (bitpos % alignment != 0)
8788 alignment >>= 1;
8789
8790 if (GET_CODE (XEXP (op0, 0)) == REG)
8791 mark_reg_pointer (XEXP (op0, 0), alignment);
8792
8793 MEM_IN_STRUCT_P (op0) = 1;
8794 MEM_VOLATILE_P (op0) |= volatilep;
8795
8796 *palign = alignment;
8797 return op0;
8798 }
8799
8800 default:
8801 break;
8802
8803 }
8804
8805 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8806 }
8807 \f
8808 /* Return the tree node if a ARG corresponds to a string constant or zero
8809 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8810 in bytes within the string that ARG is accessing. The type of the
8811 offset will be `sizetype'. */
8812
8813 tree
8814 string_constant (arg, ptr_offset)
8815 tree arg;
8816 tree *ptr_offset;
8817 {
8818 STRIP_NOPS (arg);
8819
8820 if (TREE_CODE (arg) == ADDR_EXPR
8821 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8822 {
8823 *ptr_offset = size_zero_node;
8824 return TREE_OPERAND (arg, 0);
8825 }
8826 else if (TREE_CODE (arg) == PLUS_EXPR)
8827 {
8828 tree arg0 = TREE_OPERAND (arg, 0);
8829 tree arg1 = TREE_OPERAND (arg, 1);
8830
8831 STRIP_NOPS (arg0);
8832 STRIP_NOPS (arg1);
8833
8834 if (TREE_CODE (arg0) == ADDR_EXPR
8835 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8836 {
8837 *ptr_offset = convert (sizetype, arg1);
8838 return TREE_OPERAND (arg0, 0);
8839 }
8840 else if (TREE_CODE (arg1) == ADDR_EXPR
8841 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8842 {
8843 *ptr_offset = convert (sizetype, arg0);
8844 return TREE_OPERAND (arg1, 0);
8845 }
8846 }
8847
8848 return 0;
8849 }
8850 \f
8851 /* Expand code for a post- or pre- increment or decrement
8852 and return the RTX for the result.
8853 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8854
8855 static rtx
8856 expand_increment (exp, post, ignore)
8857 register tree exp;
8858 int post, ignore;
8859 {
8860 register rtx op0, op1;
8861 register rtx temp, value;
8862 register tree incremented = TREE_OPERAND (exp, 0);
8863 optab this_optab = add_optab;
8864 int icode;
8865 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8866 int op0_is_copy = 0;
8867 int single_insn = 0;
8868 /* 1 means we can't store into OP0 directly,
8869 because it is a subreg narrower than a word,
8870 and we don't dare clobber the rest of the word. */
8871 int bad_subreg = 0;
8872
8873 /* Stabilize any component ref that might need to be
8874 evaluated more than once below. */
8875 if (!post
8876 || TREE_CODE (incremented) == BIT_FIELD_REF
8877 || (TREE_CODE (incremented) == COMPONENT_REF
8878 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8879 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8880 incremented = stabilize_reference (incremented);
8881 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8882 ones into save exprs so that they don't accidentally get evaluated
8883 more than once by the code below. */
8884 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8885 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8886 incremented = save_expr (incremented);
8887
8888 /* Compute the operands as RTX.
8889 Note whether OP0 is the actual lvalue or a copy of it:
8890 I believe it is a copy iff it is a register or subreg
8891 and insns were generated in computing it. */
8892
8893 temp = get_last_insn ();
8894 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8895
8896 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8897 in place but instead must do sign- or zero-extension during assignment,
8898 so we copy it into a new register and let the code below use it as
8899 a copy.
8900
8901 Note that we can safely modify this SUBREG since it is know not to be
8902 shared (it was made by the expand_expr call above). */
8903
8904 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8905 {
8906 if (post)
8907 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8908 else
8909 bad_subreg = 1;
8910 }
8911 else if (GET_CODE (op0) == SUBREG
8912 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8913 {
8914 /* We cannot increment this SUBREG in place. If we are
8915 post-incrementing, get a copy of the old value. Otherwise,
8916 just mark that we cannot increment in place. */
8917 if (post)
8918 op0 = copy_to_reg (op0);
8919 else
8920 bad_subreg = 1;
8921 }
8922
8923 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8924 && temp != get_last_insn ());
8925 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8926 EXPAND_MEMORY_USE_BAD);
8927
8928 /* Decide whether incrementing or decrementing. */
8929 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8930 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8931 this_optab = sub_optab;
8932
8933 /* Convert decrement by a constant into a negative increment. */
8934 if (this_optab == sub_optab
8935 && GET_CODE (op1) == CONST_INT)
8936 {
8937 op1 = GEN_INT (- INTVAL (op1));
8938 this_optab = add_optab;
8939 }
8940
8941 /* For a preincrement, see if we can do this with a single instruction. */
8942 if (!post)
8943 {
8944 icode = (int) this_optab->handlers[(int) mode].insn_code;
8945 if (icode != (int) CODE_FOR_nothing
8946 /* Make sure that OP0 is valid for operands 0 and 1
8947 of the insn we want to queue. */
8948 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8949 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8950 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8951 single_insn = 1;
8952 }
8953
8954 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8955 then we cannot just increment OP0. We must therefore contrive to
8956 increment the original value. Then, for postincrement, we can return
8957 OP0 since it is a copy of the old value. For preincrement, expand here
8958 unless we can do it with a single insn.
8959
8960 Likewise if storing directly into OP0 would clobber high bits
8961 we need to preserve (bad_subreg). */
8962 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8963 {
8964 /* This is the easiest way to increment the value wherever it is.
8965 Problems with multiple evaluation of INCREMENTED are prevented
8966 because either (1) it is a component_ref or preincrement,
8967 in which case it was stabilized above, or (2) it is an array_ref
8968 with constant index in an array in a register, which is
8969 safe to reevaluate. */
8970 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8971 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8972 ? MINUS_EXPR : PLUS_EXPR),
8973 TREE_TYPE (exp),
8974 incremented,
8975 TREE_OPERAND (exp, 1));
8976
8977 while (TREE_CODE (incremented) == NOP_EXPR
8978 || TREE_CODE (incremented) == CONVERT_EXPR)
8979 {
8980 newexp = convert (TREE_TYPE (incremented), newexp);
8981 incremented = TREE_OPERAND (incremented, 0);
8982 }
8983
8984 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8985 return post ? op0 : temp;
8986 }
8987
8988 if (post)
8989 {
8990 /* We have a true reference to the value in OP0.
8991 If there is an insn to add or subtract in this mode, queue it.
8992 Queueing the increment insn avoids the register shuffling
8993 that often results if we must increment now and first save
8994 the old value for subsequent use. */
8995
8996 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8997 op0 = stabilize (op0);
8998 #endif
8999
9000 icode = (int) this_optab->handlers[(int) mode].insn_code;
9001 if (icode != (int) CODE_FOR_nothing
9002 /* Make sure that OP0 is valid for operands 0 and 1
9003 of the insn we want to queue. */
9004 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9005 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9006 {
9007 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9008 op1 = force_reg (mode, op1);
9009
9010 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9011 }
9012 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9013 {
9014 rtx addr = (general_operand (XEXP (op0, 0), mode)
9015 ? force_reg (Pmode, XEXP (op0, 0))
9016 : copy_to_reg (XEXP (op0, 0)));
9017 rtx temp, result;
9018
9019 op0 = change_address (op0, VOIDmode, addr);
9020 temp = force_reg (GET_MODE (op0), op0);
9021 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9022 op1 = force_reg (mode, op1);
9023
9024 /* The increment queue is LIFO, thus we have to `queue'
9025 the instructions in reverse order. */
9026 enqueue_insn (op0, gen_move_insn (op0, temp));
9027 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9028 return result;
9029 }
9030 }
9031
9032 /* Preincrement, or we can't increment with one simple insn. */
9033 if (post)
9034 /* Save a copy of the value before inc or dec, to return it later. */
9035 temp = value = copy_to_reg (op0);
9036 else
9037 /* Arrange to return the incremented value. */
9038 /* Copy the rtx because expand_binop will protect from the queue,
9039 and the results of that would be invalid for us to return
9040 if our caller does emit_queue before using our result. */
9041 temp = copy_rtx (value = op0);
9042
9043 /* Increment however we can. */
9044 op1 = expand_binop (mode, this_optab, value, op1,
9045 current_function_check_memory_usage ? NULL_RTX : op0,
9046 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9047 /* Make sure the value is stored into OP0. */
9048 if (op1 != op0)
9049 emit_move_insn (op0, op1);
9050
9051 return temp;
9052 }
9053 \f
9054 /* Expand all function calls contained within EXP, innermost ones first.
9055 But don't look within expressions that have sequence points.
9056 For each CALL_EXPR, record the rtx for its value
9057 in the CALL_EXPR_RTL field. */
9058
9059 static void
9060 preexpand_calls (exp)
9061 tree exp;
9062 {
9063 register int nops, i;
9064 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9065
9066 if (! do_preexpand_calls)
9067 return;
9068
9069 /* Only expressions and references can contain calls. */
9070
9071 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9072 return;
9073
9074 switch (TREE_CODE (exp))
9075 {
9076 case CALL_EXPR:
9077 /* Do nothing if already expanded. */
9078 if (CALL_EXPR_RTL (exp) != 0
9079 /* Do nothing if the call returns a variable-sized object. */
9080 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9081 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9082 /* Do nothing to built-in functions. */
9083 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9084 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9085 == FUNCTION_DECL)
9086 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9087 return;
9088
9089 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9090 return;
9091
9092 case COMPOUND_EXPR:
9093 case COND_EXPR:
9094 case TRUTH_ANDIF_EXPR:
9095 case TRUTH_ORIF_EXPR:
9096 /* If we find one of these, then we can be sure
9097 the adjust will be done for it (since it makes jumps).
9098 Do it now, so that if this is inside an argument
9099 of a function, we don't get the stack adjustment
9100 after some other args have already been pushed. */
9101 do_pending_stack_adjust ();
9102 return;
9103
9104 case BLOCK:
9105 case RTL_EXPR:
9106 case WITH_CLEANUP_EXPR:
9107 case CLEANUP_POINT_EXPR:
9108 case TRY_CATCH_EXPR:
9109 return;
9110
9111 case SAVE_EXPR:
9112 if (SAVE_EXPR_RTL (exp) != 0)
9113 return;
9114
9115 default:
9116 break;
9117 }
9118
9119 nops = tree_code_length[(int) TREE_CODE (exp)];
9120 for (i = 0; i < nops; i++)
9121 if (TREE_OPERAND (exp, i) != 0)
9122 {
9123 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9124 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9125 It doesn't happen before the call is made. */
9126 ;
9127 else
9128 {
9129 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9130 if (type == 'e' || type == '<' || type == '1' || type == '2'
9131 || type == 'r')
9132 preexpand_calls (TREE_OPERAND (exp, i));
9133 }
9134 }
9135 }
9136 \f
9137 /* At the start of a function, record that we have no previously-pushed
9138 arguments waiting to be popped. */
9139
9140 void
9141 init_pending_stack_adjust ()
9142 {
9143 pending_stack_adjust = 0;
9144 }
9145
9146 /* When exiting from function, if safe, clear out any pending stack adjust
9147 so the adjustment won't get done.
9148
9149 Note, if the current function calls alloca, then it must have a
9150 frame pointer regardless of the value of flag_omit_frame_pointer. */
9151
9152 void
9153 clear_pending_stack_adjust ()
9154 {
9155 #ifdef EXIT_IGNORE_STACK
9156 if (optimize > 0
9157 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9158 && EXIT_IGNORE_STACK
9159 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9160 && ! flag_inline_functions)
9161 {
9162 stack_pointer_delta -= pending_stack_adjust,
9163 pending_stack_adjust = 0;
9164 }
9165 #endif
9166 }
9167
9168 /* Pop any previously-pushed arguments that have not been popped yet. */
9169
9170 void
9171 do_pending_stack_adjust ()
9172 {
9173 if (inhibit_defer_pop == 0)
9174 {
9175 if (pending_stack_adjust != 0)
9176 adjust_stack (GEN_INT (pending_stack_adjust));
9177 pending_stack_adjust = 0;
9178 }
9179 }
9180 \f
9181 /* Expand conditional expressions. */
9182
9183 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9184 LABEL is an rtx of code CODE_LABEL, in this function and all the
9185 functions here. */
9186
9187 void
9188 jumpifnot (exp, label)
9189 tree exp;
9190 rtx label;
9191 {
9192 do_jump (exp, label, NULL_RTX);
9193 }
9194
9195 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9196
9197 void
9198 jumpif (exp, label)
9199 tree exp;
9200 rtx label;
9201 {
9202 do_jump (exp, NULL_RTX, label);
9203 }
9204
9205 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9206 the result is zero, or IF_TRUE_LABEL if the result is one.
9207 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9208 meaning fall through in that case.
9209
9210 do_jump always does any pending stack adjust except when it does not
9211 actually perform a jump. An example where there is no jump
9212 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9213
9214 This function is responsible for optimizing cases such as
9215 &&, || and comparison operators in EXP. */
9216
9217 void
9218 do_jump (exp, if_false_label, if_true_label)
9219 tree exp;
9220 rtx if_false_label, if_true_label;
9221 {
9222 register enum tree_code code = TREE_CODE (exp);
9223 /* Some cases need to create a label to jump to
9224 in order to properly fall through.
9225 These cases set DROP_THROUGH_LABEL nonzero. */
9226 rtx drop_through_label = 0;
9227 rtx temp;
9228 int i;
9229 tree type;
9230 enum machine_mode mode;
9231
9232 #ifdef MAX_INTEGER_COMPUTATION_MODE
9233 check_max_integer_computation_mode (exp);
9234 #endif
9235
9236 emit_queue ();
9237
9238 switch (code)
9239 {
9240 case ERROR_MARK:
9241 break;
9242
9243 case INTEGER_CST:
9244 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9245 if (temp)
9246 emit_jump (temp);
9247 break;
9248
9249 #if 0
9250 /* This is not true with #pragma weak */
9251 case ADDR_EXPR:
9252 /* The address of something can never be zero. */
9253 if (if_true_label)
9254 emit_jump (if_true_label);
9255 break;
9256 #endif
9257
9258 case NOP_EXPR:
9259 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9260 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9261 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9262 goto normal;
9263 case CONVERT_EXPR:
9264 /* If we are narrowing the operand, we have to do the compare in the
9265 narrower mode. */
9266 if ((TYPE_PRECISION (TREE_TYPE (exp))
9267 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9268 goto normal;
9269 case NON_LVALUE_EXPR:
9270 case REFERENCE_EXPR:
9271 case ABS_EXPR:
9272 case NEGATE_EXPR:
9273 case LROTATE_EXPR:
9274 case RROTATE_EXPR:
9275 /* These cannot change zero->non-zero or vice versa. */
9276 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9277 break;
9278
9279 case WITH_RECORD_EXPR:
9280 /* Put the object on the placeholder list, recurse through our first
9281 operand, and pop the list. */
9282 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9283 placeholder_list);
9284 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9285 placeholder_list = TREE_CHAIN (placeholder_list);
9286 break;
9287
9288 #if 0
9289 /* This is never less insns than evaluating the PLUS_EXPR followed by
9290 a test and can be longer if the test is eliminated. */
9291 case PLUS_EXPR:
9292 /* Reduce to minus. */
9293 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9294 TREE_OPERAND (exp, 0),
9295 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9296 TREE_OPERAND (exp, 1))));
9297 /* Process as MINUS. */
9298 #endif
9299
9300 case MINUS_EXPR:
9301 /* Non-zero iff operands of minus differ. */
9302 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9303 TREE_OPERAND (exp, 0),
9304 TREE_OPERAND (exp, 1)),
9305 NE, NE, if_false_label, if_true_label);
9306 break;
9307
9308 case BIT_AND_EXPR:
9309 /* If we are AND'ing with a small constant, do this comparison in the
9310 smallest type that fits. If the machine doesn't have comparisons
9311 that small, it will be converted back to the wider comparison.
9312 This helps if we are testing the sign bit of a narrower object.
9313 combine can't do this for us because it can't know whether a
9314 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9315
9316 if (! SLOW_BYTE_ACCESS
9317 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9318 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9319 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9320 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9321 && (type = type_for_mode (mode, 1)) != 0
9322 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9323 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9324 != CODE_FOR_nothing))
9325 {
9326 do_jump (convert (type, exp), if_false_label, if_true_label);
9327 break;
9328 }
9329 goto normal;
9330
9331 case TRUTH_NOT_EXPR:
9332 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9333 break;
9334
9335 case TRUTH_ANDIF_EXPR:
9336 if (if_false_label == 0)
9337 if_false_label = drop_through_label = gen_label_rtx ();
9338 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9339 start_cleanup_deferral ();
9340 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9341 end_cleanup_deferral ();
9342 break;
9343
9344 case TRUTH_ORIF_EXPR:
9345 if (if_true_label == 0)
9346 if_true_label = drop_through_label = gen_label_rtx ();
9347 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9348 start_cleanup_deferral ();
9349 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9350 end_cleanup_deferral ();
9351 break;
9352
9353 case COMPOUND_EXPR:
9354 push_temp_slots ();
9355 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9356 preserve_temp_slots (NULL_RTX);
9357 free_temp_slots ();
9358 pop_temp_slots ();
9359 emit_queue ();
9360 do_pending_stack_adjust ();
9361 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9362 break;
9363
9364 case COMPONENT_REF:
9365 case BIT_FIELD_REF:
9366 case ARRAY_REF:
9367 {
9368 HOST_WIDE_INT bitsize, bitpos;
9369 int unsignedp;
9370 enum machine_mode mode;
9371 tree type;
9372 tree offset;
9373 int volatilep = 0;
9374 unsigned int alignment;
9375
9376 /* Get description of this reference. We don't actually care
9377 about the underlying object here. */
9378 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9379 &unsignedp, &volatilep, &alignment);
9380
9381 type = type_for_size (bitsize, unsignedp);
9382 if (! SLOW_BYTE_ACCESS
9383 && type != 0 && bitsize >= 0
9384 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9385 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9386 != CODE_FOR_nothing))
9387 {
9388 do_jump (convert (type, exp), if_false_label, if_true_label);
9389 break;
9390 }
9391 goto normal;
9392 }
9393
9394 case COND_EXPR:
9395 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9396 if (integer_onep (TREE_OPERAND (exp, 1))
9397 && integer_zerop (TREE_OPERAND (exp, 2)))
9398 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9399
9400 else if (integer_zerop (TREE_OPERAND (exp, 1))
9401 && integer_onep (TREE_OPERAND (exp, 2)))
9402 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9403
9404 else
9405 {
9406 register rtx label1 = gen_label_rtx ();
9407 drop_through_label = gen_label_rtx ();
9408
9409 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9410
9411 start_cleanup_deferral ();
9412 /* Now the THEN-expression. */
9413 do_jump (TREE_OPERAND (exp, 1),
9414 if_false_label ? if_false_label : drop_through_label,
9415 if_true_label ? if_true_label : drop_through_label);
9416 /* In case the do_jump just above never jumps. */
9417 do_pending_stack_adjust ();
9418 emit_label (label1);
9419
9420 /* Now the ELSE-expression. */
9421 do_jump (TREE_OPERAND (exp, 2),
9422 if_false_label ? if_false_label : drop_through_label,
9423 if_true_label ? if_true_label : drop_through_label);
9424 end_cleanup_deferral ();
9425 }
9426 break;
9427
9428 case EQ_EXPR:
9429 {
9430 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9431
9432 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9433 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9434 {
9435 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9436 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9437 do_jump
9438 (fold
9439 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9440 fold (build (EQ_EXPR, TREE_TYPE (exp),
9441 fold (build1 (REALPART_EXPR,
9442 TREE_TYPE (inner_type),
9443 exp0)),
9444 fold (build1 (REALPART_EXPR,
9445 TREE_TYPE (inner_type),
9446 exp1)))),
9447 fold (build (EQ_EXPR, TREE_TYPE (exp),
9448 fold (build1 (IMAGPART_EXPR,
9449 TREE_TYPE (inner_type),
9450 exp0)),
9451 fold (build1 (IMAGPART_EXPR,
9452 TREE_TYPE (inner_type),
9453 exp1)))))),
9454 if_false_label, if_true_label);
9455 }
9456
9457 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9458 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9459
9460 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9461 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9462 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9463 else
9464 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9465 break;
9466 }
9467
9468 case NE_EXPR:
9469 {
9470 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9471
9472 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9473 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9474 {
9475 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9476 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9477 do_jump
9478 (fold
9479 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9480 fold (build (NE_EXPR, TREE_TYPE (exp),
9481 fold (build1 (REALPART_EXPR,
9482 TREE_TYPE (inner_type),
9483 exp0)),
9484 fold (build1 (REALPART_EXPR,
9485 TREE_TYPE (inner_type),
9486 exp1)))),
9487 fold (build (NE_EXPR, TREE_TYPE (exp),
9488 fold (build1 (IMAGPART_EXPR,
9489 TREE_TYPE (inner_type),
9490 exp0)),
9491 fold (build1 (IMAGPART_EXPR,
9492 TREE_TYPE (inner_type),
9493 exp1)))))),
9494 if_false_label, if_true_label);
9495 }
9496
9497 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9498 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9499
9500 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9501 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9502 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9503 else
9504 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9505 break;
9506 }
9507
9508 case LT_EXPR:
9509 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9510 if (GET_MODE_CLASS (mode) == MODE_INT
9511 && ! can_compare_p (LT, mode, ccp_jump))
9512 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9513 else
9514 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9515 break;
9516
9517 case LE_EXPR:
9518 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9519 if (GET_MODE_CLASS (mode) == MODE_INT
9520 && ! can_compare_p (LE, mode, ccp_jump))
9521 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9522 else
9523 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9524 break;
9525
9526 case GT_EXPR:
9527 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9528 if (GET_MODE_CLASS (mode) == MODE_INT
9529 && ! can_compare_p (GT, mode, ccp_jump))
9530 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9531 else
9532 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9533 break;
9534
9535 case GE_EXPR:
9536 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9537 if (GET_MODE_CLASS (mode) == MODE_INT
9538 && ! can_compare_p (GE, mode, ccp_jump))
9539 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9540 else
9541 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9542 break;
9543
9544 case UNORDERED_EXPR:
9545 case ORDERED_EXPR:
9546 {
9547 enum rtx_code cmp, rcmp;
9548 int do_rev;
9549
9550 if (code == UNORDERED_EXPR)
9551 cmp = UNORDERED, rcmp = ORDERED;
9552 else
9553 cmp = ORDERED, rcmp = UNORDERED;
9554 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9555
9556 do_rev = 0;
9557 if (! can_compare_p (cmp, mode, ccp_jump)
9558 && (can_compare_p (rcmp, mode, ccp_jump)
9559 /* If the target doesn't provide either UNORDERED or ORDERED
9560 comparisons, canonicalize on UNORDERED for the library. */
9561 || rcmp == UNORDERED))
9562 do_rev = 1;
9563
9564 if (! do_rev)
9565 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9566 else
9567 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9568 }
9569 break;
9570
9571 {
9572 enum rtx_code rcode1;
9573 enum tree_code tcode2;
9574
9575 case UNLT_EXPR:
9576 rcode1 = UNLT;
9577 tcode2 = LT_EXPR;
9578 goto unordered_bcc;
9579 case UNLE_EXPR:
9580 rcode1 = UNLE;
9581 tcode2 = LE_EXPR;
9582 goto unordered_bcc;
9583 case UNGT_EXPR:
9584 rcode1 = UNGT;
9585 tcode2 = GT_EXPR;
9586 goto unordered_bcc;
9587 case UNGE_EXPR:
9588 rcode1 = UNGE;
9589 tcode2 = GE_EXPR;
9590 goto unordered_bcc;
9591 case UNEQ_EXPR:
9592 rcode1 = UNEQ;
9593 tcode2 = EQ_EXPR;
9594 goto unordered_bcc;
9595
9596 unordered_bcc:
9597 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9598 if (can_compare_p (rcode1, mode, ccp_jump))
9599 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9600 if_true_label);
9601 else
9602 {
9603 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9604 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9605 tree cmp0, cmp1;
9606
9607 /* If the target doesn't support combined unordered
9608 compares, decompose into UNORDERED + comparison. */
9609 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9610 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9611 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9612 do_jump (exp, if_false_label, if_true_label);
9613 }
9614 }
9615 break;
9616
9617 default:
9618 normal:
9619 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9620 #if 0
9621 /* This is not needed any more and causes poor code since it causes
9622 comparisons and tests from non-SI objects to have different code
9623 sequences. */
9624 /* Copy to register to avoid generating bad insns by cse
9625 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9626 if (!cse_not_expected && GET_CODE (temp) == MEM)
9627 temp = copy_to_reg (temp);
9628 #endif
9629 do_pending_stack_adjust ();
9630 /* Do any postincrements in the expression that was tested. */
9631 emit_queue ();
9632
9633 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9634 {
9635 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9636 if (target)
9637 emit_jump (target);
9638 }
9639 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9640 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9641 /* Note swapping the labels gives us not-equal. */
9642 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9643 else if (GET_MODE (temp) != VOIDmode)
9644 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9645 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9646 GET_MODE (temp), NULL_RTX, 0,
9647 if_false_label, if_true_label);
9648 else
9649 abort ();
9650 }
9651
9652 if (drop_through_label)
9653 {
9654 /* If do_jump produces code that might be jumped around,
9655 do any stack adjusts from that code, before the place
9656 where control merges in. */
9657 do_pending_stack_adjust ();
9658 emit_label (drop_through_label);
9659 }
9660 }
9661 \f
9662 /* Given a comparison expression EXP for values too wide to be compared
9663 with one insn, test the comparison and jump to the appropriate label.
9664 The code of EXP is ignored; we always test GT if SWAP is 0,
9665 and LT if SWAP is 1. */
9666
9667 static void
9668 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9669 tree exp;
9670 int swap;
9671 rtx if_false_label, if_true_label;
9672 {
9673 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9674 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9675 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9676 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9677
9678 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9679 }
9680
9681 /* Compare OP0 with OP1, word at a time, in mode MODE.
9682 UNSIGNEDP says to do unsigned comparison.
9683 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9684
9685 void
9686 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9687 enum machine_mode mode;
9688 int unsignedp;
9689 rtx op0, op1;
9690 rtx if_false_label, if_true_label;
9691 {
9692 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9693 rtx drop_through_label = 0;
9694 int i;
9695
9696 if (! if_true_label || ! if_false_label)
9697 drop_through_label = gen_label_rtx ();
9698 if (! if_true_label)
9699 if_true_label = drop_through_label;
9700 if (! if_false_label)
9701 if_false_label = drop_through_label;
9702
9703 /* Compare a word at a time, high order first. */
9704 for (i = 0; i < nwords; i++)
9705 {
9706 rtx op0_word, op1_word;
9707
9708 if (WORDS_BIG_ENDIAN)
9709 {
9710 op0_word = operand_subword_force (op0, i, mode);
9711 op1_word = operand_subword_force (op1, i, mode);
9712 }
9713 else
9714 {
9715 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9716 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9717 }
9718
9719 /* All but high-order word must be compared as unsigned. */
9720 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9721 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9722 NULL_RTX, if_true_label);
9723
9724 /* Consider lower words only if these are equal. */
9725 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9726 NULL_RTX, 0, NULL_RTX, if_false_label);
9727 }
9728
9729 if (if_false_label)
9730 emit_jump (if_false_label);
9731 if (drop_through_label)
9732 emit_label (drop_through_label);
9733 }
9734
9735 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9736 with one insn, test the comparison and jump to the appropriate label. */
9737
9738 static void
9739 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9740 tree exp;
9741 rtx if_false_label, if_true_label;
9742 {
9743 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9744 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9745 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9746 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9747 int i;
9748 rtx drop_through_label = 0;
9749
9750 if (! if_false_label)
9751 drop_through_label = if_false_label = gen_label_rtx ();
9752
9753 for (i = 0; i < nwords; i++)
9754 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9755 operand_subword_force (op1, i, mode),
9756 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9757 word_mode, NULL_RTX, 0, if_false_label,
9758 NULL_RTX);
9759
9760 if (if_true_label)
9761 emit_jump (if_true_label);
9762 if (drop_through_label)
9763 emit_label (drop_through_label);
9764 }
9765 \f
9766 /* Jump according to whether OP0 is 0.
9767 We assume that OP0 has an integer mode that is too wide
9768 for the available compare insns. */
9769
9770 void
9771 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9772 rtx op0;
9773 rtx if_false_label, if_true_label;
9774 {
9775 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9776 rtx part;
9777 int i;
9778 rtx drop_through_label = 0;
9779
9780 /* The fastest way of doing this comparison on almost any machine is to
9781 "or" all the words and compare the result. If all have to be loaded
9782 from memory and this is a very wide item, it's possible this may
9783 be slower, but that's highly unlikely. */
9784
9785 part = gen_reg_rtx (word_mode);
9786 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9787 for (i = 1; i < nwords && part != 0; i++)
9788 part = expand_binop (word_mode, ior_optab, part,
9789 operand_subword_force (op0, i, GET_MODE (op0)),
9790 part, 1, OPTAB_WIDEN);
9791
9792 if (part != 0)
9793 {
9794 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9795 NULL_RTX, 0, if_false_label, if_true_label);
9796
9797 return;
9798 }
9799
9800 /* If we couldn't do the "or" simply, do this with a series of compares. */
9801 if (! if_false_label)
9802 drop_through_label = if_false_label = gen_label_rtx ();
9803
9804 for (i = 0; i < nwords; i++)
9805 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9806 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9807 if_false_label, NULL_RTX);
9808
9809 if (if_true_label)
9810 emit_jump (if_true_label);
9811
9812 if (drop_through_label)
9813 emit_label (drop_through_label);
9814 }
9815 \f
9816 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9817 (including code to compute the values to be compared)
9818 and set (CC0) according to the result.
9819 The decision as to signed or unsigned comparison must be made by the caller.
9820
9821 We force a stack adjustment unless there are currently
9822 things pushed on the stack that aren't yet used.
9823
9824 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9825 compared.
9826
9827 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9828 size of MODE should be used. */
9829
9830 rtx
9831 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9832 register rtx op0, op1;
9833 enum rtx_code code;
9834 int unsignedp;
9835 enum machine_mode mode;
9836 rtx size;
9837 unsigned int align;
9838 {
9839 rtx tem;
9840
9841 /* If one operand is constant, make it the second one. Only do this
9842 if the other operand is not constant as well. */
9843
9844 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9845 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9846 {
9847 tem = op0;
9848 op0 = op1;
9849 op1 = tem;
9850 code = swap_condition (code);
9851 }
9852
9853 if (flag_force_mem)
9854 {
9855 op0 = force_not_mem (op0);
9856 op1 = force_not_mem (op1);
9857 }
9858
9859 do_pending_stack_adjust ();
9860
9861 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9862 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9863 return tem;
9864
9865 #if 0
9866 /* There's no need to do this now that combine.c can eliminate lots of
9867 sign extensions. This can be less efficient in certain cases on other
9868 machines. */
9869
9870 /* If this is a signed equality comparison, we can do it as an
9871 unsigned comparison since zero-extension is cheaper than sign
9872 extension and comparisons with zero are done as unsigned. This is
9873 the case even on machines that can do fast sign extension, since
9874 zero-extension is easier to combine with other operations than
9875 sign-extension is. If we are comparing against a constant, we must
9876 convert it to what it would look like unsigned. */
9877 if ((code == EQ || code == NE) && ! unsignedp
9878 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9879 {
9880 if (GET_CODE (op1) == CONST_INT
9881 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9882 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9883 unsignedp = 1;
9884 }
9885 #endif
9886
9887 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9888
9889 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9890 }
9891
9892 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9893 The decision as to signed or unsigned comparison must be made by the caller.
9894
9895 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9896 compared.
9897
9898 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9899 size of MODE should be used. */
9900
9901 void
9902 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9903 if_false_label, if_true_label)
9904 register rtx op0, op1;
9905 enum rtx_code code;
9906 int unsignedp;
9907 enum machine_mode mode;
9908 rtx size;
9909 unsigned int align;
9910 rtx if_false_label, if_true_label;
9911 {
9912 rtx tem;
9913 int dummy_true_label = 0;
9914
9915 /* Reverse the comparison if that is safe and we want to jump if it is
9916 false. */
9917 if (! if_true_label && ! FLOAT_MODE_P (mode))
9918 {
9919 if_true_label = if_false_label;
9920 if_false_label = 0;
9921 code = reverse_condition (code);
9922 }
9923
9924 /* If one operand is constant, make it the second one. Only do this
9925 if the other operand is not constant as well. */
9926
9927 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9928 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9929 {
9930 tem = op0;
9931 op0 = op1;
9932 op1 = tem;
9933 code = swap_condition (code);
9934 }
9935
9936 if (flag_force_mem)
9937 {
9938 op0 = force_not_mem (op0);
9939 op1 = force_not_mem (op1);
9940 }
9941
9942 do_pending_stack_adjust ();
9943
9944 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9945 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9946 {
9947 if (tem == const_true_rtx)
9948 {
9949 if (if_true_label)
9950 emit_jump (if_true_label);
9951 }
9952 else
9953 {
9954 if (if_false_label)
9955 emit_jump (if_false_label);
9956 }
9957 return;
9958 }
9959
9960 #if 0
9961 /* There's no need to do this now that combine.c can eliminate lots of
9962 sign extensions. This can be less efficient in certain cases on other
9963 machines. */
9964
9965 /* If this is a signed equality comparison, we can do it as an
9966 unsigned comparison since zero-extension is cheaper than sign
9967 extension and comparisons with zero are done as unsigned. This is
9968 the case even on machines that can do fast sign extension, since
9969 zero-extension is easier to combine with other operations than
9970 sign-extension is. If we are comparing against a constant, we must
9971 convert it to what it would look like unsigned. */
9972 if ((code == EQ || code == NE) && ! unsignedp
9973 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9974 {
9975 if (GET_CODE (op1) == CONST_INT
9976 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9977 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9978 unsignedp = 1;
9979 }
9980 #endif
9981
9982 if (! if_true_label)
9983 {
9984 dummy_true_label = 1;
9985 if_true_label = gen_label_rtx ();
9986 }
9987
9988 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9989 if_true_label);
9990
9991 if (if_false_label)
9992 emit_jump (if_false_label);
9993 if (dummy_true_label)
9994 emit_label (if_true_label);
9995 }
9996
9997 /* Generate code for a comparison expression EXP (including code to compute
9998 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9999 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10000 generated code will drop through.
10001 SIGNED_CODE should be the rtx operation for this comparison for
10002 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10003
10004 We force a stack adjustment unless there are currently
10005 things pushed on the stack that aren't yet used. */
10006
10007 static void
10008 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10009 if_true_label)
10010 register tree exp;
10011 enum rtx_code signed_code, unsigned_code;
10012 rtx if_false_label, if_true_label;
10013 {
10014 unsigned int align0, align1;
10015 register rtx op0, op1;
10016 register tree type;
10017 register enum machine_mode mode;
10018 int unsignedp;
10019 enum rtx_code code;
10020
10021 /* Don't crash if the comparison was erroneous. */
10022 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10023 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10024 return;
10025
10026 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10027 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10028 mode = TYPE_MODE (type);
10029 unsignedp = TREE_UNSIGNED (type);
10030 code = unsignedp ? unsigned_code : signed_code;
10031
10032 #ifdef HAVE_canonicalize_funcptr_for_compare
10033 /* If function pointers need to be "canonicalized" before they can
10034 be reliably compared, then canonicalize them. */
10035 if (HAVE_canonicalize_funcptr_for_compare
10036 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10037 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10038 == FUNCTION_TYPE))
10039 {
10040 rtx new_op0 = gen_reg_rtx (mode);
10041
10042 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10043 op0 = new_op0;
10044 }
10045
10046 if (HAVE_canonicalize_funcptr_for_compare
10047 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10048 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10049 == FUNCTION_TYPE))
10050 {
10051 rtx new_op1 = gen_reg_rtx (mode);
10052
10053 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10054 op1 = new_op1;
10055 }
10056 #endif
10057
10058 /* Do any postincrements in the expression that was tested. */
10059 emit_queue ();
10060
10061 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10062 ((mode == BLKmode)
10063 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10064 MIN (align0, align1),
10065 if_false_label, if_true_label);
10066 }
10067 \f
10068 /* Generate code to calculate EXP using a store-flag instruction
10069 and return an rtx for the result. EXP is either a comparison
10070 or a TRUTH_NOT_EXPR whose operand is a comparison.
10071
10072 If TARGET is nonzero, store the result there if convenient.
10073
10074 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10075 cheap.
10076
10077 Return zero if there is no suitable set-flag instruction
10078 available on this machine.
10079
10080 Once expand_expr has been called on the arguments of the comparison,
10081 we are committed to doing the store flag, since it is not safe to
10082 re-evaluate the expression. We emit the store-flag insn by calling
10083 emit_store_flag, but only expand the arguments if we have a reason
10084 to believe that emit_store_flag will be successful. If we think that
10085 it will, but it isn't, we have to simulate the store-flag with a
10086 set/jump/set sequence. */
10087
10088 static rtx
10089 do_store_flag (exp, target, mode, only_cheap)
10090 tree exp;
10091 rtx target;
10092 enum machine_mode mode;
10093 int only_cheap;
10094 {
10095 enum rtx_code code;
10096 tree arg0, arg1, type;
10097 tree tem;
10098 enum machine_mode operand_mode;
10099 int invert = 0;
10100 int unsignedp;
10101 rtx op0, op1;
10102 enum insn_code icode;
10103 rtx subtarget = target;
10104 rtx result, label;
10105
10106 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10107 result at the end. We can't simply invert the test since it would
10108 have already been inverted if it were valid. This case occurs for
10109 some floating-point comparisons. */
10110
10111 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10112 invert = 1, exp = TREE_OPERAND (exp, 0);
10113
10114 arg0 = TREE_OPERAND (exp, 0);
10115 arg1 = TREE_OPERAND (exp, 1);
10116 type = TREE_TYPE (arg0);
10117 operand_mode = TYPE_MODE (type);
10118 unsignedp = TREE_UNSIGNED (type);
10119
10120 /* We won't bother with BLKmode store-flag operations because it would mean
10121 passing a lot of information to emit_store_flag. */
10122 if (operand_mode == BLKmode)
10123 return 0;
10124
10125 /* We won't bother with store-flag operations involving function pointers
10126 when function pointers must be canonicalized before comparisons. */
10127 #ifdef HAVE_canonicalize_funcptr_for_compare
10128 if (HAVE_canonicalize_funcptr_for_compare
10129 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10130 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10131 == FUNCTION_TYPE))
10132 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10133 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10134 == FUNCTION_TYPE))))
10135 return 0;
10136 #endif
10137
10138 STRIP_NOPS (arg0);
10139 STRIP_NOPS (arg1);
10140
10141 /* Get the rtx comparison code to use. We know that EXP is a comparison
10142 operation of some type. Some comparisons against 1 and -1 can be
10143 converted to comparisons with zero. Do so here so that the tests
10144 below will be aware that we have a comparison with zero. These
10145 tests will not catch constants in the first operand, but constants
10146 are rarely passed as the first operand. */
10147
10148 switch (TREE_CODE (exp))
10149 {
10150 case EQ_EXPR:
10151 code = EQ;
10152 break;
10153 case NE_EXPR:
10154 code = NE;
10155 break;
10156 case LT_EXPR:
10157 if (integer_onep (arg1))
10158 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10159 else
10160 code = unsignedp ? LTU : LT;
10161 break;
10162 case LE_EXPR:
10163 if (! unsignedp && integer_all_onesp (arg1))
10164 arg1 = integer_zero_node, code = LT;
10165 else
10166 code = unsignedp ? LEU : LE;
10167 break;
10168 case GT_EXPR:
10169 if (! unsignedp && integer_all_onesp (arg1))
10170 arg1 = integer_zero_node, code = GE;
10171 else
10172 code = unsignedp ? GTU : GT;
10173 break;
10174 case GE_EXPR:
10175 if (integer_onep (arg1))
10176 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10177 else
10178 code = unsignedp ? GEU : GE;
10179 break;
10180
10181 case UNORDERED_EXPR:
10182 code = UNORDERED;
10183 break;
10184 case ORDERED_EXPR:
10185 code = ORDERED;
10186 break;
10187 case UNLT_EXPR:
10188 code = UNLT;
10189 break;
10190 case UNLE_EXPR:
10191 code = UNLE;
10192 break;
10193 case UNGT_EXPR:
10194 code = UNGT;
10195 break;
10196 case UNGE_EXPR:
10197 code = UNGE;
10198 break;
10199 case UNEQ_EXPR:
10200 code = UNEQ;
10201 break;
10202
10203 default:
10204 abort ();
10205 }
10206
10207 /* Put a constant second. */
10208 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10209 {
10210 tem = arg0; arg0 = arg1; arg1 = tem;
10211 code = swap_condition (code);
10212 }
10213
10214 /* If this is an equality or inequality test of a single bit, we can
10215 do this by shifting the bit being tested to the low-order bit and
10216 masking the result with the constant 1. If the condition was EQ,
10217 we xor it with 1. This does not require an scc insn and is faster
10218 than an scc insn even if we have it. */
10219
10220 if ((code == NE || code == EQ)
10221 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10222 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10223 {
10224 tree inner = TREE_OPERAND (arg0, 0);
10225 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10226 int ops_unsignedp;
10227
10228 /* If INNER is a right shift of a constant and it plus BITNUM does
10229 not overflow, adjust BITNUM and INNER. */
10230
10231 if (TREE_CODE (inner) == RSHIFT_EXPR
10232 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10233 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10234 && bitnum < TYPE_PRECISION (type)
10235 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10236 bitnum - TYPE_PRECISION (type)))
10237 {
10238 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10239 inner = TREE_OPERAND (inner, 0);
10240 }
10241
10242 /* If we are going to be able to omit the AND below, we must do our
10243 operations as unsigned. If we must use the AND, we have a choice.
10244 Normally unsigned is faster, but for some machines signed is. */
10245 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10246 #ifdef LOAD_EXTEND_OP
10247 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10248 #else
10249 : 1
10250 #endif
10251 );
10252
10253 if (subtarget == 0 || GET_CODE (subtarget) != REG
10254 || GET_MODE (subtarget) != operand_mode
10255 || ! safe_from_p (subtarget, inner, 1))
10256 subtarget = 0;
10257
10258 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10259
10260 if (bitnum != 0)
10261 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10262 size_int (bitnum), subtarget, ops_unsignedp);
10263
10264 if (GET_MODE (op0) != mode)
10265 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10266
10267 if ((code == EQ && ! invert) || (code == NE && invert))
10268 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10269 ops_unsignedp, OPTAB_LIB_WIDEN);
10270
10271 /* Put the AND last so it can combine with more things. */
10272 if (bitnum != TYPE_PRECISION (type) - 1)
10273 op0 = expand_and (op0, const1_rtx, subtarget);
10274
10275 return op0;
10276 }
10277
10278 /* Now see if we are likely to be able to do this. Return if not. */
10279 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10280 return 0;
10281
10282 icode = setcc_gen_code[(int) code];
10283 if (icode == CODE_FOR_nothing
10284 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10285 {
10286 /* We can only do this if it is one of the special cases that
10287 can be handled without an scc insn. */
10288 if ((code == LT && integer_zerop (arg1))
10289 || (! only_cheap && code == GE && integer_zerop (arg1)))
10290 ;
10291 else if (BRANCH_COST >= 0
10292 && ! only_cheap && (code == NE || code == EQ)
10293 && TREE_CODE (type) != REAL_TYPE
10294 && ((abs_optab->handlers[(int) operand_mode].insn_code
10295 != CODE_FOR_nothing)
10296 || (ffs_optab->handlers[(int) operand_mode].insn_code
10297 != CODE_FOR_nothing)))
10298 ;
10299 else
10300 return 0;
10301 }
10302
10303 preexpand_calls (exp);
10304 if (subtarget == 0 || GET_CODE (subtarget) != REG
10305 || GET_MODE (subtarget) != operand_mode
10306 || ! safe_from_p (subtarget, arg1, 1))
10307 subtarget = 0;
10308
10309 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10310 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10311
10312 if (target == 0)
10313 target = gen_reg_rtx (mode);
10314
10315 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10316 because, if the emit_store_flag does anything it will succeed and
10317 OP0 and OP1 will not be used subsequently. */
10318
10319 result = emit_store_flag (target, code,
10320 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10321 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10322 operand_mode, unsignedp, 1);
10323
10324 if (result)
10325 {
10326 if (invert)
10327 result = expand_binop (mode, xor_optab, result, const1_rtx,
10328 result, 0, OPTAB_LIB_WIDEN);
10329 return result;
10330 }
10331
10332 /* If this failed, we have to do this with set/compare/jump/set code. */
10333 if (GET_CODE (target) != REG
10334 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10335 target = gen_reg_rtx (GET_MODE (target));
10336
10337 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10338 result = compare_from_rtx (op0, op1, code, unsignedp,
10339 operand_mode, NULL_RTX, 0);
10340 if (GET_CODE (result) == CONST_INT)
10341 return (((result == const0_rtx && ! invert)
10342 || (result != const0_rtx && invert))
10343 ? const0_rtx : const1_rtx);
10344
10345 label = gen_label_rtx ();
10346 if (bcc_gen_fctn[(int) code] == 0)
10347 abort ();
10348
10349 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10350 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10351 emit_label (label);
10352
10353 return target;
10354 }
10355 \f
10356 /* Generate a tablejump instruction (used for switch statements). */
10357
10358 #ifdef HAVE_tablejump
10359
10360 /* INDEX is the value being switched on, with the lowest value
10361 in the table already subtracted.
10362 MODE is its expected mode (needed if INDEX is constant).
10363 RANGE is the length of the jump table.
10364 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10365
10366 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10367 index value is out of range. */
10368
10369 void
10370 do_tablejump (index, mode, range, table_label, default_label)
10371 rtx index, range, table_label, default_label;
10372 enum machine_mode mode;
10373 {
10374 register rtx temp, vector;
10375
10376 /* Do an unsigned comparison (in the proper mode) between the index
10377 expression and the value which represents the length of the range.
10378 Since we just finished subtracting the lower bound of the range
10379 from the index expression, this comparison allows us to simultaneously
10380 check that the original index expression value is both greater than
10381 or equal to the minimum value of the range and less than or equal to
10382 the maximum value of the range. */
10383
10384 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10385 0, default_label);
10386
10387 /* If index is in range, it must fit in Pmode.
10388 Convert to Pmode so we can index with it. */
10389 if (mode != Pmode)
10390 index = convert_to_mode (Pmode, index, 1);
10391
10392 /* Don't let a MEM slip thru, because then INDEX that comes
10393 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10394 and break_out_memory_refs will go to work on it and mess it up. */
10395 #ifdef PIC_CASE_VECTOR_ADDRESS
10396 if (flag_pic && GET_CODE (index) != REG)
10397 index = copy_to_mode_reg (Pmode, index);
10398 #endif
10399
10400 /* If flag_force_addr were to affect this address
10401 it could interfere with the tricky assumptions made
10402 about addresses that contain label-refs,
10403 which may be valid only very near the tablejump itself. */
10404 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10405 GET_MODE_SIZE, because this indicates how large insns are. The other
10406 uses should all be Pmode, because they are addresses. This code
10407 could fail if addresses and insns are not the same size. */
10408 index = gen_rtx_PLUS (Pmode,
10409 gen_rtx_MULT (Pmode, index,
10410 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10411 gen_rtx_LABEL_REF (Pmode, table_label));
10412 #ifdef PIC_CASE_VECTOR_ADDRESS
10413 if (flag_pic)
10414 index = PIC_CASE_VECTOR_ADDRESS (index);
10415 else
10416 #endif
10417 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10418 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10419 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10420 RTX_UNCHANGING_P (vector) = 1;
10421 convert_move (temp, vector, 0);
10422
10423 emit_jump_insn (gen_tablejump (temp, table_label));
10424
10425 /* If we are generating PIC code or if the table is PC-relative, the
10426 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10427 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10428 emit_barrier ();
10429 }
10430
10431 #endif /* HAVE_tablejump */