expr.c (store_constructor): SIZE now signed.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "machmode.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
47
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
50
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
53
54 #ifdef PUSH_ROUNDING
55
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
58 #endif
59
60 #endif
61
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
65 #else
66 #define STACK_PUSH_CODE PRE_INC
67 #endif
68 #endif
69
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
73 #endif
74
75 /* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
81 int cse_not_expected;
82
83 /* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86 int do_preexpand_calls = 1;
87
88 /* Don't check memory usage, since code is being emitted to check a memory
89 usage. Used when current_function_check_memory_usage is true, to avoid
90 infinite recursion. */
91 static int in_check_memory_usage;
92
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 static tree placeholder_list = 0;
95
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98 struct move_by_pieces
99 {
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 int to_struct;
105 int to_readonly;
106 rtx from;
107 rtx from_addr;
108 int autinc_from;
109 int explicit_inc_from;
110 int from_struct;
111 int from_readonly;
112 int len;
113 int offset;
114 int reverse;
115 };
116
117 /* This structure is used by clear_by_pieces to describe the clear to
118 be performed. */
119
120 struct clear_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 int to_struct;
127 int len;
128 int offset;
129 int reverse;
130 };
131
132 extern struct obstack permanent_obstack;
133
134 static rtx get_push_address PARAMS ((int));
135
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
141 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
142 enum machine_mode,
143 struct clear_by_pieces *));
144 static int is_zeros_p PARAMS ((tree));
145 static int mostly_zeros_p PARAMS ((tree));
146 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 HOST_WIDE_INT, enum machine_mode,
148 tree, tree, unsigned int, int));
149 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
150 HOST_WIDE_INT));
151 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
152 HOST_WIDE_INT, enum machine_mode,
153 tree, enum machine_mode, int,
154 unsigned int, HOST_WIDE_INT, int));
155 static enum memory_use_mode
156 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
157 static tree save_noncopied_parts PARAMS ((tree, tree));
158 static tree init_noncopied_parts PARAMS ((tree, tree));
159 static int safe_from_p PARAMS ((rtx, tree, int));
160 static int fixed_type_p PARAMS ((tree));
161 static rtx var_rtx PARAMS ((tree));
162 static int readonly_fields_p PARAMS ((tree));
163 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
164 static rtx expand_increment PARAMS ((tree, int, int));
165 static void preexpand_calls PARAMS ((tree));
166 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
167 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
168 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
169 rtx, rtx));
170 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
171
172 /* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
175
176 static char direct_load[NUM_MACHINE_MODES];
177 static char direct_store[NUM_MACHINE_MODES];
178
179 /* If a memory-to-memory move would take MOVE_RATIO or more simple
180 move-instruction sequences, we will do a movstr or libcall instead. */
181
182 #ifndef MOVE_RATIO
183 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
184 #define MOVE_RATIO 2
185 #else
186 /* If we are optimizing for space (-Os), cut down the default move ratio */
187 #define MOVE_RATIO (optimize_size ? 3 : 15)
188 #endif
189 #endif
190
191 /* This macro is used to determine whether move_by_pieces should be called
192 to perform a structure copy. */
193 #ifndef MOVE_BY_PIECES_P
194 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
195 (SIZE, ALIGN) < MOVE_RATIO)
196 #endif
197
198 /* This array records the insn_code of insns to perform block moves. */
199 enum insn_code movstr_optab[NUM_MACHINE_MODES];
200
201 /* This array records the insn_code of insns to perform block clears. */
202 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
203
204 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
205
206 #ifndef SLOW_UNALIGNED_ACCESS
207 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
208 #endif
209 \f
210 /* This is run once per compilation to set up which modes can be used
211 directly in memory and to initialize the block move optab. */
212
213 void
214 init_expr_once ()
215 {
216 rtx insn, pat;
217 enum machine_mode mode;
218 int num_clobbers;
219 rtx mem, mem1;
220 char *free_point;
221
222 start_sequence ();
223
224 /* Since we are on the permanent obstack, we must be sure we save this
225 spot AFTER we call start_sequence, since it will reuse the rtl it
226 makes. */
227 free_point = (char *) oballoc (0);
228
229 /* Try indexing by frame ptr and try by stack ptr.
230 It is known that on the Convex the stack ptr isn't a valid index.
231 With luck, one or the other is valid on any machine. */
232 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
233 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234
235 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
236 pat = PATTERN (insn);
237
238 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
239 mode = (enum machine_mode) ((int) mode + 1))
240 {
241 int regno;
242 rtx reg;
243
244 direct_load[(int) mode] = direct_store[(int) mode] = 0;
245 PUT_MODE (mem, mode);
246 PUT_MODE (mem1, mode);
247
248 /* See if there is some register that can be used in this mode and
249 directly loaded or stored from memory. */
250
251 if (mode != VOIDmode && mode != BLKmode)
252 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
253 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
254 regno++)
255 {
256 if (! HARD_REGNO_MODE_OK (regno, mode))
257 continue;
258
259 reg = gen_rtx_REG (mode, regno);
260
261 SET_SRC (pat) = mem;
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
265
266 SET_SRC (pat) = mem1;
267 SET_DEST (pat) = reg;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_load[(int) mode] = 1;
270
271 SET_SRC (pat) = reg;
272 SET_DEST (pat) = mem;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
275
276 SET_SRC (pat) = reg;
277 SET_DEST (pat) = mem1;
278 if (recog (pat, insn, &num_clobbers) >= 0)
279 direct_store[(int) mode] = 1;
280 }
281 }
282
283 end_sequence ();
284 obfree (free_point);
285 }
286
287 /* This is run at the start of compiling a function. */
288
289 void
290 init_expr ()
291 {
292 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
293
294 pending_chain = 0;
295 pending_stack_adjust = 0;
296 arg_space_so_far = 0;
297 inhibit_defer_pop = 0;
298 saveregs_value = 0;
299 apply_args_value = 0;
300 forced_labels = 0;
301 }
302
303 void
304 mark_expr_status (p)
305 struct expr_status *p;
306 {
307 if (p == NULL)
308 return;
309
310 ggc_mark_rtx (p->x_saveregs_value);
311 ggc_mark_rtx (p->x_apply_args_value);
312 ggc_mark_rtx (p->x_forced_labels);
313 }
314
315 void
316 free_expr_status (f)
317 struct function *f;
318 {
319 free (f->expr);
320 f->expr = NULL;
321 }
322
323 /* Small sanity check that the queue is empty at the end of a function. */
324 void
325 finish_expr_for_function ()
326 {
327 if (pending_chain)
328 abort ();
329 }
330 \f
331 /* Manage the queue of increment instructions to be output
332 for POSTINCREMENT_EXPR expressions, etc. */
333
334 /* Queue up to increment (or change) VAR later. BODY says how:
335 BODY should be the same thing you would pass to emit_insn
336 to increment right away. It will go to emit_insn later on.
337
338 The value is a QUEUED expression to be used in place of VAR
339 where you want to guarantee the pre-incrementation value of VAR. */
340
341 static rtx
342 enqueue_insn (var, body)
343 rtx var, body;
344 {
345 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
346 body, pending_chain);
347 return pending_chain;
348 }
349
350 /* Use protect_from_queue to convert a QUEUED expression
351 into something that you can put immediately into an instruction.
352 If the queued incrementation has not happened yet,
353 protect_from_queue returns the variable itself.
354 If the incrementation has happened, protect_from_queue returns a temp
355 that contains a copy of the old value of the variable.
356
357 Any time an rtx which might possibly be a QUEUED is to be put
358 into an instruction, it must be passed through protect_from_queue first.
359 QUEUED expressions are not meaningful in instructions.
360
361 Do not pass a value through protect_from_queue and then hold
362 on to it for a while before putting it in an instruction!
363 If the queue is flushed in between, incorrect code will result. */
364
365 rtx
366 protect_from_queue (x, modify)
367 register rtx x;
368 int modify;
369 {
370 register RTX_CODE code = GET_CODE (x);
371
372 #if 0 /* A QUEUED can hang around after the queue is forced out. */
373 /* Shortcut for most common case. */
374 if (pending_chain == 0)
375 return x;
376 #endif
377
378 if (code != QUEUED)
379 {
380 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
381 use of autoincrement. Make a copy of the contents of the memory
382 location rather than a copy of the address, but not if the value is
383 of mode BLKmode. Don't modify X in place since it might be
384 shared. */
385 if (code == MEM && GET_MODE (x) != BLKmode
386 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
387 {
388 register rtx y = XEXP (x, 0);
389 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
390
391 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
392 MEM_COPY_ATTRIBUTES (new, x);
393 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
394
395 if (QUEUED_INSN (y))
396 {
397 register rtx temp = gen_reg_rtx (GET_MODE (new));
398 emit_insn_before (gen_move_insn (temp, new),
399 QUEUED_INSN (y));
400 return temp;
401 }
402 return new;
403 }
404 /* Otherwise, recursively protect the subexpressions of all
405 the kinds of rtx's that can contain a QUEUED. */
406 if (code == MEM)
407 {
408 rtx tem = protect_from_queue (XEXP (x, 0), 0);
409 if (tem != XEXP (x, 0))
410 {
411 x = copy_rtx (x);
412 XEXP (x, 0) = tem;
413 }
414 }
415 else if (code == PLUS || code == MULT)
416 {
417 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
418 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
419 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
420 {
421 x = copy_rtx (x);
422 XEXP (x, 0) = new0;
423 XEXP (x, 1) = new1;
424 }
425 }
426 return x;
427 }
428 /* If the increment has not happened, use the variable itself. */
429 if (QUEUED_INSN (x) == 0)
430 return QUEUED_VAR (x);
431 /* If the increment has happened and a pre-increment copy exists,
432 use that copy. */
433 if (QUEUED_COPY (x) != 0)
434 return QUEUED_COPY (x);
435 /* The increment has happened but we haven't set up a pre-increment copy.
436 Set one up now, and use it. */
437 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
438 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
439 QUEUED_INSN (x));
440 return QUEUED_COPY (x);
441 }
442
443 /* Return nonzero if X contains a QUEUED expression:
444 if it contains anything that will be altered by a queued increment.
445 We handle only combinations of MEM, PLUS, MINUS and MULT operators
446 since memory addresses generally contain only those. */
447
448 int
449 queued_subexp_p (x)
450 rtx x;
451 {
452 register enum rtx_code code = GET_CODE (x);
453 switch (code)
454 {
455 case QUEUED:
456 return 1;
457 case MEM:
458 return queued_subexp_p (XEXP (x, 0));
459 case MULT:
460 case PLUS:
461 case MINUS:
462 return (queued_subexp_p (XEXP (x, 0))
463 || queued_subexp_p (XEXP (x, 1)));
464 default:
465 return 0;
466 }
467 }
468
469 /* Perform all the pending incrementations. */
470
471 void
472 emit_queue ()
473 {
474 register rtx p;
475 while ((p = pending_chain))
476 {
477 rtx body = QUEUED_BODY (p);
478
479 if (GET_CODE (body) == SEQUENCE)
480 {
481 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
482 emit_insn (QUEUED_BODY (p));
483 }
484 else
485 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
486 pending_chain = QUEUED_NEXT (p);
487 }
488 }
489 \f
490 /* Copy data from FROM to TO, where the machine modes are not the same.
491 Both modes may be integer, or both may be floating.
492 UNSIGNEDP should be nonzero if FROM is an unsigned type.
493 This causes zero-extension instead of sign-extension. */
494
495 void
496 convert_move (to, from, unsignedp)
497 register rtx to, from;
498 int unsignedp;
499 {
500 enum machine_mode to_mode = GET_MODE (to);
501 enum machine_mode from_mode = GET_MODE (from);
502 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
503 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
504 enum insn_code code;
505 rtx libcall;
506
507 /* rtx code for making an equivalent value. */
508 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
509
510 to = protect_from_queue (to, 1);
511 from = protect_from_queue (from, 0);
512
513 if (to_real != from_real)
514 abort ();
515
516 /* If FROM is a SUBREG that indicates that we have already done at least
517 the required extension, strip it. We don't handle such SUBREGs as
518 TO here. */
519
520 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
521 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
522 >= GET_MODE_SIZE (to_mode))
523 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
524 from = gen_lowpart (to_mode, from), from_mode = to_mode;
525
526 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
527 abort ();
528
529 if (to_mode == from_mode
530 || (from_mode == VOIDmode && CONSTANT_P (from)))
531 {
532 emit_move_insn (to, from);
533 return;
534 }
535
536 if (to_real)
537 {
538 rtx value;
539
540 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
541 {
542 /* Try converting directly if the insn is supported. */
543 if ((code = can_extend_p (to_mode, from_mode, 0))
544 != CODE_FOR_nothing)
545 {
546 emit_unop_insn (code, to, from, UNKNOWN);
547 return;
548 }
549 }
550
551 #ifdef HAVE_trunchfqf2
552 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
553 {
554 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
555 return;
556 }
557 #endif
558 #ifdef HAVE_trunctqfqf2
559 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
560 {
561 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
562 return;
563 }
564 #endif
565 #ifdef HAVE_truncsfqf2
566 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
567 {
568 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
569 return;
570 }
571 #endif
572 #ifdef HAVE_truncdfqf2
573 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
574 {
575 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
576 return;
577 }
578 #endif
579 #ifdef HAVE_truncxfqf2
580 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
583 return;
584 }
585 #endif
586 #ifdef HAVE_trunctfqf2
587 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
588 {
589 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
590 return;
591 }
592 #endif
593
594 #ifdef HAVE_trunctqfhf2
595 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
596 {
597 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
598 return;
599 }
600 #endif
601 #ifdef HAVE_truncsfhf2
602 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
603 {
604 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
605 return;
606 }
607 #endif
608 #ifdef HAVE_truncdfhf2
609 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
610 {
611 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
612 return;
613 }
614 #endif
615 #ifdef HAVE_truncxfhf2
616 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
617 {
618 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
619 return;
620 }
621 #endif
622 #ifdef HAVE_trunctfhf2
623 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
624 {
625 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
626 return;
627 }
628 #endif
629
630 #ifdef HAVE_truncsftqf2
631 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
632 {
633 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
634 return;
635 }
636 #endif
637 #ifdef HAVE_truncdftqf2
638 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
639 {
640 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
641 return;
642 }
643 #endif
644 #ifdef HAVE_truncxftqf2
645 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
646 {
647 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
648 return;
649 }
650 #endif
651 #ifdef HAVE_trunctftqf2
652 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
653 {
654 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
655 return;
656 }
657 #endif
658
659 #ifdef HAVE_truncdfsf2
660 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
661 {
662 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
663 return;
664 }
665 #endif
666 #ifdef HAVE_truncxfsf2
667 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
668 {
669 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
670 return;
671 }
672 #endif
673 #ifdef HAVE_trunctfsf2
674 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
675 {
676 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
677 return;
678 }
679 #endif
680 #ifdef HAVE_truncxfdf2
681 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
682 {
683 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
684 return;
685 }
686 #endif
687 #ifdef HAVE_trunctfdf2
688 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
689 {
690 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694
695 libcall = (rtx) 0;
696 switch (from_mode)
697 {
698 case SFmode:
699 switch (to_mode)
700 {
701 case DFmode:
702 libcall = extendsfdf2_libfunc;
703 break;
704
705 case XFmode:
706 libcall = extendsfxf2_libfunc;
707 break;
708
709 case TFmode:
710 libcall = extendsftf2_libfunc;
711 break;
712
713 default:
714 break;
715 }
716 break;
717
718 case DFmode:
719 switch (to_mode)
720 {
721 case SFmode:
722 libcall = truncdfsf2_libfunc;
723 break;
724
725 case XFmode:
726 libcall = extenddfxf2_libfunc;
727 break;
728
729 case TFmode:
730 libcall = extenddftf2_libfunc;
731 break;
732
733 default:
734 break;
735 }
736 break;
737
738 case XFmode:
739 switch (to_mode)
740 {
741 case SFmode:
742 libcall = truncxfsf2_libfunc;
743 break;
744
745 case DFmode:
746 libcall = truncxfdf2_libfunc;
747 break;
748
749 default:
750 break;
751 }
752 break;
753
754 case TFmode:
755 switch (to_mode)
756 {
757 case SFmode:
758 libcall = trunctfsf2_libfunc;
759 break;
760
761 case DFmode:
762 libcall = trunctfdf2_libfunc;
763 break;
764
765 default:
766 break;
767 }
768 break;
769
770 default:
771 break;
772 }
773
774 if (libcall == (rtx) 0)
775 /* This conversion is not implemented yet. */
776 abort ();
777
778 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
779 1, from, from_mode);
780 emit_move_insn (to, value);
781 return;
782 }
783
784 /* Now both modes are integers. */
785
786 /* Handle expanding beyond a word. */
787 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
788 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
789 {
790 rtx insns;
791 rtx lowpart;
792 rtx fill_value;
793 rtx lowfrom;
794 int i;
795 enum machine_mode lowpart_mode;
796 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
797
798 /* Try converting directly if the insn is supported. */
799 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
800 != CODE_FOR_nothing)
801 {
802 /* If FROM is a SUBREG, put it into a register. Do this
803 so that we always generate the same set of insns for
804 better cse'ing; if an intermediate assignment occurred,
805 we won't be doing the operation directly on the SUBREG. */
806 if (optimize > 0 && GET_CODE (from) == SUBREG)
807 from = force_reg (from_mode, from);
808 emit_unop_insn (code, to, from, equiv_code);
809 return;
810 }
811 /* Next, try converting via full word. */
812 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
813 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
814 != CODE_FOR_nothing))
815 {
816 if (GET_CODE (to) == REG)
817 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
818 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
819 emit_unop_insn (code, to,
820 gen_lowpart (word_mode, to), equiv_code);
821 return;
822 }
823
824 /* No special multiword conversion insn; do it by hand. */
825 start_sequence ();
826
827 /* Since we will turn this into a no conflict block, we must ensure
828 that the source does not overlap the target. */
829
830 if (reg_overlap_mentioned_p (to, from))
831 from = force_reg (from_mode, from);
832
833 /* Get a copy of FROM widened to a word, if necessary. */
834 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
835 lowpart_mode = word_mode;
836 else
837 lowpart_mode = from_mode;
838
839 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
840
841 lowpart = gen_lowpart (lowpart_mode, to);
842 emit_move_insn (lowpart, lowfrom);
843
844 /* Compute the value to put in each remaining word. */
845 if (unsignedp)
846 fill_value = const0_rtx;
847 else
848 {
849 #ifdef HAVE_slt
850 if (HAVE_slt
851 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
852 && STORE_FLAG_VALUE == -1)
853 {
854 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
855 lowpart_mode, 0, 0);
856 fill_value = gen_reg_rtx (word_mode);
857 emit_insn (gen_slt (fill_value));
858 }
859 else
860 #endif
861 {
862 fill_value
863 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
864 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
865 NULL_RTX, 0);
866 fill_value = convert_to_mode (word_mode, fill_value, 1);
867 }
868 }
869
870 /* Fill the remaining words. */
871 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
872 {
873 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
874 rtx subword = operand_subword (to, index, 1, to_mode);
875
876 if (subword == 0)
877 abort ();
878
879 if (fill_value != subword)
880 emit_move_insn (subword, fill_value);
881 }
882
883 insns = get_insns ();
884 end_sequence ();
885
886 emit_no_conflict_block (insns, to, from, NULL_RTX,
887 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
888 return;
889 }
890
891 /* Truncating multi-word to a word or less. */
892 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
893 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
894 {
895 if (!((GET_CODE (from) == MEM
896 && ! MEM_VOLATILE_P (from)
897 && direct_load[(int) to_mode]
898 && ! mode_dependent_address_p (XEXP (from, 0)))
899 || GET_CODE (from) == REG
900 || GET_CODE (from) == SUBREG))
901 from = force_reg (from_mode, from);
902 convert_move (to, gen_lowpart (word_mode, from), 0);
903 return;
904 }
905
906 /* Handle pointer conversion */ /* SPEE 900220 */
907 if (to_mode == PQImode)
908 {
909 if (from_mode != QImode)
910 from = convert_to_mode (QImode, from, unsignedp);
911
912 #ifdef HAVE_truncqipqi2
913 if (HAVE_truncqipqi2)
914 {
915 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
916 return;
917 }
918 #endif /* HAVE_truncqipqi2 */
919 abort ();
920 }
921
922 if (from_mode == PQImode)
923 {
924 if (to_mode != QImode)
925 {
926 from = convert_to_mode (QImode, from, unsignedp);
927 from_mode = QImode;
928 }
929 else
930 {
931 #ifdef HAVE_extendpqiqi2
932 if (HAVE_extendpqiqi2)
933 {
934 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
935 return;
936 }
937 #endif /* HAVE_extendpqiqi2 */
938 abort ();
939 }
940 }
941
942 if (to_mode == PSImode)
943 {
944 if (from_mode != SImode)
945 from = convert_to_mode (SImode, from, unsignedp);
946
947 #ifdef HAVE_truncsipsi2
948 if (HAVE_truncsipsi2)
949 {
950 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
951 return;
952 }
953 #endif /* HAVE_truncsipsi2 */
954 abort ();
955 }
956
957 if (from_mode == PSImode)
958 {
959 if (to_mode != SImode)
960 {
961 from = convert_to_mode (SImode, from, unsignedp);
962 from_mode = SImode;
963 }
964 else
965 {
966 #ifdef HAVE_extendpsisi2
967 if (HAVE_extendpsisi2)
968 {
969 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
970 return;
971 }
972 #endif /* HAVE_extendpsisi2 */
973 abort ();
974 }
975 }
976
977 if (to_mode == PDImode)
978 {
979 if (from_mode != DImode)
980 from = convert_to_mode (DImode, from, unsignedp);
981
982 #ifdef HAVE_truncdipdi2
983 if (HAVE_truncdipdi2)
984 {
985 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
986 return;
987 }
988 #endif /* HAVE_truncdipdi2 */
989 abort ();
990 }
991
992 if (from_mode == PDImode)
993 {
994 if (to_mode != DImode)
995 {
996 from = convert_to_mode (DImode, from, unsignedp);
997 from_mode = DImode;
998 }
999 else
1000 {
1001 #ifdef HAVE_extendpdidi2
1002 if (HAVE_extendpdidi2)
1003 {
1004 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1005 return;
1006 }
1007 #endif /* HAVE_extendpdidi2 */
1008 abort ();
1009 }
1010 }
1011
1012 /* Now follow all the conversions between integers
1013 no more than a word long. */
1014
1015 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1016 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1017 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1018 GET_MODE_BITSIZE (from_mode)))
1019 {
1020 if (!((GET_CODE (from) == MEM
1021 && ! MEM_VOLATILE_P (from)
1022 && direct_load[(int) to_mode]
1023 && ! mode_dependent_address_p (XEXP (from, 0)))
1024 || GET_CODE (from) == REG
1025 || GET_CODE (from) == SUBREG))
1026 from = force_reg (from_mode, from);
1027 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1028 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1029 from = copy_to_reg (from);
1030 emit_move_insn (to, gen_lowpart (to_mode, from));
1031 return;
1032 }
1033
1034 /* Handle extension. */
1035 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1036 {
1037 /* Convert directly if that works. */
1038 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1039 != CODE_FOR_nothing)
1040 {
1041 emit_unop_insn (code, to, from, equiv_code);
1042 return;
1043 }
1044 else
1045 {
1046 enum machine_mode intermediate;
1047 rtx tmp;
1048 tree shift_amount;
1049
1050 /* Search for a mode to convert via. */
1051 for (intermediate = from_mode; intermediate != VOIDmode;
1052 intermediate = GET_MODE_WIDER_MODE (intermediate))
1053 if (((can_extend_p (to_mode, intermediate, unsignedp)
1054 != CODE_FOR_nothing)
1055 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1056 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1057 GET_MODE_BITSIZE (intermediate))))
1058 && (can_extend_p (intermediate, from_mode, unsignedp)
1059 != CODE_FOR_nothing))
1060 {
1061 convert_move (to, convert_to_mode (intermediate, from,
1062 unsignedp), unsignedp);
1063 return;
1064 }
1065
1066 /* No suitable intermediate mode.
1067 Generate what we need with shifts. */
1068 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1069 - GET_MODE_BITSIZE (from_mode), 0);
1070 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1071 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1072 to, unsignedp);
1073 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1074 to, unsignedp);
1075 if (tmp != to)
1076 emit_move_insn (to, tmp);
1077 return;
1078 }
1079 }
1080
1081 /* Support special truncate insns for certain modes. */
1082
1083 if (from_mode == DImode && to_mode == SImode)
1084 {
1085 #ifdef HAVE_truncdisi2
1086 if (HAVE_truncdisi2)
1087 {
1088 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1089 return;
1090 }
1091 #endif
1092 convert_move (to, force_reg (from_mode, from), unsignedp);
1093 return;
1094 }
1095
1096 if (from_mode == DImode && to_mode == HImode)
1097 {
1098 #ifdef HAVE_truncdihi2
1099 if (HAVE_truncdihi2)
1100 {
1101 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1102 return;
1103 }
1104 #endif
1105 convert_move (to, force_reg (from_mode, from), unsignedp);
1106 return;
1107 }
1108
1109 if (from_mode == DImode && to_mode == QImode)
1110 {
1111 #ifdef HAVE_truncdiqi2
1112 if (HAVE_truncdiqi2)
1113 {
1114 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1115 return;
1116 }
1117 #endif
1118 convert_move (to, force_reg (from_mode, from), unsignedp);
1119 return;
1120 }
1121
1122 if (from_mode == SImode && to_mode == HImode)
1123 {
1124 #ifdef HAVE_truncsihi2
1125 if (HAVE_truncsihi2)
1126 {
1127 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1128 return;
1129 }
1130 #endif
1131 convert_move (to, force_reg (from_mode, from), unsignedp);
1132 return;
1133 }
1134
1135 if (from_mode == SImode && to_mode == QImode)
1136 {
1137 #ifdef HAVE_truncsiqi2
1138 if (HAVE_truncsiqi2)
1139 {
1140 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1141 return;
1142 }
1143 #endif
1144 convert_move (to, force_reg (from_mode, from), unsignedp);
1145 return;
1146 }
1147
1148 if (from_mode == HImode && to_mode == QImode)
1149 {
1150 #ifdef HAVE_trunchiqi2
1151 if (HAVE_trunchiqi2)
1152 {
1153 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1154 return;
1155 }
1156 #endif
1157 convert_move (to, force_reg (from_mode, from), unsignedp);
1158 return;
1159 }
1160
1161 if (from_mode == TImode && to_mode == DImode)
1162 {
1163 #ifdef HAVE_trunctidi2
1164 if (HAVE_trunctidi2)
1165 {
1166 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1167 return;
1168 }
1169 #endif
1170 convert_move (to, force_reg (from_mode, from), unsignedp);
1171 return;
1172 }
1173
1174 if (from_mode == TImode && to_mode == SImode)
1175 {
1176 #ifdef HAVE_trunctisi2
1177 if (HAVE_trunctisi2)
1178 {
1179 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1180 return;
1181 }
1182 #endif
1183 convert_move (to, force_reg (from_mode, from), unsignedp);
1184 return;
1185 }
1186
1187 if (from_mode == TImode && to_mode == HImode)
1188 {
1189 #ifdef HAVE_trunctihi2
1190 if (HAVE_trunctihi2)
1191 {
1192 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1193 return;
1194 }
1195 #endif
1196 convert_move (to, force_reg (from_mode, from), unsignedp);
1197 return;
1198 }
1199
1200 if (from_mode == TImode && to_mode == QImode)
1201 {
1202 #ifdef HAVE_trunctiqi2
1203 if (HAVE_trunctiqi2)
1204 {
1205 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1206 return;
1207 }
1208 #endif
1209 convert_move (to, force_reg (from_mode, from), unsignedp);
1210 return;
1211 }
1212
1213 /* Handle truncation of volatile memrefs, and so on;
1214 the things that couldn't be truncated directly,
1215 and for which there was no special instruction. */
1216 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1217 {
1218 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1219 emit_move_insn (to, temp);
1220 return;
1221 }
1222
1223 /* Mode combination is not recognized. */
1224 abort ();
1225 }
1226
1227 /* Return an rtx for a value that would result
1228 from converting X to mode MODE.
1229 Both X and MODE may be floating, or both integer.
1230 UNSIGNEDP is nonzero if X is an unsigned value.
1231 This can be done by referring to a part of X in place
1232 or by copying to a new temporary with conversion.
1233
1234 This function *must not* call protect_from_queue
1235 except when putting X into an insn (in which case convert_move does it). */
1236
1237 rtx
1238 convert_to_mode (mode, x, unsignedp)
1239 enum machine_mode mode;
1240 rtx x;
1241 int unsignedp;
1242 {
1243 return convert_modes (mode, VOIDmode, x, unsignedp);
1244 }
1245
1246 /* Return an rtx for a value that would result
1247 from converting X from mode OLDMODE to mode MODE.
1248 Both modes may be floating, or both integer.
1249 UNSIGNEDP is nonzero if X is an unsigned value.
1250
1251 This can be done by referring to a part of X in place
1252 or by copying to a new temporary with conversion.
1253
1254 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1255
1256 This function *must not* call protect_from_queue
1257 except when putting X into an insn (in which case convert_move does it). */
1258
1259 rtx
1260 convert_modes (mode, oldmode, x, unsignedp)
1261 enum machine_mode mode, oldmode;
1262 rtx x;
1263 int unsignedp;
1264 {
1265 register rtx temp;
1266
1267 /* If FROM is a SUBREG that indicates that we have already done at least
1268 the required extension, strip it. */
1269
1270 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1271 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1272 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1273 x = gen_lowpart (mode, x);
1274
1275 if (GET_MODE (x) != VOIDmode)
1276 oldmode = GET_MODE (x);
1277
1278 if (mode == oldmode)
1279 return x;
1280
1281 /* There is one case that we must handle specially: If we are converting
1282 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1283 we are to interpret the constant as unsigned, gen_lowpart will do
1284 the wrong if the constant appears negative. What we want to do is
1285 make the high-order word of the constant zero, not all ones. */
1286
1287 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1288 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1289 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1290 {
1291 HOST_WIDE_INT val = INTVAL (x);
1292
1293 if (oldmode != VOIDmode
1294 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1295 {
1296 int width = GET_MODE_BITSIZE (oldmode);
1297
1298 /* We need to zero extend VAL. */
1299 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1300 }
1301
1302 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1303 }
1304
1305 /* We can do this with a gen_lowpart if both desired and current modes
1306 are integer, and this is either a constant integer, a register, or a
1307 non-volatile MEM. Except for the constant case where MODE is no
1308 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1309
1310 if ((GET_CODE (x) == CONST_INT
1311 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1312 || (GET_MODE_CLASS (mode) == MODE_INT
1313 && GET_MODE_CLASS (oldmode) == MODE_INT
1314 && (GET_CODE (x) == CONST_DOUBLE
1315 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1316 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1317 && direct_load[(int) mode])
1318 || (GET_CODE (x) == REG
1319 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1320 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1321 {
1322 /* ?? If we don't know OLDMODE, we have to assume here that
1323 X does not need sign- or zero-extension. This may not be
1324 the case, but it's the best we can do. */
1325 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1326 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1327 {
1328 HOST_WIDE_INT val = INTVAL (x);
1329 int width = GET_MODE_BITSIZE (oldmode);
1330
1331 /* We must sign or zero-extend in this case. Start by
1332 zero-extending, then sign extend if we need to. */
1333 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1334 if (! unsignedp
1335 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1336 val |= (HOST_WIDE_INT) (-1) << width;
1337
1338 return GEN_INT (val);
1339 }
1340
1341 return gen_lowpart (mode, x);
1342 }
1343
1344 temp = gen_reg_rtx (mode);
1345 convert_move (temp, x, unsignedp);
1346 return temp;
1347 }
1348 \f
1349
1350 /* This macro is used to determine what the largest unit size that
1351 move_by_pieces can use is. */
1352
1353 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1354 move efficiently, as opposed to MOVE_MAX which is the maximum
1355 number of bhytes we can move with a single instruction. */
1356
1357 #ifndef MOVE_MAX_PIECES
1358 #define MOVE_MAX_PIECES MOVE_MAX
1359 #endif
1360
1361 /* Generate several move instructions to copy LEN bytes
1362 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1363 The caller must pass FROM and TO
1364 through protect_from_queue before calling.
1365 ALIGN (in bytes) is maximum alignment we can assume. */
1366
1367 void
1368 move_by_pieces (to, from, len, align)
1369 rtx to, from;
1370 int len;
1371 unsigned int align;
1372 {
1373 struct move_by_pieces data;
1374 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1375 unsigned int max_size = MOVE_MAX_PIECES + 1;
1376 enum machine_mode mode = VOIDmode, tmode;
1377 enum insn_code icode;
1378
1379 data.offset = 0;
1380 data.to_addr = to_addr;
1381 data.from_addr = from_addr;
1382 data.to = to;
1383 data.from = from;
1384 data.autinc_to
1385 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1386 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1387 data.autinc_from
1388 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1389 || GET_CODE (from_addr) == POST_INC
1390 || GET_CODE (from_addr) == POST_DEC);
1391
1392 data.explicit_inc_from = 0;
1393 data.explicit_inc_to = 0;
1394 data.reverse
1395 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1396 if (data.reverse) data.offset = len;
1397 data.len = len;
1398
1399 data.to_struct = MEM_IN_STRUCT_P (to);
1400 data.from_struct = MEM_IN_STRUCT_P (from);
1401 data.to_readonly = RTX_UNCHANGING_P (to);
1402 data.from_readonly = RTX_UNCHANGING_P (from);
1403
1404 /* If copying requires more than two move insns,
1405 copy addresses to registers (to make displacements shorter)
1406 and use post-increment if available. */
1407 if (!(data.autinc_from && data.autinc_to)
1408 && move_by_pieces_ninsns (len, align) > 2)
1409 {
1410 /* Find the mode of the largest move... */
1411 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1412 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1413 if (GET_MODE_SIZE (tmode) < max_size)
1414 mode = tmode;
1415
1416 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1417 {
1418 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1419 data.autinc_from = 1;
1420 data.explicit_inc_from = -1;
1421 }
1422 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1423 {
1424 data.from_addr = copy_addr_to_reg (from_addr);
1425 data.autinc_from = 1;
1426 data.explicit_inc_from = 1;
1427 }
1428 if (!data.autinc_from && CONSTANT_P (from_addr))
1429 data.from_addr = copy_addr_to_reg (from_addr);
1430 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1431 {
1432 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1433 data.autinc_to = 1;
1434 data.explicit_inc_to = -1;
1435 }
1436 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1437 {
1438 data.to_addr = copy_addr_to_reg (to_addr);
1439 data.autinc_to = 1;
1440 data.explicit_inc_to = 1;
1441 }
1442 if (!data.autinc_to && CONSTANT_P (to_addr))
1443 data.to_addr = copy_addr_to_reg (to_addr);
1444 }
1445
1446 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1447 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1448 align = MOVE_MAX;
1449
1450 /* First move what we can in the largest integer mode, then go to
1451 successively smaller modes. */
1452
1453 while (max_size > 1)
1454 {
1455 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1456 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1457 if (GET_MODE_SIZE (tmode) < max_size)
1458 mode = tmode;
1459
1460 if (mode == VOIDmode)
1461 break;
1462
1463 icode = mov_optab->handlers[(int) mode].insn_code;
1464 if (icode != CODE_FOR_nothing
1465 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1466 (unsigned int) GET_MODE_SIZE (mode)))
1467 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1468
1469 max_size = GET_MODE_SIZE (mode);
1470 }
1471
1472 /* The code above should have handled everything. */
1473 if (data.len > 0)
1474 abort ();
1475 }
1476
1477 /* Return number of insns required to move L bytes by pieces.
1478 ALIGN (in bytes) is maximum alignment we can assume. */
1479
1480 static int
1481 move_by_pieces_ninsns (l, align)
1482 unsigned int l;
1483 unsigned int align;
1484 {
1485 register int n_insns = 0;
1486 unsigned int max_size = MOVE_MAX + 1;
1487
1488 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1489 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1490 align = MOVE_MAX;
1491
1492 while (max_size > 1)
1493 {
1494 enum machine_mode mode = VOIDmode, tmode;
1495 enum insn_code icode;
1496
1497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1499 if (GET_MODE_SIZE (tmode) < max_size)
1500 mode = tmode;
1501
1502 if (mode == VOIDmode)
1503 break;
1504
1505 icode = mov_optab->handlers[(int) mode].insn_code;
1506 if (icode != CODE_FOR_nothing
1507 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1508 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1509
1510 max_size = GET_MODE_SIZE (mode);
1511 }
1512
1513 return n_insns;
1514 }
1515
1516 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1517 with move instructions for mode MODE. GENFUN is the gen_... function
1518 to make a move insn for that mode. DATA has all the other info. */
1519
1520 static void
1521 move_by_pieces_1 (genfun, mode, data)
1522 rtx (*genfun) PARAMS ((rtx, ...));
1523 enum machine_mode mode;
1524 struct move_by_pieces *data;
1525 {
1526 register int size = GET_MODE_SIZE (mode);
1527 register rtx to1, from1;
1528
1529 while (data->len >= size)
1530 {
1531 if (data->reverse) data->offset -= size;
1532
1533 to1 = (data->autinc_to
1534 ? gen_rtx_MEM (mode, data->to_addr)
1535 : copy_rtx (change_address (data->to, mode,
1536 plus_constant (data->to_addr,
1537 data->offset))));
1538 MEM_IN_STRUCT_P (to1) = data->to_struct;
1539 RTX_UNCHANGING_P (to1) = data->to_readonly;
1540
1541 from1
1542 = (data->autinc_from
1543 ? gen_rtx_MEM (mode, data->from_addr)
1544 : copy_rtx (change_address (data->from, mode,
1545 plus_constant (data->from_addr,
1546 data->offset))));
1547 MEM_IN_STRUCT_P (from1) = data->from_struct;
1548 RTX_UNCHANGING_P (from1) = data->from_readonly;
1549
1550 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1551 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1552 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1553 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1554
1555 emit_insn ((*genfun) (to1, from1));
1556 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1557 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1558 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1559 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1560
1561 if (! data->reverse) data->offset += size;
1562
1563 data->len -= size;
1564 }
1565 }
1566 \f
1567 /* Emit code to move a block Y to a block X.
1568 This may be done with string-move instructions,
1569 with multiple scalar move instructions, or with a library call.
1570
1571 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1572 with mode BLKmode.
1573 SIZE is an rtx that says how long they are.
1574 ALIGN is the maximum alignment we can assume they have,
1575 measured in bytes.
1576
1577 Return the address of the new block, if memcpy is called and returns it,
1578 0 otherwise. */
1579
1580 rtx
1581 emit_block_move (x, y, size, align)
1582 rtx x, y;
1583 rtx size;
1584 unsigned int align;
1585 {
1586 rtx retval = 0;
1587 #ifdef TARGET_MEM_FUNCTIONS
1588 static tree fn;
1589 tree call_expr, arg_list;
1590 #endif
1591
1592 if (GET_MODE (x) != BLKmode)
1593 abort ();
1594
1595 if (GET_MODE (y) != BLKmode)
1596 abort ();
1597
1598 x = protect_from_queue (x, 1);
1599 y = protect_from_queue (y, 0);
1600 size = protect_from_queue (size, 0);
1601
1602 if (GET_CODE (x) != MEM)
1603 abort ();
1604 if (GET_CODE (y) != MEM)
1605 abort ();
1606 if (size == 0)
1607 abort ();
1608
1609 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1610 move_by_pieces (x, y, INTVAL (size), align);
1611 else
1612 {
1613 /* Try the most limited insn first, because there's no point
1614 including more than one in the machine description unless
1615 the more limited one has some advantage. */
1616
1617 rtx opalign = GEN_INT (align);
1618 enum machine_mode mode;
1619
1620 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1621 mode = GET_MODE_WIDER_MODE (mode))
1622 {
1623 enum insn_code code = movstr_optab[(int) mode];
1624 insn_operand_predicate_fn pred;
1625
1626 if (code != CODE_FOR_nothing
1627 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1628 here because if SIZE is less than the mode mask, as it is
1629 returned by the macro, it will definitely be less than the
1630 actual mode mask. */
1631 && ((GET_CODE (size) == CONST_INT
1632 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1633 <= (GET_MODE_MASK (mode) >> 1)))
1634 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1635 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1636 || (*pred) (x, BLKmode))
1637 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1638 || (*pred) (y, BLKmode))
1639 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1640 || (*pred) (opalign, VOIDmode)))
1641 {
1642 rtx op2;
1643 rtx last = get_last_insn ();
1644 rtx pat;
1645
1646 op2 = convert_to_mode (mode, size, 1);
1647 pred = insn_data[(int) code].operand[2].predicate;
1648 if (pred != 0 && ! (*pred) (op2, mode))
1649 op2 = copy_to_mode_reg (mode, op2);
1650
1651 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1652 if (pat)
1653 {
1654 emit_insn (pat);
1655 return 0;
1656 }
1657 else
1658 delete_insns_since (last);
1659 }
1660 }
1661
1662 /* X, Y, or SIZE may have been passed through protect_from_queue.
1663
1664 It is unsafe to save the value generated by protect_from_queue
1665 and reuse it later. Consider what happens if emit_queue is
1666 called before the return value from protect_from_queue is used.
1667
1668 Expansion of the CALL_EXPR below will call emit_queue before
1669 we are finished emitting RTL for argument setup. So if we are
1670 not careful we could get the wrong value for an argument.
1671
1672 To avoid this problem we go ahead and emit code to copy X, Y &
1673 SIZE into new pseudos. We can then place those new pseudos
1674 into an RTL_EXPR and use them later, even after a call to
1675 emit_queue.
1676
1677 Note this is not strictly needed for library calls since they
1678 do not call emit_queue before loading their arguments. However,
1679 we may need to have library calls call emit_queue in the future
1680 since failing to do so could cause problems for targets which
1681 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1682 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1683 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1684
1685 #ifdef TARGET_MEM_FUNCTIONS
1686 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1687 #else
1688 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1689 TREE_UNSIGNED (integer_type_node));
1690 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1691 #endif
1692
1693 #ifdef TARGET_MEM_FUNCTIONS
1694 /* It is incorrect to use the libcall calling conventions to call
1695 memcpy in this context.
1696
1697 This could be a user call to memcpy and the user may wish to
1698 examine the return value from memcpy.
1699
1700 For targets where libcalls and normal calls have different conventions
1701 for returning pointers, we could end up generating incorrect code.
1702
1703 So instead of using a libcall sequence we build up a suitable
1704 CALL_EXPR and expand the call in the normal fashion. */
1705 if (fn == NULL_TREE)
1706 {
1707 tree fntype;
1708
1709 /* This was copied from except.c, I don't know if all this is
1710 necessary in this context or not. */
1711 fn = get_identifier ("memcpy");
1712 push_obstacks_nochange ();
1713 end_temporary_allocation ();
1714 fntype = build_pointer_type (void_type_node);
1715 fntype = build_function_type (fntype, NULL_TREE);
1716 fn = build_decl (FUNCTION_DECL, fn, fntype);
1717 ggc_add_tree_root (&fn, 1);
1718 DECL_EXTERNAL (fn) = 1;
1719 TREE_PUBLIC (fn) = 1;
1720 DECL_ARTIFICIAL (fn) = 1;
1721 make_decl_rtl (fn, NULL_PTR, 1);
1722 assemble_external (fn);
1723 pop_obstacks ();
1724 }
1725
1726 /* We need to make an argument list for the function call.
1727
1728 memcpy has three arguments, the first two are void * addresses and
1729 the last is a size_t byte count for the copy. */
1730 arg_list
1731 = build_tree_list (NULL_TREE,
1732 make_tree (build_pointer_type (void_type_node), x));
1733 TREE_CHAIN (arg_list)
1734 = build_tree_list (NULL_TREE,
1735 make_tree (build_pointer_type (void_type_node), y));
1736 TREE_CHAIN (TREE_CHAIN (arg_list))
1737 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1738 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1739
1740 /* Now we have to build up the CALL_EXPR itself. */
1741 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1742 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1743 call_expr, arg_list, NULL_TREE);
1744 TREE_SIDE_EFFECTS (call_expr) = 1;
1745
1746 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1747 #else
1748 emit_library_call (bcopy_libfunc, 0,
1749 VOIDmode, 3, y, Pmode, x, Pmode,
1750 convert_to_mode (TYPE_MODE (integer_type_node), size,
1751 TREE_UNSIGNED (integer_type_node)),
1752 TYPE_MODE (integer_type_node));
1753 #endif
1754 }
1755
1756 return retval;
1757 }
1758 \f
1759 /* Copy all or part of a value X into registers starting at REGNO.
1760 The number of registers to be filled is NREGS. */
1761
1762 void
1763 move_block_to_reg (regno, x, nregs, mode)
1764 int regno;
1765 rtx x;
1766 int nregs;
1767 enum machine_mode mode;
1768 {
1769 int i;
1770 #ifdef HAVE_load_multiple
1771 rtx pat;
1772 rtx last;
1773 #endif
1774
1775 if (nregs == 0)
1776 return;
1777
1778 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1779 x = validize_mem (force_const_mem (mode, x));
1780
1781 /* See if the machine can do this with a load multiple insn. */
1782 #ifdef HAVE_load_multiple
1783 if (HAVE_load_multiple)
1784 {
1785 last = get_last_insn ();
1786 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1787 GEN_INT (nregs));
1788 if (pat)
1789 {
1790 emit_insn (pat);
1791 return;
1792 }
1793 else
1794 delete_insns_since (last);
1795 }
1796 #endif
1797
1798 for (i = 0; i < nregs; i++)
1799 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1800 operand_subword_force (x, i, mode));
1801 }
1802
1803 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1804 The number of registers to be filled is NREGS. SIZE indicates the number
1805 of bytes in the object X. */
1806
1807
1808 void
1809 move_block_from_reg (regno, x, nregs, size)
1810 int regno;
1811 rtx x;
1812 int nregs;
1813 int size;
1814 {
1815 int i;
1816 #ifdef HAVE_store_multiple
1817 rtx pat;
1818 rtx last;
1819 #endif
1820 enum machine_mode mode;
1821
1822 /* If SIZE is that of a mode no bigger than a word, just use that
1823 mode's store operation. */
1824 if (size <= UNITS_PER_WORD
1825 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1826 {
1827 emit_move_insn (change_address (x, mode, NULL),
1828 gen_rtx_REG (mode, regno));
1829 return;
1830 }
1831
1832 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1833 to the left before storing to memory. Note that the previous test
1834 doesn't handle all cases (e.g. SIZE == 3). */
1835 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1836 {
1837 rtx tem = operand_subword (x, 0, 1, BLKmode);
1838 rtx shift;
1839
1840 if (tem == 0)
1841 abort ();
1842
1843 shift = expand_shift (LSHIFT_EXPR, word_mode,
1844 gen_rtx_REG (word_mode, regno),
1845 build_int_2 ((UNITS_PER_WORD - size)
1846 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1847 emit_move_insn (tem, shift);
1848 return;
1849 }
1850
1851 /* See if the machine can do this with a store multiple insn. */
1852 #ifdef HAVE_store_multiple
1853 if (HAVE_store_multiple)
1854 {
1855 last = get_last_insn ();
1856 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1857 GEN_INT (nregs));
1858 if (pat)
1859 {
1860 emit_insn (pat);
1861 return;
1862 }
1863 else
1864 delete_insns_since (last);
1865 }
1866 #endif
1867
1868 for (i = 0; i < nregs; i++)
1869 {
1870 rtx tem = operand_subword (x, i, 1, BLKmode);
1871
1872 if (tem == 0)
1873 abort ();
1874
1875 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1876 }
1877 }
1878
1879 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1880 registers represented by a PARALLEL. SSIZE represents the total size of
1881 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1882 SRC in bits. */
1883 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1884 the balance will be in what would be the low-order memory addresses, i.e.
1885 left justified for big endian, right justified for little endian. This
1886 happens to be true for the targets currently using this support. If this
1887 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1888 would be needed. */
1889
1890 void
1891 emit_group_load (dst, orig_src, ssize, align)
1892 rtx dst, orig_src;
1893 unsigned int align;
1894 int ssize;
1895 {
1896 rtx *tmps, src;
1897 int start, i;
1898
1899 if (GET_CODE (dst) != PARALLEL)
1900 abort ();
1901
1902 /* Check for a NULL entry, used to indicate that the parameter goes
1903 both on the stack and in registers. */
1904 if (XEXP (XVECEXP (dst, 0, 0), 0))
1905 start = 0;
1906 else
1907 start = 1;
1908
1909 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1910
1911 /* If we won't be loading directly from memory, protect the real source
1912 from strange tricks we might play. */
1913 src = orig_src;
1914 if (GET_CODE (src) != MEM)
1915 {
1916 if (GET_CODE (src) == VOIDmode)
1917 src = gen_reg_rtx (GET_MODE (dst));
1918 else
1919 src = gen_reg_rtx (GET_MODE (orig_src));
1920 emit_move_insn (src, orig_src);
1921 }
1922
1923 /* Process the pieces. */
1924 for (i = start; i < XVECLEN (dst, 0); i++)
1925 {
1926 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1927 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1928 unsigned int bytelen = GET_MODE_SIZE (mode);
1929 int shift = 0;
1930
1931 /* Handle trailing fragments that run over the size of the struct. */
1932 if (ssize >= 0 && bytepos + bytelen > ssize)
1933 {
1934 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1935 bytelen = ssize - bytepos;
1936 if (bytelen <= 0)
1937 abort ();
1938 }
1939
1940 /* Optimize the access just a bit. */
1941 if (GET_CODE (src) == MEM
1942 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1943 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1944 && bytelen == GET_MODE_SIZE (mode))
1945 {
1946 tmps[i] = gen_reg_rtx (mode);
1947 emit_move_insn (tmps[i],
1948 change_address (src, mode,
1949 plus_constant (XEXP (src, 0),
1950 bytepos)));
1951 }
1952 else if (GET_CODE (src) == CONCAT)
1953 {
1954 if (bytepos == 0
1955 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1956 tmps[i] = XEXP (src, 0);
1957 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1958 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1959 tmps[i] = XEXP (src, 1);
1960 else
1961 abort ();
1962 }
1963 else
1964 {
1965 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1966 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1967 mode, mode, align, ssize);
1968 }
1969
1970 if (BYTES_BIG_ENDIAN && shift)
1971 {
1972 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1973 tmps[i], 0, OPTAB_WIDEN);
1974 }
1975 }
1976 emit_queue();
1977
1978 /* Copy the extracted pieces into the proper (probable) hard regs. */
1979 for (i = start; i < XVECLEN (dst, 0); i++)
1980 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1981 }
1982
1983 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1984 registers represented by a PARALLEL. SSIZE represents the total size of
1985 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1986
1987 void
1988 emit_group_store (orig_dst, src, ssize, align)
1989 rtx orig_dst, src;
1990 int ssize;
1991 unsigned int align;
1992 {
1993 rtx *tmps, dst;
1994 int start, i;
1995
1996 if (GET_CODE (src) != PARALLEL)
1997 abort ();
1998
1999 /* Check for a NULL entry, used to indicate that the parameter goes
2000 both on the stack and in registers. */
2001 if (XEXP (XVECEXP (src, 0, 0), 0))
2002 start = 0;
2003 else
2004 start = 1;
2005
2006 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2007
2008 /* Copy the (probable) hard regs into pseudos. */
2009 for (i = start; i < XVECLEN (src, 0); i++)
2010 {
2011 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2012 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2013 emit_move_insn (tmps[i], reg);
2014 }
2015 emit_queue();
2016
2017 /* If we won't be storing directly into memory, protect the real destination
2018 from strange tricks we might play. */
2019 dst = orig_dst;
2020 if (GET_CODE (dst) == PARALLEL)
2021 {
2022 rtx temp;
2023
2024 /* We can get a PARALLEL dst if there is a conditional expression in
2025 a return statement. In that case, the dst and src are the same,
2026 so no action is necessary. */
2027 if (rtx_equal_p (dst, src))
2028 return;
2029
2030 /* It is unclear if we can ever reach here, but we may as well handle
2031 it. Allocate a temporary, and split this into a store/load to/from
2032 the temporary. */
2033
2034 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2035 emit_group_store (temp, src, ssize, align);
2036 emit_group_load (dst, temp, ssize, align);
2037 return;
2038 }
2039 else if (GET_CODE (dst) != MEM)
2040 {
2041 dst = gen_reg_rtx (GET_MODE (orig_dst));
2042 /* Make life a bit easier for combine. */
2043 emit_move_insn (dst, const0_rtx);
2044 }
2045 else if (! MEM_IN_STRUCT_P (dst))
2046 {
2047 /* store_bit_field requires that memory operations have
2048 mem_in_struct_p set; we might not. */
2049
2050 dst = copy_rtx (orig_dst);
2051 MEM_SET_IN_STRUCT_P (dst, 1);
2052 }
2053
2054 /* Process the pieces. */
2055 for (i = start; i < XVECLEN (src, 0); i++)
2056 {
2057 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2058 enum machine_mode mode = GET_MODE (tmps[i]);
2059 unsigned int bytelen = GET_MODE_SIZE (mode);
2060
2061 /* Handle trailing fragments that run over the size of the struct. */
2062 if (ssize >= 0 && bytepos + bytelen > ssize)
2063 {
2064 if (BYTES_BIG_ENDIAN)
2065 {
2066 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2067 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2068 tmps[i], 0, OPTAB_WIDEN);
2069 }
2070 bytelen = ssize - bytepos;
2071 }
2072
2073 /* Optimize the access just a bit. */
2074 if (GET_CODE (dst) == MEM
2075 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2076 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2077 && bytelen == GET_MODE_SIZE (mode))
2078 emit_move_insn (change_address (dst, mode,
2079 plus_constant (XEXP (dst, 0),
2080 bytepos)),
2081 tmps[i]);
2082 else
2083 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2084 mode, tmps[i], align, ssize);
2085 }
2086
2087 emit_queue();
2088
2089 /* Copy from the pseudo into the (probable) hard reg. */
2090 if (GET_CODE (dst) == REG)
2091 emit_move_insn (orig_dst, dst);
2092 }
2093
2094 /* Generate code to copy a BLKmode object of TYPE out of a
2095 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2096 is null, a stack temporary is created. TGTBLK is returned.
2097
2098 The primary purpose of this routine is to handle functions
2099 that return BLKmode structures in registers. Some machines
2100 (the PA for example) want to return all small structures
2101 in registers regardless of the structure's alignment. */
2102
2103 rtx
2104 copy_blkmode_from_reg (tgtblk,srcreg,type)
2105 rtx tgtblk;
2106 rtx srcreg;
2107 tree type;
2108 {
2109 int bytes = int_size_in_bytes (type);
2110 rtx src = NULL, dst = NULL;
2111 int bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2112 int bitpos, xbitpos, big_endian_correction = 0;
2113
2114 if (tgtblk == 0)
2115 {
2116 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2117 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2118 preserve_temp_slots (tgtblk);
2119 }
2120
2121 /* This code assumes srcreg is at least a full word. If it isn't,
2122 copy it into a new pseudo which is a full word. */
2123 if (GET_MODE (srcreg) != BLKmode
2124 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2125 srcreg = convert_to_mode (word_mode, srcreg,
2126 TREE_UNSIGNED (type));
2127
2128 /* Structures whose size is not a multiple of a word are aligned
2129 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2130 machine, this means we must skip the empty high order bytes when
2131 calculating the bit offset. */
2132 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2133 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2134 * BITS_PER_UNIT));
2135
2136 /* Copy the structure BITSIZE bites at a time.
2137
2138 We could probably emit more efficient code for machines
2139 which do not use strict alignment, but it doesn't seem
2140 worth the effort at the current time. */
2141 for (bitpos = 0, xbitpos = big_endian_correction;
2142 bitpos < bytes * BITS_PER_UNIT;
2143 bitpos += bitsize, xbitpos += bitsize)
2144 {
2145
2146 /* We need a new source operand each time xbitpos is on a
2147 word boundary and when xbitpos == big_endian_correction
2148 (the first time through). */
2149 if (xbitpos % BITS_PER_WORD == 0
2150 || xbitpos == big_endian_correction)
2151 src = operand_subword_force (srcreg,
2152 xbitpos / BITS_PER_WORD,
2153 BLKmode);
2154
2155 /* We need a new destination operand each time bitpos is on
2156 a word boundary. */
2157 if (bitpos % BITS_PER_WORD == 0)
2158 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2159
2160 /* Use xbitpos for the source extraction (right justified) and
2161 xbitpos for the destination store (left justified). */
2162 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2163 extract_bit_field (src, bitsize,
2164 xbitpos % BITS_PER_WORD, 1,
2165 NULL_RTX, word_mode,
2166 word_mode,
2167 bitsize / BITS_PER_UNIT,
2168 BITS_PER_WORD),
2169 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2170 }
2171 return tgtblk;
2172 }
2173
2174
2175 /* Add a USE expression for REG to the (possibly empty) list pointed
2176 to by CALL_FUSAGE. REG must denote a hard register. */
2177
2178 void
2179 use_reg (call_fusage, reg)
2180 rtx *call_fusage, reg;
2181 {
2182 if (GET_CODE (reg) != REG
2183 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2184 abort();
2185
2186 *call_fusage
2187 = gen_rtx_EXPR_LIST (VOIDmode,
2188 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2189 }
2190
2191 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2192 starting at REGNO. All of these registers must be hard registers. */
2193
2194 void
2195 use_regs (call_fusage, regno, nregs)
2196 rtx *call_fusage;
2197 int regno;
2198 int nregs;
2199 {
2200 int i;
2201
2202 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2203 abort ();
2204
2205 for (i = 0; i < nregs; i++)
2206 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2207 }
2208
2209 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2210 PARALLEL REGS. This is for calls that pass values in multiple
2211 non-contiguous locations. The Irix 6 ABI has examples of this. */
2212
2213 void
2214 use_group_regs (call_fusage, regs)
2215 rtx *call_fusage;
2216 rtx regs;
2217 {
2218 int i;
2219
2220 for (i = 0; i < XVECLEN (regs, 0); i++)
2221 {
2222 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2223
2224 /* A NULL entry means the parameter goes both on the stack and in
2225 registers. This can also be a MEM for targets that pass values
2226 partially on the stack and partially in registers. */
2227 if (reg != 0 && GET_CODE (reg) == REG)
2228 use_reg (call_fusage, reg);
2229 }
2230 }
2231 \f
2232 /* Generate several move instructions to clear LEN bytes of block TO.
2233 (A MEM rtx with BLKmode). The caller must pass TO through
2234 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2235 we can assume. */
2236
2237 static void
2238 clear_by_pieces (to, len, align)
2239 rtx to;
2240 int len;
2241 unsigned int align;
2242 {
2243 struct clear_by_pieces data;
2244 rtx to_addr = XEXP (to, 0);
2245 unsigned int max_size = MOVE_MAX_PIECES + 1;
2246 enum machine_mode mode = VOIDmode, tmode;
2247 enum insn_code icode;
2248
2249 data.offset = 0;
2250 data.to_addr = to_addr;
2251 data.to = to;
2252 data.autinc_to
2253 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2254 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2255
2256 data.explicit_inc_to = 0;
2257 data.reverse
2258 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2259 if (data.reverse) data.offset = len;
2260 data.len = len;
2261
2262 data.to_struct = MEM_IN_STRUCT_P (to);
2263
2264 /* If copying requires more than two move insns,
2265 copy addresses to registers (to make displacements shorter)
2266 and use post-increment if available. */
2267 if (!data.autinc_to
2268 && move_by_pieces_ninsns (len, align) > 2)
2269 {
2270 /* Determine the main mode we'll be using */
2271 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2272 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2273 if (GET_MODE_SIZE (tmode) < max_size)
2274 mode = tmode;
2275
2276 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2277 {
2278 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2279 data.autinc_to = 1;
2280 data.explicit_inc_to = -1;
2281 }
2282 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2283 {
2284 data.to_addr = copy_addr_to_reg (to_addr);
2285 data.autinc_to = 1;
2286 data.explicit_inc_to = 1;
2287 }
2288 if (!data.autinc_to && CONSTANT_P (to_addr))
2289 data.to_addr = copy_addr_to_reg (to_addr);
2290 }
2291
2292 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2293 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2294 align = MOVE_MAX;
2295
2296 /* First move what we can in the largest integer mode, then go to
2297 successively smaller modes. */
2298
2299 while (max_size > 1)
2300 {
2301 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2302 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2303 if (GET_MODE_SIZE (tmode) < max_size)
2304 mode = tmode;
2305
2306 if (mode == VOIDmode)
2307 break;
2308
2309 icode = mov_optab->handlers[(int) mode].insn_code;
2310 if (icode != CODE_FOR_nothing
2311 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2312 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2313
2314 max_size = GET_MODE_SIZE (mode);
2315 }
2316
2317 /* The code above should have handled everything. */
2318 if (data.len != 0)
2319 abort ();
2320 }
2321
2322 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2323 with move instructions for mode MODE. GENFUN is the gen_... function
2324 to make a move insn for that mode. DATA has all the other info. */
2325
2326 static void
2327 clear_by_pieces_1 (genfun, mode, data)
2328 rtx (*genfun) PARAMS ((rtx, ...));
2329 enum machine_mode mode;
2330 struct clear_by_pieces *data;
2331 {
2332 register int size = GET_MODE_SIZE (mode);
2333 register rtx to1;
2334
2335 while (data->len >= size)
2336 {
2337 if (data->reverse) data->offset -= size;
2338
2339 to1 = (data->autinc_to
2340 ? gen_rtx_MEM (mode, data->to_addr)
2341 : copy_rtx (change_address (data->to, mode,
2342 plus_constant (data->to_addr,
2343 data->offset))));
2344 MEM_IN_STRUCT_P (to1) = data->to_struct;
2345
2346 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2347 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2348
2349 emit_insn ((*genfun) (to1, const0_rtx));
2350 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2351 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2352
2353 if (! data->reverse) data->offset += size;
2354
2355 data->len -= size;
2356 }
2357 }
2358 \f
2359 /* Write zeros through the storage of OBJECT.
2360 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2361 the maximum alignment we can is has, measured in bytes.
2362
2363 If we call a function that returns the length of the block, return it. */
2364
2365 rtx
2366 clear_storage (object, size, align)
2367 rtx object;
2368 rtx size;
2369 unsigned int align;
2370 {
2371 #ifdef TARGET_MEM_FUNCTIONS
2372 static tree fn;
2373 tree call_expr, arg_list;
2374 #endif
2375 rtx retval = 0;
2376
2377 if (GET_MODE (object) == BLKmode)
2378 {
2379 object = protect_from_queue (object, 1);
2380 size = protect_from_queue (size, 0);
2381
2382 if (GET_CODE (size) == CONST_INT
2383 && MOVE_BY_PIECES_P (INTVAL (size), align))
2384 clear_by_pieces (object, INTVAL (size), align);
2385
2386 else
2387 {
2388 /* Try the most limited insn first, because there's no point
2389 including more than one in the machine description unless
2390 the more limited one has some advantage. */
2391
2392 rtx opalign = GEN_INT (align);
2393 enum machine_mode mode;
2394
2395 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2396 mode = GET_MODE_WIDER_MODE (mode))
2397 {
2398 enum insn_code code = clrstr_optab[(int) mode];
2399 insn_operand_predicate_fn pred;
2400
2401 if (code != CODE_FOR_nothing
2402 /* We don't need MODE to be narrower than
2403 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2404 the mode mask, as it is returned by the macro, it will
2405 definitely be less than the actual mode mask. */
2406 && ((GET_CODE (size) == CONST_INT
2407 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2408 <= (GET_MODE_MASK (mode) >> 1)))
2409 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2410 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2411 || (*pred) (object, BLKmode))
2412 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2413 || (*pred) (opalign, VOIDmode)))
2414 {
2415 rtx op1;
2416 rtx last = get_last_insn ();
2417 rtx pat;
2418
2419 op1 = convert_to_mode (mode, size, 1);
2420 pred = insn_data[(int) code].operand[1].predicate;
2421 if (pred != 0 && ! (*pred) (op1, mode))
2422 op1 = copy_to_mode_reg (mode, op1);
2423
2424 pat = GEN_FCN ((int) code) (object, op1, opalign);
2425 if (pat)
2426 {
2427 emit_insn (pat);
2428 return 0;
2429 }
2430 else
2431 delete_insns_since (last);
2432 }
2433 }
2434
2435 /* OBJECT or SIZE may have been passed through protect_from_queue.
2436
2437 It is unsafe to save the value generated by protect_from_queue
2438 and reuse it later. Consider what happens if emit_queue is
2439 called before the return value from protect_from_queue is used.
2440
2441 Expansion of the CALL_EXPR below will call emit_queue before
2442 we are finished emitting RTL for argument setup. So if we are
2443 not careful we could get the wrong value for an argument.
2444
2445 To avoid this problem we go ahead and emit code to copy OBJECT
2446 and SIZE into new pseudos. We can then place those new pseudos
2447 into an RTL_EXPR and use them later, even after a call to
2448 emit_queue.
2449
2450 Note this is not strictly needed for library calls since they
2451 do not call emit_queue before loading their arguments. However,
2452 we may need to have library calls call emit_queue in the future
2453 since failing to do so could cause problems for targets which
2454 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2455 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2456
2457 #ifdef TARGET_MEM_FUNCTIONS
2458 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2459 #else
2460 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2461 TREE_UNSIGNED (integer_type_node));
2462 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2463 #endif
2464
2465
2466 #ifdef TARGET_MEM_FUNCTIONS
2467 /* It is incorrect to use the libcall calling conventions to call
2468 memset in this context.
2469
2470 This could be a user call to memset and the user may wish to
2471 examine the return value from memset.
2472
2473 For targets where libcalls and normal calls have different
2474 conventions for returning pointers, we could end up generating
2475 incorrect code.
2476
2477 So instead of using a libcall sequence we build up a suitable
2478 CALL_EXPR and expand the call in the normal fashion. */
2479 if (fn == NULL_TREE)
2480 {
2481 tree fntype;
2482
2483 /* This was copied from except.c, I don't know if all this is
2484 necessary in this context or not. */
2485 fn = get_identifier ("memset");
2486 push_obstacks_nochange ();
2487 end_temporary_allocation ();
2488 fntype = build_pointer_type (void_type_node);
2489 fntype = build_function_type (fntype, NULL_TREE);
2490 fn = build_decl (FUNCTION_DECL, fn, fntype);
2491 ggc_add_tree_root (&fn, 1);
2492 DECL_EXTERNAL (fn) = 1;
2493 TREE_PUBLIC (fn) = 1;
2494 DECL_ARTIFICIAL (fn) = 1;
2495 make_decl_rtl (fn, NULL_PTR, 1);
2496 assemble_external (fn);
2497 pop_obstacks ();
2498 }
2499
2500 /* We need to make an argument list for the function call.
2501
2502 memset has three arguments, the first is a void * addresses, the
2503 second a integer with the initialization value, the last is a
2504 size_t byte count for the copy. */
2505 arg_list
2506 = build_tree_list (NULL_TREE,
2507 make_tree (build_pointer_type (void_type_node),
2508 object));
2509 TREE_CHAIN (arg_list)
2510 = build_tree_list (NULL_TREE,
2511 make_tree (integer_type_node, const0_rtx));
2512 TREE_CHAIN (TREE_CHAIN (arg_list))
2513 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2514 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2515
2516 /* Now we have to build up the CALL_EXPR itself. */
2517 call_expr = build1 (ADDR_EXPR,
2518 build_pointer_type (TREE_TYPE (fn)), fn);
2519 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2520 call_expr, arg_list, NULL_TREE);
2521 TREE_SIDE_EFFECTS (call_expr) = 1;
2522
2523 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2524 #else
2525 emit_library_call (bzero_libfunc, 0,
2526 VOIDmode, 2, object, Pmode, size,
2527 TYPE_MODE (integer_type_node));
2528 #endif
2529 }
2530 }
2531 else
2532 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2533
2534 return retval;
2535 }
2536
2537 /* Generate code to copy Y into X.
2538 Both Y and X must have the same mode, except that
2539 Y can be a constant with VOIDmode.
2540 This mode cannot be BLKmode; use emit_block_move for that.
2541
2542 Return the last instruction emitted. */
2543
2544 rtx
2545 emit_move_insn (x, y)
2546 rtx x, y;
2547 {
2548 enum machine_mode mode = GET_MODE (x);
2549
2550 x = protect_from_queue (x, 1);
2551 y = protect_from_queue (y, 0);
2552
2553 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2554 abort ();
2555
2556 /* Never force constant_p_rtx to memory. */
2557 if (GET_CODE (y) == CONSTANT_P_RTX)
2558 ;
2559 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2560 y = force_const_mem (mode, y);
2561
2562 /* If X or Y are memory references, verify that their addresses are valid
2563 for the machine. */
2564 if (GET_CODE (x) == MEM
2565 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2566 && ! push_operand (x, GET_MODE (x)))
2567 || (flag_force_addr
2568 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2569 x = change_address (x, VOIDmode, XEXP (x, 0));
2570
2571 if (GET_CODE (y) == MEM
2572 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2573 || (flag_force_addr
2574 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2575 y = change_address (y, VOIDmode, XEXP (y, 0));
2576
2577 if (mode == BLKmode)
2578 abort ();
2579
2580 return emit_move_insn_1 (x, y);
2581 }
2582
2583 /* Low level part of emit_move_insn.
2584 Called just like emit_move_insn, but assumes X and Y
2585 are basically valid. */
2586
2587 rtx
2588 emit_move_insn_1 (x, y)
2589 rtx x, y;
2590 {
2591 enum machine_mode mode = GET_MODE (x);
2592 enum machine_mode submode;
2593 enum mode_class class = GET_MODE_CLASS (mode);
2594 unsigned int i;
2595
2596 if (mode >= MAX_MACHINE_MODE)
2597 abort ();
2598
2599 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2600 return
2601 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2602
2603 /* Expand complex moves by moving real part and imag part, if possible. */
2604 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2605 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2606 * BITS_PER_UNIT),
2607 (class == MODE_COMPLEX_INT
2608 ? MODE_INT : MODE_FLOAT),
2609 0))
2610 && (mov_optab->handlers[(int) submode].insn_code
2611 != CODE_FOR_nothing))
2612 {
2613 /* Don't split destination if it is a stack push. */
2614 int stack = push_operand (x, GET_MODE (x));
2615
2616 /* If this is a stack, push the highpart first, so it
2617 will be in the argument order.
2618
2619 In that case, change_address is used only to convert
2620 the mode, not to change the address. */
2621 if (stack)
2622 {
2623 /* Note that the real part always precedes the imag part in memory
2624 regardless of machine's endianness. */
2625 #ifdef STACK_GROWS_DOWNWARD
2626 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2627 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2628 gen_imagpart (submode, y)));
2629 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2630 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2631 gen_realpart (submode, y)));
2632 #else
2633 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2634 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2635 gen_realpart (submode, y)));
2636 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2637 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2638 gen_imagpart (submode, y)));
2639 #endif
2640 }
2641 else
2642 {
2643 rtx realpart_x, realpart_y;
2644 rtx imagpart_x, imagpart_y;
2645
2646 /* If this is a complex value with each part being smaller than a
2647 word, the usual calling sequence will likely pack the pieces into
2648 a single register. Unfortunately, SUBREG of hard registers only
2649 deals in terms of words, so we have a problem converting input
2650 arguments to the CONCAT of two registers that is used elsewhere
2651 for complex values. If this is before reload, we can copy it into
2652 memory and reload. FIXME, we should see about using extract and
2653 insert on integer registers, but complex short and complex char
2654 variables should be rarely used. */
2655 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2656 && (reload_in_progress | reload_completed) == 0)
2657 {
2658 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2659 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2660
2661 if (packed_dest_p || packed_src_p)
2662 {
2663 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2664 ? MODE_FLOAT : MODE_INT);
2665
2666 enum machine_mode reg_mode =
2667 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2668
2669 if (reg_mode != BLKmode)
2670 {
2671 rtx mem = assign_stack_temp (reg_mode,
2672 GET_MODE_SIZE (mode), 0);
2673
2674 rtx cmem = change_address (mem, mode, NULL_RTX);
2675
2676 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2677
2678 if (packed_dest_p)
2679 {
2680 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2681 emit_move_insn_1 (cmem, y);
2682 return emit_move_insn_1 (sreg, mem);
2683 }
2684 else
2685 {
2686 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2687 emit_move_insn_1 (mem, sreg);
2688 return emit_move_insn_1 (x, cmem);
2689 }
2690 }
2691 }
2692 }
2693
2694 realpart_x = gen_realpart (submode, x);
2695 realpart_y = gen_realpart (submode, y);
2696 imagpart_x = gen_imagpart (submode, x);
2697 imagpart_y = gen_imagpart (submode, y);
2698
2699 /* Show the output dies here. This is necessary for SUBREGs
2700 of pseudos since we cannot track their lifetimes correctly;
2701 hard regs shouldn't appear here except as return values.
2702 We never want to emit such a clobber after reload. */
2703 if (x != y
2704 && ! (reload_in_progress || reload_completed)
2705 && (GET_CODE (realpart_x) == SUBREG
2706 || GET_CODE (imagpart_x) == SUBREG))
2707 {
2708 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2709 }
2710
2711 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2712 (realpart_x, realpart_y));
2713 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2714 (imagpart_x, imagpart_y));
2715 }
2716
2717 return get_last_insn ();
2718 }
2719
2720 /* This will handle any multi-word mode that lacks a move_insn pattern.
2721 However, you will get better code if you define such patterns,
2722 even if they must turn into multiple assembler instructions. */
2723 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2724 {
2725 rtx last_insn = 0;
2726 rtx seq;
2727 int need_clobber;
2728
2729 #ifdef PUSH_ROUNDING
2730
2731 /* If X is a push on the stack, do the push now and replace
2732 X with a reference to the stack pointer. */
2733 if (push_operand (x, GET_MODE (x)))
2734 {
2735 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2736 x = change_address (x, VOIDmode, stack_pointer_rtx);
2737 }
2738 #endif
2739
2740 start_sequence ();
2741
2742 need_clobber = 0;
2743 for (i = 0;
2744 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2745 i++)
2746 {
2747 rtx xpart = operand_subword (x, i, 1, mode);
2748 rtx ypart = operand_subword (y, i, 1, mode);
2749
2750 /* If we can't get a part of Y, put Y into memory if it is a
2751 constant. Otherwise, force it into a register. If we still
2752 can't get a part of Y, abort. */
2753 if (ypart == 0 && CONSTANT_P (y))
2754 {
2755 y = force_const_mem (mode, y);
2756 ypart = operand_subword (y, i, 1, mode);
2757 }
2758 else if (ypart == 0)
2759 ypart = operand_subword_force (y, i, mode);
2760
2761 if (xpart == 0 || ypart == 0)
2762 abort ();
2763
2764 need_clobber |= (GET_CODE (xpart) == SUBREG);
2765
2766 last_insn = emit_move_insn (xpart, ypart);
2767 }
2768
2769 seq = gen_sequence ();
2770 end_sequence ();
2771
2772 /* Show the output dies here. This is necessary for SUBREGs
2773 of pseudos since we cannot track their lifetimes correctly;
2774 hard regs shouldn't appear here except as return values.
2775 We never want to emit such a clobber after reload. */
2776 if (x != y
2777 && ! (reload_in_progress || reload_completed)
2778 && need_clobber != 0)
2779 {
2780 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2781 }
2782
2783 emit_insn (seq);
2784
2785 return last_insn;
2786 }
2787 else
2788 abort ();
2789 }
2790 \f
2791 /* Pushing data onto the stack. */
2792
2793 /* Push a block of length SIZE (perhaps variable)
2794 and return an rtx to address the beginning of the block.
2795 Note that it is not possible for the value returned to be a QUEUED.
2796 The value may be virtual_outgoing_args_rtx.
2797
2798 EXTRA is the number of bytes of padding to push in addition to SIZE.
2799 BELOW nonzero means this padding comes at low addresses;
2800 otherwise, the padding comes at high addresses. */
2801
2802 rtx
2803 push_block (size, extra, below)
2804 rtx size;
2805 int extra, below;
2806 {
2807 register rtx temp;
2808
2809 size = convert_modes (Pmode, ptr_mode, size, 1);
2810 if (CONSTANT_P (size))
2811 anti_adjust_stack (plus_constant (size, extra));
2812 else if (GET_CODE (size) == REG && extra == 0)
2813 anti_adjust_stack (size);
2814 else
2815 {
2816 rtx temp = copy_to_mode_reg (Pmode, size);
2817 if (extra != 0)
2818 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2819 temp, 0, OPTAB_LIB_WIDEN);
2820 anti_adjust_stack (temp);
2821 }
2822
2823 #if defined (STACK_GROWS_DOWNWARD) \
2824 || (defined (ARGS_GROW_DOWNWARD) \
2825 && !defined (ACCUMULATE_OUTGOING_ARGS))
2826
2827 /* Return the lowest stack address when STACK or ARGS grow downward and
2828 we are not aaccumulating outgoing arguments (the c4x port uses such
2829 conventions). */
2830 temp = virtual_outgoing_args_rtx;
2831 if (extra != 0 && below)
2832 temp = plus_constant (temp, extra);
2833 #else
2834 if (GET_CODE (size) == CONST_INT)
2835 temp = plus_constant (virtual_outgoing_args_rtx,
2836 - INTVAL (size) - (below ? 0 : extra));
2837 else if (extra != 0 && !below)
2838 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2839 negate_rtx (Pmode, plus_constant (size, extra)));
2840 else
2841 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2842 negate_rtx (Pmode, size));
2843 #endif
2844
2845 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2846 }
2847
2848 rtx
2849 gen_push_operand ()
2850 {
2851 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2852 }
2853
2854 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2855 block of SIZE bytes. */
2856
2857 static rtx
2858 get_push_address (size)
2859 int size;
2860 {
2861 register rtx temp;
2862
2863 if (STACK_PUSH_CODE == POST_DEC)
2864 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2865 else if (STACK_PUSH_CODE == POST_INC)
2866 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2867 else
2868 temp = stack_pointer_rtx;
2869
2870 return copy_to_reg (temp);
2871 }
2872
2873 /* Generate code to push X onto the stack, assuming it has mode MODE and
2874 type TYPE.
2875 MODE is redundant except when X is a CONST_INT (since they don't
2876 carry mode info).
2877 SIZE is an rtx for the size of data to be copied (in bytes),
2878 needed only if X is BLKmode.
2879
2880 ALIGN (in bytes) is maximum alignment we can assume.
2881
2882 If PARTIAL and REG are both nonzero, then copy that many of the first
2883 words of X into registers starting with REG, and push the rest of X.
2884 The amount of space pushed is decreased by PARTIAL words,
2885 rounded *down* to a multiple of PARM_BOUNDARY.
2886 REG must be a hard register in this case.
2887 If REG is zero but PARTIAL is not, take any all others actions for an
2888 argument partially in registers, but do not actually load any
2889 registers.
2890
2891 EXTRA is the amount in bytes of extra space to leave next to this arg.
2892 This is ignored if an argument block has already been allocated.
2893
2894 On a machine that lacks real push insns, ARGS_ADDR is the address of
2895 the bottom of the argument block for this call. We use indexing off there
2896 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2897 argument block has not been preallocated.
2898
2899 ARGS_SO_FAR is the size of args previously pushed for this call.
2900
2901 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2902 for arguments passed in registers. If nonzero, it will be the number
2903 of bytes required. */
2904
2905 void
2906 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2907 args_addr, args_so_far, reg_parm_stack_space,
2908 alignment_pad)
2909 register rtx x;
2910 enum machine_mode mode;
2911 tree type;
2912 rtx size;
2913 unsigned int align;
2914 int partial;
2915 rtx reg;
2916 int extra;
2917 rtx args_addr;
2918 rtx args_so_far;
2919 int reg_parm_stack_space;
2920 rtx alignment_pad;
2921 {
2922 rtx xinner;
2923 enum direction stack_direction
2924 #ifdef STACK_GROWS_DOWNWARD
2925 = downward;
2926 #else
2927 = upward;
2928 #endif
2929
2930 /* Decide where to pad the argument: `downward' for below,
2931 `upward' for above, or `none' for don't pad it.
2932 Default is below for small data on big-endian machines; else above. */
2933 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2934
2935 /* Invert direction if stack is post-update. */
2936 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2937 if (where_pad != none)
2938 where_pad = (where_pad == downward ? upward : downward);
2939
2940 xinner = x = protect_from_queue (x, 0);
2941
2942 if (mode == BLKmode)
2943 {
2944 /* Copy a block into the stack, entirely or partially. */
2945
2946 register rtx temp;
2947 int used = partial * UNITS_PER_WORD;
2948 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2949 int skip;
2950
2951 if (size == 0)
2952 abort ();
2953
2954 used -= offset;
2955
2956 /* USED is now the # of bytes we need not copy to the stack
2957 because registers will take care of them. */
2958
2959 if (partial != 0)
2960 xinner = change_address (xinner, BLKmode,
2961 plus_constant (XEXP (xinner, 0), used));
2962
2963 /* If the partial register-part of the arg counts in its stack size,
2964 skip the part of stack space corresponding to the registers.
2965 Otherwise, start copying to the beginning of the stack space,
2966 by setting SKIP to 0. */
2967 skip = (reg_parm_stack_space == 0) ? 0 : used;
2968
2969 #ifdef PUSH_ROUNDING
2970 /* Do it with several push insns if that doesn't take lots of insns
2971 and if there is no difficulty with push insns that skip bytes
2972 on the stack for alignment purposes. */
2973 if (args_addr == 0
2974 && GET_CODE (size) == CONST_INT
2975 && skip == 0
2976 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2977 /* Here we avoid the case of a structure whose weak alignment
2978 forces many pushes of a small amount of data,
2979 and such small pushes do rounding that causes trouble. */
2980 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
2981 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2982 || PUSH_ROUNDING (align) == align)
2983 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2984 {
2985 /* Push padding now if padding above and stack grows down,
2986 or if padding below and stack grows up.
2987 But if space already allocated, this has already been done. */
2988 if (extra && args_addr == 0
2989 && where_pad != none && where_pad != stack_direction)
2990 anti_adjust_stack (GEN_INT (extra));
2991
2992 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2993 INTVAL (size) - used, align);
2994
2995 if (current_function_check_memory_usage && ! in_check_memory_usage)
2996 {
2997 rtx temp;
2998
2999 in_check_memory_usage = 1;
3000 temp = get_push_address (INTVAL(size) - used);
3001 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3002 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3003 temp, Pmode,
3004 XEXP (xinner, 0), Pmode,
3005 GEN_INT (INTVAL(size) - used),
3006 TYPE_MODE (sizetype));
3007 else
3008 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3009 temp, Pmode,
3010 GEN_INT (INTVAL(size) - used),
3011 TYPE_MODE (sizetype),
3012 GEN_INT (MEMORY_USE_RW),
3013 TYPE_MODE (integer_type_node));
3014 in_check_memory_usage = 0;
3015 }
3016 }
3017 else
3018 #endif /* PUSH_ROUNDING */
3019 {
3020 /* Otherwise make space on the stack and copy the data
3021 to the address of that space. */
3022
3023 /* Deduct words put into registers from the size we must copy. */
3024 if (partial != 0)
3025 {
3026 if (GET_CODE (size) == CONST_INT)
3027 size = GEN_INT (INTVAL (size) - used);
3028 else
3029 size = expand_binop (GET_MODE (size), sub_optab, size,
3030 GEN_INT (used), NULL_RTX, 0,
3031 OPTAB_LIB_WIDEN);
3032 }
3033
3034 /* Get the address of the stack space.
3035 In this case, we do not deal with EXTRA separately.
3036 A single stack adjust will do. */
3037 if (! args_addr)
3038 {
3039 temp = push_block (size, extra, where_pad == downward);
3040 extra = 0;
3041 }
3042 else if (GET_CODE (args_so_far) == CONST_INT)
3043 temp = memory_address (BLKmode,
3044 plus_constant (args_addr,
3045 skip + INTVAL (args_so_far)));
3046 else
3047 temp = memory_address (BLKmode,
3048 plus_constant (gen_rtx_PLUS (Pmode,
3049 args_addr,
3050 args_so_far),
3051 skip));
3052 if (current_function_check_memory_usage && ! in_check_memory_usage)
3053 {
3054 rtx target;
3055
3056 in_check_memory_usage = 1;
3057 target = copy_to_reg (temp);
3058 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3059 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3060 target, Pmode,
3061 XEXP (xinner, 0), Pmode,
3062 size, TYPE_MODE (sizetype));
3063 else
3064 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3065 target, Pmode,
3066 size, TYPE_MODE (sizetype),
3067 GEN_INT (MEMORY_USE_RW),
3068 TYPE_MODE (integer_type_node));
3069 in_check_memory_usage = 0;
3070 }
3071
3072 /* TEMP is the address of the block. Copy the data there. */
3073 if (GET_CODE (size) == CONST_INT
3074 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3075 {
3076 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3077 INTVAL (size), align);
3078 goto ret;
3079 }
3080 else
3081 {
3082 rtx opalign = GEN_INT (align);
3083 enum machine_mode mode;
3084 rtx target = gen_rtx_MEM (BLKmode, temp);
3085
3086 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3087 mode != VOIDmode;
3088 mode = GET_MODE_WIDER_MODE (mode))
3089 {
3090 enum insn_code code = movstr_optab[(int) mode];
3091 insn_operand_predicate_fn pred;
3092
3093 if (code != CODE_FOR_nothing
3094 && ((GET_CODE (size) == CONST_INT
3095 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3096 <= (GET_MODE_MASK (mode) >> 1)))
3097 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3098 && (!(pred = insn_data[(int) code].operand[0].predicate)
3099 || ((*pred) (target, BLKmode)))
3100 && (!(pred = insn_data[(int) code].operand[1].predicate)
3101 || ((*pred) (xinner, BLKmode)))
3102 && (!(pred = insn_data[(int) code].operand[3].predicate)
3103 || ((*pred) (opalign, VOIDmode))))
3104 {
3105 rtx op2 = convert_to_mode (mode, size, 1);
3106 rtx last = get_last_insn ();
3107 rtx pat;
3108
3109 pred = insn_data[(int) code].operand[2].predicate;
3110 if (pred != 0 && ! (*pred) (op2, mode))
3111 op2 = copy_to_mode_reg (mode, op2);
3112
3113 pat = GEN_FCN ((int) code) (target, xinner,
3114 op2, opalign);
3115 if (pat)
3116 {
3117 emit_insn (pat);
3118 goto ret;
3119 }
3120 else
3121 delete_insns_since (last);
3122 }
3123 }
3124 }
3125
3126 #ifndef ACCUMULATE_OUTGOING_ARGS
3127 /* If the source is referenced relative to the stack pointer,
3128 copy it to another register to stabilize it. We do not need
3129 to do this if we know that we won't be changing sp. */
3130
3131 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3132 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3133 temp = copy_to_reg (temp);
3134 #endif
3135
3136 /* Make inhibit_defer_pop nonzero around the library call
3137 to force it to pop the bcopy-arguments right away. */
3138 NO_DEFER_POP;
3139 #ifdef TARGET_MEM_FUNCTIONS
3140 emit_library_call (memcpy_libfunc, 0,
3141 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3142 convert_to_mode (TYPE_MODE (sizetype),
3143 size, TREE_UNSIGNED (sizetype)),
3144 TYPE_MODE (sizetype));
3145 #else
3146 emit_library_call (bcopy_libfunc, 0,
3147 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3148 convert_to_mode (TYPE_MODE (integer_type_node),
3149 size,
3150 TREE_UNSIGNED (integer_type_node)),
3151 TYPE_MODE (integer_type_node));
3152 #endif
3153 OK_DEFER_POP;
3154 }
3155 }
3156 else if (partial > 0)
3157 {
3158 /* Scalar partly in registers. */
3159
3160 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3161 int i;
3162 int not_stack;
3163 /* # words of start of argument
3164 that we must make space for but need not store. */
3165 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3166 int args_offset = INTVAL (args_so_far);
3167 int skip;
3168
3169 /* Push padding now if padding above and stack grows down,
3170 or if padding below and stack grows up.
3171 But if space already allocated, this has already been done. */
3172 if (extra && args_addr == 0
3173 && where_pad != none && where_pad != stack_direction)
3174 anti_adjust_stack (GEN_INT (extra));
3175
3176 /* If we make space by pushing it, we might as well push
3177 the real data. Otherwise, we can leave OFFSET nonzero
3178 and leave the space uninitialized. */
3179 if (args_addr == 0)
3180 offset = 0;
3181
3182 /* Now NOT_STACK gets the number of words that we don't need to
3183 allocate on the stack. */
3184 not_stack = partial - offset;
3185
3186 /* If the partial register-part of the arg counts in its stack size,
3187 skip the part of stack space corresponding to the registers.
3188 Otherwise, start copying to the beginning of the stack space,
3189 by setting SKIP to 0. */
3190 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3191
3192 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3193 x = validize_mem (force_const_mem (mode, x));
3194
3195 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3196 SUBREGs of such registers are not allowed. */
3197 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3198 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3199 x = copy_to_reg (x);
3200
3201 /* Loop over all the words allocated on the stack for this arg. */
3202 /* We can do it by words, because any scalar bigger than a word
3203 has a size a multiple of a word. */
3204 #ifndef PUSH_ARGS_REVERSED
3205 for (i = not_stack; i < size; i++)
3206 #else
3207 for (i = size - 1; i >= not_stack; i--)
3208 #endif
3209 if (i >= not_stack + offset)
3210 emit_push_insn (operand_subword_force (x, i, mode),
3211 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3212 0, args_addr,
3213 GEN_INT (args_offset + ((i - not_stack + skip)
3214 * UNITS_PER_WORD)),
3215 reg_parm_stack_space, alignment_pad);
3216 }
3217 else
3218 {
3219 rtx addr;
3220 rtx target = NULL_RTX;
3221
3222 /* Push padding now if padding above and stack grows down,
3223 or if padding below and stack grows up.
3224 But if space already allocated, this has already been done. */
3225 if (extra && args_addr == 0
3226 && where_pad != none && where_pad != stack_direction)
3227 anti_adjust_stack (GEN_INT (extra));
3228
3229 #ifdef PUSH_ROUNDING
3230 if (args_addr == 0)
3231 addr = gen_push_operand ();
3232 else
3233 #endif
3234 {
3235 if (GET_CODE (args_so_far) == CONST_INT)
3236 addr
3237 = memory_address (mode,
3238 plus_constant (args_addr,
3239 INTVAL (args_so_far)));
3240 else
3241 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3242 args_so_far));
3243 target = addr;
3244 }
3245
3246 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3247
3248 if (current_function_check_memory_usage && ! in_check_memory_usage)
3249 {
3250 in_check_memory_usage = 1;
3251 if (target == 0)
3252 target = get_push_address (GET_MODE_SIZE (mode));
3253
3254 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3255 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3256 target, Pmode,
3257 XEXP (x, 0), Pmode,
3258 GEN_INT (GET_MODE_SIZE (mode)),
3259 TYPE_MODE (sizetype));
3260 else
3261 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3262 target, Pmode,
3263 GEN_INT (GET_MODE_SIZE (mode)),
3264 TYPE_MODE (sizetype),
3265 GEN_INT (MEMORY_USE_RW),
3266 TYPE_MODE (integer_type_node));
3267 in_check_memory_usage = 0;
3268 }
3269 }
3270
3271 ret:
3272 /* If part should go in registers, copy that part
3273 into the appropriate registers. Do this now, at the end,
3274 since mem-to-mem copies above may do function calls. */
3275 if (partial > 0 && reg != 0)
3276 {
3277 /* Handle calls that pass values in multiple non-contiguous locations.
3278 The Irix 6 ABI has examples of this. */
3279 if (GET_CODE (reg) == PARALLEL)
3280 emit_group_load (reg, x, -1, align); /* ??? size? */
3281 else
3282 move_block_to_reg (REGNO (reg), x, partial, mode);
3283 }
3284
3285 if (extra && args_addr == 0 && where_pad == stack_direction)
3286 anti_adjust_stack (GEN_INT (extra));
3287
3288 if (alignment_pad)
3289 anti_adjust_stack (alignment_pad);
3290 }
3291 \f
3292 /* Expand an assignment that stores the value of FROM into TO.
3293 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3294 (This may contain a QUEUED rtx;
3295 if the value is constant, this rtx is a constant.)
3296 Otherwise, the returned value is NULL_RTX.
3297
3298 SUGGEST_REG is no longer actually used.
3299 It used to mean, copy the value through a register
3300 and return that register, if that is possible.
3301 We now use WANT_VALUE to decide whether to do this. */
3302
3303 rtx
3304 expand_assignment (to, from, want_value, suggest_reg)
3305 tree to, from;
3306 int want_value;
3307 int suggest_reg ATTRIBUTE_UNUSED;
3308 {
3309 register rtx to_rtx = 0;
3310 rtx result;
3311
3312 /* Don't crash if the lhs of the assignment was erroneous. */
3313
3314 if (TREE_CODE (to) == ERROR_MARK)
3315 {
3316 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3317 return want_value ? result : NULL_RTX;
3318 }
3319
3320 /* Assignment of a structure component needs special treatment
3321 if the structure component's rtx is not simply a MEM.
3322 Assignment of an array element at a constant index, and assignment of
3323 an array element in an unaligned packed structure field, has the same
3324 problem. */
3325
3326 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3327 || TREE_CODE (to) == ARRAY_REF)
3328 {
3329 enum machine_mode mode1;
3330 HOST_WIDE_INT bitsize, bitpos;
3331 tree offset;
3332 int unsignedp;
3333 int volatilep = 0;
3334 tree tem;
3335 unsigned int alignment;
3336
3337 push_temp_slots ();
3338 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3339 &unsignedp, &volatilep, &alignment);
3340
3341 /* If we are going to use store_bit_field and extract_bit_field,
3342 make sure to_rtx will be safe for multiple use. */
3343
3344 if (mode1 == VOIDmode && want_value)
3345 tem = stabilize_reference (tem);
3346
3347 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3348 if (offset != 0)
3349 {
3350 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3351
3352 if (GET_CODE (to_rtx) != MEM)
3353 abort ();
3354
3355 if (GET_MODE (offset_rtx) != ptr_mode)
3356 {
3357 #ifdef POINTERS_EXTEND_UNSIGNED
3358 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3359 #else
3360 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3361 #endif
3362 }
3363
3364 /* A constant address in TO_RTX can have VOIDmode, we must not try
3365 to call force_reg for that case. Avoid that case. */
3366 if (GET_CODE (to_rtx) == MEM
3367 && GET_MODE (to_rtx) == BLKmode
3368 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3369 && bitsize
3370 && (bitpos % bitsize) == 0
3371 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3372 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3373 {
3374 rtx temp = change_address (to_rtx, mode1,
3375 plus_constant (XEXP (to_rtx, 0),
3376 (bitpos /
3377 BITS_PER_UNIT)));
3378 if (GET_CODE (XEXP (temp, 0)) == REG)
3379 to_rtx = temp;
3380 else
3381 to_rtx = change_address (to_rtx, mode1,
3382 force_reg (GET_MODE (XEXP (temp, 0)),
3383 XEXP (temp, 0)));
3384 bitpos = 0;
3385 }
3386
3387 to_rtx = change_address (to_rtx, VOIDmode,
3388 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3389 force_reg (ptr_mode,
3390 offset_rtx)));
3391 }
3392
3393 if (volatilep)
3394 {
3395 if (GET_CODE (to_rtx) == MEM)
3396 {
3397 /* When the offset is zero, to_rtx is the address of the
3398 structure we are storing into, and hence may be shared.
3399 We must make a new MEM before setting the volatile bit. */
3400 if (offset == 0)
3401 to_rtx = copy_rtx (to_rtx);
3402
3403 MEM_VOLATILE_P (to_rtx) = 1;
3404 }
3405 #if 0 /* This was turned off because, when a field is volatile
3406 in an object which is not volatile, the object may be in a register,
3407 and then we would abort over here. */
3408 else
3409 abort ();
3410 #endif
3411 }
3412
3413 if (TREE_CODE (to) == COMPONENT_REF
3414 && TREE_READONLY (TREE_OPERAND (to, 1)))
3415 {
3416 if (offset == 0)
3417 to_rtx = copy_rtx (to_rtx);
3418
3419 RTX_UNCHANGING_P (to_rtx) = 1;
3420 }
3421
3422 /* Check the access. */
3423 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3424 {
3425 rtx to_addr;
3426 int size;
3427 int best_mode_size;
3428 enum machine_mode best_mode;
3429
3430 best_mode = get_best_mode (bitsize, bitpos,
3431 TYPE_ALIGN (TREE_TYPE (tem)),
3432 mode1, volatilep);
3433 if (best_mode == VOIDmode)
3434 best_mode = QImode;
3435
3436 best_mode_size = GET_MODE_BITSIZE (best_mode);
3437 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3438 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3439 size *= GET_MODE_SIZE (best_mode);
3440
3441 /* Check the access right of the pointer. */
3442 if (size)
3443 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3444 to_addr, Pmode,
3445 GEN_INT (size), TYPE_MODE (sizetype),
3446 GEN_INT (MEMORY_USE_WO),
3447 TYPE_MODE (integer_type_node));
3448 }
3449
3450 /* If this is a varying-length object, we must get the address of
3451 the source and do an explicit block move. */
3452 if (bitsize < 0)
3453 {
3454 unsigned int from_align;
3455 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3456 rtx inner_to_rtx
3457 = change_address (to_rtx, VOIDmode,
3458 plus_constant (XEXP (to_rtx, 0),
3459 bitpos / BITS_PER_UNIT));
3460
3461 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3462 MIN (alignment, from_align / BITS_PER_UNIT));
3463 free_temp_slots ();
3464 pop_temp_slots ();
3465 return to_rtx;
3466 }
3467 else
3468 {
3469 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3470 (want_value
3471 /* Spurious cast for HPUX compiler. */
3472 ? ((enum machine_mode)
3473 TYPE_MODE (TREE_TYPE (to)))
3474 : VOIDmode),
3475 unsignedp,
3476 /* Required alignment of containing datum. */
3477 alignment,
3478 int_size_in_bytes (TREE_TYPE (tem)),
3479 get_alias_set (to));
3480
3481 preserve_temp_slots (result);
3482 free_temp_slots ();
3483 pop_temp_slots ();
3484
3485 /* If the value is meaningful, convert RESULT to the proper mode.
3486 Otherwise, return nothing. */
3487 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3488 TYPE_MODE (TREE_TYPE (from)),
3489 result,
3490 TREE_UNSIGNED (TREE_TYPE (to)))
3491 : NULL_RTX);
3492 }
3493 }
3494
3495 /* If the rhs is a function call and its value is not an aggregate,
3496 call the function before we start to compute the lhs.
3497 This is needed for correct code for cases such as
3498 val = setjmp (buf) on machines where reference to val
3499 requires loading up part of an address in a separate insn.
3500
3501 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3502 since it might be a promoted variable where the zero- or sign- extension
3503 needs to be done. Handling this in the normal way is safe because no
3504 computation is done before the call. */
3505 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3506 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3507 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3508 && GET_CODE (DECL_RTL (to)) == REG))
3509 {
3510 rtx value;
3511
3512 push_temp_slots ();
3513 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3514 if (to_rtx == 0)
3515 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3516
3517 /* Handle calls that return values in multiple non-contiguous locations.
3518 The Irix 6 ABI has examples of this. */
3519 if (GET_CODE (to_rtx) == PARALLEL)
3520 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3521 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3522 else if (GET_MODE (to_rtx) == BLKmode)
3523 emit_block_move (to_rtx, value, expr_size (from),
3524 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3525 else
3526 {
3527 #ifdef POINTERS_EXTEND_UNSIGNED
3528 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3529 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3530 value = convert_memory_address (GET_MODE (to_rtx), value);
3531 #endif
3532 emit_move_insn (to_rtx, value);
3533 }
3534 preserve_temp_slots (to_rtx);
3535 free_temp_slots ();
3536 pop_temp_slots ();
3537 return want_value ? to_rtx : NULL_RTX;
3538 }
3539
3540 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3541 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3542
3543 if (to_rtx == 0)
3544 {
3545 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3546 if (GET_CODE (to_rtx) == MEM)
3547 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3548 }
3549
3550 /* Don't move directly into a return register. */
3551 if (TREE_CODE (to) == RESULT_DECL
3552 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3553 {
3554 rtx temp;
3555
3556 push_temp_slots ();
3557 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3558
3559 if (GET_CODE (to_rtx) == PARALLEL)
3560 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3561 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3562 else
3563 emit_move_insn (to_rtx, temp);
3564
3565 preserve_temp_slots (to_rtx);
3566 free_temp_slots ();
3567 pop_temp_slots ();
3568 return want_value ? to_rtx : NULL_RTX;
3569 }
3570
3571 /* In case we are returning the contents of an object which overlaps
3572 the place the value is being stored, use a safe function when copying
3573 a value through a pointer into a structure value return block. */
3574 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3575 && current_function_returns_struct
3576 && !current_function_returns_pcc_struct)
3577 {
3578 rtx from_rtx, size;
3579
3580 push_temp_slots ();
3581 size = expr_size (from);
3582 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3583 EXPAND_MEMORY_USE_DONT);
3584
3585 /* Copy the rights of the bitmap. */
3586 if (current_function_check_memory_usage)
3587 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3588 XEXP (to_rtx, 0), Pmode,
3589 XEXP (from_rtx, 0), Pmode,
3590 convert_to_mode (TYPE_MODE (sizetype),
3591 size, TREE_UNSIGNED (sizetype)),
3592 TYPE_MODE (sizetype));
3593
3594 #ifdef TARGET_MEM_FUNCTIONS
3595 emit_library_call (memcpy_libfunc, 0,
3596 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3597 XEXP (from_rtx, 0), Pmode,
3598 convert_to_mode (TYPE_MODE (sizetype),
3599 size, TREE_UNSIGNED (sizetype)),
3600 TYPE_MODE (sizetype));
3601 #else
3602 emit_library_call (bcopy_libfunc, 0,
3603 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3604 XEXP (to_rtx, 0), Pmode,
3605 convert_to_mode (TYPE_MODE (integer_type_node),
3606 size, TREE_UNSIGNED (integer_type_node)),
3607 TYPE_MODE (integer_type_node));
3608 #endif
3609
3610 preserve_temp_slots (to_rtx);
3611 free_temp_slots ();
3612 pop_temp_slots ();
3613 return want_value ? to_rtx : NULL_RTX;
3614 }
3615
3616 /* Compute FROM and store the value in the rtx we got. */
3617
3618 push_temp_slots ();
3619 result = store_expr (from, to_rtx, want_value);
3620 preserve_temp_slots (result);
3621 free_temp_slots ();
3622 pop_temp_slots ();
3623 return want_value ? result : NULL_RTX;
3624 }
3625
3626 /* Generate code for computing expression EXP,
3627 and storing the value into TARGET.
3628 TARGET may contain a QUEUED rtx.
3629
3630 If WANT_VALUE is nonzero, return a copy of the value
3631 not in TARGET, so that we can be sure to use the proper
3632 value in a containing expression even if TARGET has something
3633 else stored in it. If possible, we copy the value through a pseudo
3634 and return that pseudo. Or, if the value is constant, we try to
3635 return the constant. In some cases, we return a pseudo
3636 copied *from* TARGET.
3637
3638 If the mode is BLKmode then we may return TARGET itself.
3639 It turns out that in BLKmode it doesn't cause a problem.
3640 because C has no operators that could combine two different
3641 assignments into the same BLKmode object with different values
3642 with no sequence point. Will other languages need this to
3643 be more thorough?
3644
3645 If WANT_VALUE is 0, we return NULL, to make sure
3646 to catch quickly any cases where the caller uses the value
3647 and fails to set WANT_VALUE. */
3648
3649 rtx
3650 store_expr (exp, target, want_value)
3651 register tree exp;
3652 register rtx target;
3653 int want_value;
3654 {
3655 register rtx temp;
3656 int dont_return_target = 0;
3657
3658 if (TREE_CODE (exp) == COMPOUND_EXPR)
3659 {
3660 /* Perform first part of compound expression, then assign from second
3661 part. */
3662 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3663 emit_queue ();
3664 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3665 }
3666 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3667 {
3668 /* For conditional expression, get safe form of the target. Then
3669 test the condition, doing the appropriate assignment on either
3670 side. This avoids the creation of unnecessary temporaries.
3671 For non-BLKmode, it is more efficient not to do this. */
3672
3673 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3674
3675 emit_queue ();
3676 target = protect_from_queue (target, 1);
3677
3678 do_pending_stack_adjust ();
3679 NO_DEFER_POP;
3680 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3681 start_cleanup_deferral ();
3682 store_expr (TREE_OPERAND (exp, 1), target, 0);
3683 end_cleanup_deferral ();
3684 emit_queue ();
3685 emit_jump_insn (gen_jump (lab2));
3686 emit_barrier ();
3687 emit_label (lab1);
3688 start_cleanup_deferral ();
3689 store_expr (TREE_OPERAND (exp, 2), target, 0);
3690 end_cleanup_deferral ();
3691 emit_queue ();
3692 emit_label (lab2);
3693 OK_DEFER_POP;
3694
3695 return want_value ? target : NULL_RTX;
3696 }
3697 else if (queued_subexp_p (target))
3698 /* If target contains a postincrement, let's not risk
3699 using it as the place to generate the rhs. */
3700 {
3701 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3702 {
3703 /* Expand EXP into a new pseudo. */
3704 temp = gen_reg_rtx (GET_MODE (target));
3705 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3706 }
3707 else
3708 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3709
3710 /* If target is volatile, ANSI requires accessing the value
3711 *from* the target, if it is accessed. So make that happen.
3712 In no case return the target itself. */
3713 if (! MEM_VOLATILE_P (target) && want_value)
3714 dont_return_target = 1;
3715 }
3716 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3717 && GET_MODE (target) != BLKmode)
3718 /* If target is in memory and caller wants value in a register instead,
3719 arrange that. Pass TARGET as target for expand_expr so that,
3720 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3721 We know expand_expr will not use the target in that case.
3722 Don't do this if TARGET is volatile because we are supposed
3723 to write it and then read it. */
3724 {
3725 temp = expand_expr (exp, target, GET_MODE (target), 0);
3726 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3727 temp = copy_to_reg (temp);
3728 dont_return_target = 1;
3729 }
3730 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3731 /* If this is an scalar in a register that is stored in a wider mode
3732 than the declared mode, compute the result into its declared mode
3733 and then convert to the wider mode. Our value is the computed
3734 expression. */
3735 {
3736 /* If we don't want a value, we can do the conversion inside EXP,
3737 which will often result in some optimizations. Do the conversion
3738 in two steps: first change the signedness, if needed, then
3739 the extend. But don't do this if the type of EXP is a subtype
3740 of something else since then the conversion might involve
3741 more than just converting modes. */
3742 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3743 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3744 {
3745 if (TREE_UNSIGNED (TREE_TYPE (exp))
3746 != SUBREG_PROMOTED_UNSIGNED_P (target))
3747 exp
3748 = convert
3749 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3750 TREE_TYPE (exp)),
3751 exp);
3752
3753 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3754 SUBREG_PROMOTED_UNSIGNED_P (target)),
3755 exp);
3756 }
3757
3758 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3759
3760 /* If TEMP is a volatile MEM and we want a result value, make
3761 the access now so it gets done only once. Likewise if
3762 it contains TARGET. */
3763 if (GET_CODE (temp) == MEM && want_value
3764 && (MEM_VOLATILE_P (temp)
3765 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3766 temp = copy_to_reg (temp);
3767
3768 /* If TEMP is a VOIDmode constant, use convert_modes to make
3769 sure that we properly convert it. */
3770 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3771 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3772 TYPE_MODE (TREE_TYPE (exp)), temp,
3773 SUBREG_PROMOTED_UNSIGNED_P (target));
3774
3775 convert_move (SUBREG_REG (target), temp,
3776 SUBREG_PROMOTED_UNSIGNED_P (target));
3777
3778 /* If we promoted a constant, change the mode back down to match
3779 target. Otherwise, the caller might get confused by a result whose
3780 mode is larger than expected. */
3781
3782 if (want_value && GET_MODE (temp) != GET_MODE (target)
3783 && GET_MODE (temp) != VOIDmode)
3784 {
3785 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3786 SUBREG_PROMOTED_VAR_P (temp) = 1;
3787 SUBREG_PROMOTED_UNSIGNED_P (temp)
3788 = SUBREG_PROMOTED_UNSIGNED_P (target);
3789 }
3790
3791 return want_value ? temp : NULL_RTX;
3792 }
3793 else
3794 {
3795 temp = expand_expr (exp, target, GET_MODE (target), 0);
3796 /* Return TARGET if it's a specified hardware register.
3797 If TARGET is a volatile mem ref, either return TARGET
3798 or return a reg copied *from* TARGET; ANSI requires this.
3799
3800 Otherwise, if TEMP is not TARGET, return TEMP
3801 if it is constant (for efficiency),
3802 or if we really want the correct value. */
3803 if (!(target && GET_CODE (target) == REG
3804 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3805 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3806 && ! rtx_equal_p (temp, target)
3807 && (CONSTANT_P (temp) || want_value))
3808 dont_return_target = 1;
3809 }
3810
3811 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3812 the same as that of TARGET, adjust the constant. This is needed, for
3813 example, in case it is a CONST_DOUBLE and we want only a word-sized
3814 value. */
3815 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3816 && TREE_CODE (exp) != ERROR_MARK
3817 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3818 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3819 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3820
3821 if (current_function_check_memory_usage
3822 && GET_CODE (target) == MEM
3823 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3824 {
3825 if (GET_CODE (temp) == MEM)
3826 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3827 XEXP (target, 0), Pmode,
3828 XEXP (temp, 0), Pmode,
3829 expr_size (exp), TYPE_MODE (sizetype));
3830 else
3831 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3832 XEXP (target, 0), Pmode,
3833 expr_size (exp), TYPE_MODE (sizetype),
3834 GEN_INT (MEMORY_USE_WO),
3835 TYPE_MODE (integer_type_node));
3836 }
3837
3838 /* If value was not generated in the target, store it there.
3839 Convert the value to TARGET's type first if nec. */
3840 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3841 one or both of them are volatile memory refs, we have to distinguish
3842 two cases:
3843 - expand_expr has used TARGET. In this case, we must not generate
3844 another copy. This can be detected by TARGET being equal according
3845 to == .
3846 - expand_expr has not used TARGET - that means that the source just
3847 happens to have the same RTX form. Since temp will have been created
3848 by expand_expr, it will compare unequal according to == .
3849 We must generate a copy in this case, to reach the correct number
3850 of volatile memory references. */
3851
3852 if ((! rtx_equal_p (temp, target)
3853 || (temp != target && (side_effects_p (temp)
3854 || side_effects_p (target))))
3855 && TREE_CODE (exp) != ERROR_MARK)
3856 {
3857 target = protect_from_queue (target, 1);
3858 if (GET_MODE (temp) != GET_MODE (target)
3859 && GET_MODE (temp) != VOIDmode)
3860 {
3861 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3862 if (dont_return_target)
3863 {
3864 /* In this case, we will return TEMP,
3865 so make sure it has the proper mode.
3866 But don't forget to store the value into TARGET. */
3867 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3868 emit_move_insn (target, temp);
3869 }
3870 else
3871 convert_move (target, temp, unsignedp);
3872 }
3873
3874 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3875 {
3876 /* Handle copying a string constant into an array.
3877 The string constant may be shorter than the array.
3878 So copy just the string's actual length, and clear the rest. */
3879 rtx size;
3880 rtx addr;
3881
3882 /* Get the size of the data type of the string,
3883 which is actually the size of the target. */
3884 size = expr_size (exp);
3885 if (GET_CODE (size) == CONST_INT
3886 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3887 emit_block_move (target, temp, size,
3888 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3889 else
3890 {
3891 /* Compute the size of the data to copy from the string. */
3892 tree copy_size
3893 = size_binop (MIN_EXPR,
3894 make_tree (sizetype, size),
3895 size_int (TREE_STRING_LENGTH (exp)));
3896 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3897 VOIDmode, 0);
3898 rtx label = 0;
3899
3900 /* Copy that much. */
3901 emit_block_move (target, temp, copy_size_rtx,
3902 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3903
3904 /* Figure out how much is left in TARGET that we have to clear.
3905 Do all calculations in ptr_mode. */
3906
3907 addr = XEXP (target, 0);
3908 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3909
3910 if (GET_CODE (copy_size_rtx) == CONST_INT)
3911 {
3912 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3913 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3914 }
3915 else
3916 {
3917 addr = force_reg (ptr_mode, addr);
3918 addr = expand_binop (ptr_mode, add_optab, addr,
3919 copy_size_rtx, NULL_RTX, 0,
3920 OPTAB_LIB_WIDEN);
3921
3922 size = expand_binop (ptr_mode, sub_optab, size,
3923 copy_size_rtx, NULL_RTX, 0,
3924 OPTAB_LIB_WIDEN);
3925
3926 label = gen_label_rtx ();
3927 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3928 GET_MODE (size), 0, 0, label);
3929 }
3930
3931 if (size != const0_rtx)
3932 {
3933 /* Be sure we can write on ADDR. */
3934 if (current_function_check_memory_usage)
3935 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3936 addr, Pmode,
3937 size, TYPE_MODE (sizetype),
3938 GEN_INT (MEMORY_USE_WO),
3939 TYPE_MODE (integer_type_node));
3940 #ifdef TARGET_MEM_FUNCTIONS
3941 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3942 addr, ptr_mode,
3943 const0_rtx, TYPE_MODE (integer_type_node),
3944 convert_to_mode (TYPE_MODE (sizetype),
3945 size,
3946 TREE_UNSIGNED (sizetype)),
3947 TYPE_MODE (sizetype));
3948 #else
3949 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3950 addr, ptr_mode,
3951 convert_to_mode (TYPE_MODE (integer_type_node),
3952 size,
3953 TREE_UNSIGNED (integer_type_node)),
3954 TYPE_MODE (integer_type_node));
3955 #endif
3956 }
3957
3958 if (label)
3959 emit_label (label);
3960 }
3961 }
3962 /* Handle calls that return values in multiple non-contiguous locations.
3963 The Irix 6 ABI has examples of this. */
3964 else if (GET_CODE (target) == PARALLEL)
3965 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3966 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3967 else if (GET_MODE (temp) == BLKmode)
3968 emit_block_move (target, temp, expr_size (exp),
3969 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3970 else
3971 emit_move_insn (target, temp);
3972 }
3973
3974 /* If we don't want a value, return NULL_RTX. */
3975 if (! want_value)
3976 return NULL_RTX;
3977
3978 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3979 ??? The latter test doesn't seem to make sense. */
3980 else if (dont_return_target && GET_CODE (temp) != MEM)
3981 return temp;
3982
3983 /* Return TARGET itself if it is a hard register. */
3984 else if (want_value && GET_MODE (target) != BLKmode
3985 && ! (GET_CODE (target) == REG
3986 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3987 return copy_to_reg (target);
3988
3989 else
3990 return target;
3991 }
3992 \f
3993 /* Return 1 if EXP just contains zeros. */
3994
3995 static int
3996 is_zeros_p (exp)
3997 tree exp;
3998 {
3999 tree elt;
4000
4001 switch (TREE_CODE (exp))
4002 {
4003 case CONVERT_EXPR:
4004 case NOP_EXPR:
4005 case NON_LVALUE_EXPR:
4006 return is_zeros_p (TREE_OPERAND (exp, 0));
4007
4008 case INTEGER_CST:
4009 return integer_zerop (exp);
4010
4011 case COMPLEX_CST:
4012 return
4013 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4014
4015 case REAL_CST:
4016 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4017
4018 case CONSTRUCTOR:
4019 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4020 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4021 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4022 if (! is_zeros_p (TREE_VALUE (elt)))
4023 return 0;
4024
4025 return 1;
4026
4027 default:
4028 return 0;
4029 }
4030 }
4031
4032 /* Return 1 if EXP contains mostly (3/4) zeros. */
4033
4034 static int
4035 mostly_zeros_p (exp)
4036 tree exp;
4037 {
4038 if (TREE_CODE (exp) == CONSTRUCTOR)
4039 {
4040 int elts = 0, zeros = 0;
4041 tree elt = CONSTRUCTOR_ELTS (exp);
4042 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4043 {
4044 /* If there are no ranges of true bits, it is all zero. */
4045 return elt == NULL_TREE;
4046 }
4047 for (; elt; elt = TREE_CHAIN (elt))
4048 {
4049 /* We do not handle the case where the index is a RANGE_EXPR,
4050 so the statistic will be somewhat inaccurate.
4051 We do make a more accurate count in store_constructor itself,
4052 so since this function is only used for nested array elements,
4053 this should be close enough. */
4054 if (mostly_zeros_p (TREE_VALUE (elt)))
4055 zeros++;
4056 elts++;
4057 }
4058
4059 return 4 * zeros >= 3 * elts;
4060 }
4061
4062 return is_zeros_p (exp);
4063 }
4064 \f
4065 /* Helper function for store_constructor.
4066 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4067 TYPE is the type of the CONSTRUCTOR, not the element type.
4068 ALIGN and CLEARED are as for store_constructor.
4069
4070 This provides a recursive shortcut back to store_constructor when it isn't
4071 necessary to go through store_field. This is so that we can pass through
4072 the cleared field to let store_constructor know that we may not have to
4073 clear a substructure if the outer structure has already been cleared. */
4074
4075 static void
4076 store_constructor_field (target, bitsize, bitpos,
4077 mode, exp, type, align, cleared)
4078 rtx target;
4079 unsigned HOST_WIDE_INT bitsize;
4080 HOST_WIDE_INT bitpos;
4081 enum machine_mode mode;
4082 tree exp, type;
4083 unsigned int align;
4084 int cleared;
4085 {
4086 if (TREE_CODE (exp) == CONSTRUCTOR
4087 && bitpos % BITS_PER_UNIT == 0
4088 /* If we have a non-zero bitpos for a register target, then we just
4089 let store_field do the bitfield handling. This is unlikely to
4090 generate unnecessary clear instructions anyways. */
4091 && (bitpos == 0 || GET_CODE (target) == MEM))
4092 {
4093 if (bitpos != 0)
4094 target
4095 = change_address (target,
4096 GET_MODE (target) == BLKmode
4097 || 0 != (bitpos
4098 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4099 ? BLKmode : VOIDmode,
4100 plus_constant (XEXP (target, 0),
4101 bitpos / BITS_PER_UNIT));
4102 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4103 }
4104 else
4105 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4106 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4107 int_size_in_bytes (type), 0);
4108 }
4109
4110 /* Store the value of constructor EXP into the rtx TARGET.
4111 TARGET is either a REG or a MEM.
4112 ALIGN is the maximum known alignment for TARGET, in bits.
4113 CLEARED is true if TARGET is known to have been zero'd.
4114 SIZE is the number of bytes of TARGET we are allowed to modify: this
4115 may not be the same as the size of EXP if we are assigning to a field
4116 which has been packed to exclude padding bits. */
4117
4118 static void
4119 store_constructor (exp, target, align, cleared, size)
4120 tree exp;
4121 rtx target;
4122 unsigned int align;
4123 int cleared;
4124 HOST_WIDE_INT size;
4125 {
4126 tree type = TREE_TYPE (exp);
4127 #ifdef WORD_REGISTER_OPERATIONS
4128 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4129 #endif
4130
4131 /* We know our target cannot conflict, since safe_from_p has been called. */
4132 #if 0
4133 /* Don't try copying piece by piece into a hard register
4134 since that is vulnerable to being clobbered by EXP.
4135 Instead, construct in a pseudo register and then copy it all. */
4136 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4137 {
4138 rtx temp = gen_reg_rtx (GET_MODE (target));
4139 store_constructor (exp, temp, align, cleared, size);
4140 emit_move_insn (target, temp);
4141 return;
4142 }
4143 #endif
4144
4145 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4146 || TREE_CODE (type) == QUAL_UNION_TYPE)
4147 {
4148 register tree elt;
4149
4150 /* Inform later passes that the whole union value is dead. */
4151 if ((TREE_CODE (type) == UNION_TYPE
4152 || TREE_CODE (type) == QUAL_UNION_TYPE)
4153 && ! cleared)
4154 {
4155 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4156
4157 /* If the constructor is empty, clear the union. */
4158 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4159 clear_storage (target, expr_size (exp),
4160 TYPE_ALIGN (type) / BITS_PER_UNIT);
4161 }
4162
4163 /* If we are building a static constructor into a register,
4164 set the initial value as zero so we can fold the value into
4165 a constant. But if more than one register is involved,
4166 this probably loses. */
4167 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4168 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4169 {
4170 if (! cleared)
4171 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4172
4173 cleared = 1;
4174 }
4175
4176 /* If the constructor has fewer fields than the structure
4177 or if we are initializing the structure to mostly zeros,
4178 clear the whole structure first. */
4179 else if (size > 0
4180 && ((list_length (CONSTRUCTOR_ELTS (exp))
4181 != fields_length (type))
4182 || mostly_zeros_p (exp)))
4183 {
4184 if (! cleared)
4185 clear_storage (target, GEN_INT (size),
4186 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4187
4188 cleared = 1;
4189 }
4190 else if (! cleared)
4191 /* Inform later passes that the old value is dead. */
4192 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4193
4194 /* Store each element of the constructor into
4195 the corresponding field of TARGET. */
4196
4197 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4198 {
4199 register tree field = TREE_PURPOSE (elt);
4200 #ifdef WORD_REGISTER_OPERATIONS
4201 tree value = TREE_VALUE (elt);
4202 #endif
4203 register enum machine_mode mode;
4204 HOST_WIDE_INT bitsize;
4205 HOST_WIDE_INT bitpos = 0;
4206 int unsignedp;
4207 tree offset;
4208 rtx to_rtx = target;
4209
4210 /* Just ignore missing fields.
4211 We cleared the whole structure, above,
4212 if any fields are missing. */
4213 if (field == 0)
4214 continue;
4215
4216 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4217 continue;
4218
4219 if (host_integerp (DECL_SIZE (field), 1))
4220 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4221 else
4222 bitsize = -1;
4223
4224 unsignedp = TREE_UNSIGNED (field);
4225 mode = DECL_MODE (field);
4226 if (DECL_BIT_FIELD (field))
4227 mode = VOIDmode;
4228
4229 offset = DECL_FIELD_OFFSET (field);
4230 if (host_integerp (offset, 0)
4231 && host_integerp (bit_position (field), 0))
4232 {
4233 bitpos = int_bit_position (field);
4234 offset = 0;
4235 }
4236 else
4237 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4238
4239 if (offset)
4240 {
4241 rtx offset_rtx;
4242
4243 if (contains_placeholder_p (offset))
4244 offset = build (WITH_RECORD_EXPR, bitsizetype,
4245 offset, make_tree (TREE_TYPE (exp), target));
4246
4247 offset = size_binop (EXACT_DIV_EXPR, offset, bitsize_unit_node);
4248 offset = convert (sizetype, offset);
4249
4250 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4251 if (GET_CODE (to_rtx) != MEM)
4252 abort ();
4253
4254 if (GET_MODE (offset_rtx) != ptr_mode)
4255 {
4256 #ifdef POINTERS_EXTEND_UNSIGNED
4257 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4258 #else
4259 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4260 #endif
4261 }
4262
4263 to_rtx
4264 = change_address (to_rtx, VOIDmode,
4265 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4266 force_reg (ptr_mode,
4267 offset_rtx)));
4268 }
4269
4270 if (TREE_READONLY (field))
4271 {
4272 if (GET_CODE (to_rtx) == MEM)
4273 to_rtx = copy_rtx (to_rtx);
4274
4275 RTX_UNCHANGING_P (to_rtx) = 1;
4276 }
4277
4278 #ifdef WORD_REGISTER_OPERATIONS
4279 /* If this initializes a field that is smaller than a word, at the
4280 start of a word, try to widen it to a full word.
4281 This special case allows us to output C++ member function
4282 initializations in a form that the optimizers can understand. */
4283 if (GET_CODE (target) == REG
4284 && bitsize < BITS_PER_WORD
4285 && bitpos % BITS_PER_WORD == 0
4286 && GET_MODE_CLASS (mode) == MODE_INT
4287 && TREE_CODE (value) == INTEGER_CST
4288 && exp_size >= 0
4289 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4290 {
4291 tree type = TREE_TYPE (value);
4292 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4293 {
4294 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4295 value = convert (type, value);
4296 }
4297 if (BYTES_BIG_ENDIAN)
4298 value
4299 = fold (build (LSHIFT_EXPR, type, value,
4300 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4301 bitsize = BITS_PER_WORD;
4302 mode = word_mode;
4303 }
4304 #endif
4305 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4306 TREE_VALUE (elt), type,
4307 MIN (align,
4308 DECL_ALIGN (TREE_PURPOSE (elt))),
4309 cleared);
4310 }
4311 }
4312 else if (TREE_CODE (type) == ARRAY_TYPE)
4313 {
4314 register tree elt;
4315 register int i;
4316 int need_to_clear;
4317 tree domain = TYPE_DOMAIN (type);
4318 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4319 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4320 tree elttype = TREE_TYPE (type);
4321
4322 /* If the constructor has fewer elements than the array,
4323 clear the whole array first. Similarly if this is
4324 static constructor of a non-BLKmode object. */
4325 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4326 need_to_clear = 1;
4327 else
4328 {
4329 HOST_WIDE_INT count = 0, zero_count = 0;
4330 need_to_clear = 0;
4331 /* This loop is a more accurate version of the loop in
4332 mostly_zeros_p (it handles RANGE_EXPR in an index).
4333 It is also needed to check for missing elements. */
4334 for (elt = CONSTRUCTOR_ELTS (exp);
4335 elt != NULL_TREE;
4336 elt = TREE_CHAIN (elt))
4337 {
4338 tree index = TREE_PURPOSE (elt);
4339 HOST_WIDE_INT this_node_count;
4340 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4341 {
4342 tree lo_index = TREE_OPERAND (index, 0);
4343 tree hi_index = TREE_OPERAND (index, 1);
4344
4345 if (TREE_CODE (lo_index) != INTEGER_CST
4346 || TREE_CODE (hi_index) != INTEGER_CST)
4347 {
4348 need_to_clear = 1;
4349 break;
4350 }
4351 this_node_count = (TREE_INT_CST_LOW (hi_index)
4352 - TREE_INT_CST_LOW (lo_index) + 1);
4353 }
4354 else
4355 this_node_count = 1;
4356 count += this_node_count;
4357 if (mostly_zeros_p (TREE_VALUE (elt)))
4358 zero_count += this_node_count;
4359 }
4360 /* Clear the entire array first if there are any missing elements,
4361 or if the incidence of zero elements is >= 75%. */
4362 if (count < maxelt - minelt + 1
4363 || 4 * zero_count >= 3 * count)
4364 need_to_clear = 1;
4365 }
4366 if (need_to_clear && size > 0)
4367 {
4368 if (! cleared)
4369 clear_storage (target, GEN_INT (size),
4370 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4371 cleared = 1;
4372 }
4373 else
4374 /* Inform later passes that the old value is dead. */
4375 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4376
4377 /* Store each element of the constructor into
4378 the corresponding element of TARGET, determined
4379 by counting the elements. */
4380 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4381 elt;
4382 elt = TREE_CHAIN (elt), i++)
4383 {
4384 register enum machine_mode mode;
4385 int bitsize;
4386 int bitpos;
4387 int unsignedp;
4388 tree value = TREE_VALUE (elt);
4389 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4390 tree index = TREE_PURPOSE (elt);
4391 rtx xtarget = target;
4392
4393 if (cleared && is_zeros_p (value))
4394 continue;
4395
4396 unsignedp = TREE_UNSIGNED (elttype);
4397 mode = TYPE_MODE (elttype);
4398 if (mode == BLKmode)
4399 {
4400 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4401 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4402 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4403 else
4404 bitsize = -1;
4405 }
4406 else
4407 bitsize = GET_MODE_BITSIZE (mode);
4408
4409 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4410 {
4411 tree lo_index = TREE_OPERAND (index, 0);
4412 tree hi_index = TREE_OPERAND (index, 1);
4413 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4414 struct nesting *loop;
4415 HOST_WIDE_INT lo, hi, count;
4416 tree position;
4417
4418 /* If the range is constant and "small", unroll the loop. */
4419 if (TREE_CODE (lo_index) == INTEGER_CST
4420 && TREE_CODE (hi_index) == INTEGER_CST
4421 && (lo = TREE_INT_CST_LOW (lo_index),
4422 hi = TREE_INT_CST_LOW (hi_index),
4423 count = hi - lo + 1,
4424 (GET_CODE (target) != MEM
4425 || count <= 2
4426 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4427 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4428 <= 40 * 8))))
4429 {
4430 lo -= minelt; hi -= minelt;
4431 for (; lo <= hi; lo++)
4432 {
4433 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4434 store_constructor_field (target, bitsize, bitpos, mode,
4435 value, type, align, cleared);
4436 }
4437 }
4438 else
4439 {
4440 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4441 loop_top = gen_label_rtx ();
4442 loop_end = gen_label_rtx ();
4443
4444 unsignedp = TREE_UNSIGNED (domain);
4445
4446 index = build_decl (VAR_DECL, NULL_TREE, domain);
4447
4448 DECL_RTL (index) = index_r
4449 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4450 &unsignedp, 0));
4451
4452 if (TREE_CODE (value) == SAVE_EXPR
4453 && SAVE_EXPR_RTL (value) == 0)
4454 {
4455 /* Make sure value gets expanded once before the
4456 loop. */
4457 expand_expr (value, const0_rtx, VOIDmode, 0);
4458 emit_queue ();
4459 }
4460 store_expr (lo_index, index_r, 0);
4461 loop = expand_start_loop (0);
4462
4463 /* Assign value to element index. */
4464 position
4465 = convert (ssizetype,
4466 fold (build (MINUS_EXPR, TREE_TYPE (index),
4467 index, TYPE_MIN_VALUE (domain))));
4468 position = size_binop (MULT_EXPR, position,
4469 convert (ssizetype,
4470 TYPE_SIZE_UNIT (elttype)));
4471
4472 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4473 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4474 xtarget = change_address (target, mode, addr);
4475 if (TREE_CODE (value) == CONSTRUCTOR)
4476 store_constructor (value, xtarget, align, cleared,
4477 bitsize / BITS_PER_UNIT);
4478 else
4479 store_expr (value, xtarget, 0);
4480
4481 expand_exit_loop_if_false (loop,
4482 build (LT_EXPR, integer_type_node,
4483 index, hi_index));
4484
4485 expand_increment (build (PREINCREMENT_EXPR,
4486 TREE_TYPE (index),
4487 index, integer_one_node), 0, 0);
4488 expand_end_loop ();
4489 emit_label (loop_end);
4490 }
4491 }
4492 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4493 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4494 {
4495 rtx pos_rtx, addr;
4496 tree position;
4497
4498 if (index == 0)
4499 index = ssize_int (1);
4500
4501 if (minelt)
4502 index = convert (ssizetype,
4503 fold (build (MINUS_EXPR, index,
4504 TYPE_MIN_VALUE (domain))));
4505 position = size_binop (MULT_EXPR, index,
4506 convert (ssizetype,
4507 TYPE_SIZE_UNIT (elttype)));
4508 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4509 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4510 xtarget = change_address (target, mode, addr);
4511 store_expr (value, xtarget, 0);
4512 }
4513 else
4514 {
4515 if (index != 0)
4516 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4517 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4518 else
4519 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4520 store_constructor_field (target, bitsize, bitpos, mode, value,
4521 type, align, cleared);
4522 }
4523 }
4524 }
4525 /* set constructor assignments */
4526 else if (TREE_CODE (type) == SET_TYPE)
4527 {
4528 tree elt = CONSTRUCTOR_ELTS (exp);
4529 int nbytes = int_size_in_bytes (type), nbits;
4530 tree domain = TYPE_DOMAIN (type);
4531 tree domain_min, domain_max, bitlength;
4532
4533 /* The default implementation strategy is to extract the constant
4534 parts of the constructor, use that to initialize the target,
4535 and then "or" in whatever non-constant ranges we need in addition.
4536
4537 If a large set is all zero or all ones, it is
4538 probably better to set it using memset (if available) or bzero.
4539 Also, if a large set has just a single range, it may also be
4540 better to first clear all the first clear the set (using
4541 bzero/memset), and set the bits we want. */
4542
4543 /* Check for all zeros. */
4544 if (elt == NULL_TREE && size > 0)
4545 {
4546 if (!cleared)
4547 clear_storage (target, GEN_INT (size),
4548 TYPE_ALIGN (type) / BITS_PER_UNIT);
4549 return;
4550 }
4551
4552 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4553 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4554 bitlength = size_binop (PLUS_EXPR,
4555 size_diffop (domain_max, domain_min),
4556 ssize_int (1));
4557
4558 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4559 abort ();
4560 nbits = TREE_INT_CST_LOW (bitlength);
4561
4562 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4563 are "complicated" (more than one range), initialize (the
4564 constant parts) by copying from a constant. */
4565 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4566 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4567 {
4568 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4569 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4570 char *bit_buffer = (char *) alloca (nbits);
4571 HOST_WIDE_INT word = 0;
4572 int bit_pos = 0;
4573 int ibit = 0;
4574 int offset = 0; /* In bytes from beginning of set. */
4575 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4576 for (;;)
4577 {
4578 if (bit_buffer[ibit])
4579 {
4580 if (BYTES_BIG_ENDIAN)
4581 word |= (1 << (set_word_size - 1 - bit_pos));
4582 else
4583 word |= 1 << bit_pos;
4584 }
4585 bit_pos++; ibit++;
4586 if (bit_pos >= set_word_size || ibit == nbits)
4587 {
4588 if (word != 0 || ! cleared)
4589 {
4590 rtx datum = GEN_INT (word);
4591 rtx to_rtx;
4592 /* The assumption here is that it is safe to use
4593 XEXP if the set is multi-word, but not if
4594 it's single-word. */
4595 if (GET_CODE (target) == MEM)
4596 {
4597 to_rtx = plus_constant (XEXP (target, 0), offset);
4598 to_rtx = change_address (target, mode, to_rtx);
4599 }
4600 else if (offset == 0)
4601 to_rtx = target;
4602 else
4603 abort ();
4604 emit_move_insn (to_rtx, datum);
4605 }
4606 if (ibit == nbits)
4607 break;
4608 word = 0;
4609 bit_pos = 0;
4610 offset += set_word_size / BITS_PER_UNIT;
4611 }
4612 }
4613 }
4614 else if (!cleared)
4615 {
4616 /* Don't bother clearing storage if the set is all ones. */
4617 if (TREE_CHAIN (elt) != NULL_TREE
4618 || (TREE_PURPOSE (elt) == NULL_TREE
4619 ? nbits != 1
4620 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4621 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4622 || ((HOST_WIDE_INT) TREE_INT_CST_LOW (TREE_VALUE (elt))
4623 - (HOST_WIDE_INT) TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4624 != nbits))))
4625 clear_storage (target, expr_size (exp),
4626 TYPE_ALIGN (type) / BITS_PER_UNIT);
4627 }
4628
4629 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4630 {
4631 /* start of range of element or NULL */
4632 tree startbit = TREE_PURPOSE (elt);
4633 /* end of range of element, or element value */
4634 tree endbit = TREE_VALUE (elt);
4635 #ifdef TARGET_MEM_FUNCTIONS
4636 HOST_WIDE_INT startb, endb;
4637 #endif
4638 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4639
4640 bitlength_rtx = expand_expr (bitlength,
4641 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4642
4643 /* handle non-range tuple element like [ expr ] */
4644 if (startbit == NULL_TREE)
4645 {
4646 startbit = save_expr (endbit);
4647 endbit = startbit;
4648 }
4649 startbit = convert (sizetype, startbit);
4650 endbit = convert (sizetype, endbit);
4651 if (! integer_zerop (domain_min))
4652 {
4653 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4654 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4655 }
4656 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4657 EXPAND_CONST_ADDRESS);
4658 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4659 EXPAND_CONST_ADDRESS);
4660
4661 if (REG_P (target))
4662 {
4663 targetx = assign_stack_temp (GET_MODE (target),
4664 GET_MODE_SIZE (GET_MODE (target)),
4665 0);
4666 emit_move_insn (targetx, target);
4667 }
4668 else if (GET_CODE (target) == MEM)
4669 targetx = target;
4670 else
4671 abort ();
4672
4673 #ifdef TARGET_MEM_FUNCTIONS
4674 /* Optimization: If startbit and endbit are
4675 constants divisible by BITS_PER_UNIT,
4676 call memset instead. */
4677 if (TREE_CODE (startbit) == INTEGER_CST
4678 && TREE_CODE (endbit) == INTEGER_CST
4679 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4680 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4681 {
4682 emit_library_call (memset_libfunc, 0,
4683 VOIDmode, 3,
4684 plus_constant (XEXP (targetx, 0),
4685 startb / BITS_PER_UNIT),
4686 Pmode,
4687 constm1_rtx, TYPE_MODE (integer_type_node),
4688 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4689 TYPE_MODE (sizetype));
4690 }
4691 else
4692 #endif
4693 {
4694 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4695 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4696 bitlength_rtx, TYPE_MODE (sizetype),
4697 startbit_rtx, TYPE_MODE (sizetype),
4698 endbit_rtx, TYPE_MODE (sizetype));
4699 }
4700 if (REG_P (target))
4701 emit_move_insn (target, targetx);
4702 }
4703 }
4704
4705 else
4706 abort ();
4707 }
4708
4709 /* Store the value of EXP (an expression tree)
4710 into a subfield of TARGET which has mode MODE and occupies
4711 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4712 If MODE is VOIDmode, it means that we are storing into a bit-field.
4713
4714 If VALUE_MODE is VOIDmode, return nothing in particular.
4715 UNSIGNEDP is not used in this case.
4716
4717 Otherwise, return an rtx for the value stored. This rtx
4718 has mode VALUE_MODE if that is convenient to do.
4719 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4720
4721 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4722 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4723
4724 ALIAS_SET is the alias set for the destination. This value will
4725 (in general) be different from that for TARGET, since TARGET is a
4726 reference to the containing structure. */
4727
4728 static rtx
4729 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4730 unsignedp, align, total_size, alias_set)
4731 rtx target;
4732 HOST_WIDE_INT bitsize;
4733 HOST_WIDE_INT bitpos;
4734 enum machine_mode mode;
4735 tree exp;
4736 enum machine_mode value_mode;
4737 int unsignedp;
4738 unsigned int align;
4739 HOST_WIDE_INT total_size;
4740 int alias_set;
4741 {
4742 HOST_WIDE_INT width_mask = 0;
4743
4744 if (TREE_CODE (exp) == ERROR_MARK)
4745 return const0_rtx;
4746
4747 if (bitsize < HOST_BITS_PER_WIDE_INT)
4748 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4749
4750 /* If we are storing into an unaligned field of an aligned union that is
4751 in a register, we may have the mode of TARGET being an integer mode but
4752 MODE == BLKmode. In that case, get an aligned object whose size and
4753 alignment are the same as TARGET and store TARGET into it (we can avoid
4754 the store if the field being stored is the entire width of TARGET). Then
4755 call ourselves recursively to store the field into a BLKmode version of
4756 that object. Finally, load from the object into TARGET. This is not
4757 very efficient in general, but should only be slightly more expensive
4758 than the otherwise-required unaligned accesses. Perhaps this can be
4759 cleaned up later. */
4760
4761 if (mode == BLKmode
4762 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4763 {
4764 rtx object = assign_stack_temp (GET_MODE (target),
4765 GET_MODE_SIZE (GET_MODE (target)), 0);
4766 rtx blk_object = copy_rtx (object);
4767
4768 MEM_SET_IN_STRUCT_P (object, 1);
4769 MEM_SET_IN_STRUCT_P (blk_object, 1);
4770 PUT_MODE (blk_object, BLKmode);
4771
4772 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4773 emit_move_insn (object, target);
4774
4775 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4776 align, total_size, alias_set);
4777
4778 /* Even though we aren't returning target, we need to
4779 give it the updated value. */
4780 emit_move_insn (target, object);
4781
4782 return blk_object;
4783 }
4784
4785 if (GET_CODE (target) == CONCAT)
4786 {
4787 /* We're storing into a struct containing a single __complex. */
4788
4789 if (bitpos != 0)
4790 abort ();
4791 return store_expr (exp, target, 0);
4792 }
4793
4794 /* If the structure is in a register or if the component
4795 is a bit field, we cannot use addressing to access it.
4796 Use bit-field techniques or SUBREG to store in it. */
4797
4798 if (mode == VOIDmode
4799 || (mode != BLKmode && ! direct_store[(int) mode]
4800 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4801 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4802 || GET_CODE (target) == REG
4803 || GET_CODE (target) == SUBREG
4804 /* If the field isn't aligned enough to store as an ordinary memref,
4805 store it as a bit field. */
4806 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4807 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4808 || bitpos % GET_MODE_ALIGNMENT (mode)))
4809 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4810 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4811 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4812 /* If the RHS and field are a constant size and the size of the
4813 RHS isn't the same size as the bitfield, we must use bitfield
4814 operations. */
4815 || (bitsize >= 0
4816 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4817 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4818 {
4819 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4820
4821 /* If BITSIZE is narrower than the size of the type of EXP
4822 we will be narrowing TEMP. Normally, what's wanted are the
4823 low-order bits. However, if EXP's type is a record and this is
4824 big-endian machine, we want the upper BITSIZE bits. */
4825 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4826 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4827 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4828 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4829 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4830 - bitsize),
4831 temp, 1);
4832
4833 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4834 MODE. */
4835 if (mode != VOIDmode && mode != BLKmode
4836 && mode != TYPE_MODE (TREE_TYPE (exp)))
4837 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4838
4839 /* If the modes of TARGET and TEMP are both BLKmode, both
4840 must be in memory and BITPOS must be aligned on a byte
4841 boundary. If so, we simply do a block copy. */
4842 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4843 {
4844 unsigned int exp_align = expr_align (exp) / BITS_PER_UNIT;
4845
4846 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4847 || bitpos % BITS_PER_UNIT != 0)
4848 abort ();
4849
4850 target = change_address (target, VOIDmode,
4851 plus_constant (XEXP (target, 0),
4852 bitpos / BITS_PER_UNIT));
4853
4854 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4855 align = MIN (exp_align, align);
4856
4857 /* Find an alignment that is consistent with the bit position. */
4858 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4859 align >>= 1;
4860
4861 emit_block_move (target, temp,
4862 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4863 / BITS_PER_UNIT),
4864 align);
4865
4866 return value_mode == VOIDmode ? const0_rtx : target;
4867 }
4868
4869 /* Store the value in the bitfield. */
4870 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4871 if (value_mode != VOIDmode)
4872 {
4873 /* The caller wants an rtx for the value. */
4874 /* If possible, avoid refetching from the bitfield itself. */
4875 if (width_mask != 0
4876 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4877 {
4878 tree count;
4879 enum machine_mode tmode;
4880
4881 if (unsignedp)
4882 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4883 tmode = GET_MODE (temp);
4884 if (tmode == VOIDmode)
4885 tmode = value_mode;
4886 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4887 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4888 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4889 }
4890 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4891 NULL_RTX, value_mode, 0, align,
4892 total_size);
4893 }
4894 return const0_rtx;
4895 }
4896 else
4897 {
4898 rtx addr = XEXP (target, 0);
4899 rtx to_rtx;
4900
4901 /* If a value is wanted, it must be the lhs;
4902 so make the address stable for multiple use. */
4903
4904 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4905 && ! CONSTANT_ADDRESS_P (addr)
4906 /* A frame-pointer reference is already stable. */
4907 && ! (GET_CODE (addr) == PLUS
4908 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4909 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4910 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4911 addr = copy_to_reg (addr);
4912
4913 /* Now build a reference to just the desired component. */
4914
4915 to_rtx = copy_rtx (change_address (target, mode,
4916 plus_constant (addr,
4917 (bitpos
4918 / BITS_PER_UNIT))));
4919 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4920 MEM_ALIAS_SET (to_rtx) = alias_set;
4921
4922 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4923 }
4924 }
4925 \f
4926 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4927 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4928 ARRAY_REFs and find the ultimate containing object, which we return.
4929
4930 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4931 bit position, and *PUNSIGNEDP to the signedness of the field.
4932 If the position of the field is variable, we store a tree
4933 giving the variable offset (in units) in *POFFSET.
4934 This offset is in addition to the bit position.
4935 If the position is not variable, we store 0 in *POFFSET.
4936 We set *PALIGNMENT to the alignment in bytes of the address that will be
4937 computed. This is the alignment of the thing we return if *POFFSET
4938 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4939
4940 If any of the extraction expressions is volatile,
4941 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4942
4943 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4944 is a mode that can be used to access the field. In that case, *PBITSIZE
4945 is redundant.
4946
4947 If the field describes a variable-sized object, *PMODE is set to
4948 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4949 this case, but the address of the object can be found. */
4950
4951 tree
4952 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4953 punsignedp, pvolatilep, palignment)
4954 tree exp;
4955 HOST_WIDE_INT *pbitsize;
4956 HOST_WIDE_INT *pbitpos;
4957 tree *poffset;
4958 enum machine_mode *pmode;
4959 int *punsignedp;
4960 int *pvolatilep;
4961 unsigned int *palignment;
4962 {
4963 tree size_tree = 0;
4964 enum machine_mode mode = VOIDmode;
4965 tree offset = size_zero_node;
4966 tree bit_offset = bitsize_zero_node;
4967 unsigned int alignment = BIGGEST_ALIGNMENT;
4968 tree tem;
4969
4970 /* First get the mode, signedness, and size. We do this from just the
4971 outermost expression. */
4972 if (TREE_CODE (exp) == COMPONENT_REF)
4973 {
4974 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4975 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4976 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4977
4978 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4979 }
4980 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4981 {
4982 size_tree = TREE_OPERAND (exp, 1);
4983 *punsignedp = TREE_UNSIGNED (exp);
4984 }
4985 else
4986 {
4987 mode = TYPE_MODE (TREE_TYPE (exp));
4988 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4989
4990 if (mode == BLKmode)
4991 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4992 else
4993 *pbitsize = GET_MODE_BITSIZE (mode);
4994 }
4995
4996 if (size_tree != 0)
4997 {
4998 if (! host_integerp (size_tree, 1))
4999 mode = BLKmode, *pbitsize = -1;
5000 else
5001 *pbitsize = tree_low_cst (size_tree, 1);
5002 }
5003
5004 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5005 and find the ultimate containing object. */
5006 while (1)
5007 {
5008 if (TREE_CODE (exp) == BIT_FIELD_REF)
5009 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5010 else if (TREE_CODE (exp) == COMPONENT_REF)
5011 {
5012 tree field = TREE_OPERAND (exp, 1);
5013 tree this_offset = DECL_FIELD_OFFSET (field);
5014
5015 /* If this field hasn't been filled in yet, don't go
5016 past it. This should only happen when folding expressions
5017 made during type construction. */
5018 if (this_offset == 0)
5019 break;
5020 else if (! TREE_CONSTANT (this_offset)
5021 && contains_placeholder_p (this_offset))
5022 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5023
5024 offset = size_binop (PLUS_EXPR, offset, DECL_FIELD_OFFSET (field));
5025 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5026 DECL_FIELD_BIT_OFFSET (field));
5027
5028 if (! host_integerp (offset, 0))
5029 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5030 }
5031 else if (TREE_CODE (exp) == ARRAY_REF)
5032 {
5033 tree index = TREE_OPERAND (exp, 1);
5034 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5035 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5036
5037 /* We assume all arrays have sizes that are a multiple of a byte.
5038 First subtract the lower bound, if any, in the type of the
5039 index, then convert to sizetype and multiply by the size of the
5040 array element. */
5041 if (low_bound != 0 && ! integer_zerop (low_bound))
5042 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5043 index, low_bound));
5044
5045 if (! TREE_CONSTANT (index)
5046 && contains_placeholder_p (index))
5047 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5048
5049 offset = size_binop (PLUS_EXPR, offset,
5050 size_binop (MULT_EXPR,
5051 convert (sizetype, index),
5052 TYPE_SIZE_UNIT (TREE_TYPE (exp))));
5053 }
5054 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5055 && ! ((TREE_CODE (exp) == NOP_EXPR
5056 || TREE_CODE (exp) == CONVERT_EXPR)
5057 && (TYPE_MODE (TREE_TYPE (exp))
5058 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5059 break;
5060
5061 /* If any reference in the chain is volatile, the effect is volatile. */
5062 if (TREE_THIS_VOLATILE (exp))
5063 *pvolatilep = 1;
5064
5065 /* If the offset is non-constant already, then we can't assume any
5066 alignment more than the alignment here. */
5067 if (! TREE_CONSTANT (offset))
5068 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5069
5070 exp = TREE_OPERAND (exp, 0);
5071 }
5072
5073 if (DECL_P (exp))
5074 alignment = MIN (alignment, DECL_ALIGN (exp));
5075 else if (TREE_TYPE (exp) != 0)
5076 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5077
5078 /* If OFFSET is constant, see if we can return the whole thing as a
5079 constant bit position. Otherwise, split it up. */
5080 if (host_integerp (offset, 0)
5081 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5082 bitsize_unit_node))
5083 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5084 && host_integerp (tem, 0))
5085 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5086 else
5087 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5088
5089 *pmode = mode;
5090 *palignment = alignment / BITS_PER_UNIT;
5091 return exp;
5092 }
5093
5094 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5095
5096 static enum memory_use_mode
5097 get_memory_usage_from_modifier (modifier)
5098 enum expand_modifier modifier;
5099 {
5100 switch (modifier)
5101 {
5102 case EXPAND_NORMAL:
5103 case EXPAND_SUM:
5104 return MEMORY_USE_RO;
5105 break;
5106 case EXPAND_MEMORY_USE_WO:
5107 return MEMORY_USE_WO;
5108 break;
5109 case EXPAND_MEMORY_USE_RW:
5110 return MEMORY_USE_RW;
5111 break;
5112 case EXPAND_MEMORY_USE_DONT:
5113 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5114 MEMORY_USE_DONT, because they are modifiers to a call of
5115 expand_expr in the ADDR_EXPR case of expand_expr. */
5116 case EXPAND_CONST_ADDRESS:
5117 case EXPAND_INITIALIZER:
5118 return MEMORY_USE_DONT;
5119 case EXPAND_MEMORY_USE_BAD:
5120 default:
5121 abort ();
5122 }
5123 }
5124 \f
5125 /* Given an rtx VALUE that may contain additions and multiplications,
5126 return an equivalent value that just refers to a register or memory.
5127 This is done by generating instructions to perform the arithmetic
5128 and returning a pseudo-register containing the value.
5129
5130 The returned value may be a REG, SUBREG, MEM or constant. */
5131
5132 rtx
5133 force_operand (value, target)
5134 rtx value, target;
5135 {
5136 register optab binoptab = 0;
5137 /* Use a temporary to force order of execution of calls to
5138 `force_operand'. */
5139 rtx tmp;
5140 register rtx op2;
5141 /* Use subtarget as the target for operand 0 of a binary operation. */
5142 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5143
5144 /* Check for a PIC address load. */
5145 if (flag_pic
5146 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5147 && XEXP (value, 0) == pic_offset_table_rtx
5148 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5149 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5150 || GET_CODE (XEXP (value, 1)) == CONST))
5151 {
5152 if (!subtarget)
5153 subtarget = gen_reg_rtx (GET_MODE (value));
5154 emit_move_insn (subtarget, value);
5155 return subtarget;
5156 }
5157
5158 if (GET_CODE (value) == PLUS)
5159 binoptab = add_optab;
5160 else if (GET_CODE (value) == MINUS)
5161 binoptab = sub_optab;
5162 else if (GET_CODE (value) == MULT)
5163 {
5164 op2 = XEXP (value, 1);
5165 if (!CONSTANT_P (op2)
5166 && !(GET_CODE (op2) == REG && op2 != subtarget))
5167 subtarget = 0;
5168 tmp = force_operand (XEXP (value, 0), subtarget);
5169 return expand_mult (GET_MODE (value), tmp,
5170 force_operand (op2, NULL_RTX),
5171 target, 0);
5172 }
5173
5174 if (binoptab)
5175 {
5176 op2 = XEXP (value, 1);
5177 if (!CONSTANT_P (op2)
5178 && !(GET_CODE (op2) == REG && op2 != subtarget))
5179 subtarget = 0;
5180 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5181 {
5182 binoptab = add_optab;
5183 op2 = negate_rtx (GET_MODE (value), op2);
5184 }
5185
5186 /* Check for an addition with OP2 a constant integer and our first
5187 operand a PLUS of a virtual register and something else. In that
5188 case, we want to emit the sum of the virtual register and the
5189 constant first and then add the other value. This allows virtual
5190 register instantiation to simply modify the constant rather than
5191 creating another one around this addition. */
5192 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5193 && GET_CODE (XEXP (value, 0)) == PLUS
5194 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5195 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5196 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5197 {
5198 rtx temp = expand_binop (GET_MODE (value), binoptab,
5199 XEXP (XEXP (value, 0), 0), op2,
5200 subtarget, 0, OPTAB_LIB_WIDEN);
5201 return expand_binop (GET_MODE (value), binoptab, temp,
5202 force_operand (XEXP (XEXP (value, 0), 1), 0),
5203 target, 0, OPTAB_LIB_WIDEN);
5204 }
5205
5206 tmp = force_operand (XEXP (value, 0), subtarget);
5207 return expand_binop (GET_MODE (value), binoptab, tmp,
5208 force_operand (op2, NULL_RTX),
5209 target, 0, OPTAB_LIB_WIDEN);
5210 /* We give UNSIGNEDP = 0 to expand_binop
5211 because the only operations we are expanding here are signed ones. */
5212 }
5213 return value;
5214 }
5215 \f
5216 /* Subroutine of expand_expr:
5217 save the non-copied parts (LIST) of an expr (LHS), and return a list
5218 which can restore these values to their previous values,
5219 should something modify their storage. */
5220
5221 static tree
5222 save_noncopied_parts (lhs, list)
5223 tree lhs;
5224 tree list;
5225 {
5226 tree tail;
5227 tree parts = 0;
5228
5229 for (tail = list; tail; tail = TREE_CHAIN (tail))
5230 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5231 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5232 else
5233 {
5234 tree part = TREE_VALUE (tail);
5235 tree part_type = TREE_TYPE (part);
5236 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5237 rtx target = assign_temp (part_type, 0, 1, 1);
5238 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5239 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5240 parts = tree_cons (to_be_saved,
5241 build (RTL_EXPR, part_type, NULL_TREE,
5242 (tree) target),
5243 parts);
5244 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5245 }
5246 return parts;
5247 }
5248
5249 /* Subroutine of expand_expr:
5250 record the non-copied parts (LIST) of an expr (LHS), and return a list
5251 which specifies the initial values of these parts. */
5252
5253 static tree
5254 init_noncopied_parts (lhs, list)
5255 tree lhs;
5256 tree list;
5257 {
5258 tree tail;
5259 tree parts = 0;
5260
5261 for (tail = list; tail; tail = TREE_CHAIN (tail))
5262 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5263 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5264 else if (TREE_PURPOSE (tail))
5265 {
5266 tree part = TREE_VALUE (tail);
5267 tree part_type = TREE_TYPE (part);
5268 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5269 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5270 }
5271 return parts;
5272 }
5273
5274 /* Subroutine of expand_expr: return nonzero iff there is no way that
5275 EXP can reference X, which is being modified. TOP_P is nonzero if this
5276 call is going to be used to determine whether we need a temporary
5277 for EXP, as opposed to a recursive call to this function.
5278
5279 It is always safe for this routine to return zero since it merely
5280 searches for optimization opportunities. */
5281
5282 static int
5283 safe_from_p (x, exp, top_p)
5284 rtx x;
5285 tree exp;
5286 int top_p;
5287 {
5288 rtx exp_rtl = 0;
5289 int i, nops;
5290 static int save_expr_count;
5291 static int save_expr_size = 0;
5292 static tree *save_expr_rewritten;
5293 static tree save_expr_trees[256];
5294
5295 if (x == 0
5296 /* If EXP has varying size, we MUST use a target since we currently
5297 have no way of allocating temporaries of variable size
5298 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5299 So we assume here that something at a higher level has prevented a
5300 clash. This is somewhat bogus, but the best we can do. Only
5301 do this when X is BLKmode and when we are at the top level. */
5302 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5303 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5304 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5305 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5306 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5307 != INTEGER_CST)
5308 && GET_MODE (x) == BLKmode))
5309 return 1;
5310
5311 if (top_p && save_expr_size == 0)
5312 {
5313 int rtn;
5314
5315 save_expr_count = 0;
5316 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5317 save_expr_rewritten = &save_expr_trees[0];
5318
5319 rtn = safe_from_p (x, exp, 1);
5320
5321 for (i = 0; i < save_expr_count; ++i)
5322 {
5323 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5324 abort ();
5325 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5326 }
5327
5328 save_expr_size = 0;
5329
5330 return rtn;
5331 }
5332
5333 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5334 find the underlying pseudo. */
5335 if (GET_CODE (x) == SUBREG)
5336 {
5337 x = SUBREG_REG (x);
5338 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5339 return 0;
5340 }
5341
5342 /* If X is a location in the outgoing argument area, it is always safe. */
5343 if (GET_CODE (x) == MEM
5344 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5345 || (GET_CODE (XEXP (x, 0)) == PLUS
5346 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5347 return 1;
5348
5349 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5350 {
5351 case 'd':
5352 exp_rtl = DECL_RTL (exp);
5353 break;
5354
5355 case 'c':
5356 return 1;
5357
5358 case 'x':
5359 if (TREE_CODE (exp) == TREE_LIST)
5360 return ((TREE_VALUE (exp) == 0
5361 || safe_from_p (x, TREE_VALUE (exp), 0))
5362 && (TREE_CHAIN (exp) == 0
5363 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5364 else if (TREE_CODE (exp) == ERROR_MARK)
5365 return 1; /* An already-visited SAVE_EXPR? */
5366 else
5367 return 0;
5368
5369 case '1':
5370 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5371
5372 case '2':
5373 case '<':
5374 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5375 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5376
5377 case 'e':
5378 case 'r':
5379 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5380 the expression. If it is set, we conflict iff we are that rtx or
5381 both are in memory. Otherwise, we check all operands of the
5382 expression recursively. */
5383
5384 switch (TREE_CODE (exp))
5385 {
5386 case ADDR_EXPR:
5387 return (staticp (TREE_OPERAND (exp, 0))
5388 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5389 || TREE_STATIC (exp));
5390
5391 case INDIRECT_REF:
5392 if (GET_CODE (x) == MEM)
5393 return 0;
5394 break;
5395
5396 case CALL_EXPR:
5397 exp_rtl = CALL_EXPR_RTL (exp);
5398 if (exp_rtl == 0)
5399 {
5400 /* Assume that the call will clobber all hard registers and
5401 all of memory. */
5402 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5403 || GET_CODE (x) == MEM)
5404 return 0;
5405 }
5406
5407 break;
5408
5409 case RTL_EXPR:
5410 /* If a sequence exists, we would have to scan every instruction
5411 in the sequence to see if it was safe. This is probably not
5412 worthwhile. */
5413 if (RTL_EXPR_SEQUENCE (exp))
5414 return 0;
5415
5416 exp_rtl = RTL_EXPR_RTL (exp);
5417 break;
5418
5419 case WITH_CLEANUP_EXPR:
5420 exp_rtl = RTL_EXPR_RTL (exp);
5421 break;
5422
5423 case CLEANUP_POINT_EXPR:
5424 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5425
5426 case SAVE_EXPR:
5427 exp_rtl = SAVE_EXPR_RTL (exp);
5428 if (exp_rtl)
5429 break;
5430
5431 /* This SAVE_EXPR might appear many times in the top-level
5432 safe_from_p() expression, and if it has a complex
5433 subexpression, examining it multiple times could result
5434 in a combinatorial explosion. E.g. on an Alpha
5435 running at least 200MHz, a Fortran test case compiled with
5436 optimization took about 28 minutes to compile -- even though
5437 it was only a few lines long, and the complicated line causing
5438 so much time to be spent in the earlier version of safe_from_p()
5439 had only 293 or so unique nodes.
5440
5441 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5442 where it is so we can turn it back in the top-level safe_from_p()
5443 when we're done. */
5444
5445 /* For now, don't bother re-sizing the array. */
5446 if (save_expr_count >= save_expr_size)
5447 return 0;
5448 save_expr_rewritten[save_expr_count++] = exp;
5449
5450 nops = tree_code_length[(int) SAVE_EXPR];
5451 for (i = 0; i < nops; i++)
5452 {
5453 tree operand = TREE_OPERAND (exp, i);
5454 if (operand == NULL_TREE)
5455 continue;
5456 TREE_SET_CODE (exp, ERROR_MARK);
5457 if (!safe_from_p (x, operand, 0))
5458 return 0;
5459 TREE_SET_CODE (exp, SAVE_EXPR);
5460 }
5461 TREE_SET_CODE (exp, ERROR_MARK);
5462 return 1;
5463
5464 case BIND_EXPR:
5465 /* The only operand we look at is operand 1. The rest aren't
5466 part of the expression. */
5467 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5468
5469 case METHOD_CALL_EXPR:
5470 /* This takes a rtx argument, but shouldn't appear here. */
5471 abort ();
5472
5473 default:
5474 break;
5475 }
5476
5477 /* If we have an rtx, we do not need to scan our operands. */
5478 if (exp_rtl)
5479 break;
5480
5481 nops = tree_code_length[(int) TREE_CODE (exp)];
5482 for (i = 0; i < nops; i++)
5483 if (TREE_OPERAND (exp, i) != 0
5484 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5485 return 0;
5486 }
5487
5488 /* If we have an rtl, find any enclosed object. Then see if we conflict
5489 with it. */
5490 if (exp_rtl)
5491 {
5492 if (GET_CODE (exp_rtl) == SUBREG)
5493 {
5494 exp_rtl = SUBREG_REG (exp_rtl);
5495 if (GET_CODE (exp_rtl) == REG
5496 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5497 return 0;
5498 }
5499
5500 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5501 are memory and EXP is not readonly. */
5502 return ! (rtx_equal_p (x, exp_rtl)
5503 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5504 && ! TREE_READONLY (exp)));
5505 }
5506
5507 /* If we reach here, it is safe. */
5508 return 1;
5509 }
5510
5511 /* Subroutine of expand_expr: return nonzero iff EXP is an
5512 expression whose type is statically determinable. */
5513
5514 static int
5515 fixed_type_p (exp)
5516 tree exp;
5517 {
5518 if (TREE_CODE (exp) == PARM_DECL
5519 || TREE_CODE (exp) == VAR_DECL
5520 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5521 || TREE_CODE (exp) == COMPONENT_REF
5522 || TREE_CODE (exp) == ARRAY_REF)
5523 return 1;
5524 return 0;
5525 }
5526
5527 /* Subroutine of expand_expr: return rtx if EXP is a
5528 variable or parameter; else return 0. */
5529
5530 static rtx
5531 var_rtx (exp)
5532 tree exp;
5533 {
5534 STRIP_NOPS (exp);
5535 switch (TREE_CODE (exp))
5536 {
5537 case PARM_DECL:
5538 case VAR_DECL:
5539 return DECL_RTL (exp);
5540 default:
5541 return 0;
5542 }
5543 }
5544
5545 #ifdef MAX_INTEGER_COMPUTATION_MODE
5546 void
5547 check_max_integer_computation_mode (exp)
5548 tree exp;
5549 {
5550 enum tree_code code;
5551 enum machine_mode mode;
5552
5553 /* Strip any NOPs that don't change the mode. */
5554 STRIP_NOPS (exp);
5555 code = TREE_CODE (exp);
5556
5557 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5558 if (code == NOP_EXPR
5559 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5560 return;
5561
5562 /* First check the type of the overall operation. We need only look at
5563 unary, binary and relational operations. */
5564 if (TREE_CODE_CLASS (code) == '1'
5565 || TREE_CODE_CLASS (code) == '2'
5566 || TREE_CODE_CLASS (code) == '<')
5567 {
5568 mode = TYPE_MODE (TREE_TYPE (exp));
5569 if (GET_MODE_CLASS (mode) == MODE_INT
5570 && mode > MAX_INTEGER_COMPUTATION_MODE)
5571 fatal ("unsupported wide integer operation");
5572 }
5573
5574 /* Check operand of a unary op. */
5575 if (TREE_CODE_CLASS (code) == '1')
5576 {
5577 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5578 if (GET_MODE_CLASS (mode) == MODE_INT
5579 && mode > MAX_INTEGER_COMPUTATION_MODE)
5580 fatal ("unsupported wide integer operation");
5581 }
5582
5583 /* Check operands of a binary/comparison op. */
5584 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5585 {
5586 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5587 if (GET_MODE_CLASS (mode) == MODE_INT
5588 && mode > MAX_INTEGER_COMPUTATION_MODE)
5589 fatal ("unsupported wide integer operation");
5590
5591 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5592 if (GET_MODE_CLASS (mode) == MODE_INT
5593 && mode > MAX_INTEGER_COMPUTATION_MODE)
5594 fatal ("unsupported wide integer operation");
5595 }
5596 }
5597 #endif
5598
5599 \f
5600 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5601 has any readonly fields. If any of the fields have types that
5602 contain readonly fields, return true as well. */
5603
5604 static int
5605 readonly_fields_p (type)
5606 tree type;
5607 {
5608 tree field;
5609
5610 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5611 if (TREE_CODE (field) == FIELD_DECL
5612 && (TREE_READONLY (field)
5613 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5614 && readonly_fields_p (TREE_TYPE (field)))))
5615 return 1;
5616
5617 return 0;
5618 }
5619 \f
5620 /* expand_expr: generate code for computing expression EXP.
5621 An rtx for the computed value is returned. The value is never null.
5622 In the case of a void EXP, const0_rtx is returned.
5623
5624 The value may be stored in TARGET if TARGET is nonzero.
5625 TARGET is just a suggestion; callers must assume that
5626 the rtx returned may not be the same as TARGET.
5627
5628 If TARGET is CONST0_RTX, it means that the value will be ignored.
5629
5630 If TMODE is not VOIDmode, it suggests generating the
5631 result in mode TMODE. But this is done only when convenient.
5632 Otherwise, TMODE is ignored and the value generated in its natural mode.
5633 TMODE is just a suggestion; callers must assume that
5634 the rtx returned may not have mode TMODE.
5635
5636 Note that TARGET may have neither TMODE nor MODE. In that case, it
5637 probably will not be used.
5638
5639 If MODIFIER is EXPAND_SUM then when EXP is an addition
5640 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5641 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5642 products as above, or REG or MEM, or constant.
5643 Ordinarily in such cases we would output mul or add instructions
5644 and then return a pseudo reg containing the sum.
5645
5646 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5647 it also marks a label as absolutely required (it can't be dead).
5648 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5649 This is used for outputting expressions used in initializers.
5650
5651 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5652 with a constant address even if that address is not normally legitimate.
5653 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5654
5655 rtx
5656 expand_expr (exp, target, tmode, modifier)
5657 register tree exp;
5658 rtx target;
5659 enum machine_mode tmode;
5660 enum expand_modifier modifier;
5661 {
5662 register rtx op0, op1, temp;
5663 tree type = TREE_TYPE (exp);
5664 int unsignedp = TREE_UNSIGNED (type);
5665 register enum machine_mode mode;
5666 register enum tree_code code = TREE_CODE (exp);
5667 optab this_optab;
5668 rtx subtarget, original_target;
5669 int ignore;
5670 tree context;
5671 /* Used by check-memory-usage to make modifier read only. */
5672 enum expand_modifier ro_modifier;
5673
5674 /* Handle ERROR_MARK before anybody tries to access its type. */
5675 if (TREE_CODE (exp) == ERROR_MARK)
5676 {
5677 op0 = CONST0_RTX (tmode);
5678 if (op0 != 0)
5679 return op0;
5680 return const0_rtx;
5681 }
5682
5683 mode = TYPE_MODE (type);
5684 /* Use subtarget as the target for operand 0 of a binary operation. */
5685 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5686 original_target = target;
5687 ignore = (target == const0_rtx
5688 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5689 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5690 || code == COND_EXPR)
5691 && TREE_CODE (type) == VOID_TYPE));
5692
5693 /* Make a read-only version of the modifier. */
5694 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5695 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5696 ro_modifier = modifier;
5697 else
5698 ro_modifier = EXPAND_NORMAL;
5699
5700 /* Don't use hard regs as subtargets, because the combiner
5701 can only handle pseudo regs. */
5702 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5703 subtarget = 0;
5704 /* Avoid subtargets inside loops,
5705 since they hide some invariant expressions. */
5706 if (preserve_subexpressions_p ())
5707 subtarget = 0;
5708
5709 /* If we are going to ignore this result, we need only do something
5710 if there is a side-effect somewhere in the expression. If there
5711 is, short-circuit the most common cases here. Note that we must
5712 not call expand_expr with anything but const0_rtx in case this
5713 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5714
5715 if (ignore)
5716 {
5717 if (! TREE_SIDE_EFFECTS (exp))
5718 return const0_rtx;
5719
5720 /* Ensure we reference a volatile object even if value is ignored, but
5721 don't do this if all we are doing is taking its address. */
5722 if (TREE_THIS_VOLATILE (exp)
5723 && TREE_CODE (exp) != FUNCTION_DECL
5724 && mode != VOIDmode && mode != BLKmode
5725 && modifier != EXPAND_CONST_ADDRESS)
5726 {
5727 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5728 if (GET_CODE (temp) == MEM)
5729 temp = copy_to_reg (temp);
5730 return const0_rtx;
5731 }
5732
5733 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5734 || code == INDIRECT_REF || code == BUFFER_REF)
5735 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5736 VOIDmode, ro_modifier);
5737 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5738 || code == ARRAY_REF)
5739 {
5740 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5741 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5742 return const0_rtx;
5743 }
5744 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5745 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5746 /* If the second operand has no side effects, just evaluate
5747 the first. */
5748 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5749 VOIDmode, ro_modifier);
5750 else if (code == BIT_FIELD_REF)
5751 {
5752 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5753 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5754 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5755 return const0_rtx;
5756 }
5757 ;
5758 target = 0;
5759 }
5760
5761 #ifdef MAX_INTEGER_COMPUTATION_MODE
5762 /* Only check stuff here if the mode we want is different from the mode
5763 of the expression; if it's the same, check_max_integer_computiation_mode
5764 will handle it. Do we really need to check this stuff at all? */
5765
5766 if (target
5767 && GET_MODE (target) != mode
5768 && TREE_CODE (exp) != INTEGER_CST
5769 && TREE_CODE (exp) != PARM_DECL
5770 && TREE_CODE (exp) != ARRAY_REF
5771 && TREE_CODE (exp) != COMPONENT_REF
5772 && TREE_CODE (exp) != BIT_FIELD_REF
5773 && TREE_CODE (exp) != INDIRECT_REF
5774 && TREE_CODE (exp) != CALL_EXPR
5775 && TREE_CODE (exp) != VAR_DECL
5776 && TREE_CODE (exp) != RTL_EXPR)
5777 {
5778 enum machine_mode mode = GET_MODE (target);
5779
5780 if (GET_MODE_CLASS (mode) == MODE_INT
5781 && mode > MAX_INTEGER_COMPUTATION_MODE)
5782 fatal ("unsupported wide integer operation");
5783 }
5784
5785 if (tmode != mode
5786 && TREE_CODE (exp) != INTEGER_CST
5787 && TREE_CODE (exp) != PARM_DECL
5788 && TREE_CODE (exp) != ARRAY_REF
5789 && TREE_CODE (exp) != COMPONENT_REF
5790 && TREE_CODE (exp) != BIT_FIELD_REF
5791 && TREE_CODE (exp) != INDIRECT_REF
5792 && TREE_CODE (exp) != VAR_DECL
5793 && TREE_CODE (exp) != CALL_EXPR
5794 && TREE_CODE (exp) != RTL_EXPR
5795 && GET_MODE_CLASS (tmode) == MODE_INT
5796 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5797 fatal ("unsupported wide integer operation");
5798
5799 check_max_integer_computation_mode (exp);
5800 #endif
5801
5802 /* If will do cse, generate all results into pseudo registers
5803 since 1) that allows cse to find more things
5804 and 2) otherwise cse could produce an insn the machine
5805 cannot support. */
5806
5807 if (! cse_not_expected && mode != BLKmode && target
5808 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5809 target = subtarget;
5810
5811 switch (code)
5812 {
5813 case LABEL_DECL:
5814 {
5815 tree function = decl_function_context (exp);
5816 /* Handle using a label in a containing function. */
5817 if (function != current_function_decl
5818 && function != inline_function_decl && function != 0)
5819 {
5820 struct function *p = find_function_data (function);
5821 /* Allocate in the memory associated with the function
5822 that the label is in. */
5823 push_obstacks (p->function_obstack,
5824 p->function_maybepermanent_obstack);
5825
5826 p->expr->x_forced_labels
5827 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5828 p->expr->x_forced_labels);
5829 pop_obstacks ();
5830 }
5831 else
5832 {
5833 if (modifier == EXPAND_INITIALIZER)
5834 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5835 label_rtx (exp),
5836 forced_labels);
5837 }
5838
5839 temp = gen_rtx_MEM (FUNCTION_MODE,
5840 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5841 if (function != current_function_decl
5842 && function != inline_function_decl && function != 0)
5843 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5844 return temp;
5845 }
5846
5847 case PARM_DECL:
5848 if (DECL_RTL (exp) == 0)
5849 {
5850 error_with_decl (exp, "prior parameter's size depends on `%s'");
5851 return CONST0_RTX (mode);
5852 }
5853
5854 /* ... fall through ... */
5855
5856 case VAR_DECL:
5857 /* If a static var's type was incomplete when the decl was written,
5858 but the type is complete now, lay out the decl now. */
5859 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5860 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5861 {
5862 push_obstacks_nochange ();
5863 end_temporary_allocation ();
5864 layout_decl (exp, 0);
5865 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5866 pop_obstacks ();
5867 }
5868
5869 /* Although static-storage variables start off initialized, according to
5870 ANSI C, a memcpy could overwrite them with uninitialized values. So
5871 we check them too. This also lets us check for read-only variables
5872 accessed via a non-const declaration, in case it won't be detected
5873 any other way (e.g., in an embedded system or OS kernel without
5874 memory protection).
5875
5876 Aggregates are not checked here; they're handled elsewhere. */
5877 if (cfun && current_function_check_memory_usage
5878 && code == VAR_DECL
5879 && GET_CODE (DECL_RTL (exp)) == MEM
5880 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5881 {
5882 enum memory_use_mode memory_usage;
5883 memory_usage = get_memory_usage_from_modifier (modifier);
5884
5885 if (memory_usage != MEMORY_USE_DONT)
5886 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5887 XEXP (DECL_RTL (exp), 0), Pmode,
5888 GEN_INT (int_size_in_bytes (type)),
5889 TYPE_MODE (sizetype),
5890 GEN_INT (memory_usage),
5891 TYPE_MODE (integer_type_node));
5892 }
5893
5894 /* ... fall through ... */
5895
5896 case FUNCTION_DECL:
5897 case RESULT_DECL:
5898 if (DECL_RTL (exp) == 0)
5899 abort ();
5900
5901 /* Ensure variable marked as used even if it doesn't go through
5902 a parser. If it hasn't be used yet, write out an external
5903 definition. */
5904 if (! TREE_USED (exp))
5905 {
5906 assemble_external (exp);
5907 TREE_USED (exp) = 1;
5908 }
5909
5910 /* Show we haven't gotten RTL for this yet. */
5911 temp = 0;
5912
5913 /* Handle variables inherited from containing functions. */
5914 context = decl_function_context (exp);
5915
5916 /* We treat inline_function_decl as an alias for the current function
5917 because that is the inline function whose vars, types, etc.
5918 are being merged into the current function.
5919 See expand_inline_function. */
5920
5921 if (context != 0 && context != current_function_decl
5922 && context != inline_function_decl
5923 /* If var is static, we don't need a static chain to access it. */
5924 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5925 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5926 {
5927 rtx addr;
5928
5929 /* Mark as non-local and addressable. */
5930 DECL_NONLOCAL (exp) = 1;
5931 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5932 abort ();
5933 mark_addressable (exp);
5934 if (GET_CODE (DECL_RTL (exp)) != MEM)
5935 abort ();
5936 addr = XEXP (DECL_RTL (exp), 0);
5937 if (GET_CODE (addr) == MEM)
5938 addr = gen_rtx_MEM (Pmode,
5939 fix_lexical_addr (XEXP (addr, 0), exp));
5940 else
5941 addr = fix_lexical_addr (addr, exp);
5942 temp = change_address (DECL_RTL (exp), mode, addr);
5943 }
5944
5945 /* This is the case of an array whose size is to be determined
5946 from its initializer, while the initializer is still being parsed.
5947 See expand_decl. */
5948
5949 else if (GET_CODE (DECL_RTL (exp)) == MEM
5950 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5951 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5952 XEXP (DECL_RTL (exp), 0));
5953
5954 /* If DECL_RTL is memory, we are in the normal case and either
5955 the address is not valid or it is not a register and -fforce-addr
5956 is specified, get the address into a register. */
5957
5958 else if (GET_CODE (DECL_RTL (exp)) == MEM
5959 && modifier != EXPAND_CONST_ADDRESS
5960 && modifier != EXPAND_SUM
5961 && modifier != EXPAND_INITIALIZER
5962 && (! memory_address_p (DECL_MODE (exp),
5963 XEXP (DECL_RTL (exp), 0))
5964 || (flag_force_addr
5965 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5966 temp = change_address (DECL_RTL (exp), VOIDmode,
5967 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5968
5969 /* If we got something, return it. But first, set the alignment
5970 the address is a register. */
5971 if (temp != 0)
5972 {
5973 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5974 mark_reg_pointer (XEXP (temp, 0),
5975 DECL_ALIGN (exp) / BITS_PER_UNIT);
5976
5977 return temp;
5978 }
5979
5980 /* If the mode of DECL_RTL does not match that of the decl, it
5981 must be a promoted value. We return a SUBREG of the wanted mode,
5982 but mark it so that we know that it was already extended. */
5983
5984 if (GET_CODE (DECL_RTL (exp)) == REG
5985 && GET_MODE (DECL_RTL (exp)) != mode)
5986 {
5987 /* Get the signedness used for this variable. Ensure we get the
5988 same mode we got when the variable was declared. */
5989 if (GET_MODE (DECL_RTL (exp))
5990 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5991 abort ();
5992
5993 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5994 SUBREG_PROMOTED_VAR_P (temp) = 1;
5995 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5996 return temp;
5997 }
5998
5999 return DECL_RTL (exp);
6000
6001 case INTEGER_CST:
6002 return immed_double_const (TREE_INT_CST_LOW (exp),
6003 TREE_INT_CST_HIGH (exp), mode);
6004
6005 case CONST_DECL:
6006 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6007 EXPAND_MEMORY_USE_BAD);
6008
6009 case REAL_CST:
6010 /* If optimized, generate immediate CONST_DOUBLE
6011 which will be turned into memory by reload if necessary.
6012
6013 We used to force a register so that loop.c could see it. But
6014 this does not allow gen_* patterns to perform optimizations with
6015 the constants. It also produces two insns in cases like "x = 1.0;".
6016 On most machines, floating-point constants are not permitted in
6017 many insns, so we'd end up copying it to a register in any case.
6018
6019 Now, we do the copying in expand_binop, if appropriate. */
6020 return immed_real_const (exp);
6021
6022 case COMPLEX_CST:
6023 case STRING_CST:
6024 if (! TREE_CST_RTL (exp))
6025 output_constant_def (exp);
6026
6027 /* TREE_CST_RTL probably contains a constant address.
6028 On RISC machines where a constant address isn't valid,
6029 make some insns to get that address into a register. */
6030 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6031 && modifier != EXPAND_CONST_ADDRESS
6032 && modifier != EXPAND_INITIALIZER
6033 && modifier != EXPAND_SUM
6034 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6035 || (flag_force_addr
6036 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6037 return change_address (TREE_CST_RTL (exp), VOIDmode,
6038 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6039 return TREE_CST_RTL (exp);
6040
6041 case EXPR_WITH_FILE_LOCATION:
6042 {
6043 rtx to_return;
6044 char *saved_input_filename = input_filename;
6045 int saved_lineno = lineno;
6046 input_filename = EXPR_WFL_FILENAME (exp);
6047 lineno = EXPR_WFL_LINENO (exp);
6048 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6049 emit_line_note (input_filename, lineno);
6050 /* Possibly avoid switching back and force here */
6051 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6052 input_filename = saved_input_filename;
6053 lineno = saved_lineno;
6054 return to_return;
6055 }
6056
6057 case SAVE_EXPR:
6058 context = decl_function_context (exp);
6059
6060 /* If this SAVE_EXPR was at global context, assume we are an
6061 initialization function and move it into our context. */
6062 if (context == 0)
6063 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6064
6065 /* We treat inline_function_decl as an alias for the current function
6066 because that is the inline function whose vars, types, etc.
6067 are being merged into the current function.
6068 See expand_inline_function. */
6069 if (context == current_function_decl || context == inline_function_decl)
6070 context = 0;
6071
6072 /* If this is non-local, handle it. */
6073 if (context)
6074 {
6075 /* The following call just exists to abort if the context is
6076 not of a containing function. */
6077 find_function_data (context);
6078
6079 temp = SAVE_EXPR_RTL (exp);
6080 if (temp && GET_CODE (temp) == REG)
6081 {
6082 put_var_into_stack (exp);
6083 temp = SAVE_EXPR_RTL (exp);
6084 }
6085 if (temp == 0 || GET_CODE (temp) != MEM)
6086 abort ();
6087 return change_address (temp, mode,
6088 fix_lexical_addr (XEXP (temp, 0), exp));
6089 }
6090 if (SAVE_EXPR_RTL (exp) == 0)
6091 {
6092 if (mode == VOIDmode)
6093 temp = const0_rtx;
6094 else
6095 temp = assign_temp (type, 3, 0, 0);
6096
6097 SAVE_EXPR_RTL (exp) = temp;
6098 if (!optimize && GET_CODE (temp) == REG)
6099 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6100 save_expr_regs);
6101
6102 /* If the mode of TEMP does not match that of the expression, it
6103 must be a promoted value. We pass store_expr a SUBREG of the
6104 wanted mode but mark it so that we know that it was already
6105 extended. Note that `unsignedp' was modified above in
6106 this case. */
6107
6108 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6109 {
6110 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6111 SUBREG_PROMOTED_VAR_P (temp) = 1;
6112 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6113 }
6114
6115 if (temp == const0_rtx)
6116 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6117 EXPAND_MEMORY_USE_BAD);
6118 else
6119 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6120
6121 TREE_USED (exp) = 1;
6122 }
6123
6124 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6125 must be a promoted value. We return a SUBREG of the wanted mode,
6126 but mark it so that we know that it was already extended. */
6127
6128 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6129 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6130 {
6131 /* Compute the signedness and make the proper SUBREG. */
6132 promote_mode (type, mode, &unsignedp, 0);
6133 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6134 SUBREG_PROMOTED_VAR_P (temp) = 1;
6135 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6136 return temp;
6137 }
6138
6139 return SAVE_EXPR_RTL (exp);
6140
6141 case UNSAVE_EXPR:
6142 {
6143 rtx temp;
6144 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6145 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6146 return temp;
6147 }
6148
6149 case PLACEHOLDER_EXPR:
6150 {
6151 tree placeholder_expr;
6152
6153 /* If there is an object on the head of the placeholder list,
6154 see if some object in it of type TYPE or a pointer to it. For
6155 further information, see tree.def. */
6156 for (placeholder_expr = placeholder_list;
6157 placeholder_expr != 0;
6158 placeholder_expr = TREE_CHAIN (placeholder_expr))
6159 {
6160 tree need_type = TYPE_MAIN_VARIANT (type);
6161 tree object = 0;
6162 tree old_list = placeholder_list;
6163 tree elt;
6164
6165 /* Find the outermost reference that is of the type we want.
6166 If none, see if any object has a type that is a pointer to
6167 the type we want. */
6168 for (elt = TREE_PURPOSE (placeholder_expr);
6169 elt != 0 && object == 0;
6170 elt
6171 = ((TREE_CODE (elt) == COMPOUND_EXPR
6172 || TREE_CODE (elt) == COND_EXPR)
6173 ? TREE_OPERAND (elt, 1)
6174 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6175 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6176 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6177 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6178 ? TREE_OPERAND (elt, 0) : 0))
6179 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6180 object = elt;
6181
6182 for (elt = TREE_PURPOSE (placeholder_expr);
6183 elt != 0 && object == 0;
6184 elt
6185 = ((TREE_CODE (elt) == COMPOUND_EXPR
6186 || TREE_CODE (elt) == COND_EXPR)
6187 ? TREE_OPERAND (elt, 1)
6188 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6189 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6190 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6191 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6192 ? TREE_OPERAND (elt, 0) : 0))
6193 if (POINTER_TYPE_P (TREE_TYPE (elt))
6194 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6195 == need_type))
6196 object = build1 (INDIRECT_REF, need_type, elt);
6197
6198 if (object != 0)
6199 {
6200 /* Expand this object skipping the list entries before
6201 it was found in case it is also a PLACEHOLDER_EXPR.
6202 In that case, we want to translate it using subsequent
6203 entries. */
6204 placeholder_list = TREE_CHAIN (placeholder_expr);
6205 temp = expand_expr (object, original_target, tmode,
6206 ro_modifier);
6207 placeholder_list = old_list;
6208 return temp;
6209 }
6210 }
6211 }
6212
6213 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6214 abort ();
6215
6216 case WITH_RECORD_EXPR:
6217 /* Put the object on the placeholder list, expand our first operand,
6218 and pop the list. */
6219 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6220 placeholder_list);
6221 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6222 tmode, ro_modifier);
6223 placeholder_list = TREE_CHAIN (placeholder_list);
6224 return target;
6225
6226 case GOTO_EXPR:
6227 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6228 expand_goto (TREE_OPERAND (exp, 0));
6229 else
6230 expand_computed_goto (TREE_OPERAND (exp, 0));
6231 return const0_rtx;
6232
6233 case EXIT_EXPR:
6234 expand_exit_loop_if_false (NULL_PTR,
6235 invert_truthvalue (TREE_OPERAND (exp, 0)));
6236 return const0_rtx;
6237
6238 case LABELED_BLOCK_EXPR:
6239 if (LABELED_BLOCK_BODY (exp))
6240 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6241 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6242 return const0_rtx;
6243
6244 case EXIT_BLOCK_EXPR:
6245 if (EXIT_BLOCK_RETURN (exp))
6246 sorry ("returned value in block_exit_expr");
6247 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6248 return const0_rtx;
6249
6250 case LOOP_EXPR:
6251 push_temp_slots ();
6252 expand_start_loop (1);
6253 expand_expr_stmt (TREE_OPERAND (exp, 0));
6254 expand_end_loop ();
6255 pop_temp_slots ();
6256
6257 return const0_rtx;
6258
6259 case BIND_EXPR:
6260 {
6261 tree vars = TREE_OPERAND (exp, 0);
6262 int vars_need_expansion = 0;
6263
6264 /* Need to open a binding contour here because
6265 if there are any cleanups they must be contained here. */
6266 expand_start_bindings (2);
6267
6268 /* Mark the corresponding BLOCK for output in its proper place. */
6269 if (TREE_OPERAND (exp, 2) != 0
6270 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6271 insert_block (TREE_OPERAND (exp, 2));
6272
6273 /* If VARS have not yet been expanded, expand them now. */
6274 while (vars)
6275 {
6276 if (DECL_RTL (vars) == 0)
6277 {
6278 vars_need_expansion = 1;
6279 expand_decl (vars);
6280 }
6281 expand_decl_init (vars);
6282 vars = TREE_CHAIN (vars);
6283 }
6284
6285 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6286
6287 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6288
6289 return temp;
6290 }
6291
6292 case RTL_EXPR:
6293 if (RTL_EXPR_SEQUENCE (exp))
6294 {
6295 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6296 abort ();
6297 emit_insns (RTL_EXPR_SEQUENCE (exp));
6298 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6299 }
6300 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6301 free_temps_for_rtl_expr (exp);
6302 return RTL_EXPR_RTL (exp);
6303
6304 case CONSTRUCTOR:
6305 /* If we don't need the result, just ensure we evaluate any
6306 subexpressions. */
6307 if (ignore)
6308 {
6309 tree elt;
6310 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6311 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6312 EXPAND_MEMORY_USE_BAD);
6313 return const0_rtx;
6314 }
6315
6316 /* All elts simple constants => refer to a constant in memory. But
6317 if this is a non-BLKmode mode, let it store a field at a time
6318 since that should make a CONST_INT or CONST_DOUBLE when we
6319 fold. Likewise, if we have a target we can use, it is best to
6320 store directly into the target unless the type is large enough
6321 that memcpy will be used. If we are making an initializer and
6322 all operands are constant, put it in memory as well. */
6323 else if ((TREE_STATIC (exp)
6324 && ((mode == BLKmode
6325 && ! (target != 0 && safe_from_p (target, exp, 1)))
6326 || TREE_ADDRESSABLE (exp)
6327 || (TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST
6328 && TREE_INT_CST_HIGH (TYPE_SIZE_UNIT (type)) == 0
6329 && (! MOVE_BY_PIECES_P
6330 (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type)),
6331 TYPE_ALIGN (type) / BITS_PER_UNIT))
6332 && ! mostly_zeros_p (exp))))
6333 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6334 {
6335 rtx constructor = output_constant_def (exp);
6336 if (modifier != EXPAND_CONST_ADDRESS
6337 && modifier != EXPAND_INITIALIZER
6338 && modifier != EXPAND_SUM
6339 && (! memory_address_p (GET_MODE (constructor),
6340 XEXP (constructor, 0))
6341 || (flag_force_addr
6342 && GET_CODE (XEXP (constructor, 0)) != REG)))
6343 constructor = change_address (constructor, VOIDmode,
6344 XEXP (constructor, 0));
6345 return constructor;
6346 }
6347
6348 else
6349 {
6350 /* Handle calls that pass values in multiple non-contiguous
6351 locations. The Irix 6 ABI has examples of this. */
6352 if (target == 0 || ! safe_from_p (target, exp, 1)
6353 || GET_CODE (target) == PARALLEL)
6354 {
6355 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6356 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6357 else
6358 target = assign_temp (type, 0, 1, 1);
6359 }
6360
6361 if (TREE_READONLY (exp))
6362 {
6363 if (GET_CODE (target) == MEM)
6364 target = copy_rtx (target);
6365
6366 RTX_UNCHANGING_P (target) = 1;
6367 }
6368
6369 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6370 int_size_in_bytes (TREE_TYPE (exp)));
6371 return target;
6372 }
6373
6374 case INDIRECT_REF:
6375 {
6376 tree exp1 = TREE_OPERAND (exp, 0);
6377 tree exp2;
6378 tree index;
6379 tree string = string_constant (exp1, &index);
6380
6381 /* Try to optimize reads from const strings. */
6382 if (string
6383 && TREE_CODE (string) == STRING_CST
6384 && TREE_CODE (index) == INTEGER_CST
6385 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6386 && GET_MODE_CLASS (mode) == MODE_INT
6387 && GET_MODE_SIZE (mode) == 1
6388 && modifier != EXPAND_MEMORY_USE_WO)
6389 return
6390 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6391
6392 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6393 op0 = memory_address (mode, op0);
6394
6395 if (cfun && current_function_check_memory_usage
6396 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6397 {
6398 enum memory_use_mode memory_usage;
6399 memory_usage = get_memory_usage_from_modifier (modifier);
6400
6401 if (memory_usage != MEMORY_USE_DONT)
6402 {
6403 in_check_memory_usage = 1;
6404 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6405 op0, Pmode,
6406 GEN_INT (int_size_in_bytes (type)),
6407 TYPE_MODE (sizetype),
6408 GEN_INT (memory_usage),
6409 TYPE_MODE (integer_type_node));
6410 in_check_memory_usage = 0;
6411 }
6412 }
6413
6414 temp = gen_rtx_MEM (mode, op0);
6415 /* If address was computed by addition,
6416 mark this as an element of an aggregate. */
6417 if (TREE_CODE (exp1) == PLUS_EXPR
6418 || (TREE_CODE (exp1) == SAVE_EXPR
6419 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6420 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6421 || (TREE_CODE (exp1) == ADDR_EXPR
6422 && (exp2 = TREE_OPERAND (exp1, 0))
6423 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6424 MEM_SET_IN_STRUCT_P (temp, 1);
6425
6426 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6427 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6428
6429 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6430 here, because, in C and C++, the fact that a location is accessed
6431 through a pointer to const does not mean that the value there can
6432 never change. Languages where it can never change should
6433 also set TREE_STATIC. */
6434 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6435
6436 /* If we are writing to this object and its type is a record with
6437 readonly fields, we must mark it as readonly so it will
6438 conflict with readonly references to those fields. */
6439 if (modifier == EXPAND_MEMORY_USE_WO
6440 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6441 RTX_UNCHANGING_P (temp) = 1;
6442
6443 return temp;
6444 }
6445
6446 case ARRAY_REF:
6447 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6448 abort ();
6449
6450 {
6451 tree array = TREE_OPERAND (exp, 0);
6452 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6453 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6454 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6455 HOST_WIDE_INT i;
6456
6457 /* Optimize the special-case of a zero lower bound.
6458
6459 We convert the low_bound to sizetype to avoid some problems
6460 with constant folding. (E.g. suppose the lower bound is 1,
6461 and its mode is QI. Without the conversion, (ARRAY
6462 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6463 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6464
6465 if (! integer_zerop (low_bound))
6466 index = size_diffop (index, convert (sizetype, low_bound));
6467
6468 /* Fold an expression like: "foo"[2].
6469 This is not done in fold so it won't happen inside &.
6470 Don't fold if this is for wide characters since it's too
6471 difficult to do correctly and this is a very rare case. */
6472
6473 if (TREE_CODE (array) == STRING_CST
6474 && TREE_CODE (index) == INTEGER_CST
6475 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6476 && GET_MODE_CLASS (mode) == MODE_INT
6477 && GET_MODE_SIZE (mode) == 1)
6478 return
6479 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6480
6481 /* If this is a constant index into a constant array,
6482 just get the value from the array. Handle both the cases when
6483 we have an explicit constructor and when our operand is a variable
6484 that was declared const. */
6485
6486 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6487 && TREE_CODE (index) == INTEGER_CST
6488 && 0 > compare_tree_int (index,
6489 list_length (CONSTRUCTOR_ELTS
6490 (TREE_OPERAND (exp, 0)))))
6491 {
6492 tree elem;
6493
6494 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6495 i = TREE_INT_CST_LOW (index);
6496 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6497 ;
6498
6499 if (elem)
6500 return expand_expr (fold (TREE_VALUE (elem)), target,
6501 tmode, ro_modifier);
6502 }
6503
6504 else if (optimize >= 1
6505 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6506 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6507 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6508 {
6509 if (TREE_CODE (index) == INTEGER_CST)
6510 {
6511 tree init = DECL_INITIAL (array);
6512
6513 if (TREE_CODE (init) == CONSTRUCTOR)
6514 {
6515 tree elem;
6516
6517 for (elem = CONSTRUCTOR_ELTS (init);
6518 (elem
6519 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6520 elem = TREE_CHAIN (elem))
6521 ;
6522
6523 if (elem)
6524 return expand_expr (fold (TREE_VALUE (elem)), target,
6525 tmode, ro_modifier);
6526 }
6527 else if (TREE_CODE (init) == STRING_CST
6528 && 0 > compare_tree_int (index,
6529 TREE_STRING_LENGTH (init)))
6530 return (GEN_INT
6531 (TREE_STRING_POINTER
6532 (init)[TREE_INT_CST_LOW (index)]));
6533 }
6534 }
6535 }
6536
6537 /* ... fall through ... */
6538
6539 case COMPONENT_REF:
6540 case BIT_FIELD_REF:
6541 /* If the operand is a CONSTRUCTOR, we can just extract the
6542 appropriate field if it is present. Don't do this if we have
6543 already written the data since we want to refer to that copy
6544 and varasm.c assumes that's what we'll do. */
6545 if (code != ARRAY_REF
6546 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6547 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6548 {
6549 tree elt;
6550
6551 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6552 elt = TREE_CHAIN (elt))
6553 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6554 /* We can normally use the value of the field in the
6555 CONSTRUCTOR. However, if this is a bitfield in
6556 an integral mode that we can fit in a HOST_WIDE_INT,
6557 we must mask only the number of bits in the bitfield,
6558 since this is done implicitly by the constructor. If
6559 the bitfield does not meet either of those conditions,
6560 we can't do this optimization. */
6561 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6562 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6563 == MODE_INT)
6564 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6565 <= HOST_BITS_PER_WIDE_INT))))
6566 {
6567 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6568 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6569 {
6570 HOST_WIDE_INT bitsize
6571 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6572
6573 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6574 {
6575 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6576 op0 = expand_and (op0, op1, target);
6577 }
6578 else
6579 {
6580 enum machine_mode imode
6581 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6582 tree count
6583 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6584 0);
6585
6586 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6587 target, 0);
6588 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6589 target, 0);
6590 }
6591 }
6592
6593 return op0;
6594 }
6595 }
6596
6597 {
6598 enum machine_mode mode1;
6599 HOST_WIDE_INT bitsize, bitpos;
6600 tree offset;
6601 int volatilep = 0;
6602 unsigned int alignment;
6603 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6604 &mode1, &unsignedp, &volatilep,
6605 &alignment);
6606
6607 /* If we got back the original object, something is wrong. Perhaps
6608 we are evaluating an expression too early. In any event, don't
6609 infinitely recurse. */
6610 if (tem == exp)
6611 abort ();
6612
6613 /* If TEM's type is a union of variable size, pass TARGET to the inner
6614 computation, since it will need a temporary and TARGET is known
6615 to have to do. This occurs in unchecked conversion in Ada. */
6616
6617 op0 = expand_expr (tem,
6618 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6619 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6620 != INTEGER_CST)
6621 ? target : NULL_RTX),
6622 VOIDmode,
6623 (modifier == EXPAND_INITIALIZER
6624 || modifier == EXPAND_CONST_ADDRESS)
6625 ? modifier : EXPAND_NORMAL);
6626
6627 /* If this is a constant, put it into a register if it is a
6628 legitimate constant and OFFSET is 0 and memory if it isn't. */
6629 if (CONSTANT_P (op0))
6630 {
6631 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6632 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6633 && offset == 0)
6634 op0 = force_reg (mode, op0);
6635 else
6636 op0 = validize_mem (force_const_mem (mode, op0));
6637 }
6638
6639 if (offset != 0)
6640 {
6641 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6642
6643 /* If this object is in memory, put it into a register.
6644 This case can't occur in C, but can in Ada if we have
6645 unchecked conversion of an expression from a scalar type to
6646 an array or record type. */
6647 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6648 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6649 {
6650 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6651
6652 mark_temp_addr_taken (memloc);
6653 emit_move_insn (memloc, op0);
6654 op0 = memloc;
6655 }
6656
6657 if (GET_CODE (op0) != MEM)
6658 abort ();
6659
6660 if (GET_MODE (offset_rtx) != ptr_mode)
6661 {
6662 #ifdef POINTERS_EXTEND_UNSIGNED
6663 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6664 #else
6665 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6666 #endif
6667 }
6668
6669 /* A constant address in OP0 can have VOIDmode, we must not try
6670 to call force_reg for that case. Avoid that case. */
6671 if (GET_CODE (op0) == MEM
6672 && GET_MODE (op0) == BLKmode
6673 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6674 && bitsize != 0
6675 && (bitpos % bitsize) == 0
6676 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6677 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6678 {
6679 rtx temp = change_address (op0, mode1,
6680 plus_constant (XEXP (op0, 0),
6681 (bitpos /
6682 BITS_PER_UNIT)));
6683 if (GET_CODE (XEXP (temp, 0)) == REG)
6684 op0 = temp;
6685 else
6686 op0 = change_address (op0, mode1,
6687 force_reg (GET_MODE (XEXP (temp, 0)),
6688 XEXP (temp, 0)));
6689 bitpos = 0;
6690 }
6691
6692
6693 op0 = change_address (op0, VOIDmode,
6694 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6695 force_reg (ptr_mode,
6696 offset_rtx)));
6697 }
6698
6699 /* Don't forget about volatility even if this is a bitfield. */
6700 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6701 {
6702 op0 = copy_rtx (op0);
6703 MEM_VOLATILE_P (op0) = 1;
6704 }
6705
6706 /* Check the access. */
6707 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6708 {
6709 enum memory_use_mode memory_usage;
6710 memory_usage = get_memory_usage_from_modifier (modifier);
6711
6712 if (memory_usage != MEMORY_USE_DONT)
6713 {
6714 rtx to;
6715 int size;
6716
6717 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6718 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6719
6720 /* Check the access right of the pointer. */
6721 if (size > BITS_PER_UNIT)
6722 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6723 to, Pmode,
6724 GEN_INT (size / BITS_PER_UNIT),
6725 TYPE_MODE (sizetype),
6726 GEN_INT (memory_usage),
6727 TYPE_MODE (integer_type_node));
6728 }
6729 }
6730
6731 /* In cases where an aligned union has an unaligned object
6732 as a field, we might be extracting a BLKmode value from
6733 an integer-mode (e.g., SImode) object. Handle this case
6734 by doing the extract into an object as wide as the field
6735 (which we know to be the width of a basic mode), then
6736 storing into memory, and changing the mode to BLKmode.
6737 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6738 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6739 if (mode1 == VOIDmode
6740 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6741 || (modifier != EXPAND_CONST_ADDRESS
6742 && modifier != EXPAND_INITIALIZER
6743 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6744 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6745 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6746 /* If the field isn't aligned enough to fetch as a memref,
6747 fetch it as a bit field. */
6748 || (mode1 != BLKmode
6749 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6750 && ((TYPE_ALIGN (TREE_TYPE (tem))
6751 < (unsigned int) GET_MODE_ALIGNMENT (mode))
6752 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6753 /* If the type and the field are a constant size and the
6754 size of the type isn't the same size as the bitfield,
6755 we must use bitfield operations. */
6756 || ((bitsize >= 0
6757 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6758 == INTEGER_CST)
6759 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6760 bitsize)))))
6761 || (modifier != EXPAND_CONST_ADDRESS
6762 && modifier != EXPAND_INITIALIZER
6763 && mode == BLKmode
6764 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6765 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6766 || bitpos % TYPE_ALIGN (type) != 0)))
6767 {
6768 enum machine_mode ext_mode = mode;
6769
6770 if (ext_mode == BLKmode
6771 && ! (target != 0 && GET_CODE (op0) == MEM
6772 && GET_CODE (target) == MEM
6773 && bitpos % BITS_PER_UNIT == 0))
6774 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6775
6776 if (ext_mode == BLKmode)
6777 {
6778 /* In this case, BITPOS must start at a byte boundary and
6779 TARGET, if specified, must be a MEM. */
6780 if (GET_CODE (op0) != MEM
6781 || (target != 0 && GET_CODE (target) != MEM)
6782 || bitpos % BITS_PER_UNIT != 0)
6783 abort ();
6784
6785 op0 = change_address (op0, VOIDmode,
6786 plus_constant (XEXP (op0, 0),
6787 bitpos / BITS_PER_UNIT));
6788 if (target == 0)
6789 target = assign_temp (type, 0, 1, 1);
6790
6791 emit_block_move (target, op0,
6792 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6793 / BITS_PER_UNIT),
6794 1);
6795
6796 return target;
6797 }
6798
6799 op0 = validize_mem (op0);
6800
6801 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6802 mark_reg_pointer (XEXP (op0, 0), alignment);
6803
6804 op0 = extract_bit_field (op0, bitsize, bitpos,
6805 unsignedp, target, ext_mode, ext_mode,
6806 alignment,
6807 int_size_in_bytes (TREE_TYPE (tem)));
6808
6809 /* If the result is a record type and BITSIZE is narrower than
6810 the mode of OP0, an integral mode, and this is a big endian
6811 machine, we must put the field into the high-order bits. */
6812 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6813 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6814 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6815 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6816 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6817 - bitsize),
6818 op0, 1);
6819
6820 if (mode == BLKmode)
6821 {
6822 rtx new = assign_stack_temp (ext_mode,
6823 bitsize / BITS_PER_UNIT, 0);
6824
6825 emit_move_insn (new, op0);
6826 op0 = copy_rtx (new);
6827 PUT_MODE (op0, BLKmode);
6828 MEM_SET_IN_STRUCT_P (op0, 1);
6829 }
6830
6831 return op0;
6832 }
6833
6834 /* If the result is BLKmode, use that to access the object
6835 now as well. */
6836 if (mode == BLKmode)
6837 mode1 = BLKmode;
6838
6839 /* Get a reference to just this component. */
6840 if (modifier == EXPAND_CONST_ADDRESS
6841 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6842 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6843 (bitpos / BITS_PER_UNIT)));
6844 else
6845 op0 = change_address (op0, mode1,
6846 plus_constant (XEXP (op0, 0),
6847 (bitpos / BITS_PER_UNIT)));
6848
6849 if (GET_CODE (op0) == MEM)
6850 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6851
6852 if (GET_CODE (XEXP (op0, 0)) == REG)
6853 mark_reg_pointer (XEXP (op0, 0), alignment);
6854
6855 MEM_SET_IN_STRUCT_P (op0, 1);
6856 MEM_VOLATILE_P (op0) |= volatilep;
6857 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6858 || modifier == EXPAND_CONST_ADDRESS
6859 || modifier == EXPAND_INITIALIZER)
6860 return op0;
6861 else if (target == 0)
6862 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6863
6864 convert_move (target, op0, unsignedp);
6865 return target;
6866 }
6867
6868 /* Intended for a reference to a buffer of a file-object in Pascal.
6869 But it's not certain that a special tree code will really be
6870 necessary for these. INDIRECT_REF might work for them. */
6871 case BUFFER_REF:
6872 abort ();
6873
6874 case IN_EXPR:
6875 {
6876 /* Pascal set IN expression.
6877
6878 Algorithm:
6879 rlo = set_low - (set_low%bits_per_word);
6880 the_word = set [ (index - rlo)/bits_per_word ];
6881 bit_index = index % bits_per_word;
6882 bitmask = 1 << bit_index;
6883 return !!(the_word & bitmask); */
6884
6885 tree set = TREE_OPERAND (exp, 0);
6886 tree index = TREE_OPERAND (exp, 1);
6887 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6888 tree set_type = TREE_TYPE (set);
6889 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6890 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6891 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6892 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6893 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6894 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6895 rtx setaddr = XEXP (setval, 0);
6896 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6897 rtx rlow;
6898 rtx diff, quo, rem, addr, bit, result;
6899
6900 preexpand_calls (exp);
6901
6902 /* If domain is empty, answer is no. Likewise if index is constant
6903 and out of bounds. */
6904 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6905 && TREE_CODE (set_low_bound) == INTEGER_CST
6906 && tree_int_cst_lt (set_high_bound, set_low_bound))
6907 || (TREE_CODE (index) == INTEGER_CST
6908 && TREE_CODE (set_low_bound) == INTEGER_CST
6909 && tree_int_cst_lt (index, set_low_bound))
6910 || (TREE_CODE (set_high_bound) == INTEGER_CST
6911 && TREE_CODE (index) == INTEGER_CST
6912 && tree_int_cst_lt (set_high_bound, index))))
6913 return const0_rtx;
6914
6915 if (target == 0)
6916 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6917
6918 /* If we get here, we have to generate the code for both cases
6919 (in range and out of range). */
6920
6921 op0 = gen_label_rtx ();
6922 op1 = gen_label_rtx ();
6923
6924 if (! (GET_CODE (index_val) == CONST_INT
6925 && GET_CODE (lo_r) == CONST_INT))
6926 {
6927 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6928 GET_MODE (index_val), iunsignedp, 0, op1);
6929 }
6930
6931 if (! (GET_CODE (index_val) == CONST_INT
6932 && GET_CODE (hi_r) == CONST_INT))
6933 {
6934 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6935 GET_MODE (index_val), iunsignedp, 0, op1);
6936 }
6937
6938 /* Calculate the element number of bit zero in the first word
6939 of the set. */
6940 if (GET_CODE (lo_r) == CONST_INT)
6941 rlow = GEN_INT (INTVAL (lo_r)
6942 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6943 else
6944 rlow = expand_binop (index_mode, and_optab, lo_r,
6945 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6946 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6947
6948 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6949 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6950
6951 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6952 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6953 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6954 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6955
6956 addr = memory_address (byte_mode,
6957 expand_binop (index_mode, add_optab, diff,
6958 setaddr, NULL_RTX, iunsignedp,
6959 OPTAB_LIB_WIDEN));
6960
6961 /* Extract the bit we want to examine */
6962 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6963 gen_rtx_MEM (byte_mode, addr),
6964 make_tree (TREE_TYPE (index), rem),
6965 NULL_RTX, 1);
6966 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6967 GET_MODE (target) == byte_mode ? target : 0,
6968 1, OPTAB_LIB_WIDEN);
6969
6970 if (result != target)
6971 convert_move (target, result, 1);
6972
6973 /* Output the code to handle the out-of-range case. */
6974 emit_jump (op0);
6975 emit_label (op1);
6976 emit_move_insn (target, const0_rtx);
6977 emit_label (op0);
6978 return target;
6979 }
6980
6981 case WITH_CLEANUP_EXPR:
6982 if (RTL_EXPR_RTL (exp) == 0)
6983 {
6984 RTL_EXPR_RTL (exp)
6985 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6986 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6987
6988 /* That's it for this cleanup. */
6989 TREE_OPERAND (exp, 2) = 0;
6990 }
6991 return RTL_EXPR_RTL (exp);
6992
6993 case CLEANUP_POINT_EXPR:
6994 {
6995 /* Start a new binding layer that will keep track of all cleanup
6996 actions to be performed. */
6997 expand_start_bindings (2);
6998
6999 target_temp_slot_level = temp_slot_level;
7000
7001 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7002 /* If we're going to use this value, load it up now. */
7003 if (! ignore)
7004 op0 = force_not_mem (op0);
7005 preserve_temp_slots (op0);
7006 expand_end_bindings (NULL_TREE, 0, 0);
7007 }
7008 return op0;
7009
7010 case CALL_EXPR:
7011 /* Check for a built-in function. */
7012 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7013 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7014 == FUNCTION_DECL)
7015 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7016 return expand_builtin (exp, target, subtarget, tmode, ignore);
7017
7018 /* If this call was expanded already by preexpand_calls,
7019 just return the result we got. */
7020 if (CALL_EXPR_RTL (exp) != 0)
7021 return CALL_EXPR_RTL (exp);
7022
7023 return expand_call (exp, target, ignore);
7024
7025 case NON_LVALUE_EXPR:
7026 case NOP_EXPR:
7027 case CONVERT_EXPR:
7028 case REFERENCE_EXPR:
7029 if (TREE_CODE (type) == UNION_TYPE)
7030 {
7031 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7032
7033 /* If both input and output are BLKmode, this conversion
7034 isn't actually doing anything unless we need to make the
7035 alignment stricter. */
7036 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7037 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7038 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7039 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7040 modifier);
7041
7042 if (target == 0)
7043 {
7044 if (mode != BLKmode)
7045 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7046 else
7047 target = assign_temp (type, 0, 1, 1);
7048 }
7049
7050 if (GET_CODE (target) == MEM)
7051 /* Store data into beginning of memory target. */
7052 store_expr (TREE_OPERAND (exp, 0),
7053 change_address (target, TYPE_MODE (valtype), 0), 0);
7054
7055 else if (GET_CODE (target) == REG)
7056 /* Store this field into a union of the proper type. */
7057 store_field (target,
7058 MIN ((int_size_in_bytes (TREE_TYPE
7059 (TREE_OPERAND (exp, 0)))
7060 * BITS_PER_UNIT),
7061 GET_MODE_BITSIZE (mode)),
7062 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7063 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
7064 else
7065 abort ();
7066
7067 /* Return the entire union. */
7068 return target;
7069 }
7070
7071 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7072 {
7073 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7074 ro_modifier);
7075
7076 /* If the signedness of the conversion differs and OP0 is
7077 a promoted SUBREG, clear that indication since we now
7078 have to do the proper extension. */
7079 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7080 && GET_CODE (op0) == SUBREG)
7081 SUBREG_PROMOTED_VAR_P (op0) = 0;
7082
7083 return op0;
7084 }
7085
7086 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7087 if (GET_MODE (op0) == mode)
7088 return op0;
7089
7090 /* If OP0 is a constant, just convert it into the proper mode. */
7091 if (CONSTANT_P (op0))
7092 return
7093 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7094 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7095
7096 if (modifier == EXPAND_INITIALIZER)
7097 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7098
7099 if (target == 0)
7100 return
7101 convert_to_mode (mode, op0,
7102 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7103 else
7104 convert_move (target, op0,
7105 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7106 return target;
7107
7108 case PLUS_EXPR:
7109 /* We come here from MINUS_EXPR when the second operand is a
7110 constant. */
7111 plus_expr:
7112 this_optab = add_optab;
7113
7114 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7115 something else, make sure we add the register to the constant and
7116 then to the other thing. This case can occur during strength
7117 reduction and doing it this way will produce better code if the
7118 frame pointer or argument pointer is eliminated.
7119
7120 fold-const.c will ensure that the constant is always in the inner
7121 PLUS_EXPR, so the only case we need to do anything about is if
7122 sp, ap, or fp is our second argument, in which case we must swap
7123 the innermost first argument and our second argument. */
7124
7125 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7126 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7127 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7128 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7129 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7130 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7131 {
7132 tree t = TREE_OPERAND (exp, 1);
7133
7134 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7135 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7136 }
7137
7138 /* If the result is to be ptr_mode and we are adding an integer to
7139 something, we might be forming a constant. So try to use
7140 plus_constant. If it produces a sum and we can't accept it,
7141 use force_operand. This allows P = &ARR[const] to generate
7142 efficient code on machines where a SYMBOL_REF is not a valid
7143 address.
7144
7145 If this is an EXPAND_SUM call, always return the sum. */
7146 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7147 || mode == ptr_mode)
7148 {
7149 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7150 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7151 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7152 {
7153 rtx constant_part;
7154
7155 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7156 EXPAND_SUM);
7157 /* Use immed_double_const to ensure that the constant is
7158 truncated according to the mode of OP1, then sign extended
7159 to a HOST_WIDE_INT. Using the constant directly can result
7160 in non-canonical RTL in a 64x32 cross compile. */
7161 constant_part
7162 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7163 (HOST_WIDE_INT) 0,
7164 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7165 op1 = plus_constant (op1, INTVAL (constant_part));
7166 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7167 op1 = force_operand (op1, target);
7168 return op1;
7169 }
7170
7171 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7172 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7173 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7174 {
7175 rtx constant_part;
7176
7177 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7178 EXPAND_SUM);
7179 if (! CONSTANT_P (op0))
7180 {
7181 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7182 VOIDmode, modifier);
7183 /* Don't go to both_summands if modifier
7184 says it's not right to return a PLUS. */
7185 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7186 goto binop2;
7187 goto both_summands;
7188 }
7189 /* Use immed_double_const to ensure that the constant is
7190 truncated according to the mode of OP1, then sign extended
7191 to a HOST_WIDE_INT. Using the constant directly can result
7192 in non-canonical RTL in a 64x32 cross compile. */
7193 constant_part
7194 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7195 (HOST_WIDE_INT) 0,
7196 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7197 op0 = plus_constant (op0, INTVAL (constant_part));
7198 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7199 op0 = force_operand (op0, target);
7200 return op0;
7201 }
7202 }
7203
7204 /* No sense saving up arithmetic to be done
7205 if it's all in the wrong mode to form part of an address.
7206 And force_operand won't know whether to sign-extend or
7207 zero-extend. */
7208 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7209 || mode != ptr_mode)
7210 goto binop;
7211
7212 preexpand_calls (exp);
7213 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7214 subtarget = 0;
7215
7216 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7217 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7218
7219 both_summands:
7220 /* Make sure any term that's a sum with a constant comes last. */
7221 if (GET_CODE (op0) == PLUS
7222 && CONSTANT_P (XEXP (op0, 1)))
7223 {
7224 temp = op0;
7225 op0 = op1;
7226 op1 = temp;
7227 }
7228 /* If adding to a sum including a constant,
7229 associate it to put the constant outside. */
7230 if (GET_CODE (op1) == PLUS
7231 && CONSTANT_P (XEXP (op1, 1)))
7232 {
7233 rtx constant_term = const0_rtx;
7234
7235 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7236 if (temp != 0)
7237 op0 = temp;
7238 /* Ensure that MULT comes first if there is one. */
7239 else if (GET_CODE (op0) == MULT)
7240 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7241 else
7242 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7243
7244 /* Let's also eliminate constants from op0 if possible. */
7245 op0 = eliminate_constant_term (op0, &constant_term);
7246
7247 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7248 their sum should be a constant. Form it into OP1, since the
7249 result we want will then be OP0 + OP1. */
7250
7251 temp = simplify_binary_operation (PLUS, mode, constant_term,
7252 XEXP (op1, 1));
7253 if (temp != 0)
7254 op1 = temp;
7255 else
7256 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7257 }
7258
7259 /* Put a constant term last and put a multiplication first. */
7260 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7261 temp = op1, op1 = op0, op0 = temp;
7262
7263 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7264 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7265
7266 case MINUS_EXPR:
7267 /* For initializers, we are allowed to return a MINUS of two
7268 symbolic constants. Here we handle all cases when both operands
7269 are constant. */
7270 /* Handle difference of two symbolic constants,
7271 for the sake of an initializer. */
7272 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7273 && really_constant_p (TREE_OPERAND (exp, 0))
7274 && really_constant_p (TREE_OPERAND (exp, 1)))
7275 {
7276 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7277 VOIDmode, ro_modifier);
7278 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7279 VOIDmode, ro_modifier);
7280
7281 /* If the last operand is a CONST_INT, use plus_constant of
7282 the negated constant. Else make the MINUS. */
7283 if (GET_CODE (op1) == CONST_INT)
7284 return plus_constant (op0, - INTVAL (op1));
7285 else
7286 return gen_rtx_MINUS (mode, op0, op1);
7287 }
7288 /* Convert A - const to A + (-const). */
7289 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7290 {
7291 tree negated = fold (build1 (NEGATE_EXPR, type,
7292 TREE_OPERAND (exp, 1)));
7293
7294 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7295 /* If we can't negate the constant in TYPE, leave it alone and
7296 expand_binop will negate it for us. We used to try to do it
7297 here in the signed version of TYPE, but that doesn't work
7298 on POINTER_TYPEs. */;
7299 else
7300 {
7301 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7302 goto plus_expr;
7303 }
7304 }
7305 this_optab = sub_optab;
7306 goto binop;
7307
7308 case MULT_EXPR:
7309 preexpand_calls (exp);
7310 /* If first operand is constant, swap them.
7311 Thus the following special case checks need only
7312 check the second operand. */
7313 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7314 {
7315 register tree t1 = TREE_OPERAND (exp, 0);
7316 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7317 TREE_OPERAND (exp, 1) = t1;
7318 }
7319
7320 /* Attempt to return something suitable for generating an
7321 indexed address, for machines that support that. */
7322
7323 if (modifier == EXPAND_SUM && mode == ptr_mode
7324 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7325 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7326 {
7327 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7328 EXPAND_SUM);
7329
7330 /* Apply distributive law if OP0 is x+c. */
7331 if (GET_CODE (op0) == PLUS
7332 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7333 return
7334 gen_rtx_PLUS
7335 (mode,
7336 gen_rtx_MULT
7337 (mode, XEXP (op0, 0),
7338 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7339 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7340 * INTVAL (XEXP (op0, 1))));
7341
7342 if (GET_CODE (op0) != REG)
7343 op0 = force_operand (op0, NULL_RTX);
7344 if (GET_CODE (op0) != REG)
7345 op0 = copy_to_mode_reg (mode, op0);
7346
7347 return
7348 gen_rtx_MULT (mode, op0,
7349 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7350 }
7351
7352 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7353 subtarget = 0;
7354
7355 /* Check for multiplying things that have been extended
7356 from a narrower type. If this machine supports multiplying
7357 in that narrower type with a result in the desired type,
7358 do it that way, and avoid the explicit type-conversion. */
7359 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7360 && TREE_CODE (type) == INTEGER_TYPE
7361 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7362 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7363 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7364 && int_fits_type_p (TREE_OPERAND (exp, 1),
7365 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7366 /* Don't use a widening multiply if a shift will do. */
7367 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7368 > HOST_BITS_PER_WIDE_INT)
7369 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7370 ||
7371 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7372 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7373 ==
7374 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7375 /* If both operands are extended, they must either both
7376 be zero-extended or both be sign-extended. */
7377 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7378 ==
7379 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7380 {
7381 enum machine_mode innermode
7382 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7383 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7384 ? smul_widen_optab : umul_widen_optab);
7385 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7386 ? umul_widen_optab : smul_widen_optab);
7387 if (mode == GET_MODE_WIDER_MODE (innermode))
7388 {
7389 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7390 {
7391 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7392 NULL_RTX, VOIDmode, 0);
7393 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7394 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7395 VOIDmode, 0);
7396 else
7397 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7398 NULL_RTX, VOIDmode, 0);
7399 goto binop2;
7400 }
7401 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7402 && innermode == word_mode)
7403 {
7404 rtx htem;
7405 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7406 NULL_RTX, VOIDmode, 0);
7407 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7408 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7409 VOIDmode, 0);
7410 else
7411 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7412 NULL_RTX, VOIDmode, 0);
7413 temp = expand_binop (mode, other_optab, op0, op1, target,
7414 unsignedp, OPTAB_LIB_WIDEN);
7415 htem = expand_mult_highpart_adjust (innermode,
7416 gen_highpart (innermode, temp),
7417 op0, op1,
7418 gen_highpart (innermode, temp),
7419 unsignedp);
7420 emit_move_insn (gen_highpart (innermode, temp), htem);
7421 return temp;
7422 }
7423 }
7424 }
7425 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7426 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7427 return expand_mult (mode, op0, op1, target, unsignedp);
7428
7429 case TRUNC_DIV_EXPR:
7430 case FLOOR_DIV_EXPR:
7431 case CEIL_DIV_EXPR:
7432 case ROUND_DIV_EXPR:
7433 case EXACT_DIV_EXPR:
7434 preexpand_calls (exp);
7435 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7436 subtarget = 0;
7437 /* Possible optimization: compute the dividend with EXPAND_SUM
7438 then if the divisor is constant can optimize the case
7439 where some terms of the dividend have coeffs divisible by it. */
7440 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7441 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7442 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7443
7444 case RDIV_EXPR:
7445 this_optab = flodiv_optab;
7446 goto binop;
7447
7448 case TRUNC_MOD_EXPR:
7449 case FLOOR_MOD_EXPR:
7450 case CEIL_MOD_EXPR:
7451 case ROUND_MOD_EXPR:
7452 preexpand_calls (exp);
7453 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7454 subtarget = 0;
7455 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7456 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7457 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7458
7459 case FIX_ROUND_EXPR:
7460 case FIX_FLOOR_EXPR:
7461 case FIX_CEIL_EXPR:
7462 abort (); /* Not used for C. */
7463
7464 case FIX_TRUNC_EXPR:
7465 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7466 if (target == 0)
7467 target = gen_reg_rtx (mode);
7468 expand_fix (target, op0, unsignedp);
7469 return target;
7470
7471 case FLOAT_EXPR:
7472 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7473 if (target == 0)
7474 target = gen_reg_rtx (mode);
7475 /* expand_float can't figure out what to do if FROM has VOIDmode.
7476 So give it the correct mode. With -O, cse will optimize this. */
7477 if (GET_MODE (op0) == VOIDmode)
7478 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7479 op0);
7480 expand_float (target, op0,
7481 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7482 return target;
7483
7484 case NEGATE_EXPR:
7485 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7486 temp = expand_unop (mode, neg_optab, op0, target, 0);
7487 if (temp == 0)
7488 abort ();
7489 return temp;
7490
7491 case ABS_EXPR:
7492 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7493
7494 /* Handle complex values specially. */
7495 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7496 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7497 return expand_complex_abs (mode, op0, target, unsignedp);
7498
7499 /* Unsigned abs is simply the operand. Testing here means we don't
7500 risk generating incorrect code below. */
7501 if (TREE_UNSIGNED (type))
7502 return op0;
7503
7504 return expand_abs (mode, op0, target,
7505 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7506
7507 case MAX_EXPR:
7508 case MIN_EXPR:
7509 target = original_target;
7510 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7511 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7512 || GET_MODE (target) != mode
7513 || (GET_CODE (target) == REG
7514 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7515 target = gen_reg_rtx (mode);
7516 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7517 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7518
7519 /* First try to do it with a special MIN or MAX instruction.
7520 If that does not win, use a conditional jump to select the proper
7521 value. */
7522 this_optab = (TREE_UNSIGNED (type)
7523 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7524 : (code == MIN_EXPR ? smin_optab : smax_optab));
7525
7526 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7527 OPTAB_WIDEN);
7528 if (temp != 0)
7529 return temp;
7530
7531 /* At this point, a MEM target is no longer useful; we will get better
7532 code without it. */
7533
7534 if (GET_CODE (target) == MEM)
7535 target = gen_reg_rtx (mode);
7536
7537 if (target != op0)
7538 emit_move_insn (target, op0);
7539
7540 op0 = gen_label_rtx ();
7541
7542 /* If this mode is an integer too wide to compare properly,
7543 compare word by word. Rely on cse to optimize constant cases. */
7544 if (GET_MODE_CLASS (mode) == MODE_INT
7545 && ! can_compare_p (GE, mode, ccp_jump))
7546 {
7547 if (code == MAX_EXPR)
7548 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7549 target, op1, NULL_RTX, op0);
7550 else
7551 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7552 op1, target, NULL_RTX, op0);
7553 }
7554 else
7555 {
7556 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7557 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7558 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7559 op0);
7560 }
7561 emit_move_insn (target, op1);
7562 emit_label (op0);
7563 return target;
7564
7565 case BIT_NOT_EXPR:
7566 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7567 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7568 if (temp == 0)
7569 abort ();
7570 return temp;
7571
7572 case FFS_EXPR:
7573 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7574 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7575 if (temp == 0)
7576 abort ();
7577 return temp;
7578
7579 /* ??? Can optimize bitwise operations with one arg constant.
7580 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7581 and (a bitwise1 b) bitwise2 b (etc)
7582 but that is probably not worth while. */
7583
7584 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7585 boolean values when we want in all cases to compute both of them. In
7586 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7587 as actual zero-or-1 values and then bitwise anding. In cases where
7588 there cannot be any side effects, better code would be made by
7589 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7590 how to recognize those cases. */
7591
7592 case TRUTH_AND_EXPR:
7593 case BIT_AND_EXPR:
7594 this_optab = and_optab;
7595 goto binop;
7596
7597 case TRUTH_OR_EXPR:
7598 case BIT_IOR_EXPR:
7599 this_optab = ior_optab;
7600 goto binop;
7601
7602 case TRUTH_XOR_EXPR:
7603 case BIT_XOR_EXPR:
7604 this_optab = xor_optab;
7605 goto binop;
7606
7607 case LSHIFT_EXPR:
7608 case RSHIFT_EXPR:
7609 case LROTATE_EXPR:
7610 case RROTATE_EXPR:
7611 preexpand_calls (exp);
7612 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7613 subtarget = 0;
7614 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7615 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7616 unsignedp);
7617
7618 /* Could determine the answer when only additive constants differ. Also,
7619 the addition of one can be handled by changing the condition. */
7620 case LT_EXPR:
7621 case LE_EXPR:
7622 case GT_EXPR:
7623 case GE_EXPR:
7624 case EQ_EXPR:
7625 case NE_EXPR:
7626 case UNORDERED_EXPR:
7627 case ORDERED_EXPR:
7628 case UNLT_EXPR:
7629 case UNLE_EXPR:
7630 case UNGT_EXPR:
7631 case UNGE_EXPR:
7632 case UNEQ_EXPR:
7633 preexpand_calls (exp);
7634 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7635 if (temp != 0)
7636 return temp;
7637
7638 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7639 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7640 && original_target
7641 && GET_CODE (original_target) == REG
7642 && (GET_MODE (original_target)
7643 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7644 {
7645 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7646 VOIDmode, 0);
7647
7648 if (temp != original_target)
7649 temp = copy_to_reg (temp);
7650
7651 op1 = gen_label_rtx ();
7652 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7653 GET_MODE (temp), unsignedp, 0, op1);
7654 emit_move_insn (temp, const1_rtx);
7655 emit_label (op1);
7656 return temp;
7657 }
7658
7659 /* If no set-flag instruction, must generate a conditional
7660 store into a temporary variable. Drop through
7661 and handle this like && and ||. */
7662
7663 case TRUTH_ANDIF_EXPR:
7664 case TRUTH_ORIF_EXPR:
7665 if (! ignore
7666 && (target == 0 || ! safe_from_p (target, exp, 1)
7667 /* Make sure we don't have a hard reg (such as function's return
7668 value) live across basic blocks, if not optimizing. */
7669 || (!optimize && GET_CODE (target) == REG
7670 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7671 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7672
7673 if (target)
7674 emit_clr_insn (target);
7675
7676 op1 = gen_label_rtx ();
7677 jumpifnot (exp, op1);
7678
7679 if (target)
7680 emit_0_to_1_insn (target);
7681
7682 emit_label (op1);
7683 return ignore ? const0_rtx : target;
7684
7685 case TRUTH_NOT_EXPR:
7686 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7687 /* The parser is careful to generate TRUTH_NOT_EXPR
7688 only with operands that are always zero or one. */
7689 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7690 target, 1, OPTAB_LIB_WIDEN);
7691 if (temp == 0)
7692 abort ();
7693 return temp;
7694
7695 case COMPOUND_EXPR:
7696 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7697 emit_queue ();
7698 return expand_expr (TREE_OPERAND (exp, 1),
7699 (ignore ? const0_rtx : target),
7700 VOIDmode, 0);
7701
7702 case COND_EXPR:
7703 /* If we would have a "singleton" (see below) were it not for a
7704 conversion in each arm, bring that conversion back out. */
7705 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7706 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7707 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7708 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7709 {
7710 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7711 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7712
7713 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7714 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7715 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7716 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7717 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7718 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7719 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7720 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7721 return expand_expr (build1 (NOP_EXPR, type,
7722 build (COND_EXPR, TREE_TYPE (true),
7723 TREE_OPERAND (exp, 0),
7724 true, false)),
7725 target, tmode, modifier);
7726 }
7727
7728 {
7729 /* Note that COND_EXPRs whose type is a structure or union
7730 are required to be constructed to contain assignments of
7731 a temporary variable, so that we can evaluate them here
7732 for side effect only. If type is void, we must do likewise. */
7733
7734 /* If an arm of the branch requires a cleanup,
7735 only that cleanup is performed. */
7736
7737 tree singleton = 0;
7738 tree binary_op = 0, unary_op = 0;
7739
7740 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7741 convert it to our mode, if necessary. */
7742 if (integer_onep (TREE_OPERAND (exp, 1))
7743 && integer_zerop (TREE_OPERAND (exp, 2))
7744 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7745 {
7746 if (ignore)
7747 {
7748 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7749 ro_modifier);
7750 return const0_rtx;
7751 }
7752
7753 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7754 if (GET_MODE (op0) == mode)
7755 return op0;
7756
7757 if (target == 0)
7758 target = gen_reg_rtx (mode);
7759 convert_move (target, op0, unsignedp);
7760 return target;
7761 }
7762
7763 /* Check for X ? A + B : A. If we have this, we can copy A to the
7764 output and conditionally add B. Similarly for unary operations.
7765 Don't do this if X has side-effects because those side effects
7766 might affect A or B and the "?" operation is a sequence point in
7767 ANSI. (operand_equal_p tests for side effects.) */
7768
7769 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7770 && operand_equal_p (TREE_OPERAND (exp, 2),
7771 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7772 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7773 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7774 && operand_equal_p (TREE_OPERAND (exp, 1),
7775 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7776 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7777 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7778 && operand_equal_p (TREE_OPERAND (exp, 2),
7779 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7780 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7781 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7782 && operand_equal_p (TREE_OPERAND (exp, 1),
7783 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7784 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7785
7786 /* If we are not to produce a result, we have no target. Otherwise,
7787 if a target was specified use it; it will not be used as an
7788 intermediate target unless it is safe. If no target, use a
7789 temporary. */
7790
7791 if (ignore)
7792 temp = 0;
7793 else if (original_target
7794 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7795 || (singleton && GET_CODE (original_target) == REG
7796 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7797 && original_target == var_rtx (singleton)))
7798 && GET_MODE (original_target) == mode
7799 #ifdef HAVE_conditional_move
7800 && (! can_conditionally_move_p (mode)
7801 || GET_CODE (original_target) == REG
7802 || TREE_ADDRESSABLE (type))
7803 #endif
7804 && ! (GET_CODE (original_target) == MEM
7805 && MEM_VOLATILE_P (original_target)))
7806 temp = original_target;
7807 else if (TREE_ADDRESSABLE (type))
7808 abort ();
7809 else
7810 temp = assign_temp (type, 0, 0, 1);
7811
7812 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7813 do the test of X as a store-flag operation, do this as
7814 A + ((X != 0) << log C). Similarly for other simple binary
7815 operators. Only do for C == 1 if BRANCH_COST is low. */
7816 if (temp && singleton && binary_op
7817 && (TREE_CODE (binary_op) == PLUS_EXPR
7818 || TREE_CODE (binary_op) == MINUS_EXPR
7819 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7820 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7821 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7822 : integer_onep (TREE_OPERAND (binary_op, 1)))
7823 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7824 {
7825 rtx result;
7826 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7827 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7828 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7829 : xor_optab);
7830
7831 /* If we had X ? A : A + 1, do this as A + (X == 0).
7832
7833 We have to invert the truth value here and then put it
7834 back later if do_store_flag fails. We cannot simply copy
7835 TREE_OPERAND (exp, 0) to another variable and modify that
7836 because invert_truthvalue can modify the tree pointed to
7837 by its argument. */
7838 if (singleton == TREE_OPERAND (exp, 1))
7839 TREE_OPERAND (exp, 0)
7840 = invert_truthvalue (TREE_OPERAND (exp, 0));
7841
7842 result = do_store_flag (TREE_OPERAND (exp, 0),
7843 (safe_from_p (temp, singleton, 1)
7844 ? temp : NULL_RTX),
7845 mode, BRANCH_COST <= 1);
7846
7847 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7848 result = expand_shift (LSHIFT_EXPR, mode, result,
7849 build_int_2 (tree_log2
7850 (TREE_OPERAND
7851 (binary_op, 1)),
7852 0),
7853 (safe_from_p (temp, singleton, 1)
7854 ? temp : NULL_RTX), 0);
7855
7856 if (result)
7857 {
7858 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7859 return expand_binop (mode, boptab, op1, result, temp,
7860 unsignedp, OPTAB_LIB_WIDEN);
7861 }
7862 else if (singleton == TREE_OPERAND (exp, 1))
7863 TREE_OPERAND (exp, 0)
7864 = invert_truthvalue (TREE_OPERAND (exp, 0));
7865 }
7866
7867 do_pending_stack_adjust ();
7868 NO_DEFER_POP;
7869 op0 = gen_label_rtx ();
7870
7871 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7872 {
7873 if (temp != 0)
7874 {
7875 /* If the target conflicts with the other operand of the
7876 binary op, we can't use it. Also, we can't use the target
7877 if it is a hard register, because evaluating the condition
7878 might clobber it. */
7879 if ((binary_op
7880 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7881 || (GET_CODE (temp) == REG
7882 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7883 temp = gen_reg_rtx (mode);
7884 store_expr (singleton, temp, 0);
7885 }
7886 else
7887 expand_expr (singleton,
7888 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7889 if (singleton == TREE_OPERAND (exp, 1))
7890 jumpif (TREE_OPERAND (exp, 0), op0);
7891 else
7892 jumpifnot (TREE_OPERAND (exp, 0), op0);
7893
7894 start_cleanup_deferral ();
7895 if (binary_op && temp == 0)
7896 /* Just touch the other operand. */
7897 expand_expr (TREE_OPERAND (binary_op, 1),
7898 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7899 else if (binary_op)
7900 store_expr (build (TREE_CODE (binary_op), type,
7901 make_tree (type, temp),
7902 TREE_OPERAND (binary_op, 1)),
7903 temp, 0);
7904 else
7905 store_expr (build1 (TREE_CODE (unary_op), type,
7906 make_tree (type, temp)),
7907 temp, 0);
7908 op1 = op0;
7909 }
7910 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7911 comparison operator. If we have one of these cases, set the
7912 output to A, branch on A (cse will merge these two references),
7913 then set the output to FOO. */
7914 else if (temp
7915 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7916 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7917 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7918 TREE_OPERAND (exp, 1), 0)
7919 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7920 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7921 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7922 {
7923 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7924 temp = gen_reg_rtx (mode);
7925 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7926 jumpif (TREE_OPERAND (exp, 0), op0);
7927
7928 start_cleanup_deferral ();
7929 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7930 op1 = op0;
7931 }
7932 else if (temp
7933 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7934 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7935 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7936 TREE_OPERAND (exp, 2), 0)
7937 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7938 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7939 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7940 {
7941 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7942 temp = gen_reg_rtx (mode);
7943 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7944 jumpifnot (TREE_OPERAND (exp, 0), op0);
7945
7946 start_cleanup_deferral ();
7947 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7948 op1 = op0;
7949 }
7950 else
7951 {
7952 op1 = gen_label_rtx ();
7953 jumpifnot (TREE_OPERAND (exp, 0), op0);
7954
7955 start_cleanup_deferral ();
7956
7957 /* One branch of the cond can be void, if it never returns. For
7958 example A ? throw : E */
7959 if (temp != 0
7960 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7961 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7962 else
7963 expand_expr (TREE_OPERAND (exp, 1),
7964 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7965 end_cleanup_deferral ();
7966 emit_queue ();
7967 emit_jump_insn (gen_jump (op1));
7968 emit_barrier ();
7969 emit_label (op0);
7970 start_cleanup_deferral ();
7971 if (temp != 0
7972 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7973 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7974 else
7975 expand_expr (TREE_OPERAND (exp, 2),
7976 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7977 }
7978
7979 end_cleanup_deferral ();
7980
7981 emit_queue ();
7982 emit_label (op1);
7983 OK_DEFER_POP;
7984
7985 return temp;
7986 }
7987
7988 case TARGET_EXPR:
7989 {
7990 /* Something needs to be initialized, but we didn't know
7991 where that thing was when building the tree. For example,
7992 it could be the return value of a function, or a parameter
7993 to a function which lays down in the stack, or a temporary
7994 variable which must be passed by reference.
7995
7996 We guarantee that the expression will either be constructed
7997 or copied into our original target. */
7998
7999 tree slot = TREE_OPERAND (exp, 0);
8000 tree cleanups = NULL_TREE;
8001 tree exp1;
8002
8003 if (TREE_CODE (slot) != VAR_DECL)
8004 abort ();
8005
8006 if (! ignore)
8007 target = original_target;
8008
8009 /* Set this here so that if we get a target that refers to a
8010 register variable that's already been used, put_reg_into_stack
8011 knows that it should fix up those uses. */
8012 TREE_USED (slot) = 1;
8013
8014 if (target == 0)
8015 {
8016 if (DECL_RTL (slot) != 0)
8017 {
8018 target = DECL_RTL (slot);
8019 /* If we have already expanded the slot, so don't do
8020 it again. (mrs) */
8021 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8022 return target;
8023 }
8024 else
8025 {
8026 target = assign_temp (type, 2, 0, 1);
8027 /* All temp slots at this level must not conflict. */
8028 preserve_temp_slots (target);
8029 DECL_RTL (slot) = target;
8030 if (TREE_ADDRESSABLE (slot))
8031 {
8032 TREE_ADDRESSABLE (slot) = 0;
8033 mark_addressable (slot);
8034 }
8035
8036 /* Since SLOT is not known to the called function
8037 to belong to its stack frame, we must build an explicit
8038 cleanup. This case occurs when we must build up a reference
8039 to pass the reference as an argument. In this case,
8040 it is very likely that such a reference need not be
8041 built here. */
8042
8043 if (TREE_OPERAND (exp, 2) == 0)
8044 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8045 cleanups = TREE_OPERAND (exp, 2);
8046 }
8047 }
8048 else
8049 {
8050 /* This case does occur, when expanding a parameter which
8051 needs to be constructed on the stack. The target
8052 is the actual stack address that we want to initialize.
8053 The function we call will perform the cleanup in this case. */
8054
8055 /* If we have already assigned it space, use that space,
8056 not target that we were passed in, as our target
8057 parameter is only a hint. */
8058 if (DECL_RTL (slot) != 0)
8059 {
8060 target = DECL_RTL (slot);
8061 /* If we have already expanded the slot, so don't do
8062 it again. (mrs) */
8063 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8064 return target;
8065 }
8066 else
8067 {
8068 DECL_RTL (slot) = target;
8069 /* If we must have an addressable slot, then make sure that
8070 the RTL that we just stored in slot is OK. */
8071 if (TREE_ADDRESSABLE (slot))
8072 {
8073 TREE_ADDRESSABLE (slot) = 0;
8074 mark_addressable (slot);
8075 }
8076 }
8077 }
8078
8079 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8080 /* Mark it as expanded. */
8081 TREE_OPERAND (exp, 1) = NULL_TREE;
8082
8083 store_expr (exp1, target, 0);
8084
8085 expand_decl_cleanup (NULL_TREE, cleanups);
8086
8087 return target;
8088 }
8089
8090 case INIT_EXPR:
8091 {
8092 tree lhs = TREE_OPERAND (exp, 0);
8093 tree rhs = TREE_OPERAND (exp, 1);
8094 tree noncopied_parts = 0;
8095 tree lhs_type = TREE_TYPE (lhs);
8096
8097 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8098 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8099 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8100 TYPE_NONCOPIED_PARTS (lhs_type));
8101 while (noncopied_parts != 0)
8102 {
8103 expand_assignment (TREE_VALUE (noncopied_parts),
8104 TREE_PURPOSE (noncopied_parts), 0, 0);
8105 noncopied_parts = TREE_CHAIN (noncopied_parts);
8106 }
8107 return temp;
8108 }
8109
8110 case MODIFY_EXPR:
8111 {
8112 /* If lhs is complex, expand calls in rhs before computing it.
8113 That's so we don't compute a pointer and save it over a call.
8114 If lhs is simple, compute it first so we can give it as a
8115 target if the rhs is just a call. This avoids an extra temp and copy
8116 and that prevents a partial-subsumption which makes bad code.
8117 Actually we could treat component_ref's of vars like vars. */
8118
8119 tree lhs = TREE_OPERAND (exp, 0);
8120 tree rhs = TREE_OPERAND (exp, 1);
8121 tree noncopied_parts = 0;
8122 tree lhs_type = TREE_TYPE (lhs);
8123
8124 temp = 0;
8125
8126 if (TREE_CODE (lhs) != VAR_DECL
8127 && TREE_CODE (lhs) != RESULT_DECL
8128 && TREE_CODE (lhs) != PARM_DECL
8129 && ! (TREE_CODE (lhs) == INDIRECT_REF
8130 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8131 preexpand_calls (exp);
8132
8133 /* Check for |= or &= of a bitfield of size one into another bitfield
8134 of size 1. In this case, (unless we need the result of the
8135 assignment) we can do this more efficiently with a
8136 test followed by an assignment, if necessary.
8137
8138 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8139 things change so we do, this code should be enhanced to
8140 support it. */
8141 if (ignore
8142 && TREE_CODE (lhs) == COMPONENT_REF
8143 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8144 || TREE_CODE (rhs) == BIT_AND_EXPR)
8145 && TREE_OPERAND (rhs, 0) == lhs
8146 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8147 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8148 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8149 {
8150 rtx label = gen_label_rtx ();
8151
8152 do_jump (TREE_OPERAND (rhs, 1),
8153 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8154 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8155 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8156 (TREE_CODE (rhs) == BIT_IOR_EXPR
8157 ? integer_one_node
8158 : integer_zero_node)),
8159 0, 0);
8160 do_pending_stack_adjust ();
8161 emit_label (label);
8162 return const0_rtx;
8163 }
8164
8165 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8166 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8167 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8168 TYPE_NONCOPIED_PARTS (lhs_type));
8169
8170 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8171 while (noncopied_parts != 0)
8172 {
8173 expand_assignment (TREE_PURPOSE (noncopied_parts),
8174 TREE_VALUE (noncopied_parts), 0, 0);
8175 noncopied_parts = TREE_CHAIN (noncopied_parts);
8176 }
8177 return temp;
8178 }
8179
8180 case RETURN_EXPR:
8181 if (!TREE_OPERAND (exp, 0))
8182 expand_null_return ();
8183 else
8184 expand_return (TREE_OPERAND (exp, 0));
8185 return const0_rtx;
8186
8187 case PREINCREMENT_EXPR:
8188 case PREDECREMENT_EXPR:
8189 return expand_increment (exp, 0, ignore);
8190
8191 case POSTINCREMENT_EXPR:
8192 case POSTDECREMENT_EXPR:
8193 /* Faster to treat as pre-increment if result is not used. */
8194 return expand_increment (exp, ! ignore, ignore);
8195
8196 case ADDR_EXPR:
8197 /* If nonzero, TEMP will be set to the address of something that might
8198 be a MEM corresponding to a stack slot. */
8199 temp = 0;
8200
8201 /* Are we taking the address of a nested function? */
8202 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8203 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8204 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8205 && ! TREE_STATIC (exp))
8206 {
8207 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8208 op0 = force_operand (op0, target);
8209 }
8210 /* If we are taking the address of something erroneous, just
8211 return a zero. */
8212 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8213 return const0_rtx;
8214 else
8215 {
8216 /* We make sure to pass const0_rtx down if we came in with
8217 ignore set, to avoid doing the cleanups twice for something. */
8218 op0 = expand_expr (TREE_OPERAND (exp, 0),
8219 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8220 (modifier == EXPAND_INITIALIZER
8221 ? modifier : EXPAND_CONST_ADDRESS));
8222
8223 /* If we are going to ignore the result, OP0 will have been set
8224 to const0_rtx, so just return it. Don't get confused and
8225 think we are taking the address of the constant. */
8226 if (ignore)
8227 return op0;
8228
8229 op0 = protect_from_queue (op0, 0);
8230
8231 /* We would like the object in memory. If it is a constant, we can
8232 have it be statically allocated into memory. For a non-constant,
8233 we need to allocate some memory and store the value into it. */
8234
8235 if (CONSTANT_P (op0))
8236 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8237 op0);
8238 else if (GET_CODE (op0) == MEM)
8239 {
8240 mark_temp_addr_taken (op0);
8241 temp = XEXP (op0, 0);
8242 }
8243
8244 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8245 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8246 {
8247 /* If this object is in a register, it must be not
8248 be BLKmode. */
8249 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8250 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8251
8252 mark_temp_addr_taken (memloc);
8253 emit_move_insn (memloc, op0);
8254 op0 = memloc;
8255 }
8256
8257 if (GET_CODE (op0) != MEM)
8258 abort ();
8259
8260 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8261 {
8262 temp = XEXP (op0, 0);
8263 #ifdef POINTERS_EXTEND_UNSIGNED
8264 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8265 && mode == ptr_mode)
8266 temp = convert_memory_address (ptr_mode, temp);
8267 #endif
8268 return temp;
8269 }
8270
8271 op0 = force_operand (XEXP (op0, 0), target);
8272 }
8273
8274 if (flag_force_addr && GET_CODE (op0) != REG)
8275 op0 = force_reg (Pmode, op0);
8276
8277 if (GET_CODE (op0) == REG
8278 && ! REG_USERVAR_P (op0))
8279 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8280
8281 /* If we might have had a temp slot, add an equivalent address
8282 for it. */
8283 if (temp != 0)
8284 update_temp_slot_address (temp, op0);
8285
8286 #ifdef POINTERS_EXTEND_UNSIGNED
8287 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8288 && mode == ptr_mode)
8289 op0 = convert_memory_address (ptr_mode, op0);
8290 #endif
8291
8292 return op0;
8293
8294 case ENTRY_VALUE_EXPR:
8295 abort ();
8296
8297 /* COMPLEX type for Extended Pascal & Fortran */
8298 case COMPLEX_EXPR:
8299 {
8300 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8301 rtx insns;
8302
8303 /* Get the rtx code of the operands. */
8304 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8305 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8306
8307 if (! target)
8308 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8309
8310 start_sequence ();
8311
8312 /* Move the real (op0) and imaginary (op1) parts to their location. */
8313 emit_move_insn (gen_realpart (mode, target), op0);
8314 emit_move_insn (gen_imagpart (mode, target), op1);
8315
8316 insns = get_insns ();
8317 end_sequence ();
8318
8319 /* Complex construction should appear as a single unit. */
8320 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8321 each with a separate pseudo as destination.
8322 It's not correct for flow to treat them as a unit. */
8323 if (GET_CODE (target) != CONCAT)
8324 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8325 else
8326 emit_insns (insns);
8327
8328 return target;
8329 }
8330
8331 case REALPART_EXPR:
8332 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8333 return gen_realpart (mode, op0);
8334
8335 case IMAGPART_EXPR:
8336 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8337 return gen_imagpart (mode, op0);
8338
8339 case CONJ_EXPR:
8340 {
8341 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8342 rtx imag_t;
8343 rtx insns;
8344
8345 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8346
8347 if (! target)
8348 target = gen_reg_rtx (mode);
8349
8350 start_sequence ();
8351
8352 /* Store the realpart and the negated imagpart to target. */
8353 emit_move_insn (gen_realpart (partmode, target),
8354 gen_realpart (partmode, op0));
8355
8356 imag_t = gen_imagpart (partmode, target);
8357 temp = expand_unop (partmode, neg_optab,
8358 gen_imagpart (partmode, op0), imag_t, 0);
8359 if (temp != imag_t)
8360 emit_move_insn (imag_t, temp);
8361
8362 insns = get_insns ();
8363 end_sequence ();
8364
8365 /* Conjugate should appear as a single unit
8366 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8367 each with a separate pseudo as destination.
8368 It's not correct for flow to treat them as a unit. */
8369 if (GET_CODE (target) != CONCAT)
8370 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8371 else
8372 emit_insns (insns);
8373
8374 return target;
8375 }
8376
8377 case TRY_CATCH_EXPR:
8378 {
8379 tree handler = TREE_OPERAND (exp, 1);
8380
8381 expand_eh_region_start ();
8382
8383 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8384
8385 expand_eh_region_end (handler);
8386
8387 return op0;
8388 }
8389
8390 case TRY_FINALLY_EXPR:
8391 {
8392 tree try_block = TREE_OPERAND (exp, 0);
8393 tree finally_block = TREE_OPERAND (exp, 1);
8394 rtx finally_label = gen_label_rtx ();
8395 rtx done_label = gen_label_rtx ();
8396 rtx return_link = gen_reg_rtx (Pmode);
8397 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8398 (tree) finally_label, (tree) return_link);
8399 TREE_SIDE_EFFECTS (cleanup) = 1;
8400
8401 /* Start a new binding layer that will keep track of all cleanup
8402 actions to be performed. */
8403 expand_start_bindings (2);
8404
8405 target_temp_slot_level = temp_slot_level;
8406
8407 expand_decl_cleanup (NULL_TREE, cleanup);
8408 op0 = expand_expr (try_block, target, tmode, modifier);
8409
8410 preserve_temp_slots (op0);
8411 expand_end_bindings (NULL_TREE, 0, 0);
8412 emit_jump (done_label);
8413 emit_label (finally_label);
8414 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8415 emit_indirect_jump (return_link);
8416 emit_label (done_label);
8417 return op0;
8418 }
8419
8420 case GOTO_SUBROUTINE_EXPR:
8421 {
8422 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8423 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8424 rtx return_address = gen_label_rtx ();
8425 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8426 emit_jump (subr);
8427 emit_label (return_address);
8428 return const0_rtx;
8429 }
8430
8431 case POPDCC_EXPR:
8432 {
8433 rtx dcc = get_dynamic_cleanup_chain ();
8434 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8435 return const0_rtx;
8436 }
8437
8438 case POPDHC_EXPR:
8439 {
8440 rtx dhc = get_dynamic_handler_chain ();
8441 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8442 return const0_rtx;
8443 }
8444
8445 case VA_ARG_EXPR:
8446 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8447
8448 default:
8449 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8450 }
8451
8452 /* Here to do an ordinary binary operator, generating an instruction
8453 from the optab already placed in `this_optab'. */
8454 binop:
8455 preexpand_calls (exp);
8456 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8457 subtarget = 0;
8458 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8459 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8460 binop2:
8461 temp = expand_binop (mode, this_optab, op0, op1, target,
8462 unsignedp, OPTAB_LIB_WIDEN);
8463 if (temp == 0)
8464 abort ();
8465 return temp;
8466 }
8467 \f
8468 /* Similar to expand_expr, except that we don't specify a target, target
8469 mode, or modifier and we return the alignment of the inner type. This is
8470 used in cases where it is not necessary to align the result to the
8471 alignment of its type as long as we know the alignment of the result, for
8472 example for comparisons of BLKmode values. */
8473
8474 static rtx
8475 expand_expr_unaligned (exp, palign)
8476 register tree exp;
8477 unsigned int *palign;
8478 {
8479 register rtx op0;
8480 tree type = TREE_TYPE (exp);
8481 register enum machine_mode mode = TYPE_MODE (type);
8482
8483 /* Default the alignment we return to that of the type. */
8484 *palign = TYPE_ALIGN (type);
8485
8486 /* The only cases in which we do anything special is if the resulting mode
8487 is BLKmode. */
8488 if (mode != BLKmode)
8489 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8490
8491 switch (TREE_CODE (exp))
8492 {
8493 case CONVERT_EXPR:
8494 case NOP_EXPR:
8495 case NON_LVALUE_EXPR:
8496 /* Conversions between BLKmode values don't change the underlying
8497 alignment or value. */
8498 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8499 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8500 break;
8501
8502 case ARRAY_REF:
8503 /* Much of the code for this case is copied directly from expand_expr.
8504 We need to duplicate it here because we will do something different
8505 in the fall-through case, so we need to handle the same exceptions
8506 it does. */
8507 {
8508 tree array = TREE_OPERAND (exp, 0);
8509 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8510 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8511 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8512 HOST_WIDE_INT i;
8513
8514 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8515 abort ();
8516
8517 /* Optimize the special-case of a zero lower bound.
8518
8519 We convert the low_bound to sizetype to avoid some problems
8520 with constant folding. (E.g. suppose the lower bound is 1,
8521 and its mode is QI. Without the conversion, (ARRAY
8522 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8523 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8524
8525 if (! integer_zerop (low_bound))
8526 index = size_diffop (index, convert (sizetype, low_bound));
8527
8528 /* If this is a constant index into a constant array,
8529 just get the value from the array. Handle both the cases when
8530 we have an explicit constructor and when our operand is a variable
8531 that was declared const. */
8532
8533 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8534 && 0 > compare_tree_int (index,
8535 list_length (CONSTRUCTOR_ELTS
8536 (TREE_OPERAND (exp, 0)))))
8537 {
8538 tree elem;
8539
8540 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8541 i = TREE_INT_CST_LOW (index);
8542 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8543 ;
8544
8545 if (elem)
8546 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8547 }
8548
8549 else if (optimize >= 1
8550 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8551 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8552 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8553 {
8554 if (TREE_CODE (index) == INTEGER_CST)
8555 {
8556 tree init = DECL_INITIAL (array);
8557
8558 if (TREE_CODE (init) == CONSTRUCTOR)
8559 {
8560 tree elem;
8561
8562 for (elem = CONSTRUCTOR_ELTS (init);
8563 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8564 elem = TREE_CHAIN (elem))
8565 ;
8566
8567 if (elem)
8568 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8569 palign);
8570 }
8571 }
8572 }
8573 }
8574
8575 /* ... fall through ... */
8576
8577 case COMPONENT_REF:
8578 case BIT_FIELD_REF:
8579 /* If the operand is a CONSTRUCTOR, we can just extract the
8580 appropriate field if it is present. Don't do this if we have
8581 already written the data since we want to refer to that copy
8582 and varasm.c assumes that's what we'll do. */
8583 if (TREE_CODE (exp) != ARRAY_REF
8584 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8585 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8586 {
8587 tree elt;
8588
8589 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8590 elt = TREE_CHAIN (elt))
8591 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8592 /* Note that unlike the case in expand_expr, we know this is
8593 BLKmode and hence not an integer. */
8594 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8595 }
8596
8597 {
8598 enum machine_mode mode1;
8599 HOST_WIDE_INT bitsize, bitpos;
8600 tree offset;
8601 int volatilep = 0;
8602 unsigned int alignment;
8603 int unsignedp;
8604 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8605 &mode1, &unsignedp, &volatilep,
8606 &alignment);
8607
8608 /* If we got back the original object, something is wrong. Perhaps
8609 we are evaluating an expression too early. In any event, don't
8610 infinitely recurse. */
8611 if (tem == exp)
8612 abort ();
8613
8614 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8615
8616 /* If this is a constant, put it into a register if it is a
8617 legitimate constant and OFFSET is 0 and memory if it isn't. */
8618 if (CONSTANT_P (op0))
8619 {
8620 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8621
8622 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8623 && offset == 0)
8624 op0 = force_reg (inner_mode, op0);
8625 else
8626 op0 = validize_mem (force_const_mem (inner_mode, op0));
8627 }
8628
8629 if (offset != 0)
8630 {
8631 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8632
8633 /* If this object is in a register, put it into memory.
8634 This case can't occur in C, but can in Ada if we have
8635 unchecked conversion of an expression from a scalar type to
8636 an array or record type. */
8637 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8638 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8639 {
8640 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8641
8642 mark_temp_addr_taken (memloc);
8643 emit_move_insn (memloc, op0);
8644 op0 = memloc;
8645 }
8646
8647 if (GET_CODE (op0) != MEM)
8648 abort ();
8649
8650 if (GET_MODE (offset_rtx) != ptr_mode)
8651 {
8652 #ifdef POINTERS_EXTEND_UNSIGNED
8653 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8654 #else
8655 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8656 #endif
8657 }
8658
8659 op0 = change_address (op0, VOIDmode,
8660 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8661 force_reg (ptr_mode,
8662 offset_rtx)));
8663 }
8664
8665 /* Don't forget about volatility even if this is a bitfield. */
8666 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8667 {
8668 op0 = copy_rtx (op0);
8669 MEM_VOLATILE_P (op0) = 1;
8670 }
8671
8672 /* Check the access. */
8673 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8674 {
8675 rtx to;
8676 int size;
8677
8678 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8679 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8680
8681 /* Check the access right of the pointer. */
8682 if (size > BITS_PER_UNIT)
8683 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8684 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8685 TYPE_MODE (sizetype),
8686 GEN_INT (MEMORY_USE_RO),
8687 TYPE_MODE (integer_type_node));
8688 }
8689
8690 /* In cases where an aligned union has an unaligned object
8691 as a field, we might be extracting a BLKmode value from
8692 an integer-mode (e.g., SImode) object. Handle this case
8693 by doing the extract into an object as wide as the field
8694 (which we know to be the width of a basic mode), then
8695 storing into memory, and changing the mode to BLKmode.
8696 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8697 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8698 if (mode1 == VOIDmode
8699 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8700 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8701 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
8702 || bitpos % TYPE_ALIGN (type) != 0)))
8703 {
8704 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8705
8706 if (ext_mode == BLKmode)
8707 {
8708 /* In this case, BITPOS must start at a byte boundary. */
8709 if (GET_CODE (op0) != MEM
8710 || bitpos % BITS_PER_UNIT != 0)
8711 abort ();
8712
8713 op0 = change_address (op0, VOIDmode,
8714 plus_constant (XEXP (op0, 0),
8715 bitpos / BITS_PER_UNIT));
8716 }
8717 else
8718 {
8719 rtx new = assign_stack_temp (ext_mode,
8720 bitsize / BITS_PER_UNIT, 0);
8721
8722 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8723 unsignedp, NULL_RTX, ext_mode,
8724 ext_mode, alignment,
8725 int_size_in_bytes (TREE_TYPE (tem)));
8726
8727 /* If the result is a record type and BITSIZE is narrower than
8728 the mode of OP0, an integral mode, and this is a big endian
8729 machine, we must put the field into the high-order bits. */
8730 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8731 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8732 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8733 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8734 size_int (GET_MODE_BITSIZE
8735 (GET_MODE (op0))
8736 - bitsize),
8737 op0, 1);
8738
8739
8740 emit_move_insn (new, op0);
8741 op0 = copy_rtx (new);
8742 PUT_MODE (op0, BLKmode);
8743 }
8744 }
8745 else
8746 /* Get a reference to just this component. */
8747 op0 = change_address (op0, mode1,
8748 plus_constant (XEXP (op0, 0),
8749 (bitpos / BITS_PER_UNIT)));
8750
8751 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8752
8753 /* Adjust the alignment in case the bit position is not
8754 a multiple of the alignment of the inner object. */
8755 while (bitpos % alignment != 0)
8756 alignment >>= 1;
8757
8758 if (GET_CODE (XEXP (op0, 0)) == REG)
8759 mark_reg_pointer (XEXP (op0, 0), alignment);
8760
8761 MEM_IN_STRUCT_P (op0) = 1;
8762 MEM_VOLATILE_P (op0) |= volatilep;
8763
8764 *palign = alignment;
8765 return op0;
8766 }
8767
8768 default:
8769 break;
8770
8771 }
8772
8773 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8774 }
8775 \f
8776 /* Return the tree node if a ARG corresponds to a string constant or zero
8777 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8778 in bytes within the string that ARG is accessing. The type of the
8779 offset will be `sizetype'. */
8780
8781 tree
8782 string_constant (arg, ptr_offset)
8783 tree arg;
8784 tree *ptr_offset;
8785 {
8786 STRIP_NOPS (arg);
8787
8788 if (TREE_CODE (arg) == ADDR_EXPR
8789 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8790 {
8791 *ptr_offset = size_zero_node;
8792 return TREE_OPERAND (arg, 0);
8793 }
8794 else if (TREE_CODE (arg) == PLUS_EXPR)
8795 {
8796 tree arg0 = TREE_OPERAND (arg, 0);
8797 tree arg1 = TREE_OPERAND (arg, 1);
8798
8799 STRIP_NOPS (arg0);
8800 STRIP_NOPS (arg1);
8801
8802 if (TREE_CODE (arg0) == ADDR_EXPR
8803 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8804 {
8805 *ptr_offset = convert (sizetype, arg1);
8806 return TREE_OPERAND (arg0, 0);
8807 }
8808 else if (TREE_CODE (arg1) == ADDR_EXPR
8809 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8810 {
8811 *ptr_offset = convert (sizetype, arg0);
8812 return TREE_OPERAND (arg1, 0);
8813 }
8814 }
8815
8816 return 0;
8817 }
8818 \f
8819 /* Expand code for a post- or pre- increment or decrement
8820 and return the RTX for the result.
8821 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8822
8823 static rtx
8824 expand_increment (exp, post, ignore)
8825 register tree exp;
8826 int post, ignore;
8827 {
8828 register rtx op0, op1;
8829 register rtx temp, value;
8830 register tree incremented = TREE_OPERAND (exp, 0);
8831 optab this_optab = add_optab;
8832 int icode;
8833 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8834 int op0_is_copy = 0;
8835 int single_insn = 0;
8836 /* 1 means we can't store into OP0 directly,
8837 because it is a subreg narrower than a word,
8838 and we don't dare clobber the rest of the word. */
8839 int bad_subreg = 0;
8840
8841 /* Stabilize any component ref that might need to be
8842 evaluated more than once below. */
8843 if (!post
8844 || TREE_CODE (incremented) == BIT_FIELD_REF
8845 || (TREE_CODE (incremented) == COMPONENT_REF
8846 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8847 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8848 incremented = stabilize_reference (incremented);
8849 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8850 ones into save exprs so that they don't accidentally get evaluated
8851 more than once by the code below. */
8852 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8853 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8854 incremented = save_expr (incremented);
8855
8856 /* Compute the operands as RTX.
8857 Note whether OP0 is the actual lvalue or a copy of it:
8858 I believe it is a copy iff it is a register or subreg
8859 and insns were generated in computing it. */
8860
8861 temp = get_last_insn ();
8862 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8863
8864 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8865 in place but instead must do sign- or zero-extension during assignment,
8866 so we copy it into a new register and let the code below use it as
8867 a copy.
8868
8869 Note that we can safely modify this SUBREG since it is know not to be
8870 shared (it was made by the expand_expr call above). */
8871
8872 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8873 {
8874 if (post)
8875 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8876 else
8877 bad_subreg = 1;
8878 }
8879 else if (GET_CODE (op0) == SUBREG
8880 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8881 {
8882 /* We cannot increment this SUBREG in place. If we are
8883 post-incrementing, get a copy of the old value. Otherwise,
8884 just mark that we cannot increment in place. */
8885 if (post)
8886 op0 = copy_to_reg (op0);
8887 else
8888 bad_subreg = 1;
8889 }
8890
8891 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8892 && temp != get_last_insn ());
8893 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8894 EXPAND_MEMORY_USE_BAD);
8895
8896 /* Decide whether incrementing or decrementing. */
8897 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8898 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8899 this_optab = sub_optab;
8900
8901 /* Convert decrement by a constant into a negative increment. */
8902 if (this_optab == sub_optab
8903 && GET_CODE (op1) == CONST_INT)
8904 {
8905 op1 = GEN_INT (- INTVAL (op1));
8906 this_optab = add_optab;
8907 }
8908
8909 /* For a preincrement, see if we can do this with a single instruction. */
8910 if (!post)
8911 {
8912 icode = (int) this_optab->handlers[(int) mode].insn_code;
8913 if (icode != (int) CODE_FOR_nothing
8914 /* Make sure that OP0 is valid for operands 0 and 1
8915 of the insn we want to queue. */
8916 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8917 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8918 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8919 single_insn = 1;
8920 }
8921
8922 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8923 then we cannot just increment OP0. We must therefore contrive to
8924 increment the original value. Then, for postincrement, we can return
8925 OP0 since it is a copy of the old value. For preincrement, expand here
8926 unless we can do it with a single insn.
8927
8928 Likewise if storing directly into OP0 would clobber high bits
8929 we need to preserve (bad_subreg). */
8930 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8931 {
8932 /* This is the easiest way to increment the value wherever it is.
8933 Problems with multiple evaluation of INCREMENTED are prevented
8934 because either (1) it is a component_ref or preincrement,
8935 in which case it was stabilized above, or (2) it is an array_ref
8936 with constant index in an array in a register, which is
8937 safe to reevaluate. */
8938 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8939 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8940 ? MINUS_EXPR : PLUS_EXPR),
8941 TREE_TYPE (exp),
8942 incremented,
8943 TREE_OPERAND (exp, 1));
8944
8945 while (TREE_CODE (incremented) == NOP_EXPR
8946 || TREE_CODE (incremented) == CONVERT_EXPR)
8947 {
8948 newexp = convert (TREE_TYPE (incremented), newexp);
8949 incremented = TREE_OPERAND (incremented, 0);
8950 }
8951
8952 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8953 return post ? op0 : temp;
8954 }
8955
8956 if (post)
8957 {
8958 /* We have a true reference to the value in OP0.
8959 If there is an insn to add or subtract in this mode, queue it.
8960 Queueing the increment insn avoids the register shuffling
8961 that often results if we must increment now and first save
8962 the old value for subsequent use. */
8963
8964 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8965 op0 = stabilize (op0);
8966 #endif
8967
8968 icode = (int) this_optab->handlers[(int) mode].insn_code;
8969 if (icode != (int) CODE_FOR_nothing
8970 /* Make sure that OP0 is valid for operands 0 and 1
8971 of the insn we want to queue. */
8972 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8973 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8974 {
8975 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8976 op1 = force_reg (mode, op1);
8977
8978 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8979 }
8980 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8981 {
8982 rtx addr = (general_operand (XEXP (op0, 0), mode)
8983 ? force_reg (Pmode, XEXP (op0, 0))
8984 : copy_to_reg (XEXP (op0, 0)));
8985 rtx temp, result;
8986
8987 op0 = change_address (op0, VOIDmode, addr);
8988 temp = force_reg (GET_MODE (op0), op0);
8989 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8990 op1 = force_reg (mode, op1);
8991
8992 /* The increment queue is LIFO, thus we have to `queue'
8993 the instructions in reverse order. */
8994 enqueue_insn (op0, gen_move_insn (op0, temp));
8995 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8996 return result;
8997 }
8998 }
8999
9000 /* Preincrement, or we can't increment with one simple insn. */
9001 if (post)
9002 /* Save a copy of the value before inc or dec, to return it later. */
9003 temp = value = copy_to_reg (op0);
9004 else
9005 /* Arrange to return the incremented value. */
9006 /* Copy the rtx because expand_binop will protect from the queue,
9007 and the results of that would be invalid for us to return
9008 if our caller does emit_queue before using our result. */
9009 temp = copy_rtx (value = op0);
9010
9011 /* Increment however we can. */
9012 op1 = expand_binop (mode, this_optab, value, op1,
9013 current_function_check_memory_usage ? NULL_RTX : op0,
9014 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9015 /* Make sure the value is stored into OP0. */
9016 if (op1 != op0)
9017 emit_move_insn (op0, op1);
9018
9019 return temp;
9020 }
9021 \f
9022 /* Expand all function calls contained within EXP, innermost ones first.
9023 But don't look within expressions that have sequence points.
9024 For each CALL_EXPR, record the rtx for its value
9025 in the CALL_EXPR_RTL field. */
9026
9027 static void
9028 preexpand_calls (exp)
9029 tree exp;
9030 {
9031 register int nops, i;
9032 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9033
9034 if (! do_preexpand_calls)
9035 return;
9036
9037 /* Only expressions and references can contain calls. */
9038
9039 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9040 return;
9041
9042 switch (TREE_CODE (exp))
9043 {
9044 case CALL_EXPR:
9045 /* Do nothing if already expanded. */
9046 if (CALL_EXPR_RTL (exp) != 0
9047 /* Do nothing if the call returns a variable-sized object. */
9048 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9049 /* Do nothing to built-in functions. */
9050 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9051 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9052 == FUNCTION_DECL)
9053 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9054 return;
9055
9056 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9057 return;
9058
9059 case COMPOUND_EXPR:
9060 case COND_EXPR:
9061 case TRUTH_ANDIF_EXPR:
9062 case TRUTH_ORIF_EXPR:
9063 /* If we find one of these, then we can be sure
9064 the adjust will be done for it (since it makes jumps).
9065 Do it now, so that if this is inside an argument
9066 of a function, we don't get the stack adjustment
9067 after some other args have already been pushed. */
9068 do_pending_stack_adjust ();
9069 return;
9070
9071 case BLOCK:
9072 case RTL_EXPR:
9073 case WITH_CLEANUP_EXPR:
9074 case CLEANUP_POINT_EXPR:
9075 case TRY_CATCH_EXPR:
9076 return;
9077
9078 case SAVE_EXPR:
9079 if (SAVE_EXPR_RTL (exp) != 0)
9080 return;
9081
9082 default:
9083 break;
9084 }
9085
9086 nops = tree_code_length[(int) TREE_CODE (exp)];
9087 for (i = 0; i < nops; i++)
9088 if (TREE_OPERAND (exp, i) != 0)
9089 {
9090 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9091 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9092 It doesn't happen before the call is made. */
9093 ;
9094 else
9095 {
9096 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9097 if (type == 'e' || type == '<' || type == '1' || type == '2'
9098 || type == 'r')
9099 preexpand_calls (TREE_OPERAND (exp, i));
9100 }
9101 }
9102 }
9103 \f
9104 /* At the start of a function, record that we have no previously-pushed
9105 arguments waiting to be popped. */
9106
9107 void
9108 init_pending_stack_adjust ()
9109 {
9110 pending_stack_adjust = 0;
9111 }
9112
9113 /* When exiting from function, if safe, clear out any pending stack adjust
9114 so the adjustment won't get done.
9115
9116 Note, if the current function calls alloca, then it must have a
9117 frame pointer regardless of the value of flag_omit_frame_pointer. */
9118
9119 void
9120 clear_pending_stack_adjust ()
9121 {
9122 #ifdef EXIT_IGNORE_STACK
9123 if (optimize > 0
9124 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9125 && EXIT_IGNORE_STACK
9126 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9127 && ! flag_inline_functions)
9128 pending_stack_adjust = 0;
9129 #endif
9130 }
9131
9132 /* Pop any previously-pushed arguments that have not been popped yet. */
9133
9134 void
9135 do_pending_stack_adjust ()
9136 {
9137 if (inhibit_defer_pop == 0)
9138 {
9139 if (pending_stack_adjust != 0)
9140 adjust_stack (GEN_INT (pending_stack_adjust));
9141 pending_stack_adjust = 0;
9142 }
9143 }
9144 \f
9145 /* Expand conditional expressions. */
9146
9147 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9148 LABEL is an rtx of code CODE_LABEL, in this function and all the
9149 functions here. */
9150
9151 void
9152 jumpifnot (exp, label)
9153 tree exp;
9154 rtx label;
9155 {
9156 do_jump (exp, label, NULL_RTX);
9157 }
9158
9159 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9160
9161 void
9162 jumpif (exp, label)
9163 tree exp;
9164 rtx label;
9165 {
9166 do_jump (exp, NULL_RTX, label);
9167 }
9168
9169 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9170 the result is zero, or IF_TRUE_LABEL if the result is one.
9171 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9172 meaning fall through in that case.
9173
9174 do_jump always does any pending stack adjust except when it does not
9175 actually perform a jump. An example where there is no jump
9176 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9177
9178 This function is responsible for optimizing cases such as
9179 &&, || and comparison operators in EXP. */
9180
9181 void
9182 do_jump (exp, if_false_label, if_true_label)
9183 tree exp;
9184 rtx if_false_label, if_true_label;
9185 {
9186 register enum tree_code code = TREE_CODE (exp);
9187 /* Some cases need to create a label to jump to
9188 in order to properly fall through.
9189 These cases set DROP_THROUGH_LABEL nonzero. */
9190 rtx drop_through_label = 0;
9191 rtx temp;
9192 int i;
9193 tree type;
9194 enum machine_mode mode;
9195
9196 #ifdef MAX_INTEGER_COMPUTATION_MODE
9197 check_max_integer_computation_mode (exp);
9198 #endif
9199
9200 emit_queue ();
9201
9202 switch (code)
9203 {
9204 case ERROR_MARK:
9205 break;
9206
9207 case INTEGER_CST:
9208 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9209 if (temp)
9210 emit_jump (temp);
9211 break;
9212
9213 #if 0
9214 /* This is not true with #pragma weak */
9215 case ADDR_EXPR:
9216 /* The address of something can never be zero. */
9217 if (if_true_label)
9218 emit_jump (if_true_label);
9219 break;
9220 #endif
9221
9222 case NOP_EXPR:
9223 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9224 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9225 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9226 goto normal;
9227 case CONVERT_EXPR:
9228 /* If we are narrowing the operand, we have to do the compare in the
9229 narrower mode. */
9230 if ((TYPE_PRECISION (TREE_TYPE (exp))
9231 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9232 goto normal;
9233 case NON_LVALUE_EXPR:
9234 case REFERENCE_EXPR:
9235 case ABS_EXPR:
9236 case NEGATE_EXPR:
9237 case LROTATE_EXPR:
9238 case RROTATE_EXPR:
9239 /* These cannot change zero->non-zero or vice versa. */
9240 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9241 break;
9242
9243 case WITH_RECORD_EXPR:
9244 /* Put the object on the placeholder list, recurse through our first
9245 operand, and pop the list. */
9246 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9247 placeholder_list);
9248 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9249 placeholder_list = TREE_CHAIN (placeholder_list);
9250 break;
9251
9252 #if 0
9253 /* This is never less insns than evaluating the PLUS_EXPR followed by
9254 a test and can be longer if the test is eliminated. */
9255 case PLUS_EXPR:
9256 /* Reduce to minus. */
9257 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9258 TREE_OPERAND (exp, 0),
9259 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9260 TREE_OPERAND (exp, 1))));
9261 /* Process as MINUS. */
9262 #endif
9263
9264 case MINUS_EXPR:
9265 /* Non-zero iff operands of minus differ. */
9266 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9267 TREE_OPERAND (exp, 0),
9268 TREE_OPERAND (exp, 1)),
9269 NE, NE, if_false_label, if_true_label);
9270 break;
9271
9272 case BIT_AND_EXPR:
9273 /* If we are AND'ing with a small constant, do this comparison in the
9274 smallest type that fits. If the machine doesn't have comparisons
9275 that small, it will be converted back to the wider comparison.
9276 This helps if we are testing the sign bit of a narrower object.
9277 combine can't do this for us because it can't know whether a
9278 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9279
9280 if (! SLOW_BYTE_ACCESS
9281 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9282 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9283 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9284 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9285 && (type = type_for_mode (mode, 1)) != 0
9286 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9287 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9288 != CODE_FOR_nothing))
9289 {
9290 do_jump (convert (type, exp), if_false_label, if_true_label);
9291 break;
9292 }
9293 goto normal;
9294
9295 case TRUTH_NOT_EXPR:
9296 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9297 break;
9298
9299 case TRUTH_ANDIF_EXPR:
9300 if (if_false_label == 0)
9301 if_false_label = drop_through_label = gen_label_rtx ();
9302 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9303 start_cleanup_deferral ();
9304 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9305 end_cleanup_deferral ();
9306 break;
9307
9308 case TRUTH_ORIF_EXPR:
9309 if (if_true_label == 0)
9310 if_true_label = drop_through_label = gen_label_rtx ();
9311 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9312 start_cleanup_deferral ();
9313 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9314 end_cleanup_deferral ();
9315 break;
9316
9317 case COMPOUND_EXPR:
9318 push_temp_slots ();
9319 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9320 preserve_temp_slots (NULL_RTX);
9321 free_temp_slots ();
9322 pop_temp_slots ();
9323 emit_queue ();
9324 do_pending_stack_adjust ();
9325 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9326 break;
9327
9328 case COMPONENT_REF:
9329 case BIT_FIELD_REF:
9330 case ARRAY_REF:
9331 {
9332 HOST_WIDE_INT bitsize, bitpos;
9333 int unsignedp;
9334 enum machine_mode mode;
9335 tree type;
9336 tree offset;
9337 int volatilep = 0;
9338 unsigned int alignment;
9339
9340 /* Get description of this reference. We don't actually care
9341 about the underlying object here. */
9342 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9343 &mode, &unsignedp, &volatilep,
9344 &alignment);
9345
9346 type = type_for_size (bitsize, unsignedp);
9347 if (! SLOW_BYTE_ACCESS
9348 && type != 0 && bitsize >= 0
9349 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9350 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9351 != CODE_FOR_nothing))
9352 {
9353 do_jump (convert (type, exp), if_false_label, if_true_label);
9354 break;
9355 }
9356 goto normal;
9357 }
9358
9359 case COND_EXPR:
9360 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9361 if (integer_onep (TREE_OPERAND (exp, 1))
9362 && integer_zerop (TREE_OPERAND (exp, 2)))
9363 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9364
9365 else if (integer_zerop (TREE_OPERAND (exp, 1))
9366 && integer_onep (TREE_OPERAND (exp, 2)))
9367 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9368
9369 else
9370 {
9371 register rtx label1 = gen_label_rtx ();
9372 drop_through_label = gen_label_rtx ();
9373
9374 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9375
9376 start_cleanup_deferral ();
9377 /* Now the THEN-expression. */
9378 do_jump (TREE_OPERAND (exp, 1),
9379 if_false_label ? if_false_label : drop_through_label,
9380 if_true_label ? if_true_label : drop_through_label);
9381 /* In case the do_jump just above never jumps. */
9382 do_pending_stack_adjust ();
9383 emit_label (label1);
9384
9385 /* Now the ELSE-expression. */
9386 do_jump (TREE_OPERAND (exp, 2),
9387 if_false_label ? if_false_label : drop_through_label,
9388 if_true_label ? if_true_label : drop_through_label);
9389 end_cleanup_deferral ();
9390 }
9391 break;
9392
9393 case EQ_EXPR:
9394 {
9395 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9396
9397 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9398 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9399 {
9400 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9401 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9402 do_jump
9403 (fold
9404 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9405 fold (build (EQ_EXPR, TREE_TYPE (exp),
9406 fold (build1 (REALPART_EXPR,
9407 TREE_TYPE (inner_type),
9408 exp0)),
9409 fold (build1 (REALPART_EXPR,
9410 TREE_TYPE (inner_type),
9411 exp1)))),
9412 fold (build (EQ_EXPR, TREE_TYPE (exp),
9413 fold (build1 (IMAGPART_EXPR,
9414 TREE_TYPE (inner_type),
9415 exp0)),
9416 fold (build1 (IMAGPART_EXPR,
9417 TREE_TYPE (inner_type),
9418 exp1)))))),
9419 if_false_label, if_true_label);
9420 }
9421
9422 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9423 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9424
9425 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9426 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9427 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9428 else
9429 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9430 break;
9431 }
9432
9433 case NE_EXPR:
9434 {
9435 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9436
9437 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9438 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9439 {
9440 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9441 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9442 do_jump
9443 (fold
9444 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9445 fold (build (NE_EXPR, TREE_TYPE (exp),
9446 fold (build1 (REALPART_EXPR,
9447 TREE_TYPE (inner_type),
9448 exp0)),
9449 fold (build1 (REALPART_EXPR,
9450 TREE_TYPE (inner_type),
9451 exp1)))),
9452 fold (build (NE_EXPR, TREE_TYPE (exp),
9453 fold (build1 (IMAGPART_EXPR,
9454 TREE_TYPE (inner_type),
9455 exp0)),
9456 fold (build1 (IMAGPART_EXPR,
9457 TREE_TYPE (inner_type),
9458 exp1)))))),
9459 if_false_label, if_true_label);
9460 }
9461
9462 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9463 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9464
9465 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9466 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9467 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9468 else
9469 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9470 break;
9471 }
9472
9473 case LT_EXPR:
9474 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9475 if (GET_MODE_CLASS (mode) == MODE_INT
9476 && ! can_compare_p (LT, mode, ccp_jump))
9477 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9478 else
9479 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9480 break;
9481
9482 case LE_EXPR:
9483 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9484 if (GET_MODE_CLASS (mode) == MODE_INT
9485 && ! can_compare_p (LE, mode, ccp_jump))
9486 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9487 else
9488 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9489 break;
9490
9491 case GT_EXPR:
9492 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9493 if (GET_MODE_CLASS (mode) == MODE_INT
9494 && ! can_compare_p (GT, mode, ccp_jump))
9495 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9496 else
9497 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9498 break;
9499
9500 case GE_EXPR:
9501 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9502 if (GET_MODE_CLASS (mode) == MODE_INT
9503 && ! can_compare_p (GE, mode, ccp_jump))
9504 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9505 else
9506 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9507 break;
9508
9509 case UNORDERED_EXPR:
9510 case ORDERED_EXPR:
9511 {
9512 enum rtx_code cmp, rcmp;
9513 int do_rev;
9514
9515 if (code == UNORDERED_EXPR)
9516 cmp = UNORDERED, rcmp = ORDERED;
9517 else
9518 cmp = ORDERED, rcmp = UNORDERED;
9519 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9520
9521 do_rev = 0;
9522 if (! can_compare_p (cmp, mode, ccp_jump)
9523 && (can_compare_p (rcmp, mode, ccp_jump)
9524 /* If the target doesn't provide either UNORDERED or ORDERED
9525 comparisons, canonicalize on UNORDERED for the library. */
9526 || rcmp == UNORDERED))
9527 do_rev = 1;
9528
9529 if (! do_rev)
9530 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9531 else
9532 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9533 }
9534 break;
9535
9536 {
9537 enum rtx_code rcode1;
9538 enum tree_code tcode2;
9539
9540 case UNLT_EXPR:
9541 rcode1 = UNLT;
9542 tcode2 = LT_EXPR;
9543 goto unordered_bcc;
9544 case UNLE_EXPR:
9545 rcode1 = UNLE;
9546 tcode2 = LE_EXPR;
9547 goto unordered_bcc;
9548 case UNGT_EXPR:
9549 rcode1 = UNGT;
9550 tcode2 = GT_EXPR;
9551 goto unordered_bcc;
9552 case UNGE_EXPR:
9553 rcode1 = UNGE;
9554 tcode2 = GE_EXPR;
9555 goto unordered_bcc;
9556 case UNEQ_EXPR:
9557 rcode1 = UNEQ;
9558 tcode2 = EQ_EXPR;
9559 goto unordered_bcc;
9560
9561 unordered_bcc:
9562 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9563 if (can_compare_p (rcode1, mode, ccp_jump))
9564 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9565 if_true_label);
9566 else
9567 {
9568 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9569 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9570 tree cmp0, cmp1;
9571
9572 /* If the target doesn't support combined unordered
9573 compares, decompose into UNORDERED + comparison. */
9574 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9575 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9576 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9577 do_jump (exp, if_false_label, if_true_label);
9578 }
9579 }
9580 break;
9581
9582 default:
9583 normal:
9584 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9585 #if 0
9586 /* This is not needed any more and causes poor code since it causes
9587 comparisons and tests from non-SI objects to have different code
9588 sequences. */
9589 /* Copy to register to avoid generating bad insns by cse
9590 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9591 if (!cse_not_expected && GET_CODE (temp) == MEM)
9592 temp = copy_to_reg (temp);
9593 #endif
9594 do_pending_stack_adjust ();
9595 /* Do any postincrements in the expression that was tested. */
9596 emit_queue ();
9597
9598 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9599 {
9600 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9601 if (target)
9602 emit_jump (target);
9603 }
9604 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9605 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9606 /* Note swapping the labels gives us not-equal. */
9607 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9608 else if (GET_MODE (temp) != VOIDmode)
9609 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9610 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9611 GET_MODE (temp), NULL_RTX, 0,
9612 if_false_label, if_true_label);
9613 else
9614 abort ();
9615 }
9616
9617 if (drop_through_label)
9618 {
9619 /* If do_jump produces code that might be jumped around,
9620 do any stack adjusts from that code, before the place
9621 where control merges in. */
9622 do_pending_stack_adjust ();
9623 emit_label (drop_through_label);
9624 }
9625 }
9626 \f
9627 /* Given a comparison expression EXP for values too wide to be compared
9628 with one insn, test the comparison and jump to the appropriate label.
9629 The code of EXP is ignored; we always test GT if SWAP is 0,
9630 and LT if SWAP is 1. */
9631
9632 static void
9633 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9634 tree exp;
9635 int swap;
9636 rtx if_false_label, if_true_label;
9637 {
9638 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9639 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9640 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9641 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9642
9643 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9644 }
9645
9646 /* Compare OP0 with OP1, word at a time, in mode MODE.
9647 UNSIGNEDP says to do unsigned comparison.
9648 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9649
9650 void
9651 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9652 enum machine_mode mode;
9653 int unsignedp;
9654 rtx op0, op1;
9655 rtx if_false_label, if_true_label;
9656 {
9657 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9658 rtx drop_through_label = 0;
9659 int i;
9660
9661 if (! if_true_label || ! if_false_label)
9662 drop_through_label = gen_label_rtx ();
9663 if (! if_true_label)
9664 if_true_label = drop_through_label;
9665 if (! if_false_label)
9666 if_false_label = drop_through_label;
9667
9668 /* Compare a word at a time, high order first. */
9669 for (i = 0; i < nwords; i++)
9670 {
9671 rtx op0_word, op1_word;
9672
9673 if (WORDS_BIG_ENDIAN)
9674 {
9675 op0_word = operand_subword_force (op0, i, mode);
9676 op1_word = operand_subword_force (op1, i, mode);
9677 }
9678 else
9679 {
9680 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9681 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9682 }
9683
9684 /* All but high-order word must be compared as unsigned. */
9685 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9686 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9687 NULL_RTX, if_true_label);
9688
9689 /* Consider lower words only if these are equal. */
9690 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9691 NULL_RTX, 0, NULL_RTX, if_false_label);
9692 }
9693
9694 if (if_false_label)
9695 emit_jump (if_false_label);
9696 if (drop_through_label)
9697 emit_label (drop_through_label);
9698 }
9699
9700 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9701 with one insn, test the comparison and jump to the appropriate label. */
9702
9703 static void
9704 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9705 tree exp;
9706 rtx if_false_label, if_true_label;
9707 {
9708 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9709 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9710 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9711 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9712 int i;
9713 rtx drop_through_label = 0;
9714
9715 if (! if_false_label)
9716 drop_through_label = if_false_label = gen_label_rtx ();
9717
9718 for (i = 0; i < nwords; i++)
9719 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9720 operand_subword_force (op1, i, mode),
9721 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9722 word_mode, NULL_RTX, 0, if_false_label,
9723 NULL_RTX);
9724
9725 if (if_true_label)
9726 emit_jump (if_true_label);
9727 if (drop_through_label)
9728 emit_label (drop_through_label);
9729 }
9730 \f
9731 /* Jump according to whether OP0 is 0.
9732 We assume that OP0 has an integer mode that is too wide
9733 for the available compare insns. */
9734
9735 void
9736 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9737 rtx op0;
9738 rtx if_false_label, if_true_label;
9739 {
9740 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9741 rtx part;
9742 int i;
9743 rtx drop_through_label = 0;
9744
9745 /* The fastest way of doing this comparison on almost any machine is to
9746 "or" all the words and compare the result. If all have to be loaded
9747 from memory and this is a very wide item, it's possible this may
9748 be slower, but that's highly unlikely. */
9749
9750 part = gen_reg_rtx (word_mode);
9751 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9752 for (i = 1; i < nwords && part != 0; i++)
9753 part = expand_binop (word_mode, ior_optab, part,
9754 operand_subword_force (op0, i, GET_MODE (op0)),
9755 part, 1, OPTAB_WIDEN);
9756
9757 if (part != 0)
9758 {
9759 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9760 NULL_RTX, 0, if_false_label, if_true_label);
9761
9762 return;
9763 }
9764
9765 /* If we couldn't do the "or" simply, do this with a series of compares. */
9766 if (! if_false_label)
9767 drop_through_label = if_false_label = gen_label_rtx ();
9768
9769 for (i = 0; i < nwords; i++)
9770 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9771 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9772 if_false_label, NULL_RTX);
9773
9774 if (if_true_label)
9775 emit_jump (if_true_label);
9776
9777 if (drop_through_label)
9778 emit_label (drop_through_label);
9779 }
9780 \f
9781 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9782 (including code to compute the values to be compared)
9783 and set (CC0) according to the result.
9784 The decision as to signed or unsigned comparison must be made by the caller.
9785
9786 We force a stack adjustment unless there are currently
9787 things pushed on the stack that aren't yet used.
9788
9789 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9790 compared.
9791
9792 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9793 size of MODE should be used. */
9794
9795 rtx
9796 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9797 register rtx op0, op1;
9798 enum rtx_code code;
9799 int unsignedp;
9800 enum machine_mode mode;
9801 rtx size;
9802 unsigned int align;
9803 {
9804 rtx tem;
9805
9806 /* If one operand is constant, make it the second one. Only do this
9807 if the other operand is not constant as well. */
9808
9809 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9810 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9811 {
9812 tem = op0;
9813 op0 = op1;
9814 op1 = tem;
9815 code = swap_condition (code);
9816 }
9817
9818 if (flag_force_mem)
9819 {
9820 op0 = force_not_mem (op0);
9821 op1 = force_not_mem (op1);
9822 }
9823
9824 do_pending_stack_adjust ();
9825
9826 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9827 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9828 return tem;
9829
9830 #if 0
9831 /* There's no need to do this now that combine.c can eliminate lots of
9832 sign extensions. This can be less efficient in certain cases on other
9833 machines. */
9834
9835 /* If this is a signed equality comparison, we can do it as an
9836 unsigned comparison since zero-extension is cheaper than sign
9837 extension and comparisons with zero are done as unsigned. This is
9838 the case even on machines that can do fast sign extension, since
9839 zero-extension is easier to combine with other operations than
9840 sign-extension is. If we are comparing against a constant, we must
9841 convert it to what it would look like unsigned. */
9842 if ((code == EQ || code == NE) && ! unsignedp
9843 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9844 {
9845 if (GET_CODE (op1) == CONST_INT
9846 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9847 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9848 unsignedp = 1;
9849 }
9850 #endif
9851
9852 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9853
9854 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9855 }
9856
9857 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9858 The decision as to signed or unsigned comparison must be made by the caller.
9859
9860 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9861 compared.
9862
9863 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9864 size of MODE should be used. */
9865
9866 void
9867 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9868 if_false_label, if_true_label)
9869 register rtx op0, op1;
9870 enum rtx_code code;
9871 int unsignedp;
9872 enum machine_mode mode;
9873 rtx size;
9874 unsigned int align;
9875 rtx if_false_label, if_true_label;
9876 {
9877 rtx tem;
9878 int dummy_true_label = 0;
9879
9880 /* Reverse the comparison if that is safe and we want to jump if it is
9881 false. */
9882 if (! if_true_label && ! FLOAT_MODE_P (mode))
9883 {
9884 if_true_label = if_false_label;
9885 if_false_label = 0;
9886 code = reverse_condition (code);
9887 }
9888
9889 /* If one operand is constant, make it the second one. Only do this
9890 if the other operand is not constant as well. */
9891
9892 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9893 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9894 {
9895 tem = op0;
9896 op0 = op1;
9897 op1 = tem;
9898 code = swap_condition (code);
9899 }
9900
9901 if (flag_force_mem)
9902 {
9903 op0 = force_not_mem (op0);
9904 op1 = force_not_mem (op1);
9905 }
9906
9907 do_pending_stack_adjust ();
9908
9909 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9910 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9911 {
9912 if (tem == const_true_rtx)
9913 {
9914 if (if_true_label)
9915 emit_jump (if_true_label);
9916 }
9917 else
9918 {
9919 if (if_false_label)
9920 emit_jump (if_false_label);
9921 }
9922 return;
9923 }
9924
9925 #if 0
9926 /* There's no need to do this now that combine.c can eliminate lots of
9927 sign extensions. This can be less efficient in certain cases on other
9928 machines. */
9929
9930 /* If this is a signed equality comparison, we can do it as an
9931 unsigned comparison since zero-extension is cheaper than sign
9932 extension and comparisons with zero are done as unsigned. This is
9933 the case even on machines that can do fast sign extension, since
9934 zero-extension is easier to combine with other operations than
9935 sign-extension is. If we are comparing against a constant, we must
9936 convert it to what it would look like unsigned. */
9937 if ((code == EQ || code == NE) && ! unsignedp
9938 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9939 {
9940 if (GET_CODE (op1) == CONST_INT
9941 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9942 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9943 unsignedp = 1;
9944 }
9945 #endif
9946
9947 if (! if_true_label)
9948 {
9949 dummy_true_label = 1;
9950 if_true_label = gen_label_rtx ();
9951 }
9952
9953 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9954 if_true_label);
9955
9956 if (if_false_label)
9957 emit_jump (if_false_label);
9958 if (dummy_true_label)
9959 emit_label (if_true_label);
9960 }
9961
9962 /* Generate code for a comparison expression EXP (including code to compute
9963 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9964 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9965 generated code will drop through.
9966 SIGNED_CODE should be the rtx operation for this comparison for
9967 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9968
9969 We force a stack adjustment unless there are currently
9970 things pushed on the stack that aren't yet used. */
9971
9972 static void
9973 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9974 if_true_label)
9975 register tree exp;
9976 enum rtx_code signed_code, unsigned_code;
9977 rtx if_false_label, if_true_label;
9978 {
9979 unsigned int align0, align1;
9980 register rtx op0, op1;
9981 register tree type;
9982 register enum machine_mode mode;
9983 int unsignedp;
9984 enum rtx_code code;
9985
9986 /* Don't crash if the comparison was erroneous. */
9987 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
9988 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9989 return;
9990
9991 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
9992 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9993 mode = TYPE_MODE (type);
9994 unsignedp = TREE_UNSIGNED (type);
9995 code = unsignedp ? unsigned_code : signed_code;
9996
9997 #ifdef HAVE_canonicalize_funcptr_for_compare
9998 /* If function pointers need to be "canonicalized" before they can
9999 be reliably compared, then canonicalize them. */
10000 if (HAVE_canonicalize_funcptr_for_compare
10001 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10002 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10003 == FUNCTION_TYPE))
10004 {
10005 rtx new_op0 = gen_reg_rtx (mode);
10006
10007 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10008 op0 = new_op0;
10009 }
10010
10011 if (HAVE_canonicalize_funcptr_for_compare
10012 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10013 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10014 == FUNCTION_TYPE))
10015 {
10016 rtx new_op1 = gen_reg_rtx (mode);
10017
10018 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10019 op1 = new_op1;
10020 }
10021 #endif
10022
10023 /* Do any postincrements in the expression that was tested. */
10024 emit_queue ();
10025
10026 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10027 ((mode == BLKmode)
10028 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10029 MIN (align0, align1) / BITS_PER_UNIT,
10030 if_false_label, if_true_label);
10031 }
10032 \f
10033 /* Generate code to calculate EXP using a store-flag instruction
10034 and return an rtx for the result. EXP is either a comparison
10035 or a TRUTH_NOT_EXPR whose operand is a comparison.
10036
10037 If TARGET is nonzero, store the result there if convenient.
10038
10039 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10040 cheap.
10041
10042 Return zero if there is no suitable set-flag instruction
10043 available on this machine.
10044
10045 Once expand_expr has been called on the arguments of the comparison,
10046 we are committed to doing the store flag, since it is not safe to
10047 re-evaluate the expression. We emit the store-flag insn by calling
10048 emit_store_flag, but only expand the arguments if we have a reason
10049 to believe that emit_store_flag will be successful. If we think that
10050 it will, but it isn't, we have to simulate the store-flag with a
10051 set/jump/set sequence. */
10052
10053 static rtx
10054 do_store_flag (exp, target, mode, only_cheap)
10055 tree exp;
10056 rtx target;
10057 enum machine_mode mode;
10058 int only_cheap;
10059 {
10060 enum rtx_code code;
10061 tree arg0, arg1, type;
10062 tree tem;
10063 enum machine_mode operand_mode;
10064 int invert = 0;
10065 int unsignedp;
10066 rtx op0, op1;
10067 enum insn_code icode;
10068 rtx subtarget = target;
10069 rtx result, label;
10070
10071 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10072 result at the end. We can't simply invert the test since it would
10073 have already been inverted if it were valid. This case occurs for
10074 some floating-point comparisons. */
10075
10076 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10077 invert = 1, exp = TREE_OPERAND (exp, 0);
10078
10079 arg0 = TREE_OPERAND (exp, 0);
10080 arg1 = TREE_OPERAND (exp, 1);
10081 type = TREE_TYPE (arg0);
10082 operand_mode = TYPE_MODE (type);
10083 unsignedp = TREE_UNSIGNED (type);
10084
10085 /* We won't bother with BLKmode store-flag operations because it would mean
10086 passing a lot of information to emit_store_flag. */
10087 if (operand_mode == BLKmode)
10088 return 0;
10089
10090 /* We won't bother with store-flag operations involving function pointers
10091 when function pointers must be canonicalized before comparisons. */
10092 #ifdef HAVE_canonicalize_funcptr_for_compare
10093 if (HAVE_canonicalize_funcptr_for_compare
10094 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10095 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10096 == FUNCTION_TYPE))
10097 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10098 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10099 == FUNCTION_TYPE))))
10100 return 0;
10101 #endif
10102
10103 STRIP_NOPS (arg0);
10104 STRIP_NOPS (arg1);
10105
10106 /* Get the rtx comparison code to use. We know that EXP is a comparison
10107 operation of some type. Some comparisons against 1 and -1 can be
10108 converted to comparisons with zero. Do so here so that the tests
10109 below will be aware that we have a comparison with zero. These
10110 tests will not catch constants in the first operand, but constants
10111 are rarely passed as the first operand. */
10112
10113 switch (TREE_CODE (exp))
10114 {
10115 case EQ_EXPR:
10116 code = EQ;
10117 break;
10118 case NE_EXPR:
10119 code = NE;
10120 break;
10121 case LT_EXPR:
10122 if (integer_onep (arg1))
10123 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10124 else
10125 code = unsignedp ? LTU : LT;
10126 break;
10127 case LE_EXPR:
10128 if (! unsignedp && integer_all_onesp (arg1))
10129 arg1 = integer_zero_node, code = LT;
10130 else
10131 code = unsignedp ? LEU : LE;
10132 break;
10133 case GT_EXPR:
10134 if (! unsignedp && integer_all_onesp (arg1))
10135 arg1 = integer_zero_node, code = GE;
10136 else
10137 code = unsignedp ? GTU : GT;
10138 break;
10139 case GE_EXPR:
10140 if (integer_onep (arg1))
10141 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10142 else
10143 code = unsignedp ? GEU : GE;
10144 break;
10145
10146 case UNORDERED_EXPR:
10147 code = UNORDERED;
10148 break;
10149 case ORDERED_EXPR:
10150 code = ORDERED;
10151 break;
10152 case UNLT_EXPR:
10153 code = UNLT;
10154 break;
10155 case UNLE_EXPR:
10156 code = UNLE;
10157 break;
10158 case UNGT_EXPR:
10159 code = UNGT;
10160 break;
10161 case UNGE_EXPR:
10162 code = UNGE;
10163 break;
10164 case UNEQ_EXPR:
10165 code = UNEQ;
10166 break;
10167
10168 default:
10169 abort ();
10170 }
10171
10172 /* Put a constant second. */
10173 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10174 {
10175 tem = arg0; arg0 = arg1; arg1 = tem;
10176 code = swap_condition (code);
10177 }
10178
10179 /* If this is an equality or inequality test of a single bit, we can
10180 do this by shifting the bit being tested to the low-order bit and
10181 masking the result with the constant 1. If the condition was EQ,
10182 we xor it with 1. This does not require an scc insn and is faster
10183 than an scc insn even if we have it. */
10184
10185 if ((code == NE || code == EQ)
10186 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10187 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10188 {
10189 tree inner = TREE_OPERAND (arg0, 0);
10190 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10191 int ops_unsignedp;
10192
10193 /* If INNER is a right shift of a constant and it plus BITNUM does
10194 not overflow, adjust BITNUM and INNER. */
10195
10196 if (TREE_CODE (inner) == RSHIFT_EXPR
10197 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10198 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10199 && bitnum < TYPE_PRECISION (type)
10200 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10201 bitnum - TYPE_PRECISION (type)))
10202 {
10203 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10204 inner = TREE_OPERAND (inner, 0);
10205 }
10206
10207 /* If we are going to be able to omit the AND below, we must do our
10208 operations as unsigned. If we must use the AND, we have a choice.
10209 Normally unsigned is faster, but for some machines signed is. */
10210 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10211 #ifdef LOAD_EXTEND_OP
10212 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10213 #else
10214 : 1
10215 #endif
10216 );
10217
10218 if (subtarget == 0 || GET_CODE (subtarget) != REG
10219 || GET_MODE (subtarget) != operand_mode
10220 || ! safe_from_p (subtarget, inner, 1))
10221 subtarget = 0;
10222
10223 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10224
10225 if (bitnum != 0)
10226 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10227 size_int (bitnum), subtarget, ops_unsignedp);
10228
10229 if (GET_MODE (op0) != mode)
10230 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10231
10232 if ((code == EQ && ! invert) || (code == NE && invert))
10233 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10234 ops_unsignedp, OPTAB_LIB_WIDEN);
10235
10236 /* Put the AND last so it can combine with more things. */
10237 if (bitnum != TYPE_PRECISION (type) - 1)
10238 op0 = expand_and (op0, const1_rtx, subtarget);
10239
10240 return op0;
10241 }
10242
10243 /* Now see if we are likely to be able to do this. Return if not. */
10244 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10245 return 0;
10246
10247 icode = setcc_gen_code[(int) code];
10248 if (icode == CODE_FOR_nothing
10249 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10250 {
10251 /* We can only do this if it is one of the special cases that
10252 can be handled without an scc insn. */
10253 if ((code == LT && integer_zerop (arg1))
10254 || (! only_cheap && code == GE && integer_zerop (arg1)))
10255 ;
10256 else if (BRANCH_COST >= 0
10257 && ! only_cheap && (code == NE || code == EQ)
10258 && TREE_CODE (type) != REAL_TYPE
10259 && ((abs_optab->handlers[(int) operand_mode].insn_code
10260 != CODE_FOR_nothing)
10261 || (ffs_optab->handlers[(int) operand_mode].insn_code
10262 != CODE_FOR_nothing)))
10263 ;
10264 else
10265 return 0;
10266 }
10267
10268 preexpand_calls (exp);
10269 if (subtarget == 0 || GET_CODE (subtarget) != REG
10270 || GET_MODE (subtarget) != operand_mode
10271 || ! safe_from_p (subtarget, arg1, 1))
10272 subtarget = 0;
10273
10274 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10275 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10276
10277 if (target == 0)
10278 target = gen_reg_rtx (mode);
10279
10280 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10281 because, if the emit_store_flag does anything it will succeed and
10282 OP0 and OP1 will not be used subsequently. */
10283
10284 result = emit_store_flag (target, code,
10285 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10286 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10287 operand_mode, unsignedp, 1);
10288
10289 if (result)
10290 {
10291 if (invert)
10292 result = expand_binop (mode, xor_optab, result, const1_rtx,
10293 result, 0, OPTAB_LIB_WIDEN);
10294 return result;
10295 }
10296
10297 /* If this failed, we have to do this with set/compare/jump/set code. */
10298 if (GET_CODE (target) != REG
10299 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10300 target = gen_reg_rtx (GET_MODE (target));
10301
10302 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10303 result = compare_from_rtx (op0, op1, code, unsignedp,
10304 operand_mode, NULL_RTX, 0);
10305 if (GET_CODE (result) == CONST_INT)
10306 return (((result == const0_rtx && ! invert)
10307 || (result != const0_rtx && invert))
10308 ? const0_rtx : const1_rtx);
10309
10310 label = gen_label_rtx ();
10311 if (bcc_gen_fctn[(int) code] == 0)
10312 abort ();
10313
10314 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10315 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10316 emit_label (label);
10317
10318 return target;
10319 }
10320 \f
10321 /* Generate a tablejump instruction (used for switch statements). */
10322
10323 #ifdef HAVE_tablejump
10324
10325 /* INDEX is the value being switched on, with the lowest value
10326 in the table already subtracted.
10327 MODE is its expected mode (needed if INDEX is constant).
10328 RANGE is the length of the jump table.
10329 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10330
10331 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10332 index value is out of range. */
10333
10334 void
10335 do_tablejump (index, mode, range, table_label, default_label)
10336 rtx index, range, table_label, default_label;
10337 enum machine_mode mode;
10338 {
10339 register rtx temp, vector;
10340
10341 /* Do an unsigned comparison (in the proper mode) between the index
10342 expression and the value which represents the length of the range.
10343 Since we just finished subtracting the lower bound of the range
10344 from the index expression, this comparison allows us to simultaneously
10345 check that the original index expression value is both greater than
10346 or equal to the minimum value of the range and less than or equal to
10347 the maximum value of the range. */
10348
10349 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10350 0, default_label);
10351
10352 /* If index is in range, it must fit in Pmode.
10353 Convert to Pmode so we can index with it. */
10354 if (mode != Pmode)
10355 index = convert_to_mode (Pmode, index, 1);
10356
10357 /* Don't let a MEM slip thru, because then INDEX that comes
10358 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10359 and break_out_memory_refs will go to work on it and mess it up. */
10360 #ifdef PIC_CASE_VECTOR_ADDRESS
10361 if (flag_pic && GET_CODE (index) != REG)
10362 index = copy_to_mode_reg (Pmode, index);
10363 #endif
10364
10365 /* If flag_force_addr were to affect this address
10366 it could interfere with the tricky assumptions made
10367 about addresses that contain label-refs,
10368 which may be valid only very near the tablejump itself. */
10369 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10370 GET_MODE_SIZE, because this indicates how large insns are. The other
10371 uses should all be Pmode, because they are addresses. This code
10372 could fail if addresses and insns are not the same size. */
10373 index = gen_rtx_PLUS (Pmode,
10374 gen_rtx_MULT (Pmode, index,
10375 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10376 gen_rtx_LABEL_REF (Pmode, table_label));
10377 #ifdef PIC_CASE_VECTOR_ADDRESS
10378 if (flag_pic)
10379 index = PIC_CASE_VECTOR_ADDRESS (index);
10380 else
10381 #endif
10382 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10383 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10384 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10385 RTX_UNCHANGING_P (vector) = 1;
10386 convert_move (temp, vector, 0);
10387
10388 emit_jump_insn (gen_tablejump (temp, table_label));
10389
10390 /* If we are generating PIC code or if the table is PC-relative, the
10391 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10392 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10393 emit_barrier ();
10394 }
10395
10396 #endif /* HAVE_tablejump */