* expr.c: Include intl.h.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "machmode.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
47
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
50
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
53
54 #ifdef PUSH_ROUNDING
55
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
58 #endif
59
60 #endif
61
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
65 #else
66 #define STACK_PUSH_CODE PRE_INC
67 #endif
68 #endif
69
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
73 #endif
74
75 /* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
81 int cse_not_expected;
82
83 /* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86 int do_preexpand_calls = 1;
87
88 /* Don't check memory usage, since code is being emitted to check a memory
89 usage. Used when current_function_check_memory_usage is true, to avoid
90 infinite recursion. */
91 static int in_check_memory_usage;
92
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 static tree placeholder_list = 0;
95
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98 struct move_by_pieces
99 {
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 int to_struct;
105 int to_readonly;
106 rtx from;
107 rtx from_addr;
108 int autinc_from;
109 int explicit_inc_from;
110 int from_struct;
111 int from_readonly;
112 int len;
113 int offset;
114 int reverse;
115 };
116
117 /* This structure is used by clear_by_pieces to describe the clear to
118 be performed. */
119
120 struct clear_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 int to_struct;
127 int len;
128 int offset;
129 int reverse;
130 };
131
132 extern struct obstack permanent_obstack;
133
134 static rtx get_push_address PARAMS ((int));
135
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
141 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
142 enum machine_mode,
143 struct clear_by_pieces *));
144 static int is_zeros_p PARAMS ((tree));
145 static int mostly_zeros_p PARAMS ((tree));
146 static void store_constructor_field PARAMS ((rtx, int, int, enum machine_mode,
147 tree, tree, unsigned int, int));
148 static void store_constructor PARAMS ((tree, rtx, unsigned int, int, int));
149 static rtx store_field PARAMS ((rtx, int, int, enum machine_mode,
150 tree, enum machine_mode, int,
151 unsigned int, int, int));
152 static enum memory_use_mode
153 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
154 static tree save_noncopied_parts PARAMS ((tree, tree));
155 static tree init_noncopied_parts PARAMS ((tree, tree));
156 static int safe_from_p PARAMS ((rtx, tree, int));
157 static int fixed_type_p PARAMS ((tree));
158 static rtx var_rtx PARAMS ((tree));
159 static int readonly_fields_p PARAMS ((tree));
160 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
161 static rtx expand_increment PARAMS ((tree, int, int));
162 static void preexpand_calls PARAMS ((tree));
163 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
164 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
165 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code, rtx, rtx));
166 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
167
168 /* Record for each mode whether we can move a register directly to or
169 from an object of that mode in memory. If we can't, we won't try
170 to use that mode directly when accessing a field of that mode. */
171
172 static char direct_load[NUM_MACHINE_MODES];
173 static char direct_store[NUM_MACHINE_MODES];
174
175 /* If a memory-to-memory move would take MOVE_RATIO or more simple
176 move-instruction sequences, we will do a movstr or libcall instead. */
177
178 #ifndef MOVE_RATIO
179 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
180 #define MOVE_RATIO 2
181 #else
182 /* If we are optimizing for space (-Os), cut down the default move ratio */
183 #define MOVE_RATIO (optimize_size ? 3 : 15)
184 #endif
185 #endif
186
187 /* This macro is used to determine whether move_by_pieces should be called
188 to perform a structure copy. */
189 #ifndef MOVE_BY_PIECES_P
190 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
191 (SIZE, ALIGN) < MOVE_RATIO)
192 #endif
193
194 /* This array records the insn_code of insns to perform block moves. */
195 enum insn_code movstr_optab[NUM_MACHINE_MODES];
196
197 /* This array records the insn_code of insns to perform block clears. */
198 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
199
200 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
201
202 #ifndef SLOW_UNALIGNED_ACCESS
203 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
204 #endif
205 \f
206 /* This is run once per compilation to set up which modes can be used
207 directly in memory and to initialize the block move optab. */
208
209 void
210 init_expr_once ()
211 {
212 rtx insn, pat;
213 enum machine_mode mode;
214 int num_clobbers;
215 rtx mem, mem1;
216 char *free_point;
217
218 start_sequence ();
219
220 /* Since we are on the permanent obstack, we must be sure we save this
221 spot AFTER we call start_sequence, since it will reuse the rtl it
222 makes. */
223 free_point = (char *) oballoc (0);
224
225 /* Try indexing by frame ptr and try by stack ptr.
226 It is known that on the Convex the stack ptr isn't a valid index.
227 With luck, one or the other is valid on any machine. */
228 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
229 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
230
231 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
232 pat = PATTERN (insn);
233
234 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
235 mode = (enum machine_mode) ((int) mode + 1))
236 {
237 int regno;
238 rtx reg;
239
240 direct_load[(int) mode] = direct_store[(int) mode] = 0;
241 PUT_MODE (mem, mode);
242 PUT_MODE (mem1, mode);
243
244 /* See if there is some register that can be used in this mode and
245 directly loaded or stored from memory. */
246
247 if (mode != VOIDmode && mode != BLKmode)
248 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
249 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
250 regno++)
251 {
252 if (! HARD_REGNO_MODE_OK (regno, mode))
253 continue;
254
255 reg = gen_rtx_REG (mode, regno);
256
257 SET_SRC (pat) = mem;
258 SET_DEST (pat) = reg;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_load[(int) mode] = 1;
261
262 SET_SRC (pat) = mem1;
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
266
267 SET_SRC (pat) = reg;
268 SET_DEST (pat) = mem;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_store[(int) mode] = 1;
271
272 SET_SRC (pat) = reg;
273 SET_DEST (pat) = mem1;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
276 }
277 }
278
279 end_sequence ();
280 obfree (free_point);
281 }
282
283 /* This is run at the start of compiling a function. */
284
285 void
286 init_expr ()
287 {
288 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
289
290 pending_chain = 0;
291 pending_stack_adjust = 0;
292 arg_space_so_far = 0;
293 inhibit_defer_pop = 0;
294 saveregs_value = 0;
295 apply_args_value = 0;
296 forced_labels = 0;
297 }
298
299 void
300 mark_expr_status (p)
301 struct expr_status *p;
302 {
303 if (p == NULL)
304 return;
305
306 ggc_mark_rtx (p->x_saveregs_value);
307 ggc_mark_rtx (p->x_apply_args_value);
308 ggc_mark_rtx (p->x_forced_labels);
309 }
310
311 void
312 free_expr_status (f)
313 struct function *f;
314 {
315 free (f->expr);
316 f->expr = NULL;
317 }
318
319 /* Small sanity check that the queue is empty at the end of a function. */
320 void
321 finish_expr_for_function ()
322 {
323 if (pending_chain)
324 abort ();
325 }
326 \f
327 /* Manage the queue of increment instructions to be output
328 for POSTINCREMENT_EXPR expressions, etc. */
329
330 /* Queue up to increment (or change) VAR later. BODY says how:
331 BODY should be the same thing you would pass to emit_insn
332 to increment right away. It will go to emit_insn later on.
333
334 The value is a QUEUED expression to be used in place of VAR
335 where you want to guarantee the pre-incrementation value of VAR. */
336
337 static rtx
338 enqueue_insn (var, body)
339 rtx var, body;
340 {
341 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
342 body, pending_chain);
343 return pending_chain;
344 }
345
346 /* Use protect_from_queue to convert a QUEUED expression
347 into something that you can put immediately into an instruction.
348 If the queued incrementation has not happened yet,
349 protect_from_queue returns the variable itself.
350 If the incrementation has happened, protect_from_queue returns a temp
351 that contains a copy of the old value of the variable.
352
353 Any time an rtx which might possibly be a QUEUED is to be put
354 into an instruction, it must be passed through protect_from_queue first.
355 QUEUED expressions are not meaningful in instructions.
356
357 Do not pass a value through protect_from_queue and then hold
358 on to it for a while before putting it in an instruction!
359 If the queue is flushed in between, incorrect code will result. */
360
361 rtx
362 protect_from_queue (x, modify)
363 register rtx x;
364 int modify;
365 {
366 register RTX_CODE code = GET_CODE (x);
367
368 #if 0 /* A QUEUED can hang around after the queue is forced out. */
369 /* Shortcut for most common case. */
370 if (pending_chain == 0)
371 return x;
372 #endif
373
374 if (code != QUEUED)
375 {
376 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
377 use of autoincrement. Make a copy of the contents of the memory
378 location rather than a copy of the address, but not if the value is
379 of mode BLKmode. Don't modify X in place since it might be
380 shared. */
381 if (code == MEM && GET_MODE (x) != BLKmode
382 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
383 {
384 register rtx y = XEXP (x, 0);
385 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
386
387 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
388 MEM_COPY_ATTRIBUTES (new, x);
389 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
390
391 if (QUEUED_INSN (y))
392 {
393 register rtx temp = gen_reg_rtx (GET_MODE (new));
394 emit_insn_before (gen_move_insn (temp, new),
395 QUEUED_INSN (y));
396 return temp;
397 }
398 return new;
399 }
400 /* Otherwise, recursively protect the subexpressions of all
401 the kinds of rtx's that can contain a QUEUED. */
402 if (code == MEM)
403 {
404 rtx tem = protect_from_queue (XEXP (x, 0), 0);
405 if (tem != XEXP (x, 0))
406 {
407 x = copy_rtx (x);
408 XEXP (x, 0) = tem;
409 }
410 }
411 else if (code == PLUS || code == MULT)
412 {
413 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
414 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
415 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
416 {
417 x = copy_rtx (x);
418 XEXP (x, 0) = new0;
419 XEXP (x, 1) = new1;
420 }
421 }
422 return x;
423 }
424 /* If the increment has not happened, use the variable itself. */
425 if (QUEUED_INSN (x) == 0)
426 return QUEUED_VAR (x);
427 /* If the increment has happened and a pre-increment copy exists,
428 use that copy. */
429 if (QUEUED_COPY (x) != 0)
430 return QUEUED_COPY (x);
431 /* The increment has happened but we haven't set up a pre-increment copy.
432 Set one up now, and use it. */
433 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
434 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
435 QUEUED_INSN (x));
436 return QUEUED_COPY (x);
437 }
438
439 /* Return nonzero if X contains a QUEUED expression:
440 if it contains anything that will be altered by a queued increment.
441 We handle only combinations of MEM, PLUS, MINUS and MULT operators
442 since memory addresses generally contain only those. */
443
444 int
445 queued_subexp_p (x)
446 rtx x;
447 {
448 register enum rtx_code code = GET_CODE (x);
449 switch (code)
450 {
451 case QUEUED:
452 return 1;
453 case MEM:
454 return queued_subexp_p (XEXP (x, 0));
455 case MULT:
456 case PLUS:
457 case MINUS:
458 return (queued_subexp_p (XEXP (x, 0))
459 || queued_subexp_p (XEXP (x, 1)));
460 default:
461 return 0;
462 }
463 }
464
465 /* Perform all the pending incrementations. */
466
467 void
468 emit_queue ()
469 {
470 register rtx p;
471 while ((p = pending_chain))
472 {
473 rtx body = QUEUED_BODY (p);
474
475 if (GET_CODE (body) == SEQUENCE)
476 {
477 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
478 emit_insn (QUEUED_BODY (p));
479 }
480 else
481 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
482 pending_chain = QUEUED_NEXT (p);
483 }
484 }
485 \f
486 /* Copy data from FROM to TO, where the machine modes are not the same.
487 Both modes may be integer, or both may be floating.
488 UNSIGNEDP should be nonzero if FROM is an unsigned type.
489 This causes zero-extension instead of sign-extension. */
490
491 void
492 convert_move (to, from, unsignedp)
493 register rtx to, from;
494 int unsignedp;
495 {
496 enum machine_mode to_mode = GET_MODE (to);
497 enum machine_mode from_mode = GET_MODE (from);
498 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
499 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
500 enum insn_code code;
501 rtx libcall;
502
503 /* rtx code for making an equivalent value. */
504 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
505
506 to = protect_from_queue (to, 1);
507 from = protect_from_queue (from, 0);
508
509 if (to_real != from_real)
510 abort ();
511
512 /* If FROM is a SUBREG that indicates that we have already done at least
513 the required extension, strip it. We don't handle such SUBREGs as
514 TO here. */
515
516 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
517 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
518 >= GET_MODE_SIZE (to_mode))
519 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
520 from = gen_lowpart (to_mode, from), from_mode = to_mode;
521
522 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
523 abort ();
524
525 if (to_mode == from_mode
526 || (from_mode == VOIDmode && CONSTANT_P (from)))
527 {
528 emit_move_insn (to, from);
529 return;
530 }
531
532 if (to_real)
533 {
534 rtx value;
535
536 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
537 {
538 /* Try converting directly if the insn is supported. */
539 if ((code = can_extend_p (to_mode, from_mode, 0))
540 != CODE_FOR_nothing)
541 {
542 emit_unop_insn (code, to, from, UNKNOWN);
543 return;
544 }
545 }
546
547 #ifdef HAVE_trunchfqf2
548 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
549 {
550 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
551 return;
552 }
553 #endif
554 #ifdef HAVE_trunctqfqf2
555 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
556 {
557 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
558 return;
559 }
560 #endif
561 #ifdef HAVE_truncsfqf2
562 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
563 {
564 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
565 return;
566 }
567 #endif
568 #ifdef HAVE_truncdfqf2
569 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
570 {
571 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
572 return;
573 }
574 #endif
575 #ifdef HAVE_truncxfqf2
576 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
577 {
578 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
579 return;
580 }
581 #endif
582 #ifdef HAVE_trunctfqf2
583 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
584 {
585 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
586 return;
587 }
588 #endif
589
590 #ifdef HAVE_trunctqfhf2
591 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
592 {
593 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
594 return;
595 }
596 #endif
597 #ifdef HAVE_truncsfhf2
598 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
599 {
600 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
601 return;
602 }
603 #endif
604 #ifdef HAVE_truncdfhf2
605 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
606 {
607 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
608 return;
609 }
610 #endif
611 #ifdef HAVE_truncxfhf2
612 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
613 {
614 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
615 return;
616 }
617 #endif
618 #ifdef HAVE_trunctfhf2
619 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
620 {
621 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
622 return;
623 }
624 #endif
625
626 #ifdef HAVE_truncsftqf2
627 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
628 {
629 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
630 return;
631 }
632 #endif
633 #ifdef HAVE_truncdftqf2
634 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
637 return;
638 }
639 #endif
640 #ifdef HAVE_truncxftqf2
641 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
644 return;
645 }
646 #endif
647 #ifdef HAVE_trunctftqf2
648 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
649 {
650 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
651 return;
652 }
653 #endif
654
655 #ifdef HAVE_truncdfsf2
656 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
657 {
658 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
659 return;
660 }
661 #endif
662 #ifdef HAVE_truncxfsf2
663 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
664 {
665 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
666 return;
667 }
668 #endif
669 #ifdef HAVE_trunctfsf2
670 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
671 {
672 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
673 return;
674 }
675 #endif
676 #ifdef HAVE_truncxfdf2
677 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
678 {
679 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
680 return;
681 }
682 #endif
683 #ifdef HAVE_trunctfdf2
684 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
685 {
686 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
687 return;
688 }
689 #endif
690
691 libcall = (rtx) 0;
692 switch (from_mode)
693 {
694 case SFmode:
695 switch (to_mode)
696 {
697 case DFmode:
698 libcall = extendsfdf2_libfunc;
699 break;
700
701 case XFmode:
702 libcall = extendsfxf2_libfunc;
703 break;
704
705 case TFmode:
706 libcall = extendsftf2_libfunc;
707 break;
708
709 default:
710 break;
711 }
712 break;
713
714 case DFmode:
715 switch (to_mode)
716 {
717 case SFmode:
718 libcall = truncdfsf2_libfunc;
719 break;
720
721 case XFmode:
722 libcall = extenddfxf2_libfunc;
723 break;
724
725 case TFmode:
726 libcall = extenddftf2_libfunc;
727 break;
728
729 default:
730 break;
731 }
732 break;
733
734 case XFmode:
735 switch (to_mode)
736 {
737 case SFmode:
738 libcall = truncxfsf2_libfunc;
739 break;
740
741 case DFmode:
742 libcall = truncxfdf2_libfunc;
743 break;
744
745 default:
746 break;
747 }
748 break;
749
750 case TFmode:
751 switch (to_mode)
752 {
753 case SFmode:
754 libcall = trunctfsf2_libfunc;
755 break;
756
757 case DFmode:
758 libcall = trunctfdf2_libfunc;
759 break;
760
761 default:
762 break;
763 }
764 break;
765
766 default:
767 break;
768 }
769
770 if (libcall == (rtx) 0)
771 /* This conversion is not implemented yet. */
772 abort ();
773
774 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
775 1, from, from_mode);
776 emit_move_insn (to, value);
777 return;
778 }
779
780 /* Now both modes are integers. */
781
782 /* Handle expanding beyond a word. */
783 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
784 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
785 {
786 rtx insns;
787 rtx lowpart;
788 rtx fill_value;
789 rtx lowfrom;
790 int i;
791 enum machine_mode lowpart_mode;
792 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
793
794 /* Try converting directly if the insn is supported. */
795 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
796 != CODE_FOR_nothing)
797 {
798 /* If FROM is a SUBREG, put it into a register. Do this
799 so that we always generate the same set of insns for
800 better cse'ing; if an intermediate assignment occurred,
801 we won't be doing the operation directly on the SUBREG. */
802 if (optimize > 0 && GET_CODE (from) == SUBREG)
803 from = force_reg (from_mode, from);
804 emit_unop_insn (code, to, from, equiv_code);
805 return;
806 }
807 /* Next, try converting via full word. */
808 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
809 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
810 != CODE_FOR_nothing))
811 {
812 if (GET_CODE (to) == REG)
813 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
814 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
815 emit_unop_insn (code, to,
816 gen_lowpart (word_mode, to), equiv_code);
817 return;
818 }
819
820 /* No special multiword conversion insn; do it by hand. */
821 start_sequence ();
822
823 /* Since we will turn this into a no conflict block, we must ensure
824 that the source does not overlap the target. */
825
826 if (reg_overlap_mentioned_p (to, from))
827 from = force_reg (from_mode, from);
828
829 /* Get a copy of FROM widened to a word, if necessary. */
830 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
831 lowpart_mode = word_mode;
832 else
833 lowpart_mode = from_mode;
834
835 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
836
837 lowpart = gen_lowpart (lowpart_mode, to);
838 emit_move_insn (lowpart, lowfrom);
839
840 /* Compute the value to put in each remaining word. */
841 if (unsignedp)
842 fill_value = const0_rtx;
843 else
844 {
845 #ifdef HAVE_slt
846 if (HAVE_slt
847 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
848 && STORE_FLAG_VALUE == -1)
849 {
850 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
851 lowpart_mode, 0, 0);
852 fill_value = gen_reg_rtx (word_mode);
853 emit_insn (gen_slt (fill_value));
854 }
855 else
856 #endif
857 {
858 fill_value
859 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
860 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
861 NULL_RTX, 0);
862 fill_value = convert_to_mode (word_mode, fill_value, 1);
863 }
864 }
865
866 /* Fill the remaining words. */
867 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
868 {
869 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
870 rtx subword = operand_subword (to, index, 1, to_mode);
871
872 if (subword == 0)
873 abort ();
874
875 if (fill_value != subword)
876 emit_move_insn (subword, fill_value);
877 }
878
879 insns = get_insns ();
880 end_sequence ();
881
882 emit_no_conflict_block (insns, to, from, NULL_RTX,
883 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
884 return;
885 }
886
887 /* Truncating multi-word to a word or less. */
888 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
889 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
890 {
891 if (!((GET_CODE (from) == MEM
892 && ! MEM_VOLATILE_P (from)
893 && direct_load[(int) to_mode]
894 && ! mode_dependent_address_p (XEXP (from, 0)))
895 || GET_CODE (from) == REG
896 || GET_CODE (from) == SUBREG))
897 from = force_reg (from_mode, from);
898 convert_move (to, gen_lowpart (word_mode, from), 0);
899 return;
900 }
901
902 /* Handle pointer conversion */ /* SPEE 900220 */
903 if (to_mode == PQImode)
904 {
905 if (from_mode != QImode)
906 from = convert_to_mode (QImode, from, unsignedp);
907
908 #ifdef HAVE_truncqipqi2
909 if (HAVE_truncqipqi2)
910 {
911 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
912 return;
913 }
914 #endif /* HAVE_truncqipqi2 */
915 abort ();
916 }
917
918 if (from_mode == PQImode)
919 {
920 if (to_mode != QImode)
921 {
922 from = convert_to_mode (QImode, from, unsignedp);
923 from_mode = QImode;
924 }
925 else
926 {
927 #ifdef HAVE_extendpqiqi2
928 if (HAVE_extendpqiqi2)
929 {
930 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
931 return;
932 }
933 #endif /* HAVE_extendpqiqi2 */
934 abort ();
935 }
936 }
937
938 if (to_mode == PSImode)
939 {
940 if (from_mode != SImode)
941 from = convert_to_mode (SImode, from, unsignedp);
942
943 #ifdef HAVE_truncsipsi2
944 if (HAVE_truncsipsi2)
945 {
946 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
947 return;
948 }
949 #endif /* HAVE_truncsipsi2 */
950 abort ();
951 }
952
953 if (from_mode == PSImode)
954 {
955 if (to_mode != SImode)
956 {
957 from = convert_to_mode (SImode, from, unsignedp);
958 from_mode = SImode;
959 }
960 else
961 {
962 #ifdef HAVE_extendpsisi2
963 if (HAVE_extendpsisi2)
964 {
965 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
966 return;
967 }
968 #endif /* HAVE_extendpsisi2 */
969 abort ();
970 }
971 }
972
973 if (to_mode == PDImode)
974 {
975 if (from_mode != DImode)
976 from = convert_to_mode (DImode, from, unsignedp);
977
978 #ifdef HAVE_truncdipdi2
979 if (HAVE_truncdipdi2)
980 {
981 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
982 return;
983 }
984 #endif /* HAVE_truncdipdi2 */
985 abort ();
986 }
987
988 if (from_mode == PDImode)
989 {
990 if (to_mode != DImode)
991 {
992 from = convert_to_mode (DImode, from, unsignedp);
993 from_mode = DImode;
994 }
995 else
996 {
997 #ifdef HAVE_extendpdidi2
998 if (HAVE_extendpdidi2)
999 {
1000 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1001 return;
1002 }
1003 #endif /* HAVE_extendpdidi2 */
1004 abort ();
1005 }
1006 }
1007
1008 /* Now follow all the conversions between integers
1009 no more than a word long. */
1010
1011 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1012 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1013 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1014 GET_MODE_BITSIZE (from_mode)))
1015 {
1016 if (!((GET_CODE (from) == MEM
1017 && ! MEM_VOLATILE_P (from)
1018 && direct_load[(int) to_mode]
1019 && ! mode_dependent_address_p (XEXP (from, 0)))
1020 || GET_CODE (from) == REG
1021 || GET_CODE (from) == SUBREG))
1022 from = force_reg (from_mode, from);
1023 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1024 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1025 from = copy_to_reg (from);
1026 emit_move_insn (to, gen_lowpart (to_mode, from));
1027 return;
1028 }
1029
1030 /* Handle extension. */
1031 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1032 {
1033 /* Convert directly if that works. */
1034 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1035 != CODE_FOR_nothing)
1036 {
1037 emit_unop_insn (code, to, from, equiv_code);
1038 return;
1039 }
1040 else
1041 {
1042 enum machine_mode intermediate;
1043 rtx tmp;
1044 tree shift_amount;
1045
1046 /* Search for a mode to convert via. */
1047 for (intermediate = from_mode; intermediate != VOIDmode;
1048 intermediate = GET_MODE_WIDER_MODE (intermediate))
1049 if (((can_extend_p (to_mode, intermediate, unsignedp)
1050 != CODE_FOR_nothing)
1051 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1052 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1053 GET_MODE_BITSIZE (intermediate))))
1054 && (can_extend_p (intermediate, from_mode, unsignedp)
1055 != CODE_FOR_nothing))
1056 {
1057 convert_move (to, convert_to_mode (intermediate, from,
1058 unsignedp), unsignedp);
1059 return;
1060 }
1061
1062 /* No suitable intermediate mode.
1063 Generate what we need with shifts. */
1064 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1065 - GET_MODE_BITSIZE (from_mode), 0);
1066 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1067 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1068 to, unsignedp);
1069 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1070 to, unsignedp);
1071 if (tmp != to)
1072 emit_move_insn (to, tmp);
1073 return;
1074 }
1075 }
1076
1077 /* Support special truncate insns for certain modes. */
1078
1079 if (from_mode == DImode && to_mode == SImode)
1080 {
1081 #ifdef HAVE_truncdisi2
1082 if (HAVE_truncdisi2)
1083 {
1084 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1085 return;
1086 }
1087 #endif
1088 convert_move (to, force_reg (from_mode, from), unsignedp);
1089 return;
1090 }
1091
1092 if (from_mode == DImode && to_mode == HImode)
1093 {
1094 #ifdef HAVE_truncdihi2
1095 if (HAVE_truncdihi2)
1096 {
1097 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1098 return;
1099 }
1100 #endif
1101 convert_move (to, force_reg (from_mode, from), unsignedp);
1102 return;
1103 }
1104
1105 if (from_mode == DImode && to_mode == QImode)
1106 {
1107 #ifdef HAVE_truncdiqi2
1108 if (HAVE_truncdiqi2)
1109 {
1110 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1111 return;
1112 }
1113 #endif
1114 convert_move (to, force_reg (from_mode, from), unsignedp);
1115 return;
1116 }
1117
1118 if (from_mode == SImode && to_mode == HImode)
1119 {
1120 #ifdef HAVE_truncsihi2
1121 if (HAVE_truncsihi2)
1122 {
1123 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1124 return;
1125 }
1126 #endif
1127 convert_move (to, force_reg (from_mode, from), unsignedp);
1128 return;
1129 }
1130
1131 if (from_mode == SImode && to_mode == QImode)
1132 {
1133 #ifdef HAVE_truncsiqi2
1134 if (HAVE_truncsiqi2)
1135 {
1136 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1137 return;
1138 }
1139 #endif
1140 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 return;
1142 }
1143
1144 if (from_mode == HImode && to_mode == QImode)
1145 {
1146 #ifdef HAVE_trunchiqi2
1147 if (HAVE_trunchiqi2)
1148 {
1149 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1150 return;
1151 }
1152 #endif
1153 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 return;
1155 }
1156
1157 if (from_mode == TImode && to_mode == DImode)
1158 {
1159 #ifdef HAVE_trunctidi2
1160 if (HAVE_trunctidi2)
1161 {
1162 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1163 return;
1164 }
1165 #endif
1166 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 return;
1168 }
1169
1170 if (from_mode == TImode && to_mode == SImode)
1171 {
1172 #ifdef HAVE_trunctisi2
1173 if (HAVE_trunctisi2)
1174 {
1175 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1176 return;
1177 }
1178 #endif
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 return;
1181 }
1182
1183 if (from_mode == TImode && to_mode == HImode)
1184 {
1185 #ifdef HAVE_trunctihi2
1186 if (HAVE_trunctihi2)
1187 {
1188 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1189 return;
1190 }
1191 #endif
1192 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 return;
1194 }
1195
1196 if (from_mode == TImode && to_mode == QImode)
1197 {
1198 #ifdef HAVE_trunctiqi2
1199 if (HAVE_trunctiqi2)
1200 {
1201 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1202 return;
1203 }
1204 #endif
1205 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 return;
1207 }
1208
1209 /* Handle truncation of volatile memrefs, and so on;
1210 the things that couldn't be truncated directly,
1211 and for which there was no special instruction. */
1212 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1213 {
1214 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1215 emit_move_insn (to, temp);
1216 return;
1217 }
1218
1219 /* Mode combination is not recognized. */
1220 abort ();
1221 }
1222
1223 /* Return an rtx for a value that would result
1224 from converting X to mode MODE.
1225 Both X and MODE may be floating, or both integer.
1226 UNSIGNEDP is nonzero if X is an unsigned value.
1227 This can be done by referring to a part of X in place
1228 or by copying to a new temporary with conversion.
1229
1230 This function *must not* call protect_from_queue
1231 except when putting X into an insn (in which case convert_move does it). */
1232
1233 rtx
1234 convert_to_mode (mode, x, unsignedp)
1235 enum machine_mode mode;
1236 rtx x;
1237 int unsignedp;
1238 {
1239 return convert_modes (mode, VOIDmode, x, unsignedp);
1240 }
1241
1242 /* Return an rtx for a value that would result
1243 from converting X from mode OLDMODE to mode MODE.
1244 Both modes may be floating, or both integer.
1245 UNSIGNEDP is nonzero if X is an unsigned value.
1246
1247 This can be done by referring to a part of X in place
1248 or by copying to a new temporary with conversion.
1249
1250 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1251
1252 This function *must not* call protect_from_queue
1253 except when putting X into an insn (in which case convert_move does it). */
1254
1255 rtx
1256 convert_modes (mode, oldmode, x, unsignedp)
1257 enum machine_mode mode, oldmode;
1258 rtx x;
1259 int unsignedp;
1260 {
1261 register rtx temp;
1262
1263 /* If FROM is a SUBREG that indicates that we have already done at least
1264 the required extension, strip it. */
1265
1266 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1267 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1268 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1269 x = gen_lowpart (mode, x);
1270
1271 if (GET_MODE (x) != VOIDmode)
1272 oldmode = GET_MODE (x);
1273
1274 if (mode == oldmode)
1275 return x;
1276
1277 /* There is one case that we must handle specially: If we are converting
1278 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1279 we are to interpret the constant as unsigned, gen_lowpart will do
1280 the wrong if the constant appears negative. What we want to do is
1281 make the high-order word of the constant zero, not all ones. */
1282
1283 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1284 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1285 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1286 {
1287 HOST_WIDE_INT val = INTVAL (x);
1288
1289 if (oldmode != VOIDmode
1290 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1291 {
1292 int width = GET_MODE_BITSIZE (oldmode);
1293
1294 /* We need to zero extend VAL. */
1295 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1296 }
1297
1298 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1299 }
1300
1301 /* We can do this with a gen_lowpart if both desired and current modes
1302 are integer, and this is either a constant integer, a register, or a
1303 non-volatile MEM. Except for the constant case where MODE is no
1304 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1305
1306 if ((GET_CODE (x) == CONST_INT
1307 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1308 || (GET_MODE_CLASS (mode) == MODE_INT
1309 && GET_MODE_CLASS (oldmode) == MODE_INT
1310 && (GET_CODE (x) == CONST_DOUBLE
1311 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1312 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1313 && direct_load[(int) mode])
1314 || (GET_CODE (x) == REG
1315 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1316 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1317 {
1318 /* ?? If we don't know OLDMODE, we have to assume here that
1319 X does not need sign- or zero-extension. This may not be
1320 the case, but it's the best we can do. */
1321 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1322 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1323 {
1324 HOST_WIDE_INT val = INTVAL (x);
1325 int width = GET_MODE_BITSIZE (oldmode);
1326
1327 /* We must sign or zero-extend in this case. Start by
1328 zero-extending, then sign extend if we need to. */
1329 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1330 if (! unsignedp
1331 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1332 val |= (HOST_WIDE_INT) (-1) << width;
1333
1334 return GEN_INT (val);
1335 }
1336
1337 return gen_lowpart (mode, x);
1338 }
1339
1340 temp = gen_reg_rtx (mode);
1341 convert_move (temp, x, unsignedp);
1342 return temp;
1343 }
1344 \f
1345
1346 /* This macro is used to determine what the largest unit size that
1347 move_by_pieces can use is. */
1348
1349 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1350 move efficiently, as opposed to MOVE_MAX which is the maximum
1351 number of bhytes we can move with a single instruction. */
1352
1353 #ifndef MOVE_MAX_PIECES
1354 #define MOVE_MAX_PIECES MOVE_MAX
1355 #endif
1356
1357 /* Generate several move instructions to copy LEN bytes
1358 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1359 The caller must pass FROM and TO
1360 through protect_from_queue before calling.
1361 ALIGN (in bytes) is maximum alignment we can assume. */
1362
1363 void
1364 move_by_pieces (to, from, len, align)
1365 rtx to, from;
1366 int len;
1367 unsigned int align;
1368 {
1369 struct move_by_pieces data;
1370 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1371 int max_size = MOVE_MAX_PIECES + 1;
1372 enum machine_mode mode = VOIDmode, tmode;
1373 enum insn_code icode;
1374
1375 data.offset = 0;
1376 data.to_addr = to_addr;
1377 data.from_addr = from_addr;
1378 data.to = to;
1379 data.from = from;
1380 data.autinc_to
1381 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1382 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1383 data.autinc_from
1384 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1385 || GET_CODE (from_addr) == POST_INC
1386 || GET_CODE (from_addr) == POST_DEC);
1387
1388 data.explicit_inc_from = 0;
1389 data.explicit_inc_to = 0;
1390 data.reverse
1391 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1392 if (data.reverse) data.offset = len;
1393 data.len = len;
1394
1395 data.to_struct = MEM_IN_STRUCT_P (to);
1396 data.from_struct = MEM_IN_STRUCT_P (from);
1397 data.to_readonly = RTX_UNCHANGING_P (to);
1398 data.from_readonly = RTX_UNCHANGING_P (from);
1399
1400 /* If copying requires more than two move insns,
1401 copy addresses to registers (to make displacements shorter)
1402 and use post-increment if available. */
1403 if (!(data.autinc_from && data.autinc_to)
1404 && move_by_pieces_ninsns (len, align) > 2)
1405 {
1406 /* Find the mode of the largest move... */
1407 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1408 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1409 if (GET_MODE_SIZE (tmode) < max_size)
1410 mode = tmode;
1411
1412 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1413 {
1414 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1415 data.autinc_from = 1;
1416 data.explicit_inc_from = -1;
1417 }
1418 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1419 {
1420 data.from_addr = copy_addr_to_reg (from_addr);
1421 data.autinc_from = 1;
1422 data.explicit_inc_from = 1;
1423 }
1424 if (!data.autinc_from && CONSTANT_P (from_addr))
1425 data.from_addr = copy_addr_to_reg (from_addr);
1426 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1427 {
1428 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1429 data.autinc_to = 1;
1430 data.explicit_inc_to = -1;
1431 }
1432 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1433 {
1434 data.to_addr = copy_addr_to_reg (to_addr);
1435 data.autinc_to = 1;
1436 data.explicit_inc_to = 1;
1437 }
1438 if (!data.autinc_to && CONSTANT_P (to_addr))
1439 data.to_addr = copy_addr_to_reg (to_addr);
1440 }
1441
1442 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1443 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1444 align = MOVE_MAX;
1445
1446 /* First move what we can in the largest integer mode, then go to
1447 successively smaller modes. */
1448
1449 while (max_size > 1)
1450 {
1451 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1452 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1453 if (GET_MODE_SIZE (tmode) < max_size)
1454 mode = tmode;
1455
1456 if (mode == VOIDmode)
1457 break;
1458
1459 icode = mov_optab->handlers[(int) mode].insn_code;
1460 if (icode != CODE_FOR_nothing
1461 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1462 (unsigned int) GET_MODE_SIZE (mode)))
1463 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1464
1465 max_size = GET_MODE_SIZE (mode);
1466 }
1467
1468 /* The code above should have handled everything. */
1469 if (data.len > 0)
1470 abort ();
1471 }
1472
1473 /* Return number of insns required to move L bytes by pieces.
1474 ALIGN (in bytes) is maximum alignment we can assume. */
1475
1476 static int
1477 move_by_pieces_ninsns (l, align)
1478 unsigned int l;
1479 unsigned int align;
1480 {
1481 register int n_insns = 0;
1482 int max_size = MOVE_MAX + 1;
1483
1484 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1485 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1486 align = MOVE_MAX;
1487
1488 while (max_size > 1)
1489 {
1490 enum machine_mode mode = VOIDmode, tmode;
1491 enum insn_code icode;
1492
1493 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1494 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1495 if (GET_MODE_SIZE (tmode) < max_size)
1496 mode = tmode;
1497
1498 if (mode == VOIDmode)
1499 break;
1500
1501 icode = mov_optab->handlers[(int) mode].insn_code;
1502 if (icode != CODE_FOR_nothing
1503 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1504 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1505
1506 max_size = GET_MODE_SIZE (mode);
1507 }
1508
1509 return n_insns;
1510 }
1511
1512 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1513 with move instructions for mode MODE. GENFUN is the gen_... function
1514 to make a move insn for that mode. DATA has all the other info. */
1515
1516 static void
1517 move_by_pieces_1 (genfun, mode, data)
1518 rtx (*genfun) PARAMS ((rtx, ...));
1519 enum machine_mode mode;
1520 struct move_by_pieces *data;
1521 {
1522 register int size = GET_MODE_SIZE (mode);
1523 register rtx to1, from1;
1524
1525 while (data->len >= size)
1526 {
1527 if (data->reverse) data->offset -= size;
1528
1529 to1 = (data->autinc_to
1530 ? gen_rtx_MEM (mode, data->to_addr)
1531 : copy_rtx (change_address (data->to, mode,
1532 plus_constant (data->to_addr,
1533 data->offset))));
1534 MEM_IN_STRUCT_P (to1) = data->to_struct;
1535 RTX_UNCHANGING_P (to1) = data->to_readonly;
1536
1537 from1
1538 = (data->autinc_from
1539 ? gen_rtx_MEM (mode, data->from_addr)
1540 : copy_rtx (change_address (data->from, mode,
1541 plus_constant (data->from_addr,
1542 data->offset))));
1543 MEM_IN_STRUCT_P (from1) = data->from_struct;
1544 RTX_UNCHANGING_P (from1) = data->from_readonly;
1545
1546 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1547 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1548 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1549 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1550
1551 emit_insn ((*genfun) (to1, from1));
1552 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1553 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1554 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1555 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1556
1557 if (! data->reverse) data->offset += size;
1558
1559 data->len -= size;
1560 }
1561 }
1562 \f
1563 /* Emit code to move a block Y to a block X.
1564 This may be done with string-move instructions,
1565 with multiple scalar move instructions, or with a library call.
1566
1567 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1568 with mode BLKmode.
1569 SIZE is an rtx that says how long they are.
1570 ALIGN is the maximum alignment we can assume they have,
1571 measured in bytes.
1572
1573 Return the address of the new block, if memcpy is called and returns it,
1574 0 otherwise. */
1575
1576 rtx
1577 emit_block_move (x, y, size, align)
1578 rtx x, y;
1579 rtx size;
1580 unsigned int align;
1581 {
1582 rtx retval = 0;
1583 #ifdef TARGET_MEM_FUNCTIONS
1584 static tree fn;
1585 tree call_expr, arg_list;
1586 #endif
1587
1588 if (GET_MODE (x) != BLKmode)
1589 abort ();
1590
1591 if (GET_MODE (y) != BLKmode)
1592 abort ();
1593
1594 x = protect_from_queue (x, 1);
1595 y = protect_from_queue (y, 0);
1596 size = protect_from_queue (size, 0);
1597
1598 if (GET_CODE (x) != MEM)
1599 abort ();
1600 if (GET_CODE (y) != MEM)
1601 abort ();
1602 if (size == 0)
1603 abort ();
1604
1605 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1606 move_by_pieces (x, y, INTVAL (size), align);
1607 else
1608 {
1609 /* Try the most limited insn first, because there's no point
1610 including more than one in the machine description unless
1611 the more limited one has some advantage. */
1612
1613 rtx opalign = GEN_INT (align);
1614 enum machine_mode mode;
1615
1616 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1617 mode = GET_MODE_WIDER_MODE (mode))
1618 {
1619 enum insn_code code = movstr_optab[(int) mode];
1620 insn_operand_predicate_fn pred;
1621
1622 if (code != CODE_FOR_nothing
1623 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1624 here because if SIZE is less than the mode mask, as it is
1625 returned by the macro, it will definitely be less than the
1626 actual mode mask. */
1627 && ((GET_CODE (size) == CONST_INT
1628 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1629 <= (GET_MODE_MASK (mode) >> 1)))
1630 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1631 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1632 || (*pred) (x, BLKmode))
1633 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1634 || (*pred) (y, BLKmode))
1635 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1636 || (*pred) (opalign, VOIDmode)))
1637 {
1638 rtx op2;
1639 rtx last = get_last_insn ();
1640 rtx pat;
1641
1642 op2 = convert_to_mode (mode, size, 1);
1643 pred = insn_data[(int) code].operand[2].predicate;
1644 if (pred != 0 && ! (*pred) (op2, mode))
1645 op2 = copy_to_mode_reg (mode, op2);
1646
1647 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1648 if (pat)
1649 {
1650 emit_insn (pat);
1651 return 0;
1652 }
1653 else
1654 delete_insns_since (last);
1655 }
1656 }
1657
1658 /* X, Y, or SIZE may have been passed through protect_from_queue.
1659
1660 It is unsafe to save the value generated by protect_from_queue
1661 and reuse it later. Consider what happens if emit_queue is
1662 called before the return value from protect_from_queue is used.
1663
1664 Expansion of the CALL_EXPR below will call emit_queue before
1665 we are finished emitting RTL for argument setup. So if we are
1666 not careful we could get the wrong value for an argument.
1667
1668 To avoid this problem we go ahead and emit code to copy X, Y &
1669 SIZE into new pseudos. We can then place those new pseudos
1670 into an RTL_EXPR and use them later, even after a call to
1671 emit_queue.
1672
1673 Note this is not strictly needed for library calls since they
1674 do not call emit_queue before loading their arguments. However,
1675 we may need to have library calls call emit_queue in the future
1676 since failing to do so could cause problems for targets which
1677 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1678 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1679 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1680
1681 #ifdef TARGET_MEM_FUNCTIONS
1682 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1683 #else
1684 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1685 TREE_UNSIGNED (integer_type_node));
1686 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1687 #endif
1688
1689 #ifdef TARGET_MEM_FUNCTIONS
1690 /* It is incorrect to use the libcall calling conventions to call
1691 memcpy in this context.
1692
1693 This could be a user call to memcpy and the user may wish to
1694 examine the return value from memcpy.
1695
1696 For targets where libcalls and normal calls have different conventions
1697 for returning pointers, we could end up generating incorrect code.
1698
1699 So instead of using a libcall sequence we build up a suitable
1700 CALL_EXPR and expand the call in the normal fashion. */
1701 if (fn == NULL_TREE)
1702 {
1703 tree fntype;
1704
1705 /* This was copied from except.c, I don't know if all this is
1706 necessary in this context or not. */
1707 fn = get_identifier ("memcpy");
1708 push_obstacks_nochange ();
1709 end_temporary_allocation ();
1710 fntype = build_pointer_type (void_type_node);
1711 fntype = build_function_type (fntype, NULL_TREE);
1712 fn = build_decl (FUNCTION_DECL, fn, fntype);
1713 ggc_add_tree_root (&fn, 1);
1714 DECL_EXTERNAL (fn) = 1;
1715 TREE_PUBLIC (fn) = 1;
1716 DECL_ARTIFICIAL (fn) = 1;
1717 make_decl_rtl (fn, NULL_PTR, 1);
1718 assemble_external (fn);
1719 pop_obstacks ();
1720 }
1721
1722 /* We need to make an argument list for the function call.
1723
1724 memcpy has three arguments, the first two are void * addresses and
1725 the last is a size_t byte count for the copy. */
1726 arg_list
1727 = build_tree_list (NULL_TREE,
1728 make_tree (build_pointer_type (void_type_node), x));
1729 TREE_CHAIN (arg_list)
1730 = build_tree_list (NULL_TREE,
1731 make_tree (build_pointer_type (void_type_node), y));
1732 TREE_CHAIN (TREE_CHAIN (arg_list))
1733 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1734 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1735
1736 /* Now we have to build up the CALL_EXPR itself. */
1737 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1738 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1739 call_expr, arg_list, NULL_TREE);
1740 TREE_SIDE_EFFECTS (call_expr) = 1;
1741
1742 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1743 #else
1744 emit_library_call (bcopy_libfunc, 0,
1745 VOIDmode, 3, y, Pmode, x, Pmode,
1746 convert_to_mode (TYPE_MODE (integer_type_node), size,
1747 TREE_UNSIGNED (integer_type_node)),
1748 TYPE_MODE (integer_type_node));
1749 #endif
1750 }
1751
1752 return retval;
1753 }
1754 \f
1755 /* Copy all or part of a value X into registers starting at REGNO.
1756 The number of registers to be filled is NREGS. */
1757
1758 void
1759 move_block_to_reg (regno, x, nregs, mode)
1760 int regno;
1761 rtx x;
1762 int nregs;
1763 enum machine_mode mode;
1764 {
1765 int i;
1766 #ifdef HAVE_load_multiple
1767 rtx pat;
1768 rtx last;
1769 #endif
1770
1771 if (nregs == 0)
1772 return;
1773
1774 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1775 x = validize_mem (force_const_mem (mode, x));
1776
1777 /* See if the machine can do this with a load multiple insn. */
1778 #ifdef HAVE_load_multiple
1779 if (HAVE_load_multiple)
1780 {
1781 last = get_last_insn ();
1782 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1783 GEN_INT (nregs));
1784 if (pat)
1785 {
1786 emit_insn (pat);
1787 return;
1788 }
1789 else
1790 delete_insns_since (last);
1791 }
1792 #endif
1793
1794 for (i = 0; i < nregs; i++)
1795 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1796 operand_subword_force (x, i, mode));
1797 }
1798
1799 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1800 The number of registers to be filled is NREGS. SIZE indicates the number
1801 of bytes in the object X. */
1802
1803
1804 void
1805 move_block_from_reg (regno, x, nregs, size)
1806 int regno;
1807 rtx x;
1808 int nregs;
1809 int size;
1810 {
1811 int i;
1812 #ifdef HAVE_store_multiple
1813 rtx pat;
1814 rtx last;
1815 #endif
1816 enum machine_mode mode;
1817
1818 /* If SIZE is that of a mode no bigger than a word, just use that
1819 mode's store operation. */
1820 if (size <= UNITS_PER_WORD
1821 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1822 {
1823 emit_move_insn (change_address (x, mode, NULL),
1824 gen_rtx_REG (mode, regno));
1825 return;
1826 }
1827
1828 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1829 to the left before storing to memory. Note that the previous test
1830 doesn't handle all cases (e.g. SIZE == 3). */
1831 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1832 {
1833 rtx tem = operand_subword (x, 0, 1, BLKmode);
1834 rtx shift;
1835
1836 if (tem == 0)
1837 abort ();
1838
1839 shift = expand_shift (LSHIFT_EXPR, word_mode,
1840 gen_rtx_REG (word_mode, regno),
1841 build_int_2 ((UNITS_PER_WORD - size)
1842 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1843 emit_move_insn (tem, shift);
1844 return;
1845 }
1846
1847 /* See if the machine can do this with a store multiple insn. */
1848 #ifdef HAVE_store_multiple
1849 if (HAVE_store_multiple)
1850 {
1851 last = get_last_insn ();
1852 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1853 GEN_INT (nregs));
1854 if (pat)
1855 {
1856 emit_insn (pat);
1857 return;
1858 }
1859 else
1860 delete_insns_since (last);
1861 }
1862 #endif
1863
1864 for (i = 0; i < nregs; i++)
1865 {
1866 rtx tem = operand_subword (x, i, 1, BLKmode);
1867
1868 if (tem == 0)
1869 abort ();
1870
1871 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1872 }
1873 }
1874
1875 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1876 registers represented by a PARALLEL. SSIZE represents the total size of
1877 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1878 SRC in bits. */
1879 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1880 the balance will be in what would be the low-order memory addresses, i.e.
1881 left justified for big endian, right justified for little endian. This
1882 happens to be true for the targets currently using this support. If this
1883 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1884 would be needed. */
1885
1886 void
1887 emit_group_load (dst, orig_src, ssize, align)
1888 rtx dst, orig_src;
1889 unsigned int align;
1890 int ssize;
1891 {
1892 rtx *tmps, src;
1893 int start, i;
1894
1895 if (GET_CODE (dst) != PARALLEL)
1896 abort ();
1897
1898 /* Check for a NULL entry, used to indicate that the parameter goes
1899 both on the stack and in registers. */
1900 if (XEXP (XVECEXP (dst, 0, 0), 0))
1901 start = 0;
1902 else
1903 start = 1;
1904
1905 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1906
1907 /* If we won't be loading directly from memory, protect the real source
1908 from strange tricks we might play. */
1909 src = orig_src;
1910 if (GET_CODE (src) != MEM)
1911 {
1912 if (GET_CODE (src) == VOIDmode)
1913 src = gen_reg_rtx (GET_MODE (dst));
1914 else
1915 src = gen_reg_rtx (GET_MODE (orig_src));
1916 emit_move_insn (src, orig_src);
1917 }
1918
1919 /* Process the pieces. */
1920 for (i = start; i < XVECLEN (dst, 0); i++)
1921 {
1922 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1923 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1924 int bytelen = GET_MODE_SIZE (mode);
1925 int shift = 0;
1926
1927 /* Handle trailing fragments that run over the size of the struct. */
1928 if (ssize >= 0 && bytepos + bytelen > ssize)
1929 {
1930 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1931 bytelen = ssize - bytepos;
1932 if (bytelen <= 0)
1933 abort ();
1934 }
1935
1936 /* Optimize the access just a bit. */
1937 if (GET_CODE (src) == MEM
1938 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1939 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1940 && bytelen == GET_MODE_SIZE (mode))
1941 {
1942 tmps[i] = gen_reg_rtx (mode);
1943 emit_move_insn (tmps[i],
1944 change_address (src, mode,
1945 plus_constant (XEXP (src, 0),
1946 bytepos)));
1947 }
1948 else if (GET_CODE (src) == CONCAT)
1949 {
1950 if (bytepos == 0
1951 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1952 tmps[i] = XEXP (src, 0);
1953 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1954 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1955 tmps[i] = XEXP (src, 1);
1956 else
1957 abort ();
1958 }
1959 else
1960 {
1961 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1962 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1963 mode, mode, align, ssize);
1964 }
1965
1966 if (BYTES_BIG_ENDIAN && shift)
1967 {
1968 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1969 tmps[i], 0, OPTAB_WIDEN);
1970 }
1971 }
1972 emit_queue();
1973
1974 /* Copy the extracted pieces into the proper (probable) hard regs. */
1975 for (i = start; i < XVECLEN (dst, 0); i++)
1976 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1977 }
1978
1979 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1980 registers represented by a PARALLEL. SSIZE represents the total size of
1981 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1982
1983 void
1984 emit_group_store (orig_dst, src, ssize, align)
1985 rtx orig_dst, src;
1986 int ssize;
1987 unsigned int align;
1988 {
1989 rtx *tmps, dst;
1990 int start, i;
1991
1992 if (GET_CODE (src) != PARALLEL)
1993 abort ();
1994
1995 /* Check for a NULL entry, used to indicate that the parameter goes
1996 both on the stack and in registers. */
1997 if (XEXP (XVECEXP (src, 0, 0), 0))
1998 start = 0;
1999 else
2000 start = 1;
2001
2002 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2003
2004 /* Copy the (probable) hard regs into pseudos. */
2005 for (i = start; i < XVECLEN (src, 0); i++)
2006 {
2007 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2008 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2009 emit_move_insn (tmps[i], reg);
2010 }
2011 emit_queue();
2012
2013 /* If we won't be storing directly into memory, protect the real destination
2014 from strange tricks we might play. */
2015 dst = orig_dst;
2016 if (GET_CODE (dst) == PARALLEL)
2017 {
2018 rtx temp;
2019
2020 /* We can get a PARALLEL dst if there is a conditional expression in
2021 a return statement. In that case, the dst and src are the same,
2022 so no action is necessary. */
2023 if (rtx_equal_p (dst, src))
2024 return;
2025
2026 /* It is unclear if we can ever reach here, but we may as well handle
2027 it. Allocate a temporary, and split this into a store/load to/from
2028 the temporary. */
2029
2030 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2031 emit_group_store (temp, src, ssize, align);
2032 emit_group_load (dst, temp, ssize, align);
2033 return;
2034 }
2035 else if (GET_CODE (dst) != MEM)
2036 {
2037 dst = gen_reg_rtx (GET_MODE (orig_dst));
2038 /* Make life a bit easier for combine. */
2039 emit_move_insn (dst, const0_rtx);
2040 }
2041 else if (! MEM_IN_STRUCT_P (dst))
2042 {
2043 /* store_bit_field requires that memory operations have
2044 mem_in_struct_p set; we might not. */
2045
2046 dst = copy_rtx (orig_dst);
2047 MEM_SET_IN_STRUCT_P (dst, 1);
2048 }
2049
2050 /* Process the pieces. */
2051 for (i = start; i < XVECLEN (src, 0); i++)
2052 {
2053 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2054 enum machine_mode mode = GET_MODE (tmps[i]);
2055 int bytelen = GET_MODE_SIZE (mode);
2056
2057 /* Handle trailing fragments that run over the size of the struct. */
2058 if (ssize >= 0 && bytepos + bytelen > ssize)
2059 {
2060 if (BYTES_BIG_ENDIAN)
2061 {
2062 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2063 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2064 tmps[i], 0, OPTAB_WIDEN);
2065 }
2066 bytelen = ssize - bytepos;
2067 }
2068
2069 /* Optimize the access just a bit. */
2070 if (GET_CODE (dst) == MEM
2071 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2072 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2073 && bytelen == GET_MODE_SIZE (mode))
2074 emit_move_insn (change_address (dst, mode,
2075 plus_constant (XEXP (dst, 0),
2076 bytepos)),
2077 tmps[i]);
2078 else
2079 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2080 mode, tmps[i], align, ssize);
2081 }
2082
2083 emit_queue();
2084
2085 /* Copy from the pseudo into the (probable) hard reg. */
2086 if (GET_CODE (dst) == REG)
2087 emit_move_insn (orig_dst, dst);
2088 }
2089
2090 /* Generate code to copy a BLKmode object of TYPE out of a
2091 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2092 is null, a stack temporary is created. TGTBLK is returned.
2093
2094 The primary purpose of this routine is to handle functions
2095 that return BLKmode structures in registers. Some machines
2096 (the PA for example) want to return all small structures
2097 in registers regardless of the structure's alignment. */
2098
2099 rtx
2100 copy_blkmode_from_reg (tgtblk,srcreg,type)
2101 rtx tgtblk;
2102 rtx srcreg;
2103 tree type;
2104 {
2105 int bytes = int_size_in_bytes (type);
2106 rtx src = NULL, dst = NULL;
2107 int bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2108 int bitpos, xbitpos, big_endian_correction = 0;
2109
2110 if (tgtblk == 0)
2111 {
2112 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2113 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2114 preserve_temp_slots (tgtblk);
2115 }
2116
2117 /* This code assumes srcreg is at least a full word. If it isn't,
2118 copy it into a new pseudo which is a full word. */
2119 if (GET_MODE (srcreg) != BLKmode
2120 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2121 srcreg = convert_to_mode (word_mode, srcreg,
2122 TREE_UNSIGNED (type));
2123
2124 /* Structures whose size is not a multiple of a word are aligned
2125 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2126 machine, this means we must skip the empty high order bytes when
2127 calculating the bit offset. */
2128 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2129 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2130 * BITS_PER_UNIT));
2131
2132 /* Copy the structure BITSIZE bites at a time.
2133
2134 We could probably emit more efficient code for machines
2135 which do not use strict alignment, but it doesn't seem
2136 worth the effort at the current time. */
2137 for (bitpos = 0, xbitpos = big_endian_correction;
2138 bitpos < bytes * BITS_PER_UNIT;
2139 bitpos += bitsize, xbitpos += bitsize)
2140 {
2141
2142 /* We need a new source operand each time xbitpos is on a
2143 word boundary and when xbitpos == big_endian_correction
2144 (the first time through). */
2145 if (xbitpos % BITS_PER_WORD == 0
2146 || xbitpos == big_endian_correction)
2147 src = operand_subword_force (srcreg,
2148 xbitpos / BITS_PER_WORD,
2149 BLKmode);
2150
2151 /* We need a new destination operand each time bitpos is on
2152 a word boundary. */
2153 if (bitpos % BITS_PER_WORD == 0)
2154 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2155
2156 /* Use xbitpos for the source extraction (right justified) and
2157 xbitpos for the destination store (left justified). */
2158 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2159 extract_bit_field (src, bitsize,
2160 xbitpos % BITS_PER_WORD, 1,
2161 NULL_RTX, word_mode,
2162 word_mode,
2163 bitsize / BITS_PER_UNIT,
2164 BITS_PER_WORD),
2165 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2166 }
2167 return tgtblk;
2168 }
2169
2170
2171 /* Add a USE expression for REG to the (possibly empty) list pointed
2172 to by CALL_FUSAGE. REG must denote a hard register. */
2173
2174 void
2175 use_reg (call_fusage, reg)
2176 rtx *call_fusage, reg;
2177 {
2178 if (GET_CODE (reg) != REG
2179 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2180 abort();
2181
2182 *call_fusage
2183 = gen_rtx_EXPR_LIST (VOIDmode,
2184 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2185 }
2186
2187 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2188 starting at REGNO. All of these registers must be hard registers. */
2189
2190 void
2191 use_regs (call_fusage, regno, nregs)
2192 rtx *call_fusage;
2193 int regno;
2194 int nregs;
2195 {
2196 int i;
2197
2198 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2199 abort ();
2200
2201 for (i = 0; i < nregs; i++)
2202 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2203 }
2204
2205 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2206 PARALLEL REGS. This is for calls that pass values in multiple
2207 non-contiguous locations. The Irix 6 ABI has examples of this. */
2208
2209 void
2210 use_group_regs (call_fusage, regs)
2211 rtx *call_fusage;
2212 rtx regs;
2213 {
2214 int i;
2215
2216 for (i = 0; i < XVECLEN (regs, 0); i++)
2217 {
2218 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2219
2220 /* A NULL entry means the parameter goes both on the stack and in
2221 registers. This can also be a MEM for targets that pass values
2222 partially on the stack and partially in registers. */
2223 if (reg != 0 && GET_CODE (reg) == REG)
2224 use_reg (call_fusage, reg);
2225 }
2226 }
2227 \f
2228 /* Generate several move instructions to clear LEN bytes of block TO.
2229 (A MEM rtx with BLKmode). The caller must pass TO through
2230 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2231 we can assume. */
2232
2233 static void
2234 clear_by_pieces (to, len, align)
2235 rtx to;
2236 int len;
2237 unsigned int align;
2238 {
2239 struct clear_by_pieces data;
2240 rtx to_addr = XEXP (to, 0);
2241 int max_size = MOVE_MAX_PIECES + 1;
2242 enum machine_mode mode = VOIDmode, tmode;
2243 enum insn_code icode;
2244
2245 data.offset = 0;
2246 data.to_addr = to_addr;
2247 data.to = to;
2248 data.autinc_to
2249 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2250 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2251
2252 data.explicit_inc_to = 0;
2253 data.reverse
2254 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2255 if (data.reverse) data.offset = len;
2256 data.len = len;
2257
2258 data.to_struct = MEM_IN_STRUCT_P (to);
2259
2260 /* If copying requires more than two move insns,
2261 copy addresses to registers (to make displacements shorter)
2262 and use post-increment if available. */
2263 if (!data.autinc_to
2264 && move_by_pieces_ninsns (len, align) > 2)
2265 {
2266 /* Determine the main mode we'll be using */
2267 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2268 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2269 if (GET_MODE_SIZE (tmode) < max_size)
2270 mode = tmode;
2271
2272 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2273 {
2274 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2275 data.autinc_to = 1;
2276 data.explicit_inc_to = -1;
2277 }
2278 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2279 {
2280 data.to_addr = copy_addr_to_reg (to_addr);
2281 data.autinc_to = 1;
2282 data.explicit_inc_to = 1;
2283 }
2284 if (!data.autinc_to && CONSTANT_P (to_addr))
2285 data.to_addr = copy_addr_to_reg (to_addr);
2286 }
2287
2288 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2289 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2290 align = MOVE_MAX;
2291
2292 /* First move what we can in the largest integer mode, then go to
2293 successively smaller modes. */
2294
2295 while (max_size > 1)
2296 {
2297 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2298 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2299 if (GET_MODE_SIZE (tmode) < max_size)
2300 mode = tmode;
2301
2302 if (mode == VOIDmode)
2303 break;
2304
2305 icode = mov_optab->handlers[(int) mode].insn_code;
2306 if (icode != CODE_FOR_nothing
2307 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2308 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2309
2310 max_size = GET_MODE_SIZE (mode);
2311 }
2312
2313 /* The code above should have handled everything. */
2314 if (data.len != 0)
2315 abort ();
2316 }
2317
2318 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2319 with move instructions for mode MODE. GENFUN is the gen_... function
2320 to make a move insn for that mode. DATA has all the other info. */
2321
2322 static void
2323 clear_by_pieces_1 (genfun, mode, data)
2324 rtx (*genfun) PARAMS ((rtx, ...));
2325 enum machine_mode mode;
2326 struct clear_by_pieces *data;
2327 {
2328 register int size = GET_MODE_SIZE (mode);
2329 register rtx to1;
2330
2331 while (data->len >= size)
2332 {
2333 if (data->reverse) data->offset -= size;
2334
2335 to1 = (data->autinc_to
2336 ? gen_rtx_MEM (mode, data->to_addr)
2337 : copy_rtx (change_address (data->to, mode,
2338 plus_constant (data->to_addr,
2339 data->offset))));
2340 MEM_IN_STRUCT_P (to1) = data->to_struct;
2341
2342 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2343 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2344
2345 emit_insn ((*genfun) (to1, const0_rtx));
2346 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2347 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2348
2349 if (! data->reverse) data->offset += size;
2350
2351 data->len -= size;
2352 }
2353 }
2354 \f
2355 /* Write zeros through the storage of OBJECT.
2356 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2357 the maximum alignment we can is has, measured in bytes.
2358
2359 If we call a function that returns the length of the block, return it. */
2360
2361 rtx
2362 clear_storage (object, size, align)
2363 rtx object;
2364 rtx size;
2365 unsigned int align;
2366 {
2367 #ifdef TARGET_MEM_FUNCTIONS
2368 static tree fn;
2369 tree call_expr, arg_list;
2370 #endif
2371 rtx retval = 0;
2372
2373 if (GET_MODE (object) == BLKmode)
2374 {
2375 object = protect_from_queue (object, 1);
2376 size = protect_from_queue (size, 0);
2377
2378 if (GET_CODE (size) == CONST_INT
2379 && MOVE_BY_PIECES_P (INTVAL (size), align))
2380 clear_by_pieces (object, INTVAL (size), align);
2381
2382 else
2383 {
2384 /* Try the most limited insn first, because there's no point
2385 including more than one in the machine description unless
2386 the more limited one has some advantage. */
2387
2388 rtx opalign = GEN_INT (align);
2389 enum machine_mode mode;
2390
2391 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2392 mode = GET_MODE_WIDER_MODE (mode))
2393 {
2394 enum insn_code code = clrstr_optab[(int) mode];
2395 insn_operand_predicate_fn pred;
2396
2397 if (code != CODE_FOR_nothing
2398 /* We don't need MODE to be narrower than
2399 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2400 the mode mask, as it is returned by the macro, it will
2401 definitely be less than the actual mode mask. */
2402 && ((GET_CODE (size) == CONST_INT
2403 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2404 <= (GET_MODE_MASK (mode) >> 1)))
2405 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2406 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2407 || (*pred) (object, BLKmode))
2408 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2409 || (*pred) (opalign, VOIDmode)))
2410 {
2411 rtx op1;
2412 rtx last = get_last_insn ();
2413 rtx pat;
2414
2415 op1 = convert_to_mode (mode, size, 1);
2416 pred = insn_data[(int) code].operand[1].predicate;
2417 if (pred != 0 && ! (*pred) (op1, mode))
2418 op1 = copy_to_mode_reg (mode, op1);
2419
2420 pat = GEN_FCN ((int) code) (object, op1, opalign);
2421 if (pat)
2422 {
2423 emit_insn (pat);
2424 return 0;
2425 }
2426 else
2427 delete_insns_since (last);
2428 }
2429 }
2430
2431 /* OBJECT or SIZE may have been passed through protect_from_queue.
2432
2433 It is unsafe to save the value generated by protect_from_queue
2434 and reuse it later. Consider what happens if emit_queue is
2435 called before the return value from protect_from_queue is used.
2436
2437 Expansion of the CALL_EXPR below will call emit_queue before
2438 we are finished emitting RTL for argument setup. So if we are
2439 not careful we could get the wrong value for an argument.
2440
2441 To avoid this problem we go ahead and emit code to copy OBJECT
2442 and SIZE into new pseudos. We can then place those new pseudos
2443 into an RTL_EXPR and use them later, even after a call to
2444 emit_queue.
2445
2446 Note this is not strictly needed for library calls since they
2447 do not call emit_queue before loading their arguments. However,
2448 we may need to have library calls call emit_queue in the future
2449 since failing to do so could cause problems for targets which
2450 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2451 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2452
2453 #ifdef TARGET_MEM_FUNCTIONS
2454 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2455 #else
2456 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2457 TREE_UNSIGNED (integer_type_node));
2458 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2459 #endif
2460
2461
2462 #ifdef TARGET_MEM_FUNCTIONS
2463 /* It is incorrect to use the libcall calling conventions to call
2464 memset in this context.
2465
2466 This could be a user call to memset and the user may wish to
2467 examine the return value from memset.
2468
2469 For targets where libcalls and normal calls have different
2470 conventions for returning pointers, we could end up generating
2471 incorrect code.
2472
2473 So instead of using a libcall sequence we build up a suitable
2474 CALL_EXPR and expand the call in the normal fashion. */
2475 if (fn == NULL_TREE)
2476 {
2477 tree fntype;
2478
2479 /* This was copied from except.c, I don't know if all this is
2480 necessary in this context or not. */
2481 fn = get_identifier ("memset");
2482 push_obstacks_nochange ();
2483 end_temporary_allocation ();
2484 fntype = build_pointer_type (void_type_node);
2485 fntype = build_function_type (fntype, NULL_TREE);
2486 fn = build_decl (FUNCTION_DECL, fn, fntype);
2487 ggc_add_tree_root (&fn, 1);
2488 DECL_EXTERNAL (fn) = 1;
2489 TREE_PUBLIC (fn) = 1;
2490 DECL_ARTIFICIAL (fn) = 1;
2491 make_decl_rtl (fn, NULL_PTR, 1);
2492 assemble_external (fn);
2493 pop_obstacks ();
2494 }
2495
2496 /* We need to make an argument list for the function call.
2497
2498 memset has three arguments, the first is a void * addresses, the
2499 second a integer with the initialization value, the last is a
2500 size_t byte count for the copy. */
2501 arg_list
2502 = build_tree_list (NULL_TREE,
2503 make_tree (build_pointer_type (void_type_node),
2504 object));
2505 TREE_CHAIN (arg_list)
2506 = build_tree_list (NULL_TREE,
2507 make_tree (integer_type_node, const0_rtx));
2508 TREE_CHAIN (TREE_CHAIN (arg_list))
2509 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2510 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2511
2512 /* Now we have to build up the CALL_EXPR itself. */
2513 call_expr = build1 (ADDR_EXPR,
2514 build_pointer_type (TREE_TYPE (fn)), fn);
2515 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2516 call_expr, arg_list, NULL_TREE);
2517 TREE_SIDE_EFFECTS (call_expr) = 1;
2518
2519 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2520 #else
2521 emit_library_call (bzero_libfunc, 0,
2522 VOIDmode, 2, object, Pmode, size,
2523 TYPE_MODE (integer_type_node));
2524 #endif
2525 }
2526 }
2527 else
2528 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2529
2530 return retval;
2531 }
2532
2533 /* Generate code to copy Y into X.
2534 Both Y and X must have the same mode, except that
2535 Y can be a constant with VOIDmode.
2536 This mode cannot be BLKmode; use emit_block_move for that.
2537
2538 Return the last instruction emitted. */
2539
2540 rtx
2541 emit_move_insn (x, y)
2542 rtx x, y;
2543 {
2544 enum machine_mode mode = GET_MODE (x);
2545
2546 x = protect_from_queue (x, 1);
2547 y = protect_from_queue (y, 0);
2548
2549 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2550 abort ();
2551
2552 /* Never force constant_p_rtx to memory. */
2553 if (GET_CODE (y) == CONSTANT_P_RTX)
2554 ;
2555 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2556 y = force_const_mem (mode, y);
2557
2558 /* If X or Y are memory references, verify that their addresses are valid
2559 for the machine. */
2560 if (GET_CODE (x) == MEM
2561 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2562 && ! push_operand (x, GET_MODE (x)))
2563 || (flag_force_addr
2564 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2565 x = change_address (x, VOIDmode, XEXP (x, 0));
2566
2567 if (GET_CODE (y) == MEM
2568 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2569 || (flag_force_addr
2570 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2571 y = change_address (y, VOIDmode, XEXP (y, 0));
2572
2573 if (mode == BLKmode)
2574 abort ();
2575
2576 return emit_move_insn_1 (x, y);
2577 }
2578
2579 /* Low level part of emit_move_insn.
2580 Called just like emit_move_insn, but assumes X and Y
2581 are basically valid. */
2582
2583 rtx
2584 emit_move_insn_1 (x, y)
2585 rtx x, y;
2586 {
2587 enum machine_mode mode = GET_MODE (x);
2588 enum machine_mode submode;
2589 enum mode_class class = GET_MODE_CLASS (mode);
2590 int i;
2591
2592 if (mode >= MAX_MACHINE_MODE)
2593 abort ();
2594
2595 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2596 return
2597 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2598
2599 /* Expand complex moves by moving real part and imag part, if possible. */
2600 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2601 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2602 * BITS_PER_UNIT),
2603 (class == MODE_COMPLEX_INT
2604 ? MODE_INT : MODE_FLOAT),
2605 0))
2606 && (mov_optab->handlers[(int) submode].insn_code
2607 != CODE_FOR_nothing))
2608 {
2609 /* Don't split destination if it is a stack push. */
2610 int stack = push_operand (x, GET_MODE (x));
2611
2612 /* If this is a stack, push the highpart first, so it
2613 will be in the argument order.
2614
2615 In that case, change_address is used only to convert
2616 the mode, not to change the address. */
2617 if (stack)
2618 {
2619 /* Note that the real part always precedes the imag part in memory
2620 regardless of machine's endianness. */
2621 #ifdef STACK_GROWS_DOWNWARD
2622 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2623 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2624 gen_imagpart (submode, y)));
2625 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2626 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2627 gen_realpart (submode, y)));
2628 #else
2629 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2630 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2631 gen_realpart (submode, y)));
2632 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2633 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2634 gen_imagpart (submode, y)));
2635 #endif
2636 }
2637 else
2638 {
2639 rtx realpart_x, realpart_y;
2640 rtx imagpart_x, imagpart_y;
2641
2642 /* If this is a complex value with each part being smaller than a
2643 word, the usual calling sequence will likely pack the pieces into
2644 a single register. Unfortunately, SUBREG of hard registers only
2645 deals in terms of words, so we have a problem converting input
2646 arguments to the CONCAT of two registers that is used elsewhere
2647 for complex values. If this is before reload, we can copy it into
2648 memory and reload. FIXME, we should see about using extract and
2649 insert on integer registers, but complex short and complex char
2650 variables should be rarely used. */
2651 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2652 && (reload_in_progress | reload_completed) == 0)
2653 {
2654 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2655 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2656
2657 if (packed_dest_p || packed_src_p)
2658 {
2659 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2660 ? MODE_FLOAT : MODE_INT);
2661
2662 enum machine_mode reg_mode =
2663 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2664
2665 if (reg_mode != BLKmode)
2666 {
2667 rtx mem = assign_stack_temp (reg_mode,
2668 GET_MODE_SIZE (mode), 0);
2669
2670 rtx cmem = change_address (mem, mode, NULL_RTX);
2671
2672 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2673
2674 if (packed_dest_p)
2675 {
2676 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2677 emit_move_insn_1 (cmem, y);
2678 return emit_move_insn_1 (sreg, mem);
2679 }
2680 else
2681 {
2682 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2683 emit_move_insn_1 (mem, sreg);
2684 return emit_move_insn_1 (x, cmem);
2685 }
2686 }
2687 }
2688 }
2689
2690 realpart_x = gen_realpart (submode, x);
2691 realpart_y = gen_realpart (submode, y);
2692 imagpart_x = gen_imagpart (submode, x);
2693 imagpart_y = gen_imagpart (submode, y);
2694
2695 /* Show the output dies here. This is necessary for SUBREGs
2696 of pseudos since we cannot track their lifetimes correctly;
2697 hard regs shouldn't appear here except as return values.
2698 We never want to emit such a clobber after reload. */
2699 if (x != y
2700 && ! (reload_in_progress || reload_completed)
2701 && (GET_CODE (realpart_x) == SUBREG
2702 || GET_CODE (imagpart_x) == SUBREG))
2703 {
2704 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2705 }
2706
2707 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2708 (realpart_x, realpart_y));
2709 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2710 (imagpart_x, imagpart_y));
2711 }
2712
2713 return get_last_insn ();
2714 }
2715
2716 /* This will handle any multi-word mode that lacks a move_insn pattern.
2717 However, you will get better code if you define such patterns,
2718 even if they must turn into multiple assembler instructions. */
2719 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2720 {
2721 rtx last_insn = 0;
2722 rtx seq;
2723 int need_clobber;
2724
2725 #ifdef PUSH_ROUNDING
2726
2727 /* If X is a push on the stack, do the push now and replace
2728 X with a reference to the stack pointer. */
2729 if (push_operand (x, GET_MODE (x)))
2730 {
2731 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2732 x = change_address (x, VOIDmode, stack_pointer_rtx);
2733 }
2734 #endif
2735
2736 start_sequence ();
2737
2738 need_clobber = 0;
2739 for (i = 0;
2740 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2741 i++)
2742 {
2743 rtx xpart = operand_subword (x, i, 1, mode);
2744 rtx ypart = operand_subword (y, i, 1, mode);
2745
2746 /* If we can't get a part of Y, put Y into memory if it is a
2747 constant. Otherwise, force it into a register. If we still
2748 can't get a part of Y, abort. */
2749 if (ypart == 0 && CONSTANT_P (y))
2750 {
2751 y = force_const_mem (mode, y);
2752 ypart = operand_subword (y, i, 1, mode);
2753 }
2754 else if (ypart == 0)
2755 ypart = operand_subword_force (y, i, mode);
2756
2757 if (xpart == 0 || ypart == 0)
2758 abort ();
2759
2760 need_clobber |= (GET_CODE (xpart) == SUBREG);
2761
2762 last_insn = emit_move_insn (xpart, ypart);
2763 }
2764
2765 seq = gen_sequence ();
2766 end_sequence ();
2767
2768 /* Show the output dies here. This is necessary for SUBREGs
2769 of pseudos since we cannot track their lifetimes correctly;
2770 hard regs shouldn't appear here except as return values.
2771 We never want to emit such a clobber after reload. */
2772 if (x != y
2773 && ! (reload_in_progress || reload_completed)
2774 && need_clobber != 0)
2775 {
2776 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2777 }
2778
2779 emit_insn (seq);
2780
2781 return last_insn;
2782 }
2783 else
2784 abort ();
2785 }
2786 \f
2787 /* Pushing data onto the stack. */
2788
2789 /* Push a block of length SIZE (perhaps variable)
2790 and return an rtx to address the beginning of the block.
2791 Note that it is not possible for the value returned to be a QUEUED.
2792 The value may be virtual_outgoing_args_rtx.
2793
2794 EXTRA is the number of bytes of padding to push in addition to SIZE.
2795 BELOW nonzero means this padding comes at low addresses;
2796 otherwise, the padding comes at high addresses. */
2797
2798 rtx
2799 push_block (size, extra, below)
2800 rtx size;
2801 int extra, below;
2802 {
2803 register rtx temp;
2804
2805 size = convert_modes (Pmode, ptr_mode, size, 1);
2806 if (CONSTANT_P (size))
2807 anti_adjust_stack (plus_constant (size, extra));
2808 else if (GET_CODE (size) == REG && extra == 0)
2809 anti_adjust_stack (size);
2810 else
2811 {
2812 rtx temp = copy_to_mode_reg (Pmode, size);
2813 if (extra != 0)
2814 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2815 temp, 0, OPTAB_LIB_WIDEN);
2816 anti_adjust_stack (temp);
2817 }
2818
2819 #if defined (STACK_GROWS_DOWNWARD) \
2820 || (defined (ARGS_GROW_DOWNWARD) \
2821 && !defined (ACCUMULATE_OUTGOING_ARGS))
2822
2823 /* Return the lowest stack address when STACK or ARGS grow downward and
2824 we are not aaccumulating outgoing arguments (the c4x port uses such
2825 conventions). */
2826 temp = virtual_outgoing_args_rtx;
2827 if (extra != 0 && below)
2828 temp = plus_constant (temp, extra);
2829 #else
2830 if (GET_CODE (size) == CONST_INT)
2831 temp = plus_constant (virtual_outgoing_args_rtx,
2832 - INTVAL (size) - (below ? 0 : extra));
2833 else if (extra != 0 && !below)
2834 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2835 negate_rtx (Pmode, plus_constant (size, extra)));
2836 else
2837 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2838 negate_rtx (Pmode, size));
2839 #endif
2840
2841 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2842 }
2843
2844 rtx
2845 gen_push_operand ()
2846 {
2847 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2848 }
2849
2850 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2851 block of SIZE bytes. */
2852
2853 static rtx
2854 get_push_address (size)
2855 int size;
2856 {
2857 register rtx temp;
2858
2859 if (STACK_PUSH_CODE == POST_DEC)
2860 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2861 else if (STACK_PUSH_CODE == POST_INC)
2862 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2863 else
2864 temp = stack_pointer_rtx;
2865
2866 return copy_to_reg (temp);
2867 }
2868
2869 /* Generate code to push X onto the stack, assuming it has mode MODE and
2870 type TYPE.
2871 MODE is redundant except when X is a CONST_INT (since they don't
2872 carry mode info).
2873 SIZE is an rtx for the size of data to be copied (in bytes),
2874 needed only if X is BLKmode.
2875
2876 ALIGN (in bytes) is maximum alignment we can assume.
2877
2878 If PARTIAL and REG are both nonzero, then copy that many of the first
2879 words of X into registers starting with REG, and push the rest of X.
2880 The amount of space pushed is decreased by PARTIAL words,
2881 rounded *down* to a multiple of PARM_BOUNDARY.
2882 REG must be a hard register in this case.
2883 If REG is zero but PARTIAL is not, take any all others actions for an
2884 argument partially in registers, but do not actually load any
2885 registers.
2886
2887 EXTRA is the amount in bytes of extra space to leave next to this arg.
2888 This is ignored if an argument block has already been allocated.
2889
2890 On a machine that lacks real push insns, ARGS_ADDR is the address of
2891 the bottom of the argument block for this call. We use indexing off there
2892 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2893 argument block has not been preallocated.
2894
2895 ARGS_SO_FAR is the size of args previously pushed for this call.
2896
2897 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2898 for arguments passed in registers. If nonzero, it will be the number
2899 of bytes required. */
2900
2901 void
2902 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2903 args_addr, args_so_far, reg_parm_stack_space,
2904 alignment_pad)
2905 register rtx x;
2906 enum machine_mode mode;
2907 tree type;
2908 rtx size;
2909 unsigned int align;
2910 int partial;
2911 rtx reg;
2912 int extra;
2913 rtx args_addr;
2914 rtx args_so_far;
2915 int reg_parm_stack_space;
2916 rtx alignment_pad;
2917 {
2918 rtx xinner;
2919 enum direction stack_direction
2920 #ifdef STACK_GROWS_DOWNWARD
2921 = downward;
2922 #else
2923 = upward;
2924 #endif
2925
2926 /* Decide where to pad the argument: `downward' for below,
2927 `upward' for above, or `none' for don't pad it.
2928 Default is below for small data on big-endian machines; else above. */
2929 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2930
2931 /* Invert direction if stack is post-update. */
2932 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2933 if (where_pad != none)
2934 where_pad = (where_pad == downward ? upward : downward);
2935
2936 xinner = x = protect_from_queue (x, 0);
2937
2938 if (mode == BLKmode)
2939 {
2940 /* Copy a block into the stack, entirely or partially. */
2941
2942 register rtx temp;
2943 int used = partial * UNITS_PER_WORD;
2944 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2945 int skip;
2946
2947 if (size == 0)
2948 abort ();
2949
2950 used -= offset;
2951
2952 /* USED is now the # of bytes we need not copy to the stack
2953 because registers will take care of them. */
2954
2955 if (partial != 0)
2956 xinner = change_address (xinner, BLKmode,
2957 plus_constant (XEXP (xinner, 0), used));
2958
2959 /* If the partial register-part of the arg counts in its stack size,
2960 skip the part of stack space corresponding to the registers.
2961 Otherwise, start copying to the beginning of the stack space,
2962 by setting SKIP to 0. */
2963 skip = (reg_parm_stack_space == 0) ? 0 : used;
2964
2965 #ifdef PUSH_ROUNDING
2966 /* Do it with several push insns if that doesn't take lots of insns
2967 and if there is no difficulty with push insns that skip bytes
2968 on the stack for alignment purposes. */
2969 if (args_addr == 0
2970 && GET_CODE (size) == CONST_INT
2971 && skip == 0
2972 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2973 /* Here we avoid the case of a structure whose weak alignment
2974 forces many pushes of a small amount of data,
2975 and such small pushes do rounding that causes trouble. */
2976 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
2977 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2978 || PUSH_ROUNDING (align) == align)
2979 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2980 {
2981 /* Push padding now if padding above and stack grows down,
2982 or if padding below and stack grows up.
2983 But if space already allocated, this has already been done. */
2984 if (extra && args_addr == 0
2985 && where_pad != none && where_pad != stack_direction)
2986 anti_adjust_stack (GEN_INT (extra));
2987
2988 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2989 INTVAL (size) - used, align);
2990
2991 if (current_function_check_memory_usage && ! in_check_memory_usage)
2992 {
2993 rtx temp;
2994
2995 in_check_memory_usage = 1;
2996 temp = get_push_address (INTVAL(size) - used);
2997 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2998 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2999 temp, Pmode,
3000 XEXP (xinner, 0), Pmode,
3001 GEN_INT (INTVAL(size) - used),
3002 TYPE_MODE (sizetype));
3003 else
3004 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3005 temp, Pmode,
3006 GEN_INT (INTVAL(size) - used),
3007 TYPE_MODE (sizetype),
3008 GEN_INT (MEMORY_USE_RW),
3009 TYPE_MODE (integer_type_node));
3010 in_check_memory_usage = 0;
3011 }
3012 }
3013 else
3014 #endif /* PUSH_ROUNDING */
3015 {
3016 /* Otherwise make space on the stack and copy the data
3017 to the address of that space. */
3018
3019 /* Deduct words put into registers from the size we must copy. */
3020 if (partial != 0)
3021 {
3022 if (GET_CODE (size) == CONST_INT)
3023 size = GEN_INT (INTVAL (size) - used);
3024 else
3025 size = expand_binop (GET_MODE (size), sub_optab, size,
3026 GEN_INT (used), NULL_RTX, 0,
3027 OPTAB_LIB_WIDEN);
3028 }
3029
3030 /* Get the address of the stack space.
3031 In this case, we do not deal with EXTRA separately.
3032 A single stack adjust will do. */
3033 if (! args_addr)
3034 {
3035 temp = push_block (size, extra, where_pad == downward);
3036 extra = 0;
3037 }
3038 else if (GET_CODE (args_so_far) == CONST_INT)
3039 temp = memory_address (BLKmode,
3040 plus_constant (args_addr,
3041 skip + INTVAL (args_so_far)));
3042 else
3043 temp = memory_address (BLKmode,
3044 plus_constant (gen_rtx_PLUS (Pmode,
3045 args_addr,
3046 args_so_far),
3047 skip));
3048 if (current_function_check_memory_usage && ! in_check_memory_usage)
3049 {
3050 rtx target;
3051
3052 in_check_memory_usage = 1;
3053 target = copy_to_reg (temp);
3054 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3055 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3056 target, Pmode,
3057 XEXP (xinner, 0), Pmode,
3058 size, TYPE_MODE (sizetype));
3059 else
3060 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3061 target, Pmode,
3062 size, TYPE_MODE (sizetype),
3063 GEN_INT (MEMORY_USE_RW),
3064 TYPE_MODE (integer_type_node));
3065 in_check_memory_usage = 0;
3066 }
3067
3068 /* TEMP is the address of the block. Copy the data there. */
3069 if (GET_CODE (size) == CONST_INT
3070 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3071 {
3072 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3073 INTVAL (size), align);
3074 goto ret;
3075 }
3076 else
3077 {
3078 rtx opalign = GEN_INT (align);
3079 enum machine_mode mode;
3080 rtx target = gen_rtx_MEM (BLKmode, temp);
3081
3082 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3083 mode != VOIDmode;
3084 mode = GET_MODE_WIDER_MODE (mode))
3085 {
3086 enum insn_code code = movstr_optab[(int) mode];
3087 insn_operand_predicate_fn pred;
3088
3089 if (code != CODE_FOR_nothing
3090 && ((GET_CODE (size) == CONST_INT
3091 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3092 <= (GET_MODE_MASK (mode) >> 1)))
3093 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3094 && (!(pred = insn_data[(int) code].operand[0].predicate)
3095 || ((*pred) (target, BLKmode)))
3096 && (!(pred = insn_data[(int) code].operand[1].predicate)
3097 || ((*pred) (xinner, BLKmode)))
3098 && (!(pred = insn_data[(int) code].operand[3].predicate)
3099 || ((*pred) (opalign, VOIDmode))))
3100 {
3101 rtx op2 = convert_to_mode (mode, size, 1);
3102 rtx last = get_last_insn ();
3103 rtx pat;
3104
3105 pred = insn_data[(int) code].operand[2].predicate;
3106 if (pred != 0 && ! (*pred) (op2, mode))
3107 op2 = copy_to_mode_reg (mode, op2);
3108
3109 pat = GEN_FCN ((int) code) (target, xinner,
3110 op2, opalign);
3111 if (pat)
3112 {
3113 emit_insn (pat);
3114 goto ret;
3115 }
3116 else
3117 delete_insns_since (last);
3118 }
3119 }
3120 }
3121
3122 #ifndef ACCUMULATE_OUTGOING_ARGS
3123 /* If the source is referenced relative to the stack pointer,
3124 copy it to another register to stabilize it. We do not need
3125 to do this if we know that we won't be changing sp. */
3126
3127 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3128 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3129 temp = copy_to_reg (temp);
3130 #endif
3131
3132 /* Make inhibit_defer_pop nonzero around the library call
3133 to force it to pop the bcopy-arguments right away. */
3134 NO_DEFER_POP;
3135 #ifdef TARGET_MEM_FUNCTIONS
3136 emit_library_call (memcpy_libfunc, 0,
3137 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3138 convert_to_mode (TYPE_MODE (sizetype),
3139 size, TREE_UNSIGNED (sizetype)),
3140 TYPE_MODE (sizetype));
3141 #else
3142 emit_library_call (bcopy_libfunc, 0,
3143 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3144 convert_to_mode (TYPE_MODE (integer_type_node),
3145 size,
3146 TREE_UNSIGNED (integer_type_node)),
3147 TYPE_MODE (integer_type_node));
3148 #endif
3149 OK_DEFER_POP;
3150 }
3151 }
3152 else if (partial > 0)
3153 {
3154 /* Scalar partly in registers. */
3155
3156 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3157 int i;
3158 int not_stack;
3159 /* # words of start of argument
3160 that we must make space for but need not store. */
3161 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3162 int args_offset = INTVAL (args_so_far);
3163 int skip;
3164
3165 /* Push padding now if padding above and stack grows down,
3166 or if padding below and stack grows up.
3167 But if space already allocated, this has already been done. */
3168 if (extra && args_addr == 0
3169 && where_pad != none && where_pad != stack_direction)
3170 anti_adjust_stack (GEN_INT (extra));
3171
3172 /* If we make space by pushing it, we might as well push
3173 the real data. Otherwise, we can leave OFFSET nonzero
3174 and leave the space uninitialized. */
3175 if (args_addr == 0)
3176 offset = 0;
3177
3178 /* Now NOT_STACK gets the number of words that we don't need to
3179 allocate on the stack. */
3180 not_stack = partial - offset;
3181
3182 /* If the partial register-part of the arg counts in its stack size,
3183 skip the part of stack space corresponding to the registers.
3184 Otherwise, start copying to the beginning of the stack space,
3185 by setting SKIP to 0. */
3186 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3187
3188 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3189 x = validize_mem (force_const_mem (mode, x));
3190
3191 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3192 SUBREGs of such registers are not allowed. */
3193 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3194 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3195 x = copy_to_reg (x);
3196
3197 /* Loop over all the words allocated on the stack for this arg. */
3198 /* We can do it by words, because any scalar bigger than a word
3199 has a size a multiple of a word. */
3200 #ifndef PUSH_ARGS_REVERSED
3201 for (i = not_stack; i < size; i++)
3202 #else
3203 for (i = size - 1; i >= not_stack; i--)
3204 #endif
3205 if (i >= not_stack + offset)
3206 emit_push_insn (operand_subword_force (x, i, mode),
3207 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3208 0, args_addr,
3209 GEN_INT (args_offset + ((i - not_stack + skip)
3210 * UNITS_PER_WORD)),
3211 reg_parm_stack_space, alignment_pad);
3212 }
3213 else
3214 {
3215 rtx addr;
3216 rtx target = NULL_RTX;
3217
3218 /* Push padding now if padding above and stack grows down,
3219 or if padding below and stack grows up.
3220 But if space already allocated, this has already been done. */
3221 if (extra && args_addr == 0
3222 && where_pad != none && where_pad != stack_direction)
3223 anti_adjust_stack (GEN_INT (extra));
3224
3225 #ifdef PUSH_ROUNDING
3226 if (args_addr == 0)
3227 addr = gen_push_operand ();
3228 else
3229 #endif
3230 {
3231 if (GET_CODE (args_so_far) == CONST_INT)
3232 addr
3233 = memory_address (mode,
3234 plus_constant (args_addr,
3235 INTVAL (args_so_far)));
3236 else
3237 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3238 args_so_far));
3239 target = addr;
3240 }
3241
3242 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3243
3244 if (current_function_check_memory_usage && ! in_check_memory_usage)
3245 {
3246 in_check_memory_usage = 1;
3247 if (target == 0)
3248 target = get_push_address (GET_MODE_SIZE (mode));
3249
3250 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3251 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3252 target, Pmode,
3253 XEXP (x, 0), Pmode,
3254 GEN_INT (GET_MODE_SIZE (mode)),
3255 TYPE_MODE (sizetype));
3256 else
3257 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3258 target, Pmode,
3259 GEN_INT (GET_MODE_SIZE (mode)),
3260 TYPE_MODE (sizetype),
3261 GEN_INT (MEMORY_USE_RW),
3262 TYPE_MODE (integer_type_node));
3263 in_check_memory_usage = 0;
3264 }
3265 }
3266
3267 ret:
3268 /* If part should go in registers, copy that part
3269 into the appropriate registers. Do this now, at the end,
3270 since mem-to-mem copies above may do function calls. */
3271 if (partial > 0 && reg != 0)
3272 {
3273 /* Handle calls that pass values in multiple non-contiguous locations.
3274 The Irix 6 ABI has examples of this. */
3275 if (GET_CODE (reg) == PARALLEL)
3276 emit_group_load (reg, x, -1, align); /* ??? size? */
3277 else
3278 move_block_to_reg (REGNO (reg), x, partial, mode);
3279 }
3280
3281 if (extra && args_addr == 0 && where_pad == stack_direction)
3282 anti_adjust_stack (GEN_INT (extra));
3283
3284 if (alignment_pad)
3285 anti_adjust_stack (alignment_pad);
3286 }
3287 \f
3288 /* Expand an assignment that stores the value of FROM into TO.
3289 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3290 (This may contain a QUEUED rtx;
3291 if the value is constant, this rtx is a constant.)
3292 Otherwise, the returned value is NULL_RTX.
3293
3294 SUGGEST_REG is no longer actually used.
3295 It used to mean, copy the value through a register
3296 and return that register, if that is possible.
3297 We now use WANT_VALUE to decide whether to do this. */
3298
3299 rtx
3300 expand_assignment (to, from, want_value, suggest_reg)
3301 tree to, from;
3302 int want_value;
3303 int suggest_reg ATTRIBUTE_UNUSED;
3304 {
3305 register rtx to_rtx = 0;
3306 rtx result;
3307
3308 /* Don't crash if the lhs of the assignment was erroneous. */
3309
3310 if (TREE_CODE (to) == ERROR_MARK)
3311 {
3312 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3313 return want_value ? result : NULL_RTX;
3314 }
3315
3316 /* Assignment of a structure component needs special treatment
3317 if the structure component's rtx is not simply a MEM.
3318 Assignment of an array element at a constant index, and assignment of
3319 an array element in an unaligned packed structure field, has the same
3320 problem. */
3321
3322 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3323 || TREE_CODE (to) == ARRAY_REF)
3324 {
3325 enum machine_mode mode1;
3326 int bitsize;
3327 int bitpos;
3328 tree offset;
3329 int unsignedp;
3330 int volatilep = 0;
3331 tree tem;
3332 unsigned int alignment;
3333
3334 push_temp_slots ();
3335 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3336 &unsignedp, &volatilep, &alignment);
3337
3338 /* If we are going to use store_bit_field and extract_bit_field,
3339 make sure to_rtx will be safe for multiple use. */
3340
3341 if (mode1 == VOIDmode && want_value)
3342 tem = stabilize_reference (tem);
3343
3344 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3345 if (offset != 0)
3346 {
3347 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3348
3349 if (GET_CODE (to_rtx) != MEM)
3350 abort ();
3351
3352 if (GET_MODE (offset_rtx) != ptr_mode)
3353 {
3354 #ifdef POINTERS_EXTEND_UNSIGNED
3355 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3356 #else
3357 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3358 #endif
3359 }
3360
3361 /* A constant address in TO_RTX can have VOIDmode, we must not try
3362 to call force_reg for that case. Avoid that case. */
3363 if (GET_CODE (to_rtx) == MEM
3364 && GET_MODE (to_rtx) == BLKmode
3365 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3366 && bitsize
3367 && (bitpos % bitsize) == 0
3368 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3369 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3370 {
3371 rtx temp = change_address (to_rtx, mode1,
3372 plus_constant (XEXP (to_rtx, 0),
3373 (bitpos /
3374 BITS_PER_UNIT)));
3375 if (GET_CODE (XEXP (temp, 0)) == REG)
3376 to_rtx = temp;
3377 else
3378 to_rtx = change_address (to_rtx, mode1,
3379 force_reg (GET_MODE (XEXP (temp, 0)),
3380 XEXP (temp, 0)));
3381 bitpos = 0;
3382 }
3383
3384 to_rtx = change_address (to_rtx, VOIDmode,
3385 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3386 force_reg (ptr_mode,
3387 offset_rtx)));
3388 }
3389
3390 if (volatilep)
3391 {
3392 if (GET_CODE (to_rtx) == MEM)
3393 {
3394 /* When the offset is zero, to_rtx is the address of the
3395 structure we are storing into, and hence may be shared.
3396 We must make a new MEM before setting the volatile bit. */
3397 if (offset == 0)
3398 to_rtx = copy_rtx (to_rtx);
3399
3400 MEM_VOLATILE_P (to_rtx) = 1;
3401 }
3402 #if 0 /* This was turned off because, when a field is volatile
3403 in an object which is not volatile, the object may be in a register,
3404 and then we would abort over here. */
3405 else
3406 abort ();
3407 #endif
3408 }
3409
3410 if (TREE_CODE (to) == COMPONENT_REF
3411 && TREE_READONLY (TREE_OPERAND (to, 1)))
3412 {
3413 if (offset == 0)
3414 to_rtx = copy_rtx (to_rtx);
3415
3416 RTX_UNCHANGING_P (to_rtx) = 1;
3417 }
3418
3419 /* Check the access. */
3420 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3421 {
3422 rtx to_addr;
3423 int size;
3424 int best_mode_size;
3425 enum machine_mode best_mode;
3426
3427 best_mode = get_best_mode (bitsize, bitpos,
3428 TYPE_ALIGN (TREE_TYPE (tem)),
3429 mode1, volatilep);
3430 if (best_mode == VOIDmode)
3431 best_mode = QImode;
3432
3433 best_mode_size = GET_MODE_BITSIZE (best_mode);
3434 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3435 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3436 size *= GET_MODE_SIZE (best_mode);
3437
3438 /* Check the access right of the pointer. */
3439 if (size)
3440 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3441 to_addr, Pmode,
3442 GEN_INT (size), TYPE_MODE (sizetype),
3443 GEN_INT (MEMORY_USE_WO),
3444 TYPE_MODE (integer_type_node));
3445 }
3446
3447 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3448 (want_value
3449 /* Spurious cast makes HPUX compiler happy. */
3450 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3451 : VOIDmode),
3452 unsignedp,
3453 /* Required alignment of containing datum. */
3454 alignment,
3455 int_size_in_bytes (TREE_TYPE (tem)),
3456 get_alias_set (to));
3457 preserve_temp_slots (result);
3458 free_temp_slots ();
3459 pop_temp_slots ();
3460
3461 /* If the value is meaningful, convert RESULT to the proper mode.
3462 Otherwise, return nothing. */
3463 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3464 TYPE_MODE (TREE_TYPE (from)),
3465 result,
3466 TREE_UNSIGNED (TREE_TYPE (to)))
3467 : NULL_RTX);
3468 }
3469
3470 /* If the rhs is a function call and its value is not an aggregate,
3471 call the function before we start to compute the lhs.
3472 This is needed for correct code for cases such as
3473 val = setjmp (buf) on machines where reference to val
3474 requires loading up part of an address in a separate insn.
3475
3476 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3477 since it might be a promoted variable where the zero- or sign- extension
3478 needs to be done. Handling this in the normal way is safe because no
3479 computation is done before the call. */
3480 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3481 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3482 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3483 && GET_CODE (DECL_RTL (to)) == REG))
3484 {
3485 rtx value;
3486
3487 push_temp_slots ();
3488 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3489 if (to_rtx == 0)
3490 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3491
3492 /* Handle calls that return values in multiple non-contiguous locations.
3493 The Irix 6 ABI has examples of this. */
3494 if (GET_CODE (to_rtx) == PARALLEL)
3495 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3496 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3497 else if (GET_MODE (to_rtx) == BLKmode)
3498 emit_block_move (to_rtx, value, expr_size (from),
3499 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3500 else
3501 {
3502 #ifdef POINTERS_EXTEND_UNSIGNED
3503 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3504 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3505 value = convert_memory_address (GET_MODE (to_rtx), value);
3506 #endif
3507 emit_move_insn (to_rtx, value);
3508 }
3509 preserve_temp_slots (to_rtx);
3510 free_temp_slots ();
3511 pop_temp_slots ();
3512 return want_value ? to_rtx : NULL_RTX;
3513 }
3514
3515 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3516 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3517
3518 if (to_rtx == 0)
3519 {
3520 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3521 if (GET_CODE (to_rtx) == MEM)
3522 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3523 }
3524
3525 /* Don't move directly into a return register. */
3526 if (TREE_CODE (to) == RESULT_DECL
3527 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3528 {
3529 rtx temp;
3530
3531 push_temp_slots ();
3532 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3533
3534 if (GET_CODE (to_rtx) == PARALLEL)
3535 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3536 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3537 else
3538 emit_move_insn (to_rtx, temp);
3539
3540 preserve_temp_slots (to_rtx);
3541 free_temp_slots ();
3542 pop_temp_slots ();
3543 return want_value ? to_rtx : NULL_RTX;
3544 }
3545
3546 /* In case we are returning the contents of an object which overlaps
3547 the place the value is being stored, use a safe function when copying
3548 a value through a pointer into a structure value return block. */
3549 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3550 && current_function_returns_struct
3551 && !current_function_returns_pcc_struct)
3552 {
3553 rtx from_rtx, size;
3554
3555 push_temp_slots ();
3556 size = expr_size (from);
3557 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3558 EXPAND_MEMORY_USE_DONT);
3559
3560 /* Copy the rights of the bitmap. */
3561 if (current_function_check_memory_usage)
3562 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3563 XEXP (to_rtx, 0), Pmode,
3564 XEXP (from_rtx, 0), Pmode,
3565 convert_to_mode (TYPE_MODE (sizetype),
3566 size, TREE_UNSIGNED (sizetype)),
3567 TYPE_MODE (sizetype));
3568
3569 #ifdef TARGET_MEM_FUNCTIONS
3570 emit_library_call (memcpy_libfunc, 0,
3571 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3572 XEXP (from_rtx, 0), Pmode,
3573 convert_to_mode (TYPE_MODE (sizetype),
3574 size, TREE_UNSIGNED (sizetype)),
3575 TYPE_MODE (sizetype));
3576 #else
3577 emit_library_call (bcopy_libfunc, 0,
3578 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3579 XEXP (to_rtx, 0), Pmode,
3580 convert_to_mode (TYPE_MODE (integer_type_node),
3581 size, TREE_UNSIGNED (integer_type_node)),
3582 TYPE_MODE (integer_type_node));
3583 #endif
3584
3585 preserve_temp_slots (to_rtx);
3586 free_temp_slots ();
3587 pop_temp_slots ();
3588 return want_value ? to_rtx : NULL_RTX;
3589 }
3590
3591 /* Compute FROM and store the value in the rtx we got. */
3592
3593 push_temp_slots ();
3594 result = store_expr (from, to_rtx, want_value);
3595 preserve_temp_slots (result);
3596 free_temp_slots ();
3597 pop_temp_slots ();
3598 return want_value ? result : NULL_RTX;
3599 }
3600
3601 /* Generate code for computing expression EXP,
3602 and storing the value into TARGET.
3603 TARGET may contain a QUEUED rtx.
3604
3605 If WANT_VALUE is nonzero, return a copy of the value
3606 not in TARGET, so that we can be sure to use the proper
3607 value in a containing expression even if TARGET has something
3608 else stored in it. If possible, we copy the value through a pseudo
3609 and return that pseudo. Or, if the value is constant, we try to
3610 return the constant. In some cases, we return a pseudo
3611 copied *from* TARGET.
3612
3613 If the mode is BLKmode then we may return TARGET itself.
3614 It turns out that in BLKmode it doesn't cause a problem.
3615 because C has no operators that could combine two different
3616 assignments into the same BLKmode object with different values
3617 with no sequence point. Will other languages need this to
3618 be more thorough?
3619
3620 If WANT_VALUE is 0, we return NULL, to make sure
3621 to catch quickly any cases where the caller uses the value
3622 and fails to set WANT_VALUE. */
3623
3624 rtx
3625 store_expr (exp, target, want_value)
3626 register tree exp;
3627 register rtx target;
3628 int want_value;
3629 {
3630 register rtx temp;
3631 int dont_return_target = 0;
3632
3633 if (TREE_CODE (exp) == COMPOUND_EXPR)
3634 {
3635 /* Perform first part of compound expression, then assign from second
3636 part. */
3637 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3638 emit_queue ();
3639 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3640 }
3641 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3642 {
3643 /* For conditional expression, get safe form of the target. Then
3644 test the condition, doing the appropriate assignment on either
3645 side. This avoids the creation of unnecessary temporaries.
3646 For non-BLKmode, it is more efficient not to do this. */
3647
3648 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3649
3650 emit_queue ();
3651 target = protect_from_queue (target, 1);
3652
3653 do_pending_stack_adjust ();
3654 NO_DEFER_POP;
3655 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3656 start_cleanup_deferral ();
3657 store_expr (TREE_OPERAND (exp, 1), target, 0);
3658 end_cleanup_deferral ();
3659 emit_queue ();
3660 emit_jump_insn (gen_jump (lab2));
3661 emit_barrier ();
3662 emit_label (lab1);
3663 start_cleanup_deferral ();
3664 store_expr (TREE_OPERAND (exp, 2), target, 0);
3665 end_cleanup_deferral ();
3666 emit_queue ();
3667 emit_label (lab2);
3668 OK_DEFER_POP;
3669
3670 return want_value ? target : NULL_RTX;
3671 }
3672 else if (queued_subexp_p (target))
3673 /* If target contains a postincrement, let's not risk
3674 using it as the place to generate the rhs. */
3675 {
3676 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3677 {
3678 /* Expand EXP into a new pseudo. */
3679 temp = gen_reg_rtx (GET_MODE (target));
3680 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3681 }
3682 else
3683 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3684
3685 /* If target is volatile, ANSI requires accessing the value
3686 *from* the target, if it is accessed. So make that happen.
3687 In no case return the target itself. */
3688 if (! MEM_VOLATILE_P (target) && want_value)
3689 dont_return_target = 1;
3690 }
3691 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3692 && GET_MODE (target) != BLKmode)
3693 /* If target is in memory and caller wants value in a register instead,
3694 arrange that. Pass TARGET as target for expand_expr so that,
3695 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3696 We know expand_expr will not use the target in that case.
3697 Don't do this if TARGET is volatile because we are supposed
3698 to write it and then read it. */
3699 {
3700 temp = expand_expr (exp, target, GET_MODE (target), 0);
3701 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3702 temp = copy_to_reg (temp);
3703 dont_return_target = 1;
3704 }
3705 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3706 /* If this is an scalar in a register that is stored in a wider mode
3707 than the declared mode, compute the result into its declared mode
3708 and then convert to the wider mode. Our value is the computed
3709 expression. */
3710 {
3711 /* If we don't want a value, we can do the conversion inside EXP,
3712 which will often result in some optimizations. Do the conversion
3713 in two steps: first change the signedness, if needed, then
3714 the extend. But don't do this if the type of EXP is a subtype
3715 of something else since then the conversion might involve
3716 more than just converting modes. */
3717 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3718 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3719 {
3720 if (TREE_UNSIGNED (TREE_TYPE (exp))
3721 != SUBREG_PROMOTED_UNSIGNED_P (target))
3722 exp
3723 = convert
3724 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3725 TREE_TYPE (exp)),
3726 exp);
3727
3728 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3729 SUBREG_PROMOTED_UNSIGNED_P (target)),
3730 exp);
3731 }
3732
3733 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3734
3735 /* If TEMP is a volatile MEM and we want a result value, make
3736 the access now so it gets done only once. Likewise if
3737 it contains TARGET. */
3738 if (GET_CODE (temp) == MEM && want_value
3739 && (MEM_VOLATILE_P (temp)
3740 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3741 temp = copy_to_reg (temp);
3742
3743 /* If TEMP is a VOIDmode constant, use convert_modes to make
3744 sure that we properly convert it. */
3745 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3746 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3747 TYPE_MODE (TREE_TYPE (exp)), temp,
3748 SUBREG_PROMOTED_UNSIGNED_P (target));
3749
3750 convert_move (SUBREG_REG (target), temp,
3751 SUBREG_PROMOTED_UNSIGNED_P (target));
3752
3753 /* If we promoted a constant, change the mode back down to match
3754 target. Otherwise, the caller might get confused by a result whose
3755 mode is larger than expected. */
3756
3757 if (want_value && GET_MODE (temp) != GET_MODE (target)
3758 && GET_MODE (temp) != VOIDmode)
3759 {
3760 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3761 SUBREG_PROMOTED_VAR_P (temp) = 1;
3762 SUBREG_PROMOTED_UNSIGNED_P (temp)
3763 = SUBREG_PROMOTED_UNSIGNED_P (target);
3764 }
3765
3766 return want_value ? temp : NULL_RTX;
3767 }
3768 else
3769 {
3770 temp = expand_expr (exp, target, GET_MODE (target), 0);
3771 /* Return TARGET if it's a specified hardware register.
3772 If TARGET is a volatile mem ref, either return TARGET
3773 or return a reg copied *from* TARGET; ANSI requires this.
3774
3775 Otherwise, if TEMP is not TARGET, return TEMP
3776 if it is constant (for efficiency),
3777 or if we really want the correct value. */
3778 if (!(target && GET_CODE (target) == REG
3779 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3780 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3781 && ! rtx_equal_p (temp, target)
3782 && (CONSTANT_P (temp) || want_value))
3783 dont_return_target = 1;
3784 }
3785
3786 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3787 the same as that of TARGET, adjust the constant. This is needed, for
3788 example, in case it is a CONST_DOUBLE and we want only a word-sized
3789 value. */
3790 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3791 && TREE_CODE (exp) != ERROR_MARK
3792 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3793 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3794 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3795
3796 if (current_function_check_memory_usage
3797 && GET_CODE (target) == MEM
3798 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3799 {
3800 if (GET_CODE (temp) == MEM)
3801 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3802 XEXP (target, 0), Pmode,
3803 XEXP (temp, 0), Pmode,
3804 expr_size (exp), TYPE_MODE (sizetype));
3805 else
3806 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3807 XEXP (target, 0), Pmode,
3808 expr_size (exp), TYPE_MODE (sizetype),
3809 GEN_INT (MEMORY_USE_WO),
3810 TYPE_MODE (integer_type_node));
3811 }
3812
3813 /* If value was not generated in the target, store it there.
3814 Convert the value to TARGET's type first if nec. */
3815 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3816 one or both of them are volatile memory refs, we have to distinguish
3817 two cases:
3818 - expand_expr has used TARGET. In this case, we must not generate
3819 another copy. This can be detected by TARGET being equal according
3820 to == .
3821 - expand_expr has not used TARGET - that means that the source just
3822 happens to have the same RTX form. Since temp will have been created
3823 by expand_expr, it will compare unequal according to == .
3824 We must generate a copy in this case, to reach the correct number
3825 of volatile memory references. */
3826
3827 if ((! rtx_equal_p (temp, target)
3828 || (temp != target && (side_effects_p (temp)
3829 || side_effects_p (target))))
3830 && TREE_CODE (exp) != ERROR_MARK)
3831 {
3832 target = protect_from_queue (target, 1);
3833 if (GET_MODE (temp) != GET_MODE (target)
3834 && GET_MODE (temp) != VOIDmode)
3835 {
3836 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3837 if (dont_return_target)
3838 {
3839 /* In this case, we will return TEMP,
3840 so make sure it has the proper mode.
3841 But don't forget to store the value into TARGET. */
3842 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3843 emit_move_insn (target, temp);
3844 }
3845 else
3846 convert_move (target, temp, unsignedp);
3847 }
3848
3849 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3850 {
3851 /* Handle copying a string constant into an array.
3852 The string constant may be shorter than the array.
3853 So copy just the string's actual length, and clear the rest. */
3854 rtx size;
3855 rtx addr;
3856
3857 /* Get the size of the data type of the string,
3858 which is actually the size of the target. */
3859 size = expr_size (exp);
3860 if (GET_CODE (size) == CONST_INT
3861 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3862 emit_block_move (target, temp, size,
3863 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3864 else
3865 {
3866 /* Compute the size of the data to copy from the string. */
3867 tree copy_size
3868 = size_binop (MIN_EXPR,
3869 make_tree (sizetype, size),
3870 size_int (TREE_STRING_LENGTH (exp)));
3871 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3872 VOIDmode, 0);
3873 rtx label = 0;
3874
3875 /* Copy that much. */
3876 emit_block_move (target, temp, copy_size_rtx,
3877 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3878
3879 /* Figure out how much is left in TARGET that we have to clear.
3880 Do all calculations in ptr_mode. */
3881
3882 addr = XEXP (target, 0);
3883 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3884
3885 if (GET_CODE (copy_size_rtx) == CONST_INT)
3886 {
3887 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3888 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3889 }
3890 else
3891 {
3892 addr = force_reg (ptr_mode, addr);
3893 addr = expand_binop (ptr_mode, add_optab, addr,
3894 copy_size_rtx, NULL_RTX, 0,
3895 OPTAB_LIB_WIDEN);
3896
3897 size = expand_binop (ptr_mode, sub_optab, size,
3898 copy_size_rtx, NULL_RTX, 0,
3899 OPTAB_LIB_WIDEN);
3900
3901 label = gen_label_rtx ();
3902 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3903 GET_MODE (size), 0, 0, label);
3904 }
3905
3906 if (size != const0_rtx)
3907 {
3908 /* Be sure we can write on ADDR. */
3909 if (current_function_check_memory_usage)
3910 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3911 addr, Pmode,
3912 size, TYPE_MODE (sizetype),
3913 GEN_INT (MEMORY_USE_WO),
3914 TYPE_MODE (integer_type_node));
3915 #ifdef TARGET_MEM_FUNCTIONS
3916 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3917 addr, ptr_mode,
3918 const0_rtx, TYPE_MODE (integer_type_node),
3919 convert_to_mode (TYPE_MODE (sizetype),
3920 size,
3921 TREE_UNSIGNED (sizetype)),
3922 TYPE_MODE (sizetype));
3923 #else
3924 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3925 addr, ptr_mode,
3926 convert_to_mode (TYPE_MODE (integer_type_node),
3927 size,
3928 TREE_UNSIGNED (integer_type_node)),
3929 TYPE_MODE (integer_type_node));
3930 #endif
3931 }
3932
3933 if (label)
3934 emit_label (label);
3935 }
3936 }
3937 /* Handle calls that return values in multiple non-contiguous locations.
3938 The Irix 6 ABI has examples of this. */
3939 else if (GET_CODE (target) == PARALLEL)
3940 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3941 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3942 else if (GET_MODE (temp) == BLKmode)
3943 emit_block_move (target, temp, expr_size (exp),
3944 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3945 else
3946 emit_move_insn (target, temp);
3947 }
3948
3949 /* If we don't want a value, return NULL_RTX. */
3950 if (! want_value)
3951 return NULL_RTX;
3952
3953 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3954 ??? The latter test doesn't seem to make sense. */
3955 else if (dont_return_target && GET_CODE (temp) != MEM)
3956 return temp;
3957
3958 /* Return TARGET itself if it is a hard register. */
3959 else if (want_value && GET_MODE (target) != BLKmode
3960 && ! (GET_CODE (target) == REG
3961 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3962 return copy_to_reg (target);
3963
3964 else
3965 return target;
3966 }
3967 \f
3968 /* Return 1 if EXP just contains zeros. */
3969
3970 static int
3971 is_zeros_p (exp)
3972 tree exp;
3973 {
3974 tree elt;
3975
3976 switch (TREE_CODE (exp))
3977 {
3978 case CONVERT_EXPR:
3979 case NOP_EXPR:
3980 case NON_LVALUE_EXPR:
3981 return is_zeros_p (TREE_OPERAND (exp, 0));
3982
3983 case INTEGER_CST:
3984 return integer_zerop (exp);
3985
3986 case COMPLEX_CST:
3987 return
3988 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3989
3990 case REAL_CST:
3991 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3992
3993 case CONSTRUCTOR:
3994 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3995 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3996 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3997 if (! is_zeros_p (TREE_VALUE (elt)))
3998 return 0;
3999
4000 return 1;
4001
4002 default:
4003 return 0;
4004 }
4005 }
4006
4007 /* Return 1 if EXP contains mostly (3/4) zeros. */
4008
4009 static int
4010 mostly_zeros_p (exp)
4011 tree exp;
4012 {
4013 if (TREE_CODE (exp) == CONSTRUCTOR)
4014 {
4015 int elts = 0, zeros = 0;
4016 tree elt = CONSTRUCTOR_ELTS (exp);
4017 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4018 {
4019 /* If there are no ranges of true bits, it is all zero. */
4020 return elt == NULL_TREE;
4021 }
4022 for (; elt; elt = TREE_CHAIN (elt))
4023 {
4024 /* We do not handle the case where the index is a RANGE_EXPR,
4025 so the statistic will be somewhat inaccurate.
4026 We do make a more accurate count in store_constructor itself,
4027 so since this function is only used for nested array elements,
4028 this should be close enough. */
4029 if (mostly_zeros_p (TREE_VALUE (elt)))
4030 zeros++;
4031 elts++;
4032 }
4033
4034 return 4 * zeros >= 3 * elts;
4035 }
4036
4037 return is_zeros_p (exp);
4038 }
4039 \f
4040 /* Helper function for store_constructor.
4041 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4042 TYPE is the type of the CONSTRUCTOR, not the element type.
4043 ALIGN and CLEARED are as for store_constructor.
4044
4045 This provides a recursive shortcut back to store_constructor when it isn't
4046 necessary to go through store_field. This is so that we can pass through
4047 the cleared field to let store_constructor know that we may not have to
4048 clear a substructure if the outer structure has already been cleared. */
4049
4050 static void
4051 store_constructor_field (target, bitsize, bitpos,
4052 mode, exp, type, align, cleared)
4053 rtx target;
4054 int bitsize, bitpos;
4055 enum machine_mode mode;
4056 tree exp, type;
4057 unsigned int align;
4058 int cleared;
4059 {
4060 if (TREE_CODE (exp) == CONSTRUCTOR
4061 && bitpos % BITS_PER_UNIT == 0
4062 /* If we have a non-zero bitpos for a register target, then we just
4063 let store_field do the bitfield handling. This is unlikely to
4064 generate unnecessary clear instructions anyways. */
4065 && (bitpos == 0 || GET_CODE (target) == MEM))
4066 {
4067 if (bitpos != 0)
4068 target
4069 = change_address (target,
4070 GET_MODE (target) == BLKmode
4071 || 0 != (bitpos
4072 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4073 ? BLKmode : VOIDmode,
4074 plus_constant (XEXP (target, 0),
4075 bitpos / BITS_PER_UNIT));
4076 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4077 }
4078 else
4079 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4080 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4081 int_size_in_bytes (type), 0);
4082 }
4083
4084 /* Store the value of constructor EXP into the rtx TARGET.
4085 TARGET is either a REG or a MEM.
4086 ALIGN is the maximum known alignment for TARGET, in bits.
4087 CLEARED is true if TARGET is known to have been zero'd.
4088 SIZE is the number of bytes of TARGET we are allowed to modify: this
4089 may not be the same as the size of EXP if we are assigning to a field
4090 which has been packed to exclude padding bits. */
4091
4092 static void
4093 store_constructor (exp, target, align, cleared, size)
4094 tree exp;
4095 rtx target;
4096 unsigned int align;
4097 int cleared;
4098 int size;
4099 {
4100 tree type = TREE_TYPE (exp);
4101 #ifdef WORD_REGISTER_OPERATIONS
4102 rtx exp_size = expr_size (exp);
4103 #endif
4104
4105 /* We know our target cannot conflict, since safe_from_p has been called. */
4106 #if 0
4107 /* Don't try copying piece by piece into a hard register
4108 since that is vulnerable to being clobbered by EXP.
4109 Instead, construct in a pseudo register and then copy it all. */
4110 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4111 {
4112 rtx temp = gen_reg_rtx (GET_MODE (target));
4113 store_constructor (exp, temp, align, cleared, size);
4114 emit_move_insn (target, temp);
4115 return;
4116 }
4117 #endif
4118
4119 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4120 || TREE_CODE (type) == QUAL_UNION_TYPE)
4121 {
4122 register tree elt;
4123
4124 /* Inform later passes that the whole union value is dead. */
4125 if ((TREE_CODE (type) == UNION_TYPE
4126 || TREE_CODE (type) == QUAL_UNION_TYPE)
4127 && ! cleared)
4128 {
4129 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4130
4131 /* If the constructor is empty, clear the union. */
4132 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4133 clear_storage (target, expr_size (exp),
4134 TYPE_ALIGN (type) / BITS_PER_UNIT);
4135 }
4136
4137 /* If we are building a static constructor into a register,
4138 set the initial value as zero so we can fold the value into
4139 a constant. But if more than one register is involved,
4140 this probably loses. */
4141 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4142 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4143 {
4144 if (! cleared)
4145 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4146
4147 cleared = 1;
4148 }
4149
4150 /* If the constructor has fewer fields than the structure
4151 or if we are initializing the structure to mostly zeros,
4152 clear the whole structure first. */
4153 else if (size > 0
4154 && ((list_length (CONSTRUCTOR_ELTS (exp))
4155 != fields_length (type))
4156 || mostly_zeros_p (exp)))
4157 {
4158 if (! cleared)
4159 clear_storage (target, GEN_INT (size),
4160 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4161
4162 cleared = 1;
4163 }
4164 else if (! cleared)
4165 /* Inform later passes that the old value is dead. */
4166 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4167
4168 /* Store each element of the constructor into
4169 the corresponding field of TARGET. */
4170
4171 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4172 {
4173 register tree field = TREE_PURPOSE (elt);
4174 #ifdef WORD_REGISTER_OPERATIONS
4175 tree value = TREE_VALUE (elt);
4176 #endif
4177 register enum machine_mode mode;
4178 int bitsize;
4179 int bitpos = 0;
4180 int unsignedp;
4181 tree pos, constant = 0, offset = 0;
4182 rtx to_rtx = target;
4183
4184 /* Just ignore missing fields.
4185 We cleared the whole structure, above,
4186 if any fields are missing. */
4187 if (field == 0)
4188 continue;
4189
4190 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4191 continue;
4192
4193 if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST)
4194 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4195 else
4196 bitsize = -1;
4197
4198 unsignedp = TREE_UNSIGNED (field);
4199 mode = DECL_MODE (field);
4200 if (DECL_BIT_FIELD (field))
4201 mode = VOIDmode;
4202
4203 pos = DECL_FIELD_BITPOS (field);
4204 if (TREE_CODE (pos) == INTEGER_CST)
4205 constant = pos;
4206 else if (TREE_CODE (pos) == PLUS_EXPR
4207 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4208 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4209 else
4210 offset = pos;
4211
4212 if (constant)
4213 bitpos = TREE_INT_CST_LOW (constant);
4214
4215 if (offset)
4216 {
4217 rtx offset_rtx;
4218
4219 if (contains_placeholder_p (offset))
4220 offset = build (WITH_RECORD_EXPR, bitsizetype,
4221 offset, make_tree (TREE_TYPE (exp), target));
4222
4223 offset = size_binop (EXACT_DIV_EXPR, offset,
4224 bitsize_int (BITS_PER_UNIT));
4225 offset = convert (sizetype, offset);
4226
4227 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4228 if (GET_CODE (to_rtx) != MEM)
4229 abort ();
4230
4231 if (GET_MODE (offset_rtx) != ptr_mode)
4232 {
4233 #ifdef POINTERS_EXTEND_UNSIGNED
4234 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4235 #else
4236 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4237 #endif
4238 }
4239
4240 to_rtx
4241 = change_address (to_rtx, VOIDmode,
4242 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4243 force_reg (ptr_mode,
4244 offset_rtx)));
4245 }
4246
4247 if (TREE_READONLY (field))
4248 {
4249 if (GET_CODE (to_rtx) == MEM)
4250 to_rtx = copy_rtx (to_rtx);
4251
4252 RTX_UNCHANGING_P (to_rtx) = 1;
4253 }
4254
4255 #ifdef WORD_REGISTER_OPERATIONS
4256 /* If this initializes a field that is smaller than a word, at the
4257 start of a word, try to widen it to a full word.
4258 This special case allows us to output C++ member function
4259 initializations in a form that the optimizers can understand. */
4260 if (constant
4261 && GET_CODE (target) == REG
4262 && bitsize < BITS_PER_WORD
4263 && bitpos % BITS_PER_WORD == 0
4264 && GET_MODE_CLASS (mode) == MODE_INT
4265 && TREE_CODE (value) == INTEGER_CST
4266 && GET_CODE (exp_size) == CONST_INT
4267 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4268 {
4269 tree type = TREE_TYPE (value);
4270 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4271 {
4272 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4273 value = convert (type, value);
4274 }
4275 if (BYTES_BIG_ENDIAN)
4276 value
4277 = fold (build (LSHIFT_EXPR, type, value,
4278 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4279 bitsize = BITS_PER_WORD;
4280 mode = word_mode;
4281 }
4282 #endif
4283 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4284 TREE_VALUE (elt), type,
4285 MIN (align,
4286 DECL_ALIGN (TREE_PURPOSE (elt))),
4287 cleared);
4288 }
4289 }
4290 else if (TREE_CODE (type) == ARRAY_TYPE)
4291 {
4292 register tree elt;
4293 register int i;
4294 int need_to_clear;
4295 tree domain = TYPE_DOMAIN (type);
4296 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4297 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4298 tree elttype = TREE_TYPE (type);
4299
4300 /* If the constructor has fewer elements than the array,
4301 clear the whole array first. Similarly if this is
4302 static constructor of a non-BLKmode object. */
4303 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4304 need_to_clear = 1;
4305 else
4306 {
4307 HOST_WIDE_INT count = 0, zero_count = 0;
4308 need_to_clear = 0;
4309 /* This loop is a more accurate version of the loop in
4310 mostly_zeros_p (it handles RANGE_EXPR in an index).
4311 It is also needed to check for missing elements. */
4312 for (elt = CONSTRUCTOR_ELTS (exp);
4313 elt != NULL_TREE;
4314 elt = TREE_CHAIN (elt))
4315 {
4316 tree index = TREE_PURPOSE (elt);
4317 HOST_WIDE_INT this_node_count;
4318 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4319 {
4320 tree lo_index = TREE_OPERAND (index, 0);
4321 tree hi_index = TREE_OPERAND (index, 1);
4322
4323 if (TREE_CODE (lo_index) != INTEGER_CST
4324 || TREE_CODE (hi_index) != INTEGER_CST)
4325 {
4326 need_to_clear = 1;
4327 break;
4328 }
4329 this_node_count = (TREE_INT_CST_LOW (hi_index)
4330 - TREE_INT_CST_LOW (lo_index) + 1);
4331 }
4332 else
4333 this_node_count = 1;
4334 count += this_node_count;
4335 if (mostly_zeros_p (TREE_VALUE (elt)))
4336 zero_count += this_node_count;
4337 }
4338 /* Clear the entire array first if there are any missing elements,
4339 or if the incidence of zero elements is >= 75%. */
4340 if (count < maxelt - minelt + 1
4341 || 4 * zero_count >= 3 * count)
4342 need_to_clear = 1;
4343 }
4344 if (need_to_clear && size > 0)
4345 {
4346 if (! cleared)
4347 clear_storage (target, GEN_INT (size),
4348 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4349 cleared = 1;
4350 }
4351 else
4352 /* Inform later passes that the old value is dead. */
4353 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4354
4355 /* Store each element of the constructor into
4356 the corresponding element of TARGET, determined
4357 by counting the elements. */
4358 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4359 elt;
4360 elt = TREE_CHAIN (elt), i++)
4361 {
4362 register enum machine_mode mode;
4363 int bitsize;
4364 int bitpos;
4365 int unsignedp;
4366 tree value = TREE_VALUE (elt);
4367 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4368 tree index = TREE_PURPOSE (elt);
4369 rtx xtarget = target;
4370
4371 if (cleared && is_zeros_p (value))
4372 continue;
4373
4374 unsignedp = TREE_UNSIGNED (elttype);
4375 mode = TYPE_MODE (elttype);
4376 if (mode == BLKmode)
4377 {
4378 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4379 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4380 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4381 else
4382 bitsize = -1;
4383 }
4384 else
4385 bitsize = GET_MODE_BITSIZE (mode);
4386
4387 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4388 {
4389 tree lo_index = TREE_OPERAND (index, 0);
4390 tree hi_index = TREE_OPERAND (index, 1);
4391 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4392 struct nesting *loop;
4393 HOST_WIDE_INT lo, hi, count;
4394 tree position;
4395
4396 /* If the range is constant and "small", unroll the loop. */
4397 if (TREE_CODE (lo_index) == INTEGER_CST
4398 && TREE_CODE (hi_index) == INTEGER_CST
4399 && (lo = TREE_INT_CST_LOW (lo_index),
4400 hi = TREE_INT_CST_LOW (hi_index),
4401 count = hi - lo + 1,
4402 (GET_CODE (target) != MEM
4403 || count <= 2
4404 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4405 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4406 <= 40 * 8))))
4407 {
4408 lo -= minelt; hi -= minelt;
4409 for (; lo <= hi; lo++)
4410 {
4411 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4412 store_constructor_field (target, bitsize, bitpos, mode,
4413 value, type, align, cleared);
4414 }
4415 }
4416 else
4417 {
4418 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4419 loop_top = gen_label_rtx ();
4420 loop_end = gen_label_rtx ();
4421
4422 unsignedp = TREE_UNSIGNED (domain);
4423
4424 index = build_decl (VAR_DECL, NULL_TREE, domain);
4425
4426 DECL_RTL (index) = index_r
4427 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4428 &unsignedp, 0));
4429
4430 if (TREE_CODE (value) == SAVE_EXPR
4431 && SAVE_EXPR_RTL (value) == 0)
4432 {
4433 /* Make sure value gets expanded once before the
4434 loop. */
4435 expand_expr (value, const0_rtx, VOIDmode, 0);
4436 emit_queue ();
4437 }
4438 store_expr (lo_index, index_r, 0);
4439 loop = expand_start_loop (0);
4440
4441 /* Assign value to element index. */
4442 position
4443 = convert (ssizetype,
4444 fold (build (MINUS_EXPR, TREE_TYPE (index),
4445 index, TYPE_MIN_VALUE (domain))));
4446 position = size_binop (MULT_EXPR, position,
4447 convert (ssizetype,
4448 TYPE_SIZE_UNIT (elttype)));
4449
4450 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4451 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4452 xtarget = change_address (target, mode, addr);
4453 if (TREE_CODE (value) == CONSTRUCTOR)
4454 store_constructor (value, xtarget, align, cleared,
4455 bitsize / BITS_PER_UNIT);
4456 else
4457 store_expr (value, xtarget, 0);
4458
4459 expand_exit_loop_if_false (loop,
4460 build (LT_EXPR, integer_type_node,
4461 index, hi_index));
4462
4463 expand_increment (build (PREINCREMENT_EXPR,
4464 TREE_TYPE (index),
4465 index, integer_one_node), 0, 0);
4466 expand_end_loop ();
4467 emit_label (loop_end);
4468 }
4469 }
4470 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4471 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4472 {
4473 rtx pos_rtx, addr;
4474 tree position;
4475
4476 if (index == 0)
4477 index = ssize_int (1);
4478
4479 if (minelt)
4480 index = convert (ssizetype,
4481 fold (build (MINUS_EXPR, index,
4482 TYPE_MIN_VALUE (domain))));
4483 position = size_binop (MULT_EXPR, index,
4484 convert (ssizetype,
4485 TYPE_SIZE_UNIT (elttype)));
4486 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4487 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4488 xtarget = change_address (target, mode, addr);
4489 store_expr (value, xtarget, 0);
4490 }
4491 else
4492 {
4493 if (index != 0)
4494 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4495 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4496 else
4497 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4498 store_constructor_field (target, bitsize, bitpos, mode, value,
4499 type, align, cleared);
4500 }
4501 }
4502 }
4503 /* set constructor assignments */
4504 else if (TREE_CODE (type) == SET_TYPE)
4505 {
4506 tree elt = CONSTRUCTOR_ELTS (exp);
4507 int nbytes = int_size_in_bytes (type), nbits;
4508 tree domain = TYPE_DOMAIN (type);
4509 tree domain_min, domain_max, bitlength;
4510
4511 /* The default implementation strategy is to extract the constant
4512 parts of the constructor, use that to initialize the target,
4513 and then "or" in whatever non-constant ranges we need in addition.
4514
4515 If a large set is all zero or all ones, it is
4516 probably better to set it using memset (if available) or bzero.
4517 Also, if a large set has just a single range, it may also be
4518 better to first clear all the first clear the set (using
4519 bzero/memset), and set the bits we want. */
4520
4521 /* Check for all zeros. */
4522 if (elt == NULL_TREE && size > 0)
4523 {
4524 if (!cleared)
4525 clear_storage (target, GEN_INT (size),
4526 TYPE_ALIGN (type) / BITS_PER_UNIT);
4527 return;
4528 }
4529
4530 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4531 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4532 bitlength = size_binop (PLUS_EXPR,
4533 size_diffop (domain_max, domain_min),
4534 ssize_int (1));
4535
4536 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4537 abort ();
4538 nbits = TREE_INT_CST_LOW (bitlength);
4539
4540 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4541 are "complicated" (more than one range), initialize (the
4542 constant parts) by copying from a constant. */
4543 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4544 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4545 {
4546 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4547 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4548 char *bit_buffer = (char *) alloca (nbits);
4549 HOST_WIDE_INT word = 0;
4550 int bit_pos = 0;
4551 int ibit = 0;
4552 int offset = 0; /* In bytes from beginning of set. */
4553 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4554 for (;;)
4555 {
4556 if (bit_buffer[ibit])
4557 {
4558 if (BYTES_BIG_ENDIAN)
4559 word |= (1 << (set_word_size - 1 - bit_pos));
4560 else
4561 word |= 1 << bit_pos;
4562 }
4563 bit_pos++; ibit++;
4564 if (bit_pos >= set_word_size || ibit == nbits)
4565 {
4566 if (word != 0 || ! cleared)
4567 {
4568 rtx datum = GEN_INT (word);
4569 rtx to_rtx;
4570 /* The assumption here is that it is safe to use
4571 XEXP if the set is multi-word, but not if
4572 it's single-word. */
4573 if (GET_CODE (target) == MEM)
4574 {
4575 to_rtx = plus_constant (XEXP (target, 0), offset);
4576 to_rtx = change_address (target, mode, to_rtx);
4577 }
4578 else if (offset == 0)
4579 to_rtx = target;
4580 else
4581 abort ();
4582 emit_move_insn (to_rtx, datum);
4583 }
4584 if (ibit == nbits)
4585 break;
4586 word = 0;
4587 bit_pos = 0;
4588 offset += set_word_size / BITS_PER_UNIT;
4589 }
4590 }
4591 }
4592 else if (!cleared)
4593 {
4594 /* Don't bother clearing storage if the set is all ones. */
4595 if (TREE_CHAIN (elt) != NULL_TREE
4596 || (TREE_PURPOSE (elt) == NULL_TREE
4597 ? nbits != 1
4598 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4599 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4600 || ((HOST_WIDE_INT) TREE_INT_CST_LOW (TREE_VALUE (elt))
4601 - (HOST_WIDE_INT) TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4602 != nbits))))
4603 clear_storage (target, expr_size (exp),
4604 TYPE_ALIGN (type) / BITS_PER_UNIT);
4605 }
4606
4607 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4608 {
4609 /* start of range of element or NULL */
4610 tree startbit = TREE_PURPOSE (elt);
4611 /* end of range of element, or element value */
4612 tree endbit = TREE_VALUE (elt);
4613 #ifdef TARGET_MEM_FUNCTIONS
4614 HOST_WIDE_INT startb, endb;
4615 #endif
4616 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4617
4618 bitlength_rtx = expand_expr (bitlength,
4619 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4620
4621 /* handle non-range tuple element like [ expr ] */
4622 if (startbit == NULL_TREE)
4623 {
4624 startbit = save_expr (endbit);
4625 endbit = startbit;
4626 }
4627 startbit = convert (sizetype, startbit);
4628 endbit = convert (sizetype, endbit);
4629 if (! integer_zerop (domain_min))
4630 {
4631 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4632 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4633 }
4634 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4635 EXPAND_CONST_ADDRESS);
4636 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4637 EXPAND_CONST_ADDRESS);
4638
4639 if (REG_P (target))
4640 {
4641 targetx = assign_stack_temp (GET_MODE (target),
4642 GET_MODE_SIZE (GET_MODE (target)),
4643 0);
4644 emit_move_insn (targetx, target);
4645 }
4646 else if (GET_CODE (target) == MEM)
4647 targetx = target;
4648 else
4649 abort ();
4650
4651 #ifdef TARGET_MEM_FUNCTIONS
4652 /* Optimization: If startbit and endbit are
4653 constants divisible by BITS_PER_UNIT,
4654 call memset instead. */
4655 if (TREE_CODE (startbit) == INTEGER_CST
4656 && TREE_CODE (endbit) == INTEGER_CST
4657 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4658 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4659 {
4660 emit_library_call (memset_libfunc, 0,
4661 VOIDmode, 3,
4662 plus_constant (XEXP (targetx, 0),
4663 startb / BITS_PER_UNIT),
4664 Pmode,
4665 constm1_rtx, TYPE_MODE (integer_type_node),
4666 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4667 TYPE_MODE (sizetype));
4668 }
4669 else
4670 #endif
4671 {
4672 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4673 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4674 bitlength_rtx, TYPE_MODE (sizetype),
4675 startbit_rtx, TYPE_MODE (sizetype),
4676 endbit_rtx, TYPE_MODE (sizetype));
4677 }
4678 if (REG_P (target))
4679 emit_move_insn (target, targetx);
4680 }
4681 }
4682
4683 else
4684 abort ();
4685 }
4686
4687 /* Store the value of EXP (an expression tree)
4688 into a subfield of TARGET which has mode MODE and occupies
4689 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4690 If MODE is VOIDmode, it means that we are storing into a bit-field.
4691
4692 If VALUE_MODE is VOIDmode, return nothing in particular.
4693 UNSIGNEDP is not used in this case.
4694
4695 Otherwise, return an rtx for the value stored. This rtx
4696 has mode VALUE_MODE if that is convenient to do.
4697 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4698
4699 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4700 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4701
4702 ALIAS_SET is the alias set for the destination. This value will
4703 (in general) be different from that for TARGET, since TARGET is a
4704 reference to the containing structure. */
4705
4706 static rtx
4707 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4708 unsignedp, align, total_size, alias_set)
4709 rtx target;
4710 int bitsize, bitpos;
4711 enum machine_mode mode;
4712 tree exp;
4713 enum machine_mode value_mode;
4714 int unsignedp;
4715 unsigned int align;
4716 int total_size;
4717 int alias_set;
4718 {
4719 HOST_WIDE_INT width_mask = 0;
4720
4721 if (TREE_CODE (exp) == ERROR_MARK)
4722 return const0_rtx;
4723
4724 if (bitsize < HOST_BITS_PER_WIDE_INT)
4725 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4726
4727 /* If we are storing into an unaligned field of an aligned union that is
4728 in a register, we may have the mode of TARGET being an integer mode but
4729 MODE == BLKmode. In that case, get an aligned object whose size and
4730 alignment are the same as TARGET and store TARGET into it (we can avoid
4731 the store if the field being stored is the entire width of TARGET). Then
4732 call ourselves recursively to store the field into a BLKmode version of
4733 that object. Finally, load from the object into TARGET. This is not
4734 very efficient in general, but should only be slightly more expensive
4735 than the otherwise-required unaligned accesses. Perhaps this can be
4736 cleaned up later. */
4737
4738 if (mode == BLKmode
4739 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4740 {
4741 rtx object = assign_stack_temp (GET_MODE (target),
4742 GET_MODE_SIZE (GET_MODE (target)), 0);
4743 rtx blk_object = copy_rtx (object);
4744
4745 MEM_SET_IN_STRUCT_P (object, 1);
4746 MEM_SET_IN_STRUCT_P (blk_object, 1);
4747 PUT_MODE (blk_object, BLKmode);
4748
4749 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4750 emit_move_insn (object, target);
4751
4752 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4753 align, total_size, alias_set);
4754
4755 /* Even though we aren't returning target, we need to
4756 give it the updated value. */
4757 emit_move_insn (target, object);
4758
4759 return blk_object;
4760 }
4761
4762 if (GET_CODE (target) == CONCAT)
4763 {
4764 /* We're storing into a struct containing a single __complex. */
4765
4766 if (bitpos != 0)
4767 abort ();
4768 return store_expr (exp, target, 0);
4769 }
4770
4771 /* If the structure is in a register or if the component
4772 is a bit field, we cannot use addressing to access it.
4773 Use bit-field techniques or SUBREG to store in it. */
4774
4775 if (mode == VOIDmode
4776 || (mode != BLKmode && ! direct_store[(int) mode]
4777 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4778 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4779 || GET_CODE (target) == REG
4780 || GET_CODE (target) == SUBREG
4781 /* If the field isn't aligned enough to store as an ordinary memref,
4782 store it as a bit field. */
4783 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4784 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4785 || bitpos % GET_MODE_ALIGNMENT (mode)))
4786 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4787 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4788 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4789 /* If the RHS and field are a constant size and the size of the
4790 RHS isn't the same size as the bitfield, we must use bitfield
4791 operations. */
4792 || (bitsize >= 0
4793 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4794 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4795 {
4796 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4797
4798 /* If BITSIZE is narrower than the size of the type of EXP
4799 we will be narrowing TEMP. Normally, what's wanted are the
4800 low-order bits. However, if EXP's type is a record and this is
4801 big-endian machine, we want the upper BITSIZE bits. */
4802 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4803 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4804 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4805 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4806 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4807 - bitsize),
4808 temp, 1);
4809
4810 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4811 MODE. */
4812 if (mode != VOIDmode && mode != BLKmode
4813 && mode != TYPE_MODE (TREE_TYPE (exp)))
4814 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4815
4816 /* If the modes of TARGET and TEMP are both BLKmode, both
4817 must be in memory and BITPOS must be aligned on a byte
4818 boundary. If so, we simply do a block copy. */
4819 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4820 {
4821 unsigned int exp_align = expr_align (exp) / BITS_PER_UNIT;
4822
4823 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4824 || bitpos % BITS_PER_UNIT != 0)
4825 abort ();
4826
4827 target = change_address (target, VOIDmode,
4828 plus_constant (XEXP (target, 0),
4829 bitpos / BITS_PER_UNIT));
4830
4831 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4832 align = MIN (exp_align, align);
4833
4834 /* Find an alignment that is consistent with the bit position. */
4835 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4836 align >>= 1;
4837
4838 emit_block_move (target, temp,
4839 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4840 / BITS_PER_UNIT),
4841 align);
4842
4843 return value_mode == VOIDmode ? const0_rtx : target;
4844 }
4845
4846 /* Store the value in the bitfield. */
4847 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4848 if (value_mode != VOIDmode)
4849 {
4850 /* The caller wants an rtx for the value. */
4851 /* If possible, avoid refetching from the bitfield itself. */
4852 if (width_mask != 0
4853 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4854 {
4855 tree count;
4856 enum machine_mode tmode;
4857
4858 if (unsignedp)
4859 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4860 tmode = GET_MODE (temp);
4861 if (tmode == VOIDmode)
4862 tmode = value_mode;
4863 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4864 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4865 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4866 }
4867 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4868 NULL_RTX, value_mode, 0, align,
4869 total_size);
4870 }
4871 return const0_rtx;
4872 }
4873 else
4874 {
4875 rtx addr = XEXP (target, 0);
4876 rtx to_rtx;
4877
4878 /* If a value is wanted, it must be the lhs;
4879 so make the address stable for multiple use. */
4880
4881 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4882 && ! CONSTANT_ADDRESS_P (addr)
4883 /* A frame-pointer reference is already stable. */
4884 && ! (GET_CODE (addr) == PLUS
4885 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4886 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4887 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4888 addr = copy_to_reg (addr);
4889
4890 /* Now build a reference to just the desired component. */
4891
4892 to_rtx = copy_rtx (change_address (target, mode,
4893 plus_constant (addr,
4894 (bitpos
4895 / BITS_PER_UNIT))));
4896 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4897 MEM_ALIAS_SET (to_rtx) = alias_set;
4898
4899 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4900 }
4901 }
4902 \f
4903 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4904 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4905 ARRAY_REFs and find the ultimate containing object, which we return.
4906
4907 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4908 bit position, and *PUNSIGNEDP to the signedness of the field.
4909 If the position of the field is variable, we store a tree
4910 giving the variable offset (in units) in *POFFSET.
4911 This offset is in addition to the bit position.
4912 If the position is not variable, we store 0 in *POFFSET.
4913 We set *PALIGNMENT to the alignment in bytes of the address that will be
4914 computed. This is the alignment of the thing we return if *POFFSET
4915 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4916
4917 If any of the extraction expressions is volatile,
4918 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4919
4920 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4921 is a mode that can be used to access the field. In that case, *PBITSIZE
4922 is redundant.
4923
4924 If the field describes a variable-sized object, *PMODE is set to
4925 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4926 this case, but the address of the object can be found. */
4927
4928 tree
4929 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4930 punsignedp, pvolatilep, palignment)
4931 tree exp;
4932 int *pbitsize;
4933 int *pbitpos;
4934 tree *poffset;
4935 enum machine_mode *pmode;
4936 int *punsignedp;
4937 int *pvolatilep;
4938 unsigned int *palignment;
4939 {
4940 tree orig_exp = exp;
4941 tree size_tree = 0;
4942 enum machine_mode mode = VOIDmode;
4943 tree offset = size_zero_node;
4944 unsigned int alignment = BIGGEST_ALIGNMENT;
4945
4946 if (TREE_CODE (exp) == COMPONENT_REF)
4947 {
4948 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4949 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4950 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4951 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4952 }
4953 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4954 {
4955 size_tree = TREE_OPERAND (exp, 1);
4956 *punsignedp = TREE_UNSIGNED (exp);
4957 }
4958 else
4959 {
4960 mode = TYPE_MODE (TREE_TYPE (exp));
4961 if (mode == BLKmode)
4962 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4963
4964 *pbitsize = GET_MODE_BITSIZE (mode);
4965 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4966 }
4967
4968 if (size_tree)
4969 {
4970 if (TREE_CODE (size_tree) != INTEGER_CST)
4971 mode = BLKmode, *pbitsize = -1;
4972 else
4973 *pbitsize = TREE_INT_CST_LOW (size_tree);
4974 }
4975
4976 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4977 and find the ultimate containing object. */
4978
4979 *pbitpos = 0;
4980
4981 while (1)
4982 {
4983 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4984 {
4985 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4986 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4987 : TREE_OPERAND (exp, 2));
4988 tree constant = bitsize_int (0), var = pos;
4989
4990 /* If this field hasn't been filled in yet, don't go
4991 past it. This should only happen when folding expressions
4992 made during type construction. */
4993 if (pos == 0)
4994 break;
4995
4996 /* Assume here that the offset is a multiple of a unit.
4997 If not, there should be an explicitly added constant. */
4998 if (TREE_CODE (pos) == PLUS_EXPR
4999 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
5000 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
5001 else if (TREE_CODE (pos) == INTEGER_CST)
5002 constant = pos, var = bitsize_int (0);
5003
5004 *pbitpos += TREE_INT_CST_LOW (constant);
5005 offset
5006 = size_binop (PLUS_EXPR, offset,
5007 convert (sizetype,
5008 size_binop (EXACT_DIV_EXPR, var,
5009 bitsize_int (BITS_PER_UNIT))));
5010 }
5011
5012 else if (TREE_CODE (exp) == ARRAY_REF)
5013 {
5014 /* This code is based on the code in case ARRAY_REF in expand_expr
5015 below. We assume here that the size of an array element is
5016 always an integral multiple of BITS_PER_UNIT. */
5017
5018 tree index = TREE_OPERAND (exp, 1);
5019 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5020 tree low_bound
5021 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5022 tree index_type = TREE_TYPE (index);
5023 tree xindex;
5024
5025 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5026 {
5027 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5028 index);
5029 index_type = TREE_TYPE (index);
5030 }
5031
5032 /* Optimize the special-case of a zero lower bound.
5033
5034 We convert the low_bound to sizetype to avoid some problems
5035 with constant folding. (E.g. suppose the lower bound is 1,
5036 and its mode is QI. Without the conversion, (ARRAY
5037 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5038 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5039
5040 But sizetype isn't quite right either (especially if
5041 the lowbound is negative). FIXME */
5042
5043 if (! integer_zerop (low_bound))
5044 index = fold (build (MINUS_EXPR, index_type, index,
5045 convert (sizetype, low_bound)));
5046
5047 if (TREE_CODE (index) == INTEGER_CST)
5048 {
5049 index = convert (sbitsizetype, index);
5050 index_type = TREE_TYPE (index);
5051 }
5052
5053 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5054 convert (sbitsizetype,
5055 TYPE_SIZE (TREE_TYPE (exp)))));
5056
5057 if (TREE_CODE (xindex) == INTEGER_CST
5058 && TREE_INT_CST_HIGH (xindex) == 0)
5059 *pbitpos += TREE_INT_CST_LOW (xindex);
5060 else
5061 {
5062 /* Either the bit offset calculated above is not constant, or
5063 it overflowed. In either case, redo the multiplication
5064 against the size in units. This is especially important
5065 in the non-constant case to avoid a division at runtime. */
5066 xindex
5067 = fold (build (MULT_EXPR, ssizetype, index,
5068 convert (ssizetype,
5069 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5070
5071 if (contains_placeholder_p (xindex))
5072 xindex = build (WITH_RECORD_EXPR, ssizetype, xindex, exp);
5073
5074 offset
5075 = size_binop (PLUS_EXPR, offset, convert (sizetype, xindex));
5076 }
5077 }
5078 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5079 && ! ((TREE_CODE (exp) == NOP_EXPR
5080 || TREE_CODE (exp) == CONVERT_EXPR)
5081 && (TYPE_MODE (TREE_TYPE (exp))
5082 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5083 break;
5084
5085 /* If any reference in the chain is volatile, the effect is volatile. */
5086 if (TREE_THIS_VOLATILE (exp))
5087 *pvolatilep = 1;
5088
5089 /* If the offset is non-constant already, then we can't assume any
5090 alignment more than the alignment here. */
5091 if (! integer_zerop (offset))
5092 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5093
5094 exp = TREE_OPERAND (exp, 0);
5095 }
5096
5097 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5098 alignment = MIN (alignment, DECL_ALIGN (exp));
5099 else if (TREE_TYPE (exp) != 0)
5100 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5101
5102 if (integer_zerop (offset))
5103 offset = 0;
5104
5105 if (offset != 0 && contains_placeholder_p (offset))
5106 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5107
5108 *pmode = mode;
5109 *poffset = offset;
5110 *palignment = alignment / BITS_PER_UNIT;
5111 return exp;
5112 }
5113
5114 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5115 static enum memory_use_mode
5116 get_memory_usage_from_modifier (modifier)
5117 enum expand_modifier modifier;
5118 {
5119 switch (modifier)
5120 {
5121 case EXPAND_NORMAL:
5122 case EXPAND_SUM:
5123 return MEMORY_USE_RO;
5124 break;
5125 case EXPAND_MEMORY_USE_WO:
5126 return MEMORY_USE_WO;
5127 break;
5128 case EXPAND_MEMORY_USE_RW:
5129 return MEMORY_USE_RW;
5130 break;
5131 case EXPAND_MEMORY_USE_DONT:
5132 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5133 MEMORY_USE_DONT, because they are modifiers to a call of
5134 expand_expr in the ADDR_EXPR case of expand_expr. */
5135 case EXPAND_CONST_ADDRESS:
5136 case EXPAND_INITIALIZER:
5137 return MEMORY_USE_DONT;
5138 case EXPAND_MEMORY_USE_BAD:
5139 default:
5140 abort ();
5141 }
5142 }
5143 \f
5144 /* Given an rtx VALUE that may contain additions and multiplications,
5145 return an equivalent value that just refers to a register or memory.
5146 This is done by generating instructions to perform the arithmetic
5147 and returning a pseudo-register containing the value.
5148
5149 The returned value may be a REG, SUBREG, MEM or constant. */
5150
5151 rtx
5152 force_operand (value, target)
5153 rtx value, target;
5154 {
5155 register optab binoptab = 0;
5156 /* Use a temporary to force order of execution of calls to
5157 `force_operand'. */
5158 rtx tmp;
5159 register rtx op2;
5160 /* Use subtarget as the target for operand 0 of a binary operation. */
5161 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5162
5163 /* Check for a PIC address load. */
5164 if (flag_pic
5165 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5166 && XEXP (value, 0) == pic_offset_table_rtx
5167 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5168 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5169 || GET_CODE (XEXP (value, 1)) == CONST))
5170 {
5171 if (!subtarget)
5172 subtarget = gen_reg_rtx (GET_MODE (value));
5173 emit_move_insn (subtarget, value);
5174 return subtarget;
5175 }
5176
5177 if (GET_CODE (value) == PLUS)
5178 binoptab = add_optab;
5179 else if (GET_CODE (value) == MINUS)
5180 binoptab = sub_optab;
5181 else if (GET_CODE (value) == MULT)
5182 {
5183 op2 = XEXP (value, 1);
5184 if (!CONSTANT_P (op2)
5185 && !(GET_CODE (op2) == REG && op2 != subtarget))
5186 subtarget = 0;
5187 tmp = force_operand (XEXP (value, 0), subtarget);
5188 return expand_mult (GET_MODE (value), tmp,
5189 force_operand (op2, NULL_RTX),
5190 target, 0);
5191 }
5192
5193 if (binoptab)
5194 {
5195 op2 = XEXP (value, 1);
5196 if (!CONSTANT_P (op2)
5197 && !(GET_CODE (op2) == REG && op2 != subtarget))
5198 subtarget = 0;
5199 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5200 {
5201 binoptab = add_optab;
5202 op2 = negate_rtx (GET_MODE (value), op2);
5203 }
5204
5205 /* Check for an addition with OP2 a constant integer and our first
5206 operand a PLUS of a virtual register and something else. In that
5207 case, we want to emit the sum of the virtual register and the
5208 constant first and then add the other value. This allows virtual
5209 register instantiation to simply modify the constant rather than
5210 creating another one around this addition. */
5211 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5212 && GET_CODE (XEXP (value, 0)) == PLUS
5213 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5214 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5215 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5216 {
5217 rtx temp = expand_binop (GET_MODE (value), binoptab,
5218 XEXP (XEXP (value, 0), 0), op2,
5219 subtarget, 0, OPTAB_LIB_WIDEN);
5220 return expand_binop (GET_MODE (value), binoptab, temp,
5221 force_operand (XEXP (XEXP (value, 0), 1), 0),
5222 target, 0, OPTAB_LIB_WIDEN);
5223 }
5224
5225 tmp = force_operand (XEXP (value, 0), subtarget);
5226 return expand_binop (GET_MODE (value), binoptab, tmp,
5227 force_operand (op2, NULL_RTX),
5228 target, 0, OPTAB_LIB_WIDEN);
5229 /* We give UNSIGNEDP = 0 to expand_binop
5230 because the only operations we are expanding here are signed ones. */
5231 }
5232 return value;
5233 }
5234 \f
5235 /* Subroutine of expand_expr:
5236 save the non-copied parts (LIST) of an expr (LHS), and return a list
5237 which can restore these values to their previous values,
5238 should something modify their storage. */
5239
5240 static tree
5241 save_noncopied_parts (lhs, list)
5242 tree lhs;
5243 tree list;
5244 {
5245 tree tail;
5246 tree parts = 0;
5247
5248 for (tail = list; tail; tail = TREE_CHAIN (tail))
5249 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5250 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5251 else
5252 {
5253 tree part = TREE_VALUE (tail);
5254 tree part_type = TREE_TYPE (part);
5255 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5256 rtx target = assign_temp (part_type, 0, 1, 1);
5257 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5258 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5259 parts = tree_cons (to_be_saved,
5260 build (RTL_EXPR, part_type, NULL_TREE,
5261 (tree) target),
5262 parts);
5263 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5264 }
5265 return parts;
5266 }
5267
5268 /* Subroutine of expand_expr:
5269 record the non-copied parts (LIST) of an expr (LHS), and return a list
5270 which specifies the initial values of these parts. */
5271
5272 static tree
5273 init_noncopied_parts (lhs, list)
5274 tree lhs;
5275 tree list;
5276 {
5277 tree tail;
5278 tree parts = 0;
5279
5280 for (tail = list; tail; tail = TREE_CHAIN (tail))
5281 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5282 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5283 else if (TREE_PURPOSE (tail))
5284 {
5285 tree part = TREE_VALUE (tail);
5286 tree part_type = TREE_TYPE (part);
5287 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5288 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5289 }
5290 return parts;
5291 }
5292
5293 /* Subroutine of expand_expr: return nonzero iff there is no way that
5294 EXP can reference X, which is being modified. TOP_P is nonzero if this
5295 call is going to be used to determine whether we need a temporary
5296 for EXP, as opposed to a recursive call to this function.
5297
5298 It is always safe for this routine to return zero since it merely
5299 searches for optimization opportunities. */
5300
5301 static int
5302 safe_from_p (x, exp, top_p)
5303 rtx x;
5304 tree exp;
5305 int top_p;
5306 {
5307 rtx exp_rtl = 0;
5308 int i, nops;
5309 static int save_expr_count;
5310 static int save_expr_size = 0;
5311 static tree *save_expr_rewritten;
5312 static tree save_expr_trees[256];
5313
5314 if (x == 0
5315 /* If EXP has varying size, we MUST use a target since we currently
5316 have no way of allocating temporaries of variable size
5317 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5318 So we assume here that something at a higher level has prevented a
5319 clash. This is somewhat bogus, but the best we can do. Only
5320 do this when X is BLKmode and when we are at the top level. */
5321 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5322 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5323 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5324 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5325 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5326 != INTEGER_CST)
5327 && GET_MODE (x) == BLKmode))
5328 return 1;
5329
5330 if (top_p && save_expr_size == 0)
5331 {
5332 int rtn;
5333
5334 save_expr_count = 0;
5335 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5336 save_expr_rewritten = &save_expr_trees[0];
5337
5338 rtn = safe_from_p (x, exp, 1);
5339
5340 for (i = 0; i < save_expr_count; ++i)
5341 {
5342 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5343 abort ();
5344 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5345 }
5346
5347 save_expr_size = 0;
5348
5349 return rtn;
5350 }
5351
5352 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5353 find the underlying pseudo. */
5354 if (GET_CODE (x) == SUBREG)
5355 {
5356 x = SUBREG_REG (x);
5357 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5358 return 0;
5359 }
5360
5361 /* If X is a location in the outgoing argument area, it is always safe. */
5362 if (GET_CODE (x) == MEM
5363 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5364 || (GET_CODE (XEXP (x, 0)) == PLUS
5365 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5366 return 1;
5367
5368 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5369 {
5370 case 'd':
5371 exp_rtl = DECL_RTL (exp);
5372 break;
5373
5374 case 'c':
5375 return 1;
5376
5377 case 'x':
5378 if (TREE_CODE (exp) == TREE_LIST)
5379 return ((TREE_VALUE (exp) == 0
5380 || safe_from_p (x, TREE_VALUE (exp), 0))
5381 && (TREE_CHAIN (exp) == 0
5382 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5383 else if (TREE_CODE (exp) == ERROR_MARK)
5384 return 1; /* An already-visited SAVE_EXPR? */
5385 else
5386 return 0;
5387
5388 case '1':
5389 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5390
5391 case '2':
5392 case '<':
5393 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5394 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5395
5396 case 'e':
5397 case 'r':
5398 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5399 the expression. If it is set, we conflict iff we are that rtx or
5400 both are in memory. Otherwise, we check all operands of the
5401 expression recursively. */
5402
5403 switch (TREE_CODE (exp))
5404 {
5405 case ADDR_EXPR:
5406 return (staticp (TREE_OPERAND (exp, 0))
5407 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5408 || TREE_STATIC (exp));
5409
5410 case INDIRECT_REF:
5411 if (GET_CODE (x) == MEM)
5412 return 0;
5413 break;
5414
5415 case CALL_EXPR:
5416 exp_rtl = CALL_EXPR_RTL (exp);
5417 if (exp_rtl == 0)
5418 {
5419 /* Assume that the call will clobber all hard registers and
5420 all of memory. */
5421 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5422 || GET_CODE (x) == MEM)
5423 return 0;
5424 }
5425
5426 break;
5427
5428 case RTL_EXPR:
5429 /* If a sequence exists, we would have to scan every instruction
5430 in the sequence to see if it was safe. This is probably not
5431 worthwhile. */
5432 if (RTL_EXPR_SEQUENCE (exp))
5433 return 0;
5434
5435 exp_rtl = RTL_EXPR_RTL (exp);
5436 break;
5437
5438 case WITH_CLEANUP_EXPR:
5439 exp_rtl = RTL_EXPR_RTL (exp);
5440 break;
5441
5442 case CLEANUP_POINT_EXPR:
5443 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5444
5445 case SAVE_EXPR:
5446 exp_rtl = SAVE_EXPR_RTL (exp);
5447 if (exp_rtl)
5448 break;
5449
5450 /* This SAVE_EXPR might appear many times in the top-level
5451 safe_from_p() expression, and if it has a complex
5452 subexpression, examining it multiple times could result
5453 in a combinatorial explosion. E.g. on an Alpha
5454 running at least 200MHz, a Fortran test case compiled with
5455 optimization took about 28 minutes to compile -- even though
5456 it was only a few lines long, and the complicated line causing
5457 so much time to be spent in the earlier version of safe_from_p()
5458 had only 293 or so unique nodes.
5459
5460 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5461 where it is so we can turn it back in the top-level safe_from_p()
5462 when we're done. */
5463
5464 /* For now, don't bother re-sizing the array. */
5465 if (save_expr_count >= save_expr_size)
5466 return 0;
5467 save_expr_rewritten[save_expr_count++] = exp;
5468
5469 nops = tree_code_length[(int) SAVE_EXPR];
5470 for (i = 0; i < nops; i++)
5471 {
5472 tree operand = TREE_OPERAND (exp, i);
5473 if (operand == NULL_TREE)
5474 continue;
5475 TREE_SET_CODE (exp, ERROR_MARK);
5476 if (!safe_from_p (x, operand, 0))
5477 return 0;
5478 TREE_SET_CODE (exp, SAVE_EXPR);
5479 }
5480 TREE_SET_CODE (exp, ERROR_MARK);
5481 return 1;
5482
5483 case BIND_EXPR:
5484 /* The only operand we look at is operand 1. The rest aren't
5485 part of the expression. */
5486 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5487
5488 case METHOD_CALL_EXPR:
5489 /* This takes a rtx argument, but shouldn't appear here. */
5490 abort ();
5491
5492 default:
5493 break;
5494 }
5495
5496 /* If we have an rtx, we do not need to scan our operands. */
5497 if (exp_rtl)
5498 break;
5499
5500 nops = tree_code_length[(int) TREE_CODE (exp)];
5501 for (i = 0; i < nops; i++)
5502 if (TREE_OPERAND (exp, i) != 0
5503 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5504 return 0;
5505 }
5506
5507 /* If we have an rtl, find any enclosed object. Then see if we conflict
5508 with it. */
5509 if (exp_rtl)
5510 {
5511 if (GET_CODE (exp_rtl) == SUBREG)
5512 {
5513 exp_rtl = SUBREG_REG (exp_rtl);
5514 if (GET_CODE (exp_rtl) == REG
5515 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5516 return 0;
5517 }
5518
5519 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5520 are memory and EXP is not readonly. */
5521 return ! (rtx_equal_p (x, exp_rtl)
5522 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5523 && ! TREE_READONLY (exp)));
5524 }
5525
5526 /* If we reach here, it is safe. */
5527 return 1;
5528 }
5529
5530 /* Subroutine of expand_expr: return nonzero iff EXP is an
5531 expression whose type is statically determinable. */
5532
5533 static int
5534 fixed_type_p (exp)
5535 tree exp;
5536 {
5537 if (TREE_CODE (exp) == PARM_DECL
5538 || TREE_CODE (exp) == VAR_DECL
5539 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5540 || TREE_CODE (exp) == COMPONENT_REF
5541 || TREE_CODE (exp) == ARRAY_REF)
5542 return 1;
5543 return 0;
5544 }
5545
5546 /* Subroutine of expand_expr: return rtx if EXP is a
5547 variable or parameter; else return 0. */
5548
5549 static rtx
5550 var_rtx (exp)
5551 tree exp;
5552 {
5553 STRIP_NOPS (exp);
5554 switch (TREE_CODE (exp))
5555 {
5556 case PARM_DECL:
5557 case VAR_DECL:
5558 return DECL_RTL (exp);
5559 default:
5560 return 0;
5561 }
5562 }
5563
5564 #ifdef MAX_INTEGER_COMPUTATION_MODE
5565 void
5566 check_max_integer_computation_mode (exp)
5567 tree exp;
5568 {
5569 enum tree_code code;
5570 enum machine_mode mode;
5571
5572 /* Strip any NOPs that don't change the mode. */
5573 STRIP_NOPS (exp);
5574 code = TREE_CODE (exp);
5575
5576 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5577 if (code == NOP_EXPR
5578 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5579 return;
5580
5581 /* First check the type of the overall operation. We need only look at
5582 unary, binary and relational operations. */
5583 if (TREE_CODE_CLASS (code) == '1'
5584 || TREE_CODE_CLASS (code) == '2'
5585 || TREE_CODE_CLASS (code) == '<')
5586 {
5587 mode = TYPE_MODE (TREE_TYPE (exp));
5588 if (GET_MODE_CLASS (mode) == MODE_INT
5589 && mode > MAX_INTEGER_COMPUTATION_MODE)
5590 fatal ("unsupported wide integer operation");
5591 }
5592
5593 /* Check operand of a unary op. */
5594 if (TREE_CODE_CLASS (code) == '1')
5595 {
5596 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5597 if (GET_MODE_CLASS (mode) == MODE_INT
5598 && mode > MAX_INTEGER_COMPUTATION_MODE)
5599 fatal ("unsupported wide integer operation");
5600 }
5601
5602 /* Check operands of a binary/comparison op. */
5603 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5604 {
5605 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5606 if (GET_MODE_CLASS (mode) == MODE_INT
5607 && mode > MAX_INTEGER_COMPUTATION_MODE)
5608 fatal ("unsupported wide integer operation");
5609
5610 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5611 if (GET_MODE_CLASS (mode) == MODE_INT
5612 && mode > MAX_INTEGER_COMPUTATION_MODE)
5613 fatal ("unsupported wide integer operation");
5614 }
5615 }
5616 #endif
5617
5618 \f
5619 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5620 has any readonly fields. If any of the fields have types that
5621 contain readonly fields, return true as well. */
5622
5623 static int
5624 readonly_fields_p (type)
5625 tree type;
5626 {
5627 tree field;
5628
5629 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5630 if (TREE_CODE (field) == FIELD_DECL
5631 && (TREE_READONLY (field)
5632 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5633 && readonly_fields_p (TREE_TYPE (field)))))
5634 return 1;
5635
5636 return 0;
5637 }
5638 \f
5639 /* expand_expr: generate code for computing expression EXP.
5640 An rtx for the computed value is returned. The value is never null.
5641 In the case of a void EXP, const0_rtx is returned.
5642
5643 The value may be stored in TARGET if TARGET is nonzero.
5644 TARGET is just a suggestion; callers must assume that
5645 the rtx returned may not be the same as TARGET.
5646
5647 If TARGET is CONST0_RTX, it means that the value will be ignored.
5648
5649 If TMODE is not VOIDmode, it suggests generating the
5650 result in mode TMODE. But this is done only when convenient.
5651 Otherwise, TMODE is ignored and the value generated in its natural mode.
5652 TMODE is just a suggestion; callers must assume that
5653 the rtx returned may not have mode TMODE.
5654
5655 Note that TARGET may have neither TMODE nor MODE. In that case, it
5656 probably will not be used.
5657
5658 If MODIFIER is EXPAND_SUM then when EXP is an addition
5659 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5660 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5661 products as above, or REG or MEM, or constant.
5662 Ordinarily in such cases we would output mul or add instructions
5663 and then return a pseudo reg containing the sum.
5664
5665 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5666 it also marks a label as absolutely required (it can't be dead).
5667 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5668 This is used for outputting expressions used in initializers.
5669
5670 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5671 with a constant address even if that address is not normally legitimate.
5672 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5673
5674 rtx
5675 expand_expr (exp, target, tmode, modifier)
5676 register tree exp;
5677 rtx target;
5678 enum machine_mode tmode;
5679 enum expand_modifier modifier;
5680 {
5681 register rtx op0, op1, temp;
5682 tree type = TREE_TYPE (exp);
5683 int unsignedp = TREE_UNSIGNED (type);
5684 register enum machine_mode mode;
5685 register enum tree_code code = TREE_CODE (exp);
5686 optab this_optab;
5687 rtx subtarget, original_target;
5688 int ignore;
5689 tree context;
5690 /* Used by check-memory-usage to make modifier read only. */
5691 enum expand_modifier ro_modifier;
5692
5693 /* Handle ERROR_MARK before anybody tries to access its type. */
5694 if (TREE_CODE (exp) == ERROR_MARK)
5695 {
5696 op0 = CONST0_RTX (tmode);
5697 if (op0 != 0)
5698 return op0;
5699 return const0_rtx;
5700 }
5701
5702 mode = TYPE_MODE (type);
5703 /* Use subtarget as the target for operand 0 of a binary operation. */
5704 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5705 original_target = target;
5706 ignore = (target == const0_rtx
5707 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5708 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5709 || code == COND_EXPR)
5710 && TREE_CODE (type) == VOID_TYPE));
5711
5712 /* Make a read-only version of the modifier. */
5713 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5714 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5715 ro_modifier = modifier;
5716 else
5717 ro_modifier = EXPAND_NORMAL;
5718
5719 /* Don't use hard regs as subtargets, because the combiner
5720 can only handle pseudo regs. */
5721 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5722 subtarget = 0;
5723 /* Avoid subtargets inside loops,
5724 since they hide some invariant expressions. */
5725 if (preserve_subexpressions_p ())
5726 subtarget = 0;
5727
5728 /* If we are going to ignore this result, we need only do something
5729 if there is a side-effect somewhere in the expression. If there
5730 is, short-circuit the most common cases here. Note that we must
5731 not call expand_expr with anything but const0_rtx in case this
5732 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5733
5734 if (ignore)
5735 {
5736 if (! TREE_SIDE_EFFECTS (exp))
5737 return const0_rtx;
5738
5739 /* Ensure we reference a volatile object even if value is ignored, but
5740 don't do this if all we are doing is taking its address. */
5741 if (TREE_THIS_VOLATILE (exp)
5742 && TREE_CODE (exp) != FUNCTION_DECL
5743 && mode != VOIDmode && mode != BLKmode
5744 && modifier != EXPAND_CONST_ADDRESS)
5745 {
5746 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5747 if (GET_CODE (temp) == MEM)
5748 temp = copy_to_reg (temp);
5749 return const0_rtx;
5750 }
5751
5752 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5753 || code == INDIRECT_REF || code == BUFFER_REF)
5754 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5755 VOIDmode, ro_modifier);
5756 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5757 || code == ARRAY_REF)
5758 {
5759 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5760 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5761 return const0_rtx;
5762 }
5763 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5764 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5765 /* If the second operand has no side effects, just evaluate
5766 the first. */
5767 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5768 VOIDmode, ro_modifier);
5769 else if (code == BIT_FIELD_REF)
5770 {
5771 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5772 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5773 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5774 return const0_rtx;
5775 }
5776 ;
5777 target = 0;
5778 }
5779
5780 #ifdef MAX_INTEGER_COMPUTATION_MODE
5781 /* Only check stuff here if the mode we want is different from the mode
5782 of the expression; if it's the same, check_max_integer_computiation_mode
5783 will handle it. Do we really need to check this stuff at all? */
5784
5785 if (target
5786 && GET_MODE (target) != mode
5787 && TREE_CODE (exp) != INTEGER_CST
5788 && TREE_CODE (exp) != PARM_DECL
5789 && TREE_CODE (exp) != ARRAY_REF
5790 && TREE_CODE (exp) != COMPONENT_REF
5791 && TREE_CODE (exp) != BIT_FIELD_REF
5792 && TREE_CODE (exp) != INDIRECT_REF
5793 && TREE_CODE (exp) != CALL_EXPR
5794 && TREE_CODE (exp) != VAR_DECL
5795 && TREE_CODE (exp) != RTL_EXPR)
5796 {
5797 enum machine_mode mode = GET_MODE (target);
5798
5799 if (GET_MODE_CLASS (mode) == MODE_INT
5800 && mode > MAX_INTEGER_COMPUTATION_MODE)
5801 fatal ("unsupported wide integer operation");
5802 }
5803
5804 if (tmode != mode
5805 && TREE_CODE (exp) != INTEGER_CST
5806 && TREE_CODE (exp) != PARM_DECL
5807 && TREE_CODE (exp) != ARRAY_REF
5808 && TREE_CODE (exp) != COMPONENT_REF
5809 && TREE_CODE (exp) != BIT_FIELD_REF
5810 && TREE_CODE (exp) != INDIRECT_REF
5811 && TREE_CODE (exp) != VAR_DECL
5812 && TREE_CODE (exp) != CALL_EXPR
5813 && TREE_CODE (exp) != RTL_EXPR
5814 && GET_MODE_CLASS (tmode) == MODE_INT
5815 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5816 fatal ("unsupported wide integer operation");
5817
5818 check_max_integer_computation_mode (exp);
5819 #endif
5820
5821 /* If will do cse, generate all results into pseudo registers
5822 since 1) that allows cse to find more things
5823 and 2) otherwise cse could produce an insn the machine
5824 cannot support. */
5825
5826 if (! cse_not_expected && mode != BLKmode && target
5827 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5828 target = subtarget;
5829
5830 switch (code)
5831 {
5832 case LABEL_DECL:
5833 {
5834 tree function = decl_function_context (exp);
5835 /* Handle using a label in a containing function. */
5836 if (function != current_function_decl
5837 && function != inline_function_decl && function != 0)
5838 {
5839 struct function *p = find_function_data (function);
5840 /* Allocate in the memory associated with the function
5841 that the label is in. */
5842 push_obstacks (p->function_obstack,
5843 p->function_maybepermanent_obstack);
5844
5845 p->expr->x_forced_labels
5846 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5847 p->expr->x_forced_labels);
5848 pop_obstacks ();
5849 }
5850 else
5851 {
5852 if (modifier == EXPAND_INITIALIZER)
5853 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5854 label_rtx (exp),
5855 forced_labels);
5856 }
5857
5858 temp = gen_rtx_MEM (FUNCTION_MODE,
5859 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5860 if (function != current_function_decl
5861 && function != inline_function_decl && function != 0)
5862 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5863 return temp;
5864 }
5865
5866 case PARM_DECL:
5867 if (DECL_RTL (exp) == 0)
5868 {
5869 error_with_decl (exp, "prior parameter's size depends on `%s'");
5870 return CONST0_RTX (mode);
5871 }
5872
5873 /* ... fall through ... */
5874
5875 case VAR_DECL:
5876 /* If a static var's type was incomplete when the decl was written,
5877 but the type is complete now, lay out the decl now. */
5878 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5879 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5880 {
5881 push_obstacks_nochange ();
5882 end_temporary_allocation ();
5883 layout_decl (exp, 0);
5884 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5885 pop_obstacks ();
5886 }
5887
5888 /* Although static-storage variables start off initialized, according to
5889 ANSI C, a memcpy could overwrite them with uninitialized values. So
5890 we check them too. This also lets us check for read-only variables
5891 accessed via a non-const declaration, in case it won't be detected
5892 any other way (e.g., in an embedded system or OS kernel without
5893 memory protection).
5894
5895 Aggregates are not checked here; they're handled elsewhere. */
5896 if (cfun && current_function_check_memory_usage
5897 && code == VAR_DECL
5898 && GET_CODE (DECL_RTL (exp)) == MEM
5899 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5900 {
5901 enum memory_use_mode memory_usage;
5902 memory_usage = get_memory_usage_from_modifier (modifier);
5903
5904 if (memory_usage != MEMORY_USE_DONT)
5905 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5906 XEXP (DECL_RTL (exp), 0), Pmode,
5907 GEN_INT (int_size_in_bytes (type)),
5908 TYPE_MODE (sizetype),
5909 GEN_INT (memory_usage),
5910 TYPE_MODE (integer_type_node));
5911 }
5912
5913 /* ... fall through ... */
5914
5915 case FUNCTION_DECL:
5916 case RESULT_DECL:
5917 if (DECL_RTL (exp) == 0)
5918 abort ();
5919
5920 /* Ensure variable marked as used even if it doesn't go through
5921 a parser. If it hasn't be used yet, write out an external
5922 definition. */
5923 if (! TREE_USED (exp))
5924 {
5925 assemble_external (exp);
5926 TREE_USED (exp) = 1;
5927 }
5928
5929 /* Show we haven't gotten RTL for this yet. */
5930 temp = 0;
5931
5932 /* Handle variables inherited from containing functions. */
5933 context = decl_function_context (exp);
5934
5935 /* We treat inline_function_decl as an alias for the current function
5936 because that is the inline function whose vars, types, etc.
5937 are being merged into the current function.
5938 See expand_inline_function. */
5939
5940 if (context != 0 && context != current_function_decl
5941 && context != inline_function_decl
5942 /* If var is static, we don't need a static chain to access it. */
5943 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5944 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5945 {
5946 rtx addr;
5947
5948 /* Mark as non-local and addressable. */
5949 DECL_NONLOCAL (exp) = 1;
5950 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5951 abort ();
5952 mark_addressable (exp);
5953 if (GET_CODE (DECL_RTL (exp)) != MEM)
5954 abort ();
5955 addr = XEXP (DECL_RTL (exp), 0);
5956 if (GET_CODE (addr) == MEM)
5957 addr = gen_rtx_MEM (Pmode,
5958 fix_lexical_addr (XEXP (addr, 0), exp));
5959 else
5960 addr = fix_lexical_addr (addr, exp);
5961 temp = change_address (DECL_RTL (exp), mode, addr);
5962 }
5963
5964 /* This is the case of an array whose size is to be determined
5965 from its initializer, while the initializer is still being parsed.
5966 See expand_decl. */
5967
5968 else if (GET_CODE (DECL_RTL (exp)) == MEM
5969 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5970 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5971 XEXP (DECL_RTL (exp), 0));
5972
5973 /* If DECL_RTL is memory, we are in the normal case and either
5974 the address is not valid or it is not a register and -fforce-addr
5975 is specified, get the address into a register. */
5976
5977 else if (GET_CODE (DECL_RTL (exp)) == MEM
5978 && modifier != EXPAND_CONST_ADDRESS
5979 && modifier != EXPAND_SUM
5980 && modifier != EXPAND_INITIALIZER
5981 && (! memory_address_p (DECL_MODE (exp),
5982 XEXP (DECL_RTL (exp), 0))
5983 || (flag_force_addr
5984 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5985 temp = change_address (DECL_RTL (exp), VOIDmode,
5986 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5987
5988 /* If we got something, return it. But first, set the alignment
5989 the address is a register. */
5990 if (temp != 0)
5991 {
5992 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5993 mark_reg_pointer (XEXP (temp, 0),
5994 DECL_ALIGN (exp) / BITS_PER_UNIT);
5995
5996 return temp;
5997 }
5998
5999 /* If the mode of DECL_RTL does not match that of the decl, it
6000 must be a promoted value. We return a SUBREG of the wanted mode,
6001 but mark it so that we know that it was already extended. */
6002
6003 if (GET_CODE (DECL_RTL (exp)) == REG
6004 && GET_MODE (DECL_RTL (exp)) != mode)
6005 {
6006 /* Get the signedness used for this variable. Ensure we get the
6007 same mode we got when the variable was declared. */
6008 if (GET_MODE (DECL_RTL (exp))
6009 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6010 abort ();
6011
6012 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6013 SUBREG_PROMOTED_VAR_P (temp) = 1;
6014 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6015 return temp;
6016 }
6017
6018 return DECL_RTL (exp);
6019
6020 case INTEGER_CST:
6021 return immed_double_const (TREE_INT_CST_LOW (exp),
6022 TREE_INT_CST_HIGH (exp), mode);
6023
6024 case CONST_DECL:
6025 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6026 EXPAND_MEMORY_USE_BAD);
6027
6028 case REAL_CST:
6029 /* If optimized, generate immediate CONST_DOUBLE
6030 which will be turned into memory by reload if necessary.
6031
6032 We used to force a register so that loop.c could see it. But
6033 this does not allow gen_* patterns to perform optimizations with
6034 the constants. It also produces two insns in cases like "x = 1.0;".
6035 On most machines, floating-point constants are not permitted in
6036 many insns, so we'd end up copying it to a register in any case.
6037
6038 Now, we do the copying in expand_binop, if appropriate. */
6039 return immed_real_const (exp);
6040
6041 case COMPLEX_CST:
6042 case STRING_CST:
6043 if (! TREE_CST_RTL (exp))
6044 output_constant_def (exp);
6045
6046 /* TREE_CST_RTL probably contains a constant address.
6047 On RISC machines where a constant address isn't valid,
6048 make some insns to get that address into a register. */
6049 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6050 && modifier != EXPAND_CONST_ADDRESS
6051 && modifier != EXPAND_INITIALIZER
6052 && modifier != EXPAND_SUM
6053 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6054 || (flag_force_addr
6055 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6056 return change_address (TREE_CST_RTL (exp), VOIDmode,
6057 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6058 return TREE_CST_RTL (exp);
6059
6060 case EXPR_WITH_FILE_LOCATION:
6061 {
6062 rtx to_return;
6063 char *saved_input_filename = input_filename;
6064 int saved_lineno = lineno;
6065 input_filename = EXPR_WFL_FILENAME (exp);
6066 lineno = EXPR_WFL_LINENO (exp);
6067 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6068 emit_line_note (input_filename, lineno);
6069 /* Possibly avoid switching back and force here */
6070 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6071 input_filename = saved_input_filename;
6072 lineno = saved_lineno;
6073 return to_return;
6074 }
6075
6076 case SAVE_EXPR:
6077 context = decl_function_context (exp);
6078
6079 /* If this SAVE_EXPR was at global context, assume we are an
6080 initialization function and move it into our context. */
6081 if (context == 0)
6082 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6083
6084 /* We treat inline_function_decl as an alias for the current function
6085 because that is the inline function whose vars, types, etc.
6086 are being merged into the current function.
6087 See expand_inline_function. */
6088 if (context == current_function_decl || context == inline_function_decl)
6089 context = 0;
6090
6091 /* If this is non-local, handle it. */
6092 if (context)
6093 {
6094 /* The following call just exists to abort if the context is
6095 not of a containing function. */
6096 find_function_data (context);
6097
6098 temp = SAVE_EXPR_RTL (exp);
6099 if (temp && GET_CODE (temp) == REG)
6100 {
6101 put_var_into_stack (exp);
6102 temp = SAVE_EXPR_RTL (exp);
6103 }
6104 if (temp == 0 || GET_CODE (temp) != MEM)
6105 abort ();
6106 return change_address (temp, mode,
6107 fix_lexical_addr (XEXP (temp, 0), exp));
6108 }
6109 if (SAVE_EXPR_RTL (exp) == 0)
6110 {
6111 if (mode == VOIDmode)
6112 temp = const0_rtx;
6113 else
6114 temp = assign_temp (type, 3, 0, 0);
6115
6116 SAVE_EXPR_RTL (exp) = temp;
6117 if (!optimize && GET_CODE (temp) == REG)
6118 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6119 save_expr_regs);
6120
6121 /* If the mode of TEMP does not match that of the expression, it
6122 must be a promoted value. We pass store_expr a SUBREG of the
6123 wanted mode but mark it so that we know that it was already
6124 extended. Note that `unsignedp' was modified above in
6125 this case. */
6126
6127 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6128 {
6129 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6130 SUBREG_PROMOTED_VAR_P (temp) = 1;
6131 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6132 }
6133
6134 if (temp == const0_rtx)
6135 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6136 EXPAND_MEMORY_USE_BAD);
6137 else
6138 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6139
6140 TREE_USED (exp) = 1;
6141 }
6142
6143 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6144 must be a promoted value. We return a SUBREG of the wanted mode,
6145 but mark it so that we know that it was already extended. */
6146
6147 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6148 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6149 {
6150 /* Compute the signedness and make the proper SUBREG. */
6151 promote_mode (type, mode, &unsignedp, 0);
6152 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6153 SUBREG_PROMOTED_VAR_P (temp) = 1;
6154 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6155 return temp;
6156 }
6157
6158 return SAVE_EXPR_RTL (exp);
6159
6160 case UNSAVE_EXPR:
6161 {
6162 rtx temp;
6163 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6164 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6165 return temp;
6166 }
6167
6168 case PLACEHOLDER_EXPR:
6169 {
6170 tree placeholder_expr;
6171
6172 /* If there is an object on the head of the placeholder list,
6173 see if some object in it of type TYPE or a pointer to it. For
6174 further information, see tree.def. */
6175 for (placeholder_expr = placeholder_list;
6176 placeholder_expr != 0;
6177 placeholder_expr = TREE_CHAIN (placeholder_expr))
6178 {
6179 tree need_type = TYPE_MAIN_VARIANT (type);
6180 tree object = 0;
6181 tree old_list = placeholder_list;
6182 tree elt;
6183
6184 /* Find the outermost reference that is of the type we want.
6185 If none, see if any object has a type that is a pointer to
6186 the type we want. */
6187 for (elt = TREE_PURPOSE (placeholder_expr);
6188 elt != 0 && object == 0;
6189 elt
6190 = ((TREE_CODE (elt) == COMPOUND_EXPR
6191 || TREE_CODE (elt) == COND_EXPR)
6192 ? TREE_OPERAND (elt, 1)
6193 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6194 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6195 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6196 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6197 ? TREE_OPERAND (elt, 0) : 0))
6198 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6199 object = elt;
6200
6201 for (elt = TREE_PURPOSE (placeholder_expr);
6202 elt != 0 && object == 0;
6203 elt
6204 = ((TREE_CODE (elt) == COMPOUND_EXPR
6205 || TREE_CODE (elt) == COND_EXPR)
6206 ? TREE_OPERAND (elt, 1)
6207 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6208 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6209 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6210 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6211 ? TREE_OPERAND (elt, 0) : 0))
6212 if (POINTER_TYPE_P (TREE_TYPE (elt))
6213 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6214 == need_type))
6215 object = build1 (INDIRECT_REF, need_type, elt);
6216
6217 if (object != 0)
6218 {
6219 /* Expand this object skipping the list entries before
6220 it was found in case it is also a PLACEHOLDER_EXPR.
6221 In that case, we want to translate it using subsequent
6222 entries. */
6223 placeholder_list = TREE_CHAIN (placeholder_expr);
6224 temp = expand_expr (object, original_target, tmode,
6225 ro_modifier);
6226 placeholder_list = old_list;
6227 return temp;
6228 }
6229 }
6230 }
6231
6232 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6233 abort ();
6234
6235 case WITH_RECORD_EXPR:
6236 /* Put the object on the placeholder list, expand our first operand,
6237 and pop the list. */
6238 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6239 placeholder_list);
6240 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6241 tmode, ro_modifier);
6242 placeholder_list = TREE_CHAIN (placeholder_list);
6243 return target;
6244
6245 case GOTO_EXPR:
6246 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6247 expand_goto (TREE_OPERAND (exp, 0));
6248 else
6249 expand_computed_goto (TREE_OPERAND (exp, 0));
6250 return const0_rtx;
6251
6252 case EXIT_EXPR:
6253 expand_exit_loop_if_false (NULL_PTR,
6254 invert_truthvalue (TREE_OPERAND (exp, 0)));
6255 return const0_rtx;
6256
6257 case LABELED_BLOCK_EXPR:
6258 if (LABELED_BLOCK_BODY (exp))
6259 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6260 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6261 return const0_rtx;
6262
6263 case EXIT_BLOCK_EXPR:
6264 if (EXIT_BLOCK_RETURN (exp))
6265 sorry ("returned value in block_exit_expr");
6266 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6267 return const0_rtx;
6268
6269 case LOOP_EXPR:
6270 push_temp_slots ();
6271 expand_start_loop (1);
6272 expand_expr_stmt (TREE_OPERAND (exp, 0));
6273 expand_end_loop ();
6274 pop_temp_slots ();
6275
6276 return const0_rtx;
6277
6278 case BIND_EXPR:
6279 {
6280 tree vars = TREE_OPERAND (exp, 0);
6281 int vars_need_expansion = 0;
6282
6283 /* Need to open a binding contour here because
6284 if there are any cleanups they must be contained here. */
6285 expand_start_bindings (2);
6286
6287 /* Mark the corresponding BLOCK for output in its proper place. */
6288 if (TREE_OPERAND (exp, 2) != 0
6289 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6290 insert_block (TREE_OPERAND (exp, 2));
6291
6292 /* If VARS have not yet been expanded, expand them now. */
6293 while (vars)
6294 {
6295 if (DECL_RTL (vars) == 0)
6296 {
6297 vars_need_expansion = 1;
6298 expand_decl (vars);
6299 }
6300 expand_decl_init (vars);
6301 vars = TREE_CHAIN (vars);
6302 }
6303
6304 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6305
6306 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6307
6308 return temp;
6309 }
6310
6311 case RTL_EXPR:
6312 if (RTL_EXPR_SEQUENCE (exp))
6313 {
6314 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6315 abort ();
6316 emit_insns (RTL_EXPR_SEQUENCE (exp));
6317 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6318 }
6319 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6320 free_temps_for_rtl_expr (exp);
6321 return RTL_EXPR_RTL (exp);
6322
6323 case CONSTRUCTOR:
6324 /* If we don't need the result, just ensure we evaluate any
6325 subexpressions. */
6326 if (ignore)
6327 {
6328 tree elt;
6329 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6330 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6331 EXPAND_MEMORY_USE_BAD);
6332 return const0_rtx;
6333 }
6334
6335 /* All elts simple constants => refer to a constant in memory. But
6336 if this is a non-BLKmode mode, let it store a field at a time
6337 since that should make a CONST_INT or CONST_DOUBLE when we
6338 fold. Likewise, if we have a target we can use, it is best to
6339 store directly into the target unless the type is large enough
6340 that memcpy will be used. If we are making an initializer and
6341 all operands are constant, put it in memory as well. */
6342 else if ((TREE_STATIC (exp)
6343 && ((mode == BLKmode
6344 && ! (target != 0 && safe_from_p (target, exp, 1)))
6345 || TREE_ADDRESSABLE (exp)
6346 || (TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST
6347 && TREE_INT_CST_HIGH (TYPE_SIZE_UNIT (type)) == 0
6348 && (! MOVE_BY_PIECES_P
6349 (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type)),
6350 TYPE_ALIGN (type) / BITS_PER_UNIT))
6351 && ! mostly_zeros_p (exp))))
6352 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6353 {
6354 rtx constructor = output_constant_def (exp);
6355 if (modifier != EXPAND_CONST_ADDRESS
6356 && modifier != EXPAND_INITIALIZER
6357 && modifier != EXPAND_SUM
6358 && (! memory_address_p (GET_MODE (constructor),
6359 XEXP (constructor, 0))
6360 || (flag_force_addr
6361 && GET_CODE (XEXP (constructor, 0)) != REG)))
6362 constructor = change_address (constructor, VOIDmode,
6363 XEXP (constructor, 0));
6364 return constructor;
6365 }
6366
6367 else
6368 {
6369 /* Handle calls that pass values in multiple non-contiguous
6370 locations. The Irix 6 ABI has examples of this. */
6371 if (target == 0 || ! safe_from_p (target, exp, 1)
6372 || GET_CODE (target) == PARALLEL)
6373 {
6374 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6375 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6376 else
6377 target = assign_temp (type, 0, 1, 1);
6378 }
6379
6380 if (TREE_READONLY (exp))
6381 {
6382 if (GET_CODE (target) == MEM)
6383 target = copy_rtx (target);
6384
6385 RTX_UNCHANGING_P (target) = 1;
6386 }
6387
6388 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6389 int_size_in_bytes (TREE_TYPE (exp)));
6390 return target;
6391 }
6392
6393 case INDIRECT_REF:
6394 {
6395 tree exp1 = TREE_OPERAND (exp, 0);
6396 tree exp2;
6397 tree index;
6398 tree string = string_constant (exp1, &index);
6399
6400 /* Try to optimize reads from const strings. */
6401 if (string
6402 && TREE_CODE (string) == STRING_CST
6403 && TREE_CODE (index) == INTEGER_CST
6404 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6405 && GET_MODE_CLASS (mode) == MODE_INT
6406 && GET_MODE_SIZE (mode) == 1
6407 && modifier != EXPAND_MEMORY_USE_WO)
6408 return
6409 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6410
6411 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6412 op0 = memory_address (mode, op0);
6413
6414 if (cfun && current_function_check_memory_usage
6415 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6416 {
6417 enum memory_use_mode memory_usage;
6418 memory_usage = get_memory_usage_from_modifier (modifier);
6419
6420 if (memory_usage != MEMORY_USE_DONT)
6421 {
6422 in_check_memory_usage = 1;
6423 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6424 op0, Pmode,
6425 GEN_INT (int_size_in_bytes (type)),
6426 TYPE_MODE (sizetype),
6427 GEN_INT (memory_usage),
6428 TYPE_MODE (integer_type_node));
6429 in_check_memory_usage = 0;
6430 }
6431 }
6432
6433 temp = gen_rtx_MEM (mode, op0);
6434 /* If address was computed by addition,
6435 mark this as an element of an aggregate. */
6436 if (TREE_CODE (exp1) == PLUS_EXPR
6437 || (TREE_CODE (exp1) == SAVE_EXPR
6438 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6439 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6440 || (TREE_CODE (exp1) == ADDR_EXPR
6441 && (exp2 = TREE_OPERAND (exp1, 0))
6442 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6443 MEM_SET_IN_STRUCT_P (temp, 1);
6444
6445 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6446 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6447
6448 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6449 here, because, in C and C++, the fact that a location is accessed
6450 through a pointer to const does not mean that the value there can
6451 never change. Languages where it can never change should
6452 also set TREE_STATIC. */
6453 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6454
6455 /* If we are writing to this object and its type is a record with
6456 readonly fields, we must mark it as readonly so it will
6457 conflict with readonly references to those fields. */
6458 if (modifier == EXPAND_MEMORY_USE_WO
6459 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6460 RTX_UNCHANGING_P (temp) = 1;
6461
6462 return temp;
6463 }
6464
6465 case ARRAY_REF:
6466 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6467 abort ();
6468
6469 {
6470 tree array = TREE_OPERAND (exp, 0);
6471 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6472 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6473 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6474 HOST_WIDE_INT i;
6475
6476 /* Optimize the special-case of a zero lower bound.
6477
6478 We convert the low_bound to sizetype to avoid some problems
6479 with constant folding. (E.g. suppose the lower bound is 1,
6480 and its mode is QI. Without the conversion, (ARRAY
6481 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6482 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6483
6484 if (! integer_zerop (low_bound))
6485 index = size_diffop (index, convert (sizetype, low_bound));
6486
6487 /* Fold an expression like: "foo"[2].
6488 This is not done in fold so it won't happen inside &.
6489 Don't fold if this is for wide characters since it's too
6490 difficult to do correctly and this is a very rare case. */
6491
6492 if (TREE_CODE (array) == STRING_CST
6493 && TREE_CODE (index) == INTEGER_CST
6494 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6495 && GET_MODE_CLASS (mode) == MODE_INT
6496 && GET_MODE_SIZE (mode) == 1)
6497 return
6498 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6499
6500 /* If this is a constant index into a constant array,
6501 just get the value from the array. Handle both the cases when
6502 we have an explicit constructor and when our operand is a variable
6503 that was declared const. */
6504
6505 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6506 && TREE_CODE (index) == INTEGER_CST
6507 && 0 > compare_tree_int (index,
6508 list_length (CONSTRUCTOR_ELTS
6509 (TREE_OPERAND (exp, 0)))))
6510 {
6511 tree elem;
6512
6513 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6514 i = TREE_INT_CST_LOW (index);
6515 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6516 ;
6517
6518 if (elem)
6519 return expand_expr (fold (TREE_VALUE (elem)), target,
6520 tmode, ro_modifier);
6521 }
6522
6523 else if (optimize >= 1
6524 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6525 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6526 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6527 {
6528 if (TREE_CODE (index) == INTEGER_CST)
6529 {
6530 tree init = DECL_INITIAL (array);
6531
6532 if (TREE_CODE (init) == CONSTRUCTOR)
6533 {
6534 tree elem;
6535
6536 for (elem = CONSTRUCTOR_ELTS (init);
6537 (elem
6538 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6539 elem = TREE_CHAIN (elem))
6540 ;
6541
6542 if (elem)
6543 return expand_expr (fold (TREE_VALUE (elem)), target,
6544 tmode, ro_modifier);
6545 }
6546 else if (TREE_CODE (init) == STRING_CST
6547 && 0 > compare_tree_int (index,
6548 TREE_STRING_LENGTH (init)))
6549 return (GEN_INT
6550 (TREE_STRING_POINTER
6551 (init)[TREE_INT_CST_LOW (index)]));
6552 }
6553 }
6554 }
6555
6556 /* ... fall through ... */
6557
6558 case COMPONENT_REF:
6559 case BIT_FIELD_REF:
6560 /* If the operand is a CONSTRUCTOR, we can just extract the
6561 appropriate field if it is present. Don't do this if we have
6562 already written the data since we want to refer to that copy
6563 and varasm.c assumes that's what we'll do. */
6564 if (code != ARRAY_REF
6565 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6566 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6567 {
6568 tree elt;
6569
6570 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6571 elt = TREE_CHAIN (elt))
6572 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6573 /* We can normally use the value of the field in the
6574 CONSTRUCTOR. However, if this is a bitfield in
6575 an integral mode that we can fit in a HOST_WIDE_INT,
6576 we must mask only the number of bits in the bitfield,
6577 since this is done implicitly by the constructor. If
6578 the bitfield does not meet either of those conditions,
6579 we can't do this optimization. */
6580 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6581 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6582 == MODE_INT)
6583 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6584 <= HOST_BITS_PER_WIDE_INT))))
6585 {
6586 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6587 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6588 {
6589 HOST_WIDE_INT bitsize
6590 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6591
6592 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6593 {
6594 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6595 op0 = expand_and (op0, op1, target);
6596 }
6597 else
6598 {
6599 enum machine_mode imode
6600 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6601 tree count
6602 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6603 0);
6604
6605 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6606 target, 0);
6607 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6608 target, 0);
6609 }
6610 }
6611
6612 return op0;
6613 }
6614 }
6615
6616 {
6617 enum machine_mode mode1;
6618 int bitsize;
6619 int bitpos;
6620 tree offset;
6621 int volatilep = 0;
6622 unsigned int alignment;
6623 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6624 &mode1, &unsignedp, &volatilep,
6625 &alignment);
6626
6627 /* If we got back the original object, something is wrong. Perhaps
6628 we are evaluating an expression too early. In any event, don't
6629 infinitely recurse. */
6630 if (tem == exp)
6631 abort ();
6632
6633 /* If TEM's type is a union of variable size, pass TARGET to the inner
6634 computation, since it will need a temporary and TARGET is known
6635 to have to do. This occurs in unchecked conversion in Ada. */
6636
6637 op0 = expand_expr (tem,
6638 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6639 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6640 != INTEGER_CST)
6641 ? target : NULL_RTX),
6642 VOIDmode,
6643 (modifier == EXPAND_INITIALIZER
6644 || modifier == EXPAND_CONST_ADDRESS)
6645 ? modifier : EXPAND_NORMAL);
6646
6647 /* If this is a constant, put it into a register if it is a
6648 legitimate constant and OFFSET is 0 and memory if it isn't. */
6649 if (CONSTANT_P (op0))
6650 {
6651 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6652 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6653 && offset == 0)
6654 op0 = force_reg (mode, op0);
6655 else
6656 op0 = validize_mem (force_const_mem (mode, op0));
6657 }
6658
6659 if (offset != 0)
6660 {
6661 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6662
6663 /* If this object is in memory, put it into a register.
6664 This case can't occur in C, but can in Ada if we have
6665 unchecked conversion of an expression from a scalar type to
6666 an array or record type. */
6667 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6668 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6669 {
6670 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6671
6672 mark_temp_addr_taken (memloc);
6673 emit_move_insn (memloc, op0);
6674 op0 = memloc;
6675 }
6676
6677 if (GET_CODE (op0) != MEM)
6678 abort ();
6679
6680 if (GET_MODE (offset_rtx) != ptr_mode)
6681 {
6682 #ifdef POINTERS_EXTEND_UNSIGNED
6683 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6684 #else
6685 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6686 #endif
6687 }
6688
6689 /* A constant address in OP0 can have VOIDmode, we must not try
6690 to call force_reg for that case. Avoid that case. */
6691 if (GET_CODE (op0) == MEM
6692 && GET_MODE (op0) == BLKmode
6693 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6694 && bitsize != 0
6695 && (bitpos % bitsize) == 0
6696 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6697 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6698 {
6699 rtx temp = change_address (op0, mode1,
6700 plus_constant (XEXP (op0, 0),
6701 (bitpos /
6702 BITS_PER_UNIT)));
6703 if (GET_CODE (XEXP (temp, 0)) == REG)
6704 op0 = temp;
6705 else
6706 op0 = change_address (op0, mode1,
6707 force_reg (GET_MODE (XEXP (temp, 0)),
6708 XEXP (temp, 0)));
6709 bitpos = 0;
6710 }
6711
6712
6713 op0 = change_address (op0, VOIDmode,
6714 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6715 force_reg (ptr_mode,
6716 offset_rtx)));
6717 }
6718
6719 /* Don't forget about volatility even if this is a bitfield. */
6720 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6721 {
6722 op0 = copy_rtx (op0);
6723 MEM_VOLATILE_P (op0) = 1;
6724 }
6725
6726 /* Check the access. */
6727 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6728 {
6729 enum memory_use_mode memory_usage;
6730 memory_usage = get_memory_usage_from_modifier (modifier);
6731
6732 if (memory_usage != MEMORY_USE_DONT)
6733 {
6734 rtx to;
6735 int size;
6736
6737 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6738 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6739
6740 /* Check the access right of the pointer. */
6741 if (size > BITS_PER_UNIT)
6742 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6743 to, Pmode,
6744 GEN_INT (size / BITS_PER_UNIT),
6745 TYPE_MODE (sizetype),
6746 GEN_INT (memory_usage),
6747 TYPE_MODE (integer_type_node));
6748 }
6749 }
6750
6751 /* In cases where an aligned union has an unaligned object
6752 as a field, we might be extracting a BLKmode value from
6753 an integer-mode (e.g., SImode) object. Handle this case
6754 by doing the extract into an object as wide as the field
6755 (which we know to be the width of a basic mode), then
6756 storing into memory, and changing the mode to BLKmode.
6757 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6758 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6759 if (mode1 == VOIDmode
6760 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6761 || (modifier != EXPAND_CONST_ADDRESS
6762 && modifier != EXPAND_INITIALIZER
6763 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6764 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6765 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6766 /* If the field isn't aligned enough to fetch as a memref,
6767 fetch it as a bit field. */
6768 || (mode1 != BLKmode
6769 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6770 && ((TYPE_ALIGN (TREE_TYPE (tem))
6771 < (unsigned int) GET_MODE_ALIGNMENT (mode))
6772 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6773 /* If the type and the field are a constant size and the
6774 size of the type isn't the same size as the bitfield,
6775 we must use bitfield operations. */
6776 || ((bitsize >= 0
6777 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6778 == INTEGER_CST)
6779 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6780 bitsize)))))
6781 || (modifier != EXPAND_CONST_ADDRESS
6782 && modifier != EXPAND_INITIALIZER
6783 && mode == BLKmode
6784 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6785 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6786 || bitpos % TYPE_ALIGN (type) != 0)))
6787 {
6788 enum machine_mode ext_mode = mode;
6789
6790 if (ext_mode == BLKmode
6791 && ! (target != 0 && GET_CODE (op0) == MEM
6792 && GET_CODE (target) == MEM
6793 && bitpos % BITS_PER_UNIT == 0))
6794 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6795
6796 if (ext_mode == BLKmode)
6797 {
6798 /* In this case, BITPOS must start at a byte boundary and
6799 TARGET, if specified, must be a MEM. */
6800 if (GET_CODE (op0) != MEM
6801 || (target != 0 && GET_CODE (target) != MEM)
6802 || bitpos % BITS_PER_UNIT != 0)
6803 abort ();
6804
6805 op0 = change_address (op0, VOIDmode,
6806 plus_constant (XEXP (op0, 0),
6807 bitpos / BITS_PER_UNIT));
6808 if (target == 0)
6809 target = assign_temp (type, 0, 1, 1);
6810
6811 emit_block_move (target, op0,
6812 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6813 / BITS_PER_UNIT),
6814 1);
6815
6816 return target;
6817 }
6818
6819 op0 = validize_mem (op0);
6820
6821 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6822 mark_reg_pointer (XEXP (op0, 0), alignment);
6823
6824 op0 = extract_bit_field (op0, bitsize, bitpos,
6825 unsignedp, target, ext_mode, ext_mode,
6826 alignment,
6827 int_size_in_bytes (TREE_TYPE (tem)));
6828
6829 /* If the result is a record type and BITSIZE is narrower than
6830 the mode of OP0, an integral mode, and this is a big endian
6831 machine, we must put the field into the high-order bits. */
6832 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6833 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6834 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6835 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6836 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6837 - bitsize),
6838 op0, 1);
6839
6840 if (mode == BLKmode)
6841 {
6842 rtx new = assign_stack_temp (ext_mode,
6843 bitsize / BITS_PER_UNIT, 0);
6844
6845 emit_move_insn (new, op0);
6846 op0 = copy_rtx (new);
6847 PUT_MODE (op0, BLKmode);
6848 MEM_SET_IN_STRUCT_P (op0, 1);
6849 }
6850
6851 return op0;
6852 }
6853
6854 /* If the result is BLKmode, use that to access the object
6855 now as well. */
6856 if (mode == BLKmode)
6857 mode1 = BLKmode;
6858
6859 /* Get a reference to just this component. */
6860 if (modifier == EXPAND_CONST_ADDRESS
6861 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6862 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6863 (bitpos / BITS_PER_UNIT)));
6864 else
6865 op0 = change_address (op0, mode1,
6866 plus_constant (XEXP (op0, 0),
6867 (bitpos / BITS_PER_UNIT)));
6868
6869 if (GET_CODE (op0) == MEM)
6870 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6871
6872 if (GET_CODE (XEXP (op0, 0)) == REG)
6873 mark_reg_pointer (XEXP (op0, 0), alignment);
6874
6875 MEM_SET_IN_STRUCT_P (op0, 1);
6876 MEM_VOLATILE_P (op0) |= volatilep;
6877 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6878 || modifier == EXPAND_CONST_ADDRESS
6879 || modifier == EXPAND_INITIALIZER)
6880 return op0;
6881 else if (target == 0)
6882 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6883
6884 convert_move (target, op0, unsignedp);
6885 return target;
6886 }
6887
6888 /* Intended for a reference to a buffer of a file-object in Pascal.
6889 But it's not certain that a special tree code will really be
6890 necessary for these. INDIRECT_REF might work for them. */
6891 case BUFFER_REF:
6892 abort ();
6893
6894 case IN_EXPR:
6895 {
6896 /* Pascal set IN expression.
6897
6898 Algorithm:
6899 rlo = set_low - (set_low%bits_per_word);
6900 the_word = set [ (index - rlo)/bits_per_word ];
6901 bit_index = index % bits_per_word;
6902 bitmask = 1 << bit_index;
6903 return !!(the_word & bitmask); */
6904
6905 tree set = TREE_OPERAND (exp, 0);
6906 tree index = TREE_OPERAND (exp, 1);
6907 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6908 tree set_type = TREE_TYPE (set);
6909 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6910 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6911 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6912 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6913 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6914 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6915 rtx setaddr = XEXP (setval, 0);
6916 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6917 rtx rlow;
6918 rtx diff, quo, rem, addr, bit, result;
6919
6920 preexpand_calls (exp);
6921
6922 /* If domain is empty, answer is no. Likewise if index is constant
6923 and out of bounds. */
6924 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6925 && TREE_CODE (set_low_bound) == INTEGER_CST
6926 && tree_int_cst_lt (set_high_bound, set_low_bound))
6927 || (TREE_CODE (index) == INTEGER_CST
6928 && TREE_CODE (set_low_bound) == INTEGER_CST
6929 && tree_int_cst_lt (index, set_low_bound))
6930 || (TREE_CODE (set_high_bound) == INTEGER_CST
6931 && TREE_CODE (index) == INTEGER_CST
6932 && tree_int_cst_lt (set_high_bound, index))))
6933 return const0_rtx;
6934
6935 if (target == 0)
6936 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6937
6938 /* If we get here, we have to generate the code for both cases
6939 (in range and out of range). */
6940
6941 op0 = gen_label_rtx ();
6942 op1 = gen_label_rtx ();
6943
6944 if (! (GET_CODE (index_val) == CONST_INT
6945 && GET_CODE (lo_r) == CONST_INT))
6946 {
6947 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6948 GET_MODE (index_val), iunsignedp, 0, op1);
6949 }
6950
6951 if (! (GET_CODE (index_val) == CONST_INT
6952 && GET_CODE (hi_r) == CONST_INT))
6953 {
6954 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6955 GET_MODE (index_val), iunsignedp, 0, op1);
6956 }
6957
6958 /* Calculate the element number of bit zero in the first word
6959 of the set. */
6960 if (GET_CODE (lo_r) == CONST_INT)
6961 rlow = GEN_INT (INTVAL (lo_r)
6962 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6963 else
6964 rlow = expand_binop (index_mode, and_optab, lo_r,
6965 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6966 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6967
6968 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6969 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6970
6971 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6972 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6973 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6974 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6975
6976 addr = memory_address (byte_mode,
6977 expand_binop (index_mode, add_optab, diff,
6978 setaddr, NULL_RTX, iunsignedp,
6979 OPTAB_LIB_WIDEN));
6980
6981 /* Extract the bit we want to examine */
6982 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6983 gen_rtx_MEM (byte_mode, addr),
6984 make_tree (TREE_TYPE (index), rem),
6985 NULL_RTX, 1);
6986 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6987 GET_MODE (target) == byte_mode ? target : 0,
6988 1, OPTAB_LIB_WIDEN);
6989
6990 if (result != target)
6991 convert_move (target, result, 1);
6992
6993 /* Output the code to handle the out-of-range case. */
6994 emit_jump (op0);
6995 emit_label (op1);
6996 emit_move_insn (target, const0_rtx);
6997 emit_label (op0);
6998 return target;
6999 }
7000
7001 case WITH_CLEANUP_EXPR:
7002 if (RTL_EXPR_RTL (exp) == 0)
7003 {
7004 RTL_EXPR_RTL (exp)
7005 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7006 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7007
7008 /* That's it for this cleanup. */
7009 TREE_OPERAND (exp, 2) = 0;
7010 }
7011 return RTL_EXPR_RTL (exp);
7012
7013 case CLEANUP_POINT_EXPR:
7014 {
7015 /* Start a new binding layer that will keep track of all cleanup
7016 actions to be performed. */
7017 expand_start_bindings (2);
7018
7019 target_temp_slot_level = temp_slot_level;
7020
7021 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7022 /* If we're going to use this value, load it up now. */
7023 if (! ignore)
7024 op0 = force_not_mem (op0);
7025 preserve_temp_slots (op0);
7026 expand_end_bindings (NULL_TREE, 0, 0);
7027 }
7028 return op0;
7029
7030 case CALL_EXPR:
7031 /* Check for a built-in function. */
7032 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7033 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7034 == FUNCTION_DECL)
7035 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7036 return expand_builtin (exp, target, subtarget, tmode, ignore);
7037
7038 /* If this call was expanded already by preexpand_calls,
7039 just return the result we got. */
7040 if (CALL_EXPR_RTL (exp) != 0)
7041 return CALL_EXPR_RTL (exp);
7042
7043 return expand_call (exp, target, ignore);
7044
7045 case NON_LVALUE_EXPR:
7046 case NOP_EXPR:
7047 case CONVERT_EXPR:
7048 case REFERENCE_EXPR:
7049 if (TREE_CODE (type) == UNION_TYPE)
7050 {
7051 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7052
7053 /* If both input and output are BLKmode, this conversion
7054 isn't actually doing anything unless we need to make the
7055 alignment stricter. */
7056 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7057 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7058 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7059 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7060 modifier);
7061
7062 if (target == 0)
7063 {
7064 if (mode != BLKmode)
7065 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7066 else
7067 target = assign_temp (type, 0, 1, 1);
7068 }
7069
7070 if (GET_CODE (target) == MEM)
7071 /* Store data into beginning of memory target. */
7072 store_expr (TREE_OPERAND (exp, 0),
7073 change_address (target, TYPE_MODE (valtype), 0), 0);
7074
7075 else if (GET_CODE (target) == REG)
7076 /* Store this field into a union of the proper type. */
7077 store_field (target,
7078 MIN ((int_size_in_bytes (TREE_TYPE
7079 (TREE_OPERAND (exp, 0)))
7080 * BITS_PER_UNIT),
7081 GET_MODE_BITSIZE (mode)),
7082 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7083 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
7084 else
7085 abort ();
7086
7087 /* Return the entire union. */
7088 return target;
7089 }
7090
7091 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7092 {
7093 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7094 ro_modifier);
7095
7096 /* If the signedness of the conversion differs and OP0 is
7097 a promoted SUBREG, clear that indication since we now
7098 have to do the proper extension. */
7099 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7100 && GET_CODE (op0) == SUBREG)
7101 SUBREG_PROMOTED_VAR_P (op0) = 0;
7102
7103 return op0;
7104 }
7105
7106 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7107 if (GET_MODE (op0) == mode)
7108 return op0;
7109
7110 /* If OP0 is a constant, just convert it into the proper mode. */
7111 if (CONSTANT_P (op0))
7112 return
7113 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7114 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7115
7116 if (modifier == EXPAND_INITIALIZER)
7117 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7118
7119 if (target == 0)
7120 return
7121 convert_to_mode (mode, op0,
7122 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7123 else
7124 convert_move (target, op0,
7125 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7126 return target;
7127
7128 case PLUS_EXPR:
7129 /* We come here from MINUS_EXPR when the second operand is a
7130 constant. */
7131 plus_expr:
7132 this_optab = add_optab;
7133
7134 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7135 something else, make sure we add the register to the constant and
7136 then to the other thing. This case can occur during strength
7137 reduction and doing it this way will produce better code if the
7138 frame pointer or argument pointer is eliminated.
7139
7140 fold-const.c will ensure that the constant is always in the inner
7141 PLUS_EXPR, so the only case we need to do anything about is if
7142 sp, ap, or fp is our second argument, in which case we must swap
7143 the innermost first argument and our second argument. */
7144
7145 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7146 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7147 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7148 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7149 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7150 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7151 {
7152 tree t = TREE_OPERAND (exp, 1);
7153
7154 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7155 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7156 }
7157
7158 /* If the result is to be ptr_mode and we are adding an integer to
7159 something, we might be forming a constant. So try to use
7160 plus_constant. If it produces a sum and we can't accept it,
7161 use force_operand. This allows P = &ARR[const] to generate
7162 efficient code on machines where a SYMBOL_REF is not a valid
7163 address.
7164
7165 If this is an EXPAND_SUM call, always return the sum. */
7166 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7167 || mode == ptr_mode)
7168 {
7169 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7170 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7171 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7172 {
7173 rtx constant_part;
7174
7175 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7176 EXPAND_SUM);
7177 /* Use immed_double_const to ensure that the constant is
7178 truncated according to the mode of OP1, then sign extended
7179 to a HOST_WIDE_INT. Using the constant directly can result
7180 in non-canonical RTL in a 64x32 cross compile. */
7181 constant_part
7182 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7183 (HOST_WIDE_INT) 0,
7184 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7185 op1 = plus_constant (op1, INTVAL (constant_part));
7186 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7187 op1 = force_operand (op1, target);
7188 return op1;
7189 }
7190
7191 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7192 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7193 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7194 {
7195 rtx constant_part;
7196
7197 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7198 EXPAND_SUM);
7199 if (! CONSTANT_P (op0))
7200 {
7201 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7202 VOIDmode, modifier);
7203 /* Don't go to both_summands if modifier
7204 says it's not right to return a PLUS. */
7205 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7206 goto binop2;
7207 goto both_summands;
7208 }
7209 /* Use immed_double_const to ensure that the constant is
7210 truncated according to the mode of OP1, then sign extended
7211 to a HOST_WIDE_INT. Using the constant directly can result
7212 in non-canonical RTL in a 64x32 cross compile. */
7213 constant_part
7214 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7215 (HOST_WIDE_INT) 0,
7216 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7217 op0 = plus_constant (op0, INTVAL (constant_part));
7218 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7219 op0 = force_operand (op0, target);
7220 return op0;
7221 }
7222 }
7223
7224 /* No sense saving up arithmetic to be done
7225 if it's all in the wrong mode to form part of an address.
7226 And force_operand won't know whether to sign-extend or
7227 zero-extend. */
7228 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7229 || mode != ptr_mode)
7230 goto binop;
7231
7232 preexpand_calls (exp);
7233 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7234 subtarget = 0;
7235
7236 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7237 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7238
7239 both_summands:
7240 /* Make sure any term that's a sum with a constant comes last. */
7241 if (GET_CODE (op0) == PLUS
7242 && CONSTANT_P (XEXP (op0, 1)))
7243 {
7244 temp = op0;
7245 op0 = op1;
7246 op1 = temp;
7247 }
7248 /* If adding to a sum including a constant,
7249 associate it to put the constant outside. */
7250 if (GET_CODE (op1) == PLUS
7251 && CONSTANT_P (XEXP (op1, 1)))
7252 {
7253 rtx constant_term = const0_rtx;
7254
7255 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7256 if (temp != 0)
7257 op0 = temp;
7258 /* Ensure that MULT comes first if there is one. */
7259 else if (GET_CODE (op0) == MULT)
7260 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7261 else
7262 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7263
7264 /* Let's also eliminate constants from op0 if possible. */
7265 op0 = eliminate_constant_term (op0, &constant_term);
7266
7267 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7268 their sum should be a constant. Form it into OP1, since the
7269 result we want will then be OP0 + OP1. */
7270
7271 temp = simplify_binary_operation (PLUS, mode, constant_term,
7272 XEXP (op1, 1));
7273 if (temp != 0)
7274 op1 = temp;
7275 else
7276 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7277 }
7278
7279 /* Put a constant term last and put a multiplication first. */
7280 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7281 temp = op1, op1 = op0, op0 = temp;
7282
7283 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7284 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7285
7286 case MINUS_EXPR:
7287 /* For initializers, we are allowed to return a MINUS of two
7288 symbolic constants. Here we handle all cases when both operands
7289 are constant. */
7290 /* Handle difference of two symbolic constants,
7291 for the sake of an initializer. */
7292 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7293 && really_constant_p (TREE_OPERAND (exp, 0))
7294 && really_constant_p (TREE_OPERAND (exp, 1)))
7295 {
7296 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7297 VOIDmode, ro_modifier);
7298 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7299 VOIDmode, ro_modifier);
7300
7301 /* If the last operand is a CONST_INT, use plus_constant of
7302 the negated constant. Else make the MINUS. */
7303 if (GET_CODE (op1) == CONST_INT)
7304 return plus_constant (op0, - INTVAL (op1));
7305 else
7306 return gen_rtx_MINUS (mode, op0, op1);
7307 }
7308 /* Convert A - const to A + (-const). */
7309 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7310 {
7311 tree negated = fold (build1 (NEGATE_EXPR, type,
7312 TREE_OPERAND (exp, 1)));
7313
7314 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7315 /* If we can't negate the constant in TYPE, leave it alone and
7316 expand_binop will negate it for us. We used to try to do it
7317 here in the signed version of TYPE, but that doesn't work
7318 on POINTER_TYPEs. */;
7319 else
7320 {
7321 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7322 goto plus_expr;
7323 }
7324 }
7325 this_optab = sub_optab;
7326 goto binop;
7327
7328 case MULT_EXPR:
7329 preexpand_calls (exp);
7330 /* If first operand is constant, swap them.
7331 Thus the following special case checks need only
7332 check the second operand. */
7333 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7334 {
7335 register tree t1 = TREE_OPERAND (exp, 0);
7336 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7337 TREE_OPERAND (exp, 1) = t1;
7338 }
7339
7340 /* Attempt to return something suitable for generating an
7341 indexed address, for machines that support that. */
7342
7343 if (modifier == EXPAND_SUM && mode == ptr_mode
7344 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7345 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7346 {
7347 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7348 EXPAND_SUM);
7349
7350 /* Apply distributive law if OP0 is x+c. */
7351 if (GET_CODE (op0) == PLUS
7352 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7353 return
7354 gen_rtx_PLUS
7355 (mode,
7356 gen_rtx_MULT
7357 (mode, XEXP (op0, 0),
7358 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7359 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7360 * INTVAL (XEXP (op0, 1))));
7361
7362 if (GET_CODE (op0) != REG)
7363 op0 = force_operand (op0, NULL_RTX);
7364 if (GET_CODE (op0) != REG)
7365 op0 = copy_to_mode_reg (mode, op0);
7366
7367 return
7368 gen_rtx_MULT (mode, op0,
7369 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7370 }
7371
7372 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7373 subtarget = 0;
7374
7375 /* Check for multiplying things that have been extended
7376 from a narrower type. If this machine supports multiplying
7377 in that narrower type with a result in the desired type,
7378 do it that way, and avoid the explicit type-conversion. */
7379 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7380 && TREE_CODE (type) == INTEGER_TYPE
7381 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7382 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7383 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7384 && int_fits_type_p (TREE_OPERAND (exp, 1),
7385 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7386 /* Don't use a widening multiply if a shift will do. */
7387 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7388 > HOST_BITS_PER_WIDE_INT)
7389 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7390 ||
7391 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7392 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7393 ==
7394 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7395 /* If both operands are extended, they must either both
7396 be zero-extended or both be sign-extended. */
7397 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7398 ==
7399 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7400 {
7401 enum machine_mode innermode
7402 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7403 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7404 ? smul_widen_optab : umul_widen_optab);
7405 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7406 ? umul_widen_optab : smul_widen_optab);
7407 if (mode == GET_MODE_WIDER_MODE (innermode))
7408 {
7409 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7410 {
7411 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7412 NULL_RTX, VOIDmode, 0);
7413 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7414 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7415 VOIDmode, 0);
7416 else
7417 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7418 NULL_RTX, VOIDmode, 0);
7419 goto binop2;
7420 }
7421 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7422 && innermode == word_mode)
7423 {
7424 rtx htem;
7425 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7426 NULL_RTX, VOIDmode, 0);
7427 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7428 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7429 VOIDmode, 0);
7430 else
7431 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7432 NULL_RTX, VOIDmode, 0);
7433 temp = expand_binop (mode, other_optab, op0, op1, target,
7434 unsignedp, OPTAB_LIB_WIDEN);
7435 htem = expand_mult_highpart_adjust (innermode,
7436 gen_highpart (innermode, temp),
7437 op0, op1,
7438 gen_highpart (innermode, temp),
7439 unsignedp);
7440 emit_move_insn (gen_highpart (innermode, temp), htem);
7441 return temp;
7442 }
7443 }
7444 }
7445 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7446 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7447 return expand_mult (mode, op0, op1, target, unsignedp);
7448
7449 case TRUNC_DIV_EXPR:
7450 case FLOOR_DIV_EXPR:
7451 case CEIL_DIV_EXPR:
7452 case ROUND_DIV_EXPR:
7453 case EXACT_DIV_EXPR:
7454 preexpand_calls (exp);
7455 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7456 subtarget = 0;
7457 /* Possible optimization: compute the dividend with EXPAND_SUM
7458 then if the divisor is constant can optimize the case
7459 where some terms of the dividend have coeffs divisible by it. */
7460 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7461 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7462 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7463
7464 case RDIV_EXPR:
7465 this_optab = flodiv_optab;
7466 goto binop;
7467
7468 case TRUNC_MOD_EXPR:
7469 case FLOOR_MOD_EXPR:
7470 case CEIL_MOD_EXPR:
7471 case ROUND_MOD_EXPR:
7472 preexpand_calls (exp);
7473 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7474 subtarget = 0;
7475 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7476 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7477 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7478
7479 case FIX_ROUND_EXPR:
7480 case FIX_FLOOR_EXPR:
7481 case FIX_CEIL_EXPR:
7482 abort (); /* Not used for C. */
7483
7484 case FIX_TRUNC_EXPR:
7485 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7486 if (target == 0)
7487 target = gen_reg_rtx (mode);
7488 expand_fix (target, op0, unsignedp);
7489 return target;
7490
7491 case FLOAT_EXPR:
7492 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7493 if (target == 0)
7494 target = gen_reg_rtx (mode);
7495 /* expand_float can't figure out what to do if FROM has VOIDmode.
7496 So give it the correct mode. With -O, cse will optimize this. */
7497 if (GET_MODE (op0) == VOIDmode)
7498 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7499 op0);
7500 expand_float (target, op0,
7501 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7502 return target;
7503
7504 case NEGATE_EXPR:
7505 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7506 temp = expand_unop (mode, neg_optab, op0, target, 0);
7507 if (temp == 0)
7508 abort ();
7509 return temp;
7510
7511 case ABS_EXPR:
7512 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7513
7514 /* Handle complex values specially. */
7515 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7516 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7517 return expand_complex_abs (mode, op0, target, unsignedp);
7518
7519 /* Unsigned abs is simply the operand. Testing here means we don't
7520 risk generating incorrect code below. */
7521 if (TREE_UNSIGNED (type))
7522 return op0;
7523
7524 return expand_abs (mode, op0, target,
7525 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7526
7527 case MAX_EXPR:
7528 case MIN_EXPR:
7529 target = original_target;
7530 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7531 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7532 || GET_MODE (target) != mode
7533 || (GET_CODE (target) == REG
7534 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7535 target = gen_reg_rtx (mode);
7536 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7537 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7538
7539 /* First try to do it with a special MIN or MAX instruction.
7540 If that does not win, use a conditional jump to select the proper
7541 value. */
7542 this_optab = (TREE_UNSIGNED (type)
7543 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7544 : (code == MIN_EXPR ? smin_optab : smax_optab));
7545
7546 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7547 OPTAB_WIDEN);
7548 if (temp != 0)
7549 return temp;
7550
7551 /* At this point, a MEM target is no longer useful; we will get better
7552 code without it. */
7553
7554 if (GET_CODE (target) == MEM)
7555 target = gen_reg_rtx (mode);
7556
7557 if (target != op0)
7558 emit_move_insn (target, op0);
7559
7560 op0 = gen_label_rtx ();
7561
7562 /* If this mode is an integer too wide to compare properly,
7563 compare word by word. Rely on cse to optimize constant cases. */
7564 if (GET_MODE_CLASS (mode) == MODE_INT
7565 && ! can_compare_p (GE, mode, ccp_jump))
7566 {
7567 if (code == MAX_EXPR)
7568 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7569 target, op1, NULL_RTX, op0);
7570 else
7571 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7572 op1, target, NULL_RTX, op0);
7573 }
7574 else
7575 {
7576 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7577 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7578 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7579 op0);
7580 }
7581 emit_move_insn (target, op1);
7582 emit_label (op0);
7583 return target;
7584
7585 case BIT_NOT_EXPR:
7586 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7587 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7588 if (temp == 0)
7589 abort ();
7590 return temp;
7591
7592 case FFS_EXPR:
7593 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7594 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7595 if (temp == 0)
7596 abort ();
7597 return temp;
7598
7599 /* ??? Can optimize bitwise operations with one arg constant.
7600 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7601 and (a bitwise1 b) bitwise2 b (etc)
7602 but that is probably not worth while. */
7603
7604 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7605 boolean values when we want in all cases to compute both of them. In
7606 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7607 as actual zero-or-1 values and then bitwise anding. In cases where
7608 there cannot be any side effects, better code would be made by
7609 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7610 how to recognize those cases. */
7611
7612 case TRUTH_AND_EXPR:
7613 case BIT_AND_EXPR:
7614 this_optab = and_optab;
7615 goto binop;
7616
7617 case TRUTH_OR_EXPR:
7618 case BIT_IOR_EXPR:
7619 this_optab = ior_optab;
7620 goto binop;
7621
7622 case TRUTH_XOR_EXPR:
7623 case BIT_XOR_EXPR:
7624 this_optab = xor_optab;
7625 goto binop;
7626
7627 case LSHIFT_EXPR:
7628 case RSHIFT_EXPR:
7629 case LROTATE_EXPR:
7630 case RROTATE_EXPR:
7631 preexpand_calls (exp);
7632 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7633 subtarget = 0;
7634 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7635 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7636 unsignedp);
7637
7638 /* Could determine the answer when only additive constants differ. Also,
7639 the addition of one can be handled by changing the condition. */
7640 case LT_EXPR:
7641 case LE_EXPR:
7642 case GT_EXPR:
7643 case GE_EXPR:
7644 case EQ_EXPR:
7645 case NE_EXPR:
7646 case UNORDERED_EXPR:
7647 case ORDERED_EXPR:
7648 case UNLT_EXPR:
7649 case UNLE_EXPR:
7650 case UNGT_EXPR:
7651 case UNGE_EXPR:
7652 case UNEQ_EXPR:
7653 preexpand_calls (exp);
7654 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7655 if (temp != 0)
7656 return temp;
7657
7658 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7659 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7660 && original_target
7661 && GET_CODE (original_target) == REG
7662 && (GET_MODE (original_target)
7663 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7664 {
7665 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7666 VOIDmode, 0);
7667
7668 if (temp != original_target)
7669 temp = copy_to_reg (temp);
7670
7671 op1 = gen_label_rtx ();
7672 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7673 GET_MODE (temp), unsignedp, 0, op1);
7674 emit_move_insn (temp, const1_rtx);
7675 emit_label (op1);
7676 return temp;
7677 }
7678
7679 /* If no set-flag instruction, must generate a conditional
7680 store into a temporary variable. Drop through
7681 and handle this like && and ||. */
7682
7683 case TRUTH_ANDIF_EXPR:
7684 case TRUTH_ORIF_EXPR:
7685 if (! ignore
7686 && (target == 0 || ! safe_from_p (target, exp, 1)
7687 /* Make sure we don't have a hard reg (such as function's return
7688 value) live across basic blocks, if not optimizing. */
7689 || (!optimize && GET_CODE (target) == REG
7690 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7691 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7692
7693 if (target)
7694 emit_clr_insn (target);
7695
7696 op1 = gen_label_rtx ();
7697 jumpifnot (exp, op1);
7698
7699 if (target)
7700 emit_0_to_1_insn (target);
7701
7702 emit_label (op1);
7703 return ignore ? const0_rtx : target;
7704
7705 case TRUTH_NOT_EXPR:
7706 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7707 /* The parser is careful to generate TRUTH_NOT_EXPR
7708 only with operands that are always zero or one. */
7709 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7710 target, 1, OPTAB_LIB_WIDEN);
7711 if (temp == 0)
7712 abort ();
7713 return temp;
7714
7715 case COMPOUND_EXPR:
7716 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7717 emit_queue ();
7718 return expand_expr (TREE_OPERAND (exp, 1),
7719 (ignore ? const0_rtx : target),
7720 VOIDmode, 0);
7721
7722 case COND_EXPR:
7723 /* If we would have a "singleton" (see below) were it not for a
7724 conversion in each arm, bring that conversion back out. */
7725 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7726 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7727 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7728 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7729 {
7730 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7731 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7732
7733 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7734 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7735 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7736 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7737 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7738 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7739 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7740 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7741 return expand_expr (build1 (NOP_EXPR, type,
7742 build (COND_EXPR, TREE_TYPE (true),
7743 TREE_OPERAND (exp, 0),
7744 true, false)),
7745 target, tmode, modifier);
7746 }
7747
7748 {
7749 /* Note that COND_EXPRs whose type is a structure or union
7750 are required to be constructed to contain assignments of
7751 a temporary variable, so that we can evaluate them here
7752 for side effect only. If type is void, we must do likewise. */
7753
7754 /* If an arm of the branch requires a cleanup,
7755 only that cleanup is performed. */
7756
7757 tree singleton = 0;
7758 tree binary_op = 0, unary_op = 0;
7759
7760 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7761 convert it to our mode, if necessary. */
7762 if (integer_onep (TREE_OPERAND (exp, 1))
7763 && integer_zerop (TREE_OPERAND (exp, 2))
7764 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7765 {
7766 if (ignore)
7767 {
7768 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7769 ro_modifier);
7770 return const0_rtx;
7771 }
7772
7773 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7774 if (GET_MODE (op0) == mode)
7775 return op0;
7776
7777 if (target == 0)
7778 target = gen_reg_rtx (mode);
7779 convert_move (target, op0, unsignedp);
7780 return target;
7781 }
7782
7783 /* Check for X ? A + B : A. If we have this, we can copy A to the
7784 output and conditionally add B. Similarly for unary operations.
7785 Don't do this if X has side-effects because those side effects
7786 might affect A or B and the "?" operation is a sequence point in
7787 ANSI. (operand_equal_p tests for side effects.) */
7788
7789 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7790 && operand_equal_p (TREE_OPERAND (exp, 2),
7791 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7792 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7793 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7794 && operand_equal_p (TREE_OPERAND (exp, 1),
7795 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7796 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7797 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7798 && operand_equal_p (TREE_OPERAND (exp, 2),
7799 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7800 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7801 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7802 && operand_equal_p (TREE_OPERAND (exp, 1),
7803 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7804 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7805
7806 /* If we are not to produce a result, we have no target. Otherwise,
7807 if a target was specified use it; it will not be used as an
7808 intermediate target unless it is safe. If no target, use a
7809 temporary. */
7810
7811 if (ignore)
7812 temp = 0;
7813 else if (original_target
7814 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7815 || (singleton && GET_CODE (original_target) == REG
7816 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7817 && original_target == var_rtx (singleton)))
7818 && GET_MODE (original_target) == mode
7819 #ifdef HAVE_conditional_move
7820 && (! can_conditionally_move_p (mode)
7821 || GET_CODE (original_target) == REG
7822 || TREE_ADDRESSABLE (type))
7823 #endif
7824 && ! (GET_CODE (original_target) == MEM
7825 && MEM_VOLATILE_P (original_target)))
7826 temp = original_target;
7827 else if (TREE_ADDRESSABLE (type))
7828 abort ();
7829 else
7830 temp = assign_temp (type, 0, 0, 1);
7831
7832 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7833 do the test of X as a store-flag operation, do this as
7834 A + ((X != 0) << log C). Similarly for other simple binary
7835 operators. Only do for C == 1 if BRANCH_COST is low. */
7836 if (temp && singleton && binary_op
7837 && (TREE_CODE (binary_op) == PLUS_EXPR
7838 || TREE_CODE (binary_op) == MINUS_EXPR
7839 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7840 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7841 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7842 : integer_onep (TREE_OPERAND (binary_op, 1)))
7843 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7844 {
7845 rtx result;
7846 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7847 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7848 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7849 : xor_optab);
7850
7851 /* If we had X ? A : A + 1, do this as A + (X == 0).
7852
7853 We have to invert the truth value here and then put it
7854 back later if do_store_flag fails. We cannot simply copy
7855 TREE_OPERAND (exp, 0) to another variable and modify that
7856 because invert_truthvalue can modify the tree pointed to
7857 by its argument. */
7858 if (singleton == TREE_OPERAND (exp, 1))
7859 TREE_OPERAND (exp, 0)
7860 = invert_truthvalue (TREE_OPERAND (exp, 0));
7861
7862 result = do_store_flag (TREE_OPERAND (exp, 0),
7863 (safe_from_p (temp, singleton, 1)
7864 ? temp : NULL_RTX),
7865 mode, BRANCH_COST <= 1);
7866
7867 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7868 result = expand_shift (LSHIFT_EXPR, mode, result,
7869 build_int_2 (tree_log2
7870 (TREE_OPERAND
7871 (binary_op, 1)),
7872 0),
7873 (safe_from_p (temp, singleton, 1)
7874 ? temp : NULL_RTX), 0);
7875
7876 if (result)
7877 {
7878 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7879 return expand_binop (mode, boptab, op1, result, temp,
7880 unsignedp, OPTAB_LIB_WIDEN);
7881 }
7882 else if (singleton == TREE_OPERAND (exp, 1))
7883 TREE_OPERAND (exp, 0)
7884 = invert_truthvalue (TREE_OPERAND (exp, 0));
7885 }
7886
7887 do_pending_stack_adjust ();
7888 NO_DEFER_POP;
7889 op0 = gen_label_rtx ();
7890
7891 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7892 {
7893 if (temp != 0)
7894 {
7895 /* If the target conflicts with the other operand of the
7896 binary op, we can't use it. Also, we can't use the target
7897 if it is a hard register, because evaluating the condition
7898 might clobber it. */
7899 if ((binary_op
7900 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7901 || (GET_CODE (temp) == REG
7902 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7903 temp = gen_reg_rtx (mode);
7904 store_expr (singleton, temp, 0);
7905 }
7906 else
7907 expand_expr (singleton,
7908 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7909 if (singleton == TREE_OPERAND (exp, 1))
7910 jumpif (TREE_OPERAND (exp, 0), op0);
7911 else
7912 jumpifnot (TREE_OPERAND (exp, 0), op0);
7913
7914 start_cleanup_deferral ();
7915 if (binary_op && temp == 0)
7916 /* Just touch the other operand. */
7917 expand_expr (TREE_OPERAND (binary_op, 1),
7918 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7919 else if (binary_op)
7920 store_expr (build (TREE_CODE (binary_op), type,
7921 make_tree (type, temp),
7922 TREE_OPERAND (binary_op, 1)),
7923 temp, 0);
7924 else
7925 store_expr (build1 (TREE_CODE (unary_op), type,
7926 make_tree (type, temp)),
7927 temp, 0);
7928 op1 = op0;
7929 }
7930 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7931 comparison operator. If we have one of these cases, set the
7932 output to A, branch on A (cse will merge these two references),
7933 then set the output to FOO. */
7934 else if (temp
7935 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7936 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7937 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7938 TREE_OPERAND (exp, 1), 0)
7939 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7940 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7941 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7942 {
7943 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7944 temp = gen_reg_rtx (mode);
7945 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7946 jumpif (TREE_OPERAND (exp, 0), op0);
7947
7948 start_cleanup_deferral ();
7949 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7950 op1 = op0;
7951 }
7952 else if (temp
7953 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7954 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7955 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7956 TREE_OPERAND (exp, 2), 0)
7957 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7958 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7959 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7960 {
7961 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7962 temp = gen_reg_rtx (mode);
7963 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7964 jumpifnot (TREE_OPERAND (exp, 0), op0);
7965
7966 start_cleanup_deferral ();
7967 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7968 op1 = op0;
7969 }
7970 else
7971 {
7972 op1 = gen_label_rtx ();
7973 jumpifnot (TREE_OPERAND (exp, 0), op0);
7974
7975 start_cleanup_deferral ();
7976
7977 /* One branch of the cond can be void, if it never returns. For
7978 example A ? throw : E */
7979 if (temp != 0
7980 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7981 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7982 else
7983 expand_expr (TREE_OPERAND (exp, 1),
7984 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7985 end_cleanup_deferral ();
7986 emit_queue ();
7987 emit_jump_insn (gen_jump (op1));
7988 emit_barrier ();
7989 emit_label (op0);
7990 start_cleanup_deferral ();
7991 if (temp != 0
7992 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7993 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7994 else
7995 expand_expr (TREE_OPERAND (exp, 2),
7996 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7997 }
7998
7999 end_cleanup_deferral ();
8000
8001 emit_queue ();
8002 emit_label (op1);
8003 OK_DEFER_POP;
8004
8005 return temp;
8006 }
8007
8008 case TARGET_EXPR:
8009 {
8010 /* Something needs to be initialized, but we didn't know
8011 where that thing was when building the tree. For example,
8012 it could be the return value of a function, or a parameter
8013 to a function which lays down in the stack, or a temporary
8014 variable which must be passed by reference.
8015
8016 We guarantee that the expression will either be constructed
8017 or copied into our original target. */
8018
8019 tree slot = TREE_OPERAND (exp, 0);
8020 tree cleanups = NULL_TREE;
8021 tree exp1;
8022
8023 if (TREE_CODE (slot) != VAR_DECL)
8024 abort ();
8025
8026 if (! ignore)
8027 target = original_target;
8028
8029 /* Set this here so that if we get a target that refers to a
8030 register variable that's already been used, put_reg_into_stack
8031 knows that it should fix up those uses. */
8032 TREE_USED (slot) = 1;
8033
8034 if (target == 0)
8035 {
8036 if (DECL_RTL (slot) != 0)
8037 {
8038 target = DECL_RTL (slot);
8039 /* If we have already expanded the slot, so don't do
8040 it again. (mrs) */
8041 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8042 return target;
8043 }
8044 else
8045 {
8046 target = assign_temp (type, 2, 0, 1);
8047 /* All temp slots at this level must not conflict. */
8048 preserve_temp_slots (target);
8049 DECL_RTL (slot) = target;
8050 if (TREE_ADDRESSABLE (slot))
8051 {
8052 TREE_ADDRESSABLE (slot) = 0;
8053 mark_addressable (slot);
8054 }
8055
8056 /* Since SLOT is not known to the called function
8057 to belong to its stack frame, we must build an explicit
8058 cleanup. This case occurs when we must build up a reference
8059 to pass the reference as an argument. In this case,
8060 it is very likely that such a reference need not be
8061 built here. */
8062
8063 if (TREE_OPERAND (exp, 2) == 0)
8064 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8065 cleanups = TREE_OPERAND (exp, 2);
8066 }
8067 }
8068 else
8069 {
8070 /* This case does occur, when expanding a parameter which
8071 needs to be constructed on the stack. The target
8072 is the actual stack address that we want to initialize.
8073 The function we call will perform the cleanup in this case. */
8074
8075 /* If we have already assigned it space, use that space,
8076 not target that we were passed in, as our target
8077 parameter is only a hint. */
8078 if (DECL_RTL (slot) != 0)
8079 {
8080 target = DECL_RTL (slot);
8081 /* If we have already expanded the slot, so don't do
8082 it again. (mrs) */
8083 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8084 return target;
8085 }
8086 else
8087 {
8088 DECL_RTL (slot) = target;
8089 /* If we must have an addressable slot, then make sure that
8090 the RTL that we just stored in slot is OK. */
8091 if (TREE_ADDRESSABLE (slot))
8092 {
8093 TREE_ADDRESSABLE (slot) = 0;
8094 mark_addressable (slot);
8095 }
8096 }
8097 }
8098
8099 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8100 /* Mark it as expanded. */
8101 TREE_OPERAND (exp, 1) = NULL_TREE;
8102
8103 store_expr (exp1, target, 0);
8104
8105 expand_decl_cleanup (NULL_TREE, cleanups);
8106
8107 return target;
8108 }
8109
8110 case INIT_EXPR:
8111 {
8112 tree lhs = TREE_OPERAND (exp, 0);
8113 tree rhs = TREE_OPERAND (exp, 1);
8114 tree noncopied_parts = 0;
8115 tree lhs_type = TREE_TYPE (lhs);
8116
8117 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8118 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8119 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8120 TYPE_NONCOPIED_PARTS (lhs_type));
8121 while (noncopied_parts != 0)
8122 {
8123 expand_assignment (TREE_VALUE (noncopied_parts),
8124 TREE_PURPOSE (noncopied_parts), 0, 0);
8125 noncopied_parts = TREE_CHAIN (noncopied_parts);
8126 }
8127 return temp;
8128 }
8129
8130 case MODIFY_EXPR:
8131 {
8132 /* If lhs is complex, expand calls in rhs before computing it.
8133 That's so we don't compute a pointer and save it over a call.
8134 If lhs is simple, compute it first so we can give it as a
8135 target if the rhs is just a call. This avoids an extra temp and copy
8136 and that prevents a partial-subsumption which makes bad code.
8137 Actually we could treat component_ref's of vars like vars. */
8138
8139 tree lhs = TREE_OPERAND (exp, 0);
8140 tree rhs = TREE_OPERAND (exp, 1);
8141 tree noncopied_parts = 0;
8142 tree lhs_type = TREE_TYPE (lhs);
8143
8144 temp = 0;
8145
8146 if (TREE_CODE (lhs) != VAR_DECL
8147 && TREE_CODE (lhs) != RESULT_DECL
8148 && TREE_CODE (lhs) != PARM_DECL
8149 && ! (TREE_CODE (lhs) == INDIRECT_REF
8150 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8151 preexpand_calls (exp);
8152
8153 /* Check for |= or &= of a bitfield of size one into another bitfield
8154 of size 1. In this case, (unless we need the result of the
8155 assignment) we can do this more efficiently with a
8156 test followed by an assignment, if necessary.
8157
8158 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8159 things change so we do, this code should be enhanced to
8160 support it. */
8161 if (ignore
8162 && TREE_CODE (lhs) == COMPONENT_REF
8163 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8164 || TREE_CODE (rhs) == BIT_AND_EXPR)
8165 && TREE_OPERAND (rhs, 0) == lhs
8166 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8167 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8168 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8169 {
8170 rtx label = gen_label_rtx ();
8171
8172 do_jump (TREE_OPERAND (rhs, 1),
8173 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8174 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8175 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8176 (TREE_CODE (rhs) == BIT_IOR_EXPR
8177 ? integer_one_node
8178 : integer_zero_node)),
8179 0, 0);
8180 do_pending_stack_adjust ();
8181 emit_label (label);
8182 return const0_rtx;
8183 }
8184
8185 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8186 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8187 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8188 TYPE_NONCOPIED_PARTS (lhs_type));
8189
8190 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8191 while (noncopied_parts != 0)
8192 {
8193 expand_assignment (TREE_PURPOSE (noncopied_parts),
8194 TREE_VALUE (noncopied_parts), 0, 0);
8195 noncopied_parts = TREE_CHAIN (noncopied_parts);
8196 }
8197 return temp;
8198 }
8199
8200 case RETURN_EXPR:
8201 if (!TREE_OPERAND (exp, 0))
8202 expand_null_return ();
8203 else
8204 expand_return (TREE_OPERAND (exp, 0));
8205 return const0_rtx;
8206
8207 case PREINCREMENT_EXPR:
8208 case PREDECREMENT_EXPR:
8209 return expand_increment (exp, 0, ignore);
8210
8211 case POSTINCREMENT_EXPR:
8212 case POSTDECREMENT_EXPR:
8213 /* Faster to treat as pre-increment if result is not used. */
8214 return expand_increment (exp, ! ignore, ignore);
8215
8216 case ADDR_EXPR:
8217 /* If nonzero, TEMP will be set to the address of something that might
8218 be a MEM corresponding to a stack slot. */
8219 temp = 0;
8220
8221 /* Are we taking the address of a nested function? */
8222 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8223 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8224 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8225 && ! TREE_STATIC (exp))
8226 {
8227 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8228 op0 = force_operand (op0, target);
8229 }
8230 /* If we are taking the address of something erroneous, just
8231 return a zero. */
8232 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8233 return const0_rtx;
8234 else
8235 {
8236 /* We make sure to pass const0_rtx down if we came in with
8237 ignore set, to avoid doing the cleanups twice for something. */
8238 op0 = expand_expr (TREE_OPERAND (exp, 0),
8239 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8240 (modifier == EXPAND_INITIALIZER
8241 ? modifier : EXPAND_CONST_ADDRESS));
8242
8243 /* If we are going to ignore the result, OP0 will have been set
8244 to const0_rtx, so just return it. Don't get confused and
8245 think we are taking the address of the constant. */
8246 if (ignore)
8247 return op0;
8248
8249 op0 = protect_from_queue (op0, 0);
8250
8251 /* We would like the object in memory. If it is a constant, we can
8252 have it be statically allocated into memory. For a non-constant,
8253 we need to allocate some memory and store the value into it. */
8254
8255 if (CONSTANT_P (op0))
8256 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8257 op0);
8258 else if (GET_CODE (op0) == MEM)
8259 {
8260 mark_temp_addr_taken (op0);
8261 temp = XEXP (op0, 0);
8262 }
8263
8264 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8265 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8266 {
8267 /* If this object is in a register, it must be not
8268 be BLKmode. */
8269 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8270 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8271
8272 mark_temp_addr_taken (memloc);
8273 emit_move_insn (memloc, op0);
8274 op0 = memloc;
8275 }
8276
8277 if (GET_CODE (op0) != MEM)
8278 abort ();
8279
8280 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8281 {
8282 temp = XEXP (op0, 0);
8283 #ifdef POINTERS_EXTEND_UNSIGNED
8284 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8285 && mode == ptr_mode)
8286 temp = convert_memory_address (ptr_mode, temp);
8287 #endif
8288 return temp;
8289 }
8290
8291 op0 = force_operand (XEXP (op0, 0), target);
8292 }
8293
8294 if (flag_force_addr && GET_CODE (op0) != REG)
8295 op0 = force_reg (Pmode, op0);
8296
8297 if (GET_CODE (op0) == REG
8298 && ! REG_USERVAR_P (op0))
8299 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8300
8301 /* If we might have had a temp slot, add an equivalent address
8302 for it. */
8303 if (temp != 0)
8304 update_temp_slot_address (temp, op0);
8305
8306 #ifdef POINTERS_EXTEND_UNSIGNED
8307 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8308 && mode == ptr_mode)
8309 op0 = convert_memory_address (ptr_mode, op0);
8310 #endif
8311
8312 return op0;
8313
8314 case ENTRY_VALUE_EXPR:
8315 abort ();
8316
8317 /* COMPLEX type for Extended Pascal & Fortran */
8318 case COMPLEX_EXPR:
8319 {
8320 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8321 rtx insns;
8322
8323 /* Get the rtx code of the operands. */
8324 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8325 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8326
8327 if (! target)
8328 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8329
8330 start_sequence ();
8331
8332 /* Move the real (op0) and imaginary (op1) parts to their location. */
8333 emit_move_insn (gen_realpart (mode, target), op0);
8334 emit_move_insn (gen_imagpart (mode, target), op1);
8335
8336 insns = get_insns ();
8337 end_sequence ();
8338
8339 /* Complex construction should appear as a single unit. */
8340 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8341 each with a separate pseudo as destination.
8342 It's not correct for flow to treat them as a unit. */
8343 if (GET_CODE (target) != CONCAT)
8344 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8345 else
8346 emit_insns (insns);
8347
8348 return target;
8349 }
8350
8351 case REALPART_EXPR:
8352 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8353 return gen_realpart (mode, op0);
8354
8355 case IMAGPART_EXPR:
8356 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8357 return gen_imagpart (mode, op0);
8358
8359 case CONJ_EXPR:
8360 {
8361 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8362 rtx imag_t;
8363 rtx insns;
8364
8365 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8366
8367 if (! target)
8368 target = gen_reg_rtx (mode);
8369
8370 start_sequence ();
8371
8372 /* Store the realpart and the negated imagpart to target. */
8373 emit_move_insn (gen_realpart (partmode, target),
8374 gen_realpart (partmode, op0));
8375
8376 imag_t = gen_imagpart (partmode, target);
8377 temp = expand_unop (partmode, neg_optab,
8378 gen_imagpart (partmode, op0), imag_t, 0);
8379 if (temp != imag_t)
8380 emit_move_insn (imag_t, temp);
8381
8382 insns = get_insns ();
8383 end_sequence ();
8384
8385 /* Conjugate should appear as a single unit
8386 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8387 each with a separate pseudo as destination.
8388 It's not correct for flow to treat them as a unit. */
8389 if (GET_CODE (target) != CONCAT)
8390 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8391 else
8392 emit_insns (insns);
8393
8394 return target;
8395 }
8396
8397 case TRY_CATCH_EXPR:
8398 {
8399 tree handler = TREE_OPERAND (exp, 1);
8400
8401 expand_eh_region_start ();
8402
8403 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8404
8405 expand_eh_region_end (handler);
8406
8407 return op0;
8408 }
8409
8410 case TRY_FINALLY_EXPR:
8411 {
8412 tree try_block = TREE_OPERAND (exp, 0);
8413 tree finally_block = TREE_OPERAND (exp, 1);
8414 rtx finally_label = gen_label_rtx ();
8415 rtx done_label = gen_label_rtx ();
8416 rtx return_link = gen_reg_rtx (Pmode);
8417 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8418 (tree) finally_label, (tree) return_link);
8419 TREE_SIDE_EFFECTS (cleanup) = 1;
8420
8421 /* Start a new binding layer that will keep track of all cleanup
8422 actions to be performed. */
8423 expand_start_bindings (2);
8424
8425 target_temp_slot_level = temp_slot_level;
8426
8427 expand_decl_cleanup (NULL_TREE, cleanup);
8428 op0 = expand_expr (try_block, target, tmode, modifier);
8429
8430 preserve_temp_slots (op0);
8431 expand_end_bindings (NULL_TREE, 0, 0);
8432 emit_jump (done_label);
8433 emit_label (finally_label);
8434 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8435 emit_indirect_jump (return_link);
8436 emit_label (done_label);
8437 return op0;
8438 }
8439
8440 case GOTO_SUBROUTINE_EXPR:
8441 {
8442 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8443 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8444 rtx return_address = gen_label_rtx ();
8445 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8446 emit_jump (subr);
8447 emit_label (return_address);
8448 return const0_rtx;
8449 }
8450
8451 case POPDCC_EXPR:
8452 {
8453 rtx dcc = get_dynamic_cleanup_chain ();
8454 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8455 return const0_rtx;
8456 }
8457
8458 case POPDHC_EXPR:
8459 {
8460 rtx dhc = get_dynamic_handler_chain ();
8461 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8462 return const0_rtx;
8463 }
8464
8465 case VA_ARG_EXPR:
8466 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8467
8468 default:
8469 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8470 }
8471
8472 /* Here to do an ordinary binary operator, generating an instruction
8473 from the optab already placed in `this_optab'. */
8474 binop:
8475 preexpand_calls (exp);
8476 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8477 subtarget = 0;
8478 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8479 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8480 binop2:
8481 temp = expand_binop (mode, this_optab, op0, op1, target,
8482 unsignedp, OPTAB_LIB_WIDEN);
8483 if (temp == 0)
8484 abort ();
8485 return temp;
8486 }
8487 \f
8488 /* Similar to expand_expr, except that we don't specify a target, target
8489 mode, or modifier and we return the alignment of the inner type. This is
8490 used in cases where it is not necessary to align the result to the
8491 alignment of its type as long as we know the alignment of the result, for
8492 example for comparisons of BLKmode values. */
8493
8494 static rtx
8495 expand_expr_unaligned (exp, palign)
8496 register tree exp;
8497 unsigned int *palign;
8498 {
8499 register rtx op0;
8500 tree type = TREE_TYPE (exp);
8501 register enum machine_mode mode = TYPE_MODE (type);
8502
8503 /* Default the alignment we return to that of the type. */
8504 *palign = TYPE_ALIGN (type);
8505
8506 /* The only cases in which we do anything special is if the resulting mode
8507 is BLKmode. */
8508 if (mode != BLKmode)
8509 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8510
8511 switch (TREE_CODE (exp))
8512 {
8513 case CONVERT_EXPR:
8514 case NOP_EXPR:
8515 case NON_LVALUE_EXPR:
8516 /* Conversions between BLKmode values don't change the underlying
8517 alignment or value. */
8518 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8519 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8520 break;
8521
8522 case ARRAY_REF:
8523 /* Much of the code for this case is copied directly from expand_expr.
8524 We need to duplicate it here because we will do something different
8525 in the fall-through case, so we need to handle the same exceptions
8526 it does. */
8527 {
8528 tree array = TREE_OPERAND (exp, 0);
8529 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8530 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8531 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8532 HOST_WIDE_INT i;
8533
8534 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8535 abort ();
8536
8537 /* Optimize the special-case of a zero lower bound.
8538
8539 We convert the low_bound to sizetype to avoid some problems
8540 with constant folding. (E.g. suppose the lower bound is 1,
8541 and its mode is QI. Without the conversion, (ARRAY
8542 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8543 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8544
8545 if (! integer_zerop (low_bound))
8546 index = size_diffop (index, convert (sizetype, low_bound));
8547
8548 /* If this is a constant index into a constant array,
8549 just get the value from the array. Handle both the cases when
8550 we have an explicit constructor and when our operand is a variable
8551 that was declared const. */
8552
8553 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8554 && 0 > compare_tree_int (index,
8555 list_length (CONSTRUCTOR_ELTS
8556 (TREE_OPERAND (exp, 0)))))
8557 {
8558 tree elem;
8559
8560 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8561 i = TREE_INT_CST_LOW (index);
8562 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8563 ;
8564
8565 if (elem)
8566 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8567 }
8568
8569 else if (optimize >= 1
8570 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8571 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8572 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8573 {
8574 if (TREE_CODE (index) == INTEGER_CST)
8575 {
8576 tree init = DECL_INITIAL (array);
8577
8578 if (TREE_CODE (init) == CONSTRUCTOR)
8579 {
8580 tree elem;
8581
8582 for (elem = CONSTRUCTOR_ELTS (init);
8583 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8584 elem = TREE_CHAIN (elem))
8585 ;
8586
8587 if (elem)
8588 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8589 palign);
8590 }
8591 }
8592 }
8593 }
8594
8595 /* ... fall through ... */
8596
8597 case COMPONENT_REF:
8598 case BIT_FIELD_REF:
8599 /* If the operand is a CONSTRUCTOR, we can just extract the
8600 appropriate field if it is present. Don't do this if we have
8601 already written the data since we want to refer to that copy
8602 and varasm.c assumes that's what we'll do. */
8603 if (TREE_CODE (exp) != ARRAY_REF
8604 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8605 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8606 {
8607 tree elt;
8608
8609 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8610 elt = TREE_CHAIN (elt))
8611 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8612 /* Note that unlike the case in expand_expr, we know this is
8613 BLKmode and hence not an integer. */
8614 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8615 }
8616
8617 {
8618 enum machine_mode mode1;
8619 int bitsize;
8620 int bitpos;
8621 tree offset;
8622 int volatilep = 0;
8623 unsigned int alignment;
8624 int unsignedp;
8625 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8626 &mode1, &unsignedp, &volatilep,
8627 &alignment);
8628
8629 /* If we got back the original object, something is wrong. Perhaps
8630 we are evaluating an expression too early. In any event, don't
8631 infinitely recurse. */
8632 if (tem == exp)
8633 abort ();
8634
8635 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8636
8637 /* If this is a constant, put it into a register if it is a
8638 legitimate constant and OFFSET is 0 and memory if it isn't. */
8639 if (CONSTANT_P (op0))
8640 {
8641 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8642
8643 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8644 && offset == 0)
8645 op0 = force_reg (inner_mode, op0);
8646 else
8647 op0 = validize_mem (force_const_mem (inner_mode, op0));
8648 }
8649
8650 if (offset != 0)
8651 {
8652 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8653
8654 /* If this object is in a register, put it into memory.
8655 This case can't occur in C, but can in Ada if we have
8656 unchecked conversion of an expression from a scalar type to
8657 an array or record type. */
8658 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8659 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8660 {
8661 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8662
8663 mark_temp_addr_taken (memloc);
8664 emit_move_insn (memloc, op0);
8665 op0 = memloc;
8666 }
8667
8668 if (GET_CODE (op0) != MEM)
8669 abort ();
8670
8671 if (GET_MODE (offset_rtx) != ptr_mode)
8672 {
8673 #ifdef POINTERS_EXTEND_UNSIGNED
8674 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8675 #else
8676 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8677 #endif
8678 }
8679
8680 op0 = change_address (op0, VOIDmode,
8681 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8682 force_reg (ptr_mode,
8683 offset_rtx)));
8684 }
8685
8686 /* Don't forget about volatility even if this is a bitfield. */
8687 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8688 {
8689 op0 = copy_rtx (op0);
8690 MEM_VOLATILE_P (op0) = 1;
8691 }
8692
8693 /* Check the access. */
8694 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8695 {
8696 rtx to;
8697 int size;
8698
8699 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8700 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8701
8702 /* Check the access right of the pointer. */
8703 if (size > BITS_PER_UNIT)
8704 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8705 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8706 TYPE_MODE (sizetype),
8707 GEN_INT (MEMORY_USE_RO),
8708 TYPE_MODE (integer_type_node));
8709 }
8710
8711 /* In cases where an aligned union has an unaligned object
8712 as a field, we might be extracting a BLKmode value from
8713 an integer-mode (e.g., SImode) object. Handle this case
8714 by doing the extract into an object as wide as the field
8715 (which we know to be the width of a basic mode), then
8716 storing into memory, and changing the mode to BLKmode.
8717 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8718 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8719 if (mode1 == VOIDmode
8720 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8721 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8722 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
8723 || bitpos % TYPE_ALIGN (type) != 0)))
8724 {
8725 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8726
8727 if (ext_mode == BLKmode)
8728 {
8729 /* In this case, BITPOS must start at a byte boundary. */
8730 if (GET_CODE (op0) != MEM
8731 || bitpos % BITS_PER_UNIT != 0)
8732 abort ();
8733
8734 op0 = change_address (op0, VOIDmode,
8735 plus_constant (XEXP (op0, 0),
8736 bitpos / BITS_PER_UNIT));
8737 }
8738 else
8739 {
8740 rtx new = assign_stack_temp (ext_mode,
8741 bitsize / BITS_PER_UNIT, 0);
8742
8743 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8744 unsignedp, NULL_RTX, ext_mode,
8745 ext_mode, alignment,
8746 int_size_in_bytes (TREE_TYPE (tem)));
8747
8748 /* If the result is a record type and BITSIZE is narrower than
8749 the mode of OP0, an integral mode, and this is a big endian
8750 machine, we must put the field into the high-order bits. */
8751 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8752 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8753 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8754 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8755 size_int (GET_MODE_BITSIZE
8756 (GET_MODE (op0))
8757 - bitsize),
8758 op0, 1);
8759
8760
8761 emit_move_insn (new, op0);
8762 op0 = copy_rtx (new);
8763 PUT_MODE (op0, BLKmode);
8764 }
8765 }
8766 else
8767 /* Get a reference to just this component. */
8768 op0 = change_address (op0, mode1,
8769 plus_constant (XEXP (op0, 0),
8770 (bitpos / BITS_PER_UNIT)));
8771
8772 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8773
8774 /* Adjust the alignment in case the bit position is not
8775 a multiple of the alignment of the inner object. */
8776 while (bitpos % alignment != 0)
8777 alignment >>= 1;
8778
8779 if (GET_CODE (XEXP (op0, 0)) == REG)
8780 mark_reg_pointer (XEXP (op0, 0), alignment);
8781
8782 MEM_IN_STRUCT_P (op0) = 1;
8783 MEM_VOLATILE_P (op0) |= volatilep;
8784
8785 *palign = alignment;
8786 return op0;
8787 }
8788
8789 default:
8790 break;
8791
8792 }
8793
8794 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8795 }
8796 \f
8797 /* Return the tree node if a ARG corresponds to a string constant or zero
8798 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8799 in bytes within the string that ARG is accessing. The type of the
8800 offset will be `sizetype'. */
8801
8802 tree
8803 string_constant (arg, ptr_offset)
8804 tree arg;
8805 tree *ptr_offset;
8806 {
8807 STRIP_NOPS (arg);
8808
8809 if (TREE_CODE (arg) == ADDR_EXPR
8810 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8811 {
8812 *ptr_offset = size_zero_node;
8813 return TREE_OPERAND (arg, 0);
8814 }
8815 else if (TREE_CODE (arg) == PLUS_EXPR)
8816 {
8817 tree arg0 = TREE_OPERAND (arg, 0);
8818 tree arg1 = TREE_OPERAND (arg, 1);
8819
8820 STRIP_NOPS (arg0);
8821 STRIP_NOPS (arg1);
8822
8823 if (TREE_CODE (arg0) == ADDR_EXPR
8824 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8825 {
8826 *ptr_offset = convert (sizetype, arg1);
8827 return TREE_OPERAND (arg0, 0);
8828 }
8829 else if (TREE_CODE (arg1) == ADDR_EXPR
8830 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8831 {
8832 *ptr_offset = convert (sizetype, arg0);
8833 return TREE_OPERAND (arg1, 0);
8834 }
8835 }
8836
8837 return 0;
8838 }
8839 \f
8840 /* Expand code for a post- or pre- increment or decrement
8841 and return the RTX for the result.
8842 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8843
8844 static rtx
8845 expand_increment (exp, post, ignore)
8846 register tree exp;
8847 int post, ignore;
8848 {
8849 register rtx op0, op1;
8850 register rtx temp, value;
8851 register tree incremented = TREE_OPERAND (exp, 0);
8852 optab this_optab = add_optab;
8853 int icode;
8854 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8855 int op0_is_copy = 0;
8856 int single_insn = 0;
8857 /* 1 means we can't store into OP0 directly,
8858 because it is a subreg narrower than a word,
8859 and we don't dare clobber the rest of the word. */
8860 int bad_subreg = 0;
8861
8862 /* Stabilize any component ref that might need to be
8863 evaluated more than once below. */
8864 if (!post
8865 || TREE_CODE (incremented) == BIT_FIELD_REF
8866 || (TREE_CODE (incremented) == COMPONENT_REF
8867 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8868 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8869 incremented = stabilize_reference (incremented);
8870 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8871 ones into save exprs so that they don't accidentally get evaluated
8872 more than once by the code below. */
8873 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8874 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8875 incremented = save_expr (incremented);
8876
8877 /* Compute the operands as RTX.
8878 Note whether OP0 is the actual lvalue or a copy of it:
8879 I believe it is a copy iff it is a register or subreg
8880 and insns were generated in computing it. */
8881
8882 temp = get_last_insn ();
8883 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8884
8885 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8886 in place but instead must do sign- or zero-extension during assignment,
8887 so we copy it into a new register and let the code below use it as
8888 a copy.
8889
8890 Note that we can safely modify this SUBREG since it is know not to be
8891 shared (it was made by the expand_expr call above). */
8892
8893 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8894 {
8895 if (post)
8896 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8897 else
8898 bad_subreg = 1;
8899 }
8900 else if (GET_CODE (op0) == SUBREG
8901 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8902 {
8903 /* We cannot increment this SUBREG in place. If we are
8904 post-incrementing, get a copy of the old value. Otherwise,
8905 just mark that we cannot increment in place. */
8906 if (post)
8907 op0 = copy_to_reg (op0);
8908 else
8909 bad_subreg = 1;
8910 }
8911
8912 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8913 && temp != get_last_insn ());
8914 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8915 EXPAND_MEMORY_USE_BAD);
8916
8917 /* Decide whether incrementing or decrementing. */
8918 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8919 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8920 this_optab = sub_optab;
8921
8922 /* Convert decrement by a constant into a negative increment. */
8923 if (this_optab == sub_optab
8924 && GET_CODE (op1) == CONST_INT)
8925 {
8926 op1 = GEN_INT (- INTVAL (op1));
8927 this_optab = add_optab;
8928 }
8929
8930 /* For a preincrement, see if we can do this with a single instruction. */
8931 if (!post)
8932 {
8933 icode = (int) this_optab->handlers[(int) mode].insn_code;
8934 if (icode != (int) CODE_FOR_nothing
8935 /* Make sure that OP0 is valid for operands 0 and 1
8936 of the insn we want to queue. */
8937 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8938 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8939 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8940 single_insn = 1;
8941 }
8942
8943 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8944 then we cannot just increment OP0. We must therefore contrive to
8945 increment the original value. Then, for postincrement, we can return
8946 OP0 since it is a copy of the old value. For preincrement, expand here
8947 unless we can do it with a single insn.
8948
8949 Likewise if storing directly into OP0 would clobber high bits
8950 we need to preserve (bad_subreg). */
8951 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8952 {
8953 /* This is the easiest way to increment the value wherever it is.
8954 Problems with multiple evaluation of INCREMENTED are prevented
8955 because either (1) it is a component_ref or preincrement,
8956 in which case it was stabilized above, or (2) it is an array_ref
8957 with constant index in an array in a register, which is
8958 safe to reevaluate. */
8959 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8960 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8961 ? MINUS_EXPR : PLUS_EXPR),
8962 TREE_TYPE (exp),
8963 incremented,
8964 TREE_OPERAND (exp, 1));
8965
8966 while (TREE_CODE (incremented) == NOP_EXPR
8967 || TREE_CODE (incremented) == CONVERT_EXPR)
8968 {
8969 newexp = convert (TREE_TYPE (incremented), newexp);
8970 incremented = TREE_OPERAND (incremented, 0);
8971 }
8972
8973 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8974 return post ? op0 : temp;
8975 }
8976
8977 if (post)
8978 {
8979 /* We have a true reference to the value in OP0.
8980 If there is an insn to add or subtract in this mode, queue it.
8981 Queueing the increment insn avoids the register shuffling
8982 that often results if we must increment now and first save
8983 the old value for subsequent use. */
8984
8985 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8986 op0 = stabilize (op0);
8987 #endif
8988
8989 icode = (int) this_optab->handlers[(int) mode].insn_code;
8990 if (icode != (int) CODE_FOR_nothing
8991 /* Make sure that OP0 is valid for operands 0 and 1
8992 of the insn we want to queue. */
8993 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8994 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8995 {
8996 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8997 op1 = force_reg (mode, op1);
8998
8999 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9000 }
9001 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9002 {
9003 rtx addr = (general_operand (XEXP (op0, 0), mode)
9004 ? force_reg (Pmode, XEXP (op0, 0))
9005 : copy_to_reg (XEXP (op0, 0)));
9006 rtx temp, result;
9007
9008 op0 = change_address (op0, VOIDmode, addr);
9009 temp = force_reg (GET_MODE (op0), op0);
9010 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9011 op1 = force_reg (mode, op1);
9012
9013 /* The increment queue is LIFO, thus we have to `queue'
9014 the instructions in reverse order. */
9015 enqueue_insn (op0, gen_move_insn (op0, temp));
9016 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9017 return result;
9018 }
9019 }
9020
9021 /* Preincrement, or we can't increment with one simple insn. */
9022 if (post)
9023 /* Save a copy of the value before inc or dec, to return it later. */
9024 temp = value = copy_to_reg (op0);
9025 else
9026 /* Arrange to return the incremented value. */
9027 /* Copy the rtx because expand_binop will protect from the queue,
9028 and the results of that would be invalid for us to return
9029 if our caller does emit_queue before using our result. */
9030 temp = copy_rtx (value = op0);
9031
9032 /* Increment however we can. */
9033 op1 = expand_binop (mode, this_optab, value, op1,
9034 current_function_check_memory_usage ? NULL_RTX : op0,
9035 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9036 /* Make sure the value is stored into OP0. */
9037 if (op1 != op0)
9038 emit_move_insn (op0, op1);
9039
9040 return temp;
9041 }
9042 \f
9043 /* Expand all function calls contained within EXP, innermost ones first.
9044 But don't look within expressions that have sequence points.
9045 For each CALL_EXPR, record the rtx for its value
9046 in the CALL_EXPR_RTL field. */
9047
9048 static void
9049 preexpand_calls (exp)
9050 tree exp;
9051 {
9052 register int nops, i;
9053 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9054
9055 if (! do_preexpand_calls)
9056 return;
9057
9058 /* Only expressions and references can contain calls. */
9059
9060 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9061 return;
9062
9063 switch (TREE_CODE (exp))
9064 {
9065 case CALL_EXPR:
9066 /* Do nothing if already expanded. */
9067 if (CALL_EXPR_RTL (exp) != 0
9068 /* Do nothing if the call returns a variable-sized object. */
9069 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9070 /* Do nothing to built-in functions. */
9071 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9072 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9073 == FUNCTION_DECL)
9074 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9075 return;
9076
9077 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9078 return;
9079
9080 case COMPOUND_EXPR:
9081 case COND_EXPR:
9082 case TRUTH_ANDIF_EXPR:
9083 case TRUTH_ORIF_EXPR:
9084 /* If we find one of these, then we can be sure
9085 the adjust will be done for it (since it makes jumps).
9086 Do it now, so that if this is inside an argument
9087 of a function, we don't get the stack adjustment
9088 after some other args have already been pushed. */
9089 do_pending_stack_adjust ();
9090 return;
9091
9092 case BLOCK:
9093 case RTL_EXPR:
9094 case WITH_CLEANUP_EXPR:
9095 case CLEANUP_POINT_EXPR:
9096 case TRY_CATCH_EXPR:
9097 return;
9098
9099 case SAVE_EXPR:
9100 if (SAVE_EXPR_RTL (exp) != 0)
9101 return;
9102
9103 default:
9104 break;
9105 }
9106
9107 nops = tree_code_length[(int) TREE_CODE (exp)];
9108 for (i = 0; i < nops; i++)
9109 if (TREE_OPERAND (exp, i) != 0)
9110 {
9111 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9112 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9113 It doesn't happen before the call is made. */
9114 ;
9115 else
9116 {
9117 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9118 if (type == 'e' || type == '<' || type == '1' || type == '2'
9119 || type == 'r')
9120 preexpand_calls (TREE_OPERAND (exp, i));
9121 }
9122 }
9123 }
9124 \f
9125 /* At the start of a function, record that we have no previously-pushed
9126 arguments waiting to be popped. */
9127
9128 void
9129 init_pending_stack_adjust ()
9130 {
9131 pending_stack_adjust = 0;
9132 }
9133
9134 /* When exiting from function, if safe, clear out any pending stack adjust
9135 so the adjustment won't get done.
9136
9137 Note, if the current function calls alloca, then it must have a
9138 frame pointer regardless of the value of flag_omit_frame_pointer. */
9139
9140 void
9141 clear_pending_stack_adjust ()
9142 {
9143 #ifdef EXIT_IGNORE_STACK
9144 if (optimize > 0
9145 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9146 && EXIT_IGNORE_STACK
9147 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9148 && ! flag_inline_functions)
9149 pending_stack_adjust = 0;
9150 #endif
9151 }
9152
9153 /* Pop any previously-pushed arguments that have not been popped yet. */
9154
9155 void
9156 do_pending_stack_adjust ()
9157 {
9158 if (inhibit_defer_pop == 0)
9159 {
9160 if (pending_stack_adjust != 0)
9161 adjust_stack (GEN_INT (pending_stack_adjust));
9162 pending_stack_adjust = 0;
9163 }
9164 }
9165 \f
9166 /* Expand conditional expressions. */
9167
9168 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9169 LABEL is an rtx of code CODE_LABEL, in this function and all the
9170 functions here. */
9171
9172 void
9173 jumpifnot (exp, label)
9174 tree exp;
9175 rtx label;
9176 {
9177 do_jump (exp, label, NULL_RTX);
9178 }
9179
9180 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9181
9182 void
9183 jumpif (exp, label)
9184 tree exp;
9185 rtx label;
9186 {
9187 do_jump (exp, NULL_RTX, label);
9188 }
9189
9190 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9191 the result is zero, or IF_TRUE_LABEL if the result is one.
9192 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9193 meaning fall through in that case.
9194
9195 do_jump always does any pending stack adjust except when it does not
9196 actually perform a jump. An example where there is no jump
9197 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9198
9199 This function is responsible for optimizing cases such as
9200 &&, || and comparison operators in EXP. */
9201
9202 void
9203 do_jump (exp, if_false_label, if_true_label)
9204 tree exp;
9205 rtx if_false_label, if_true_label;
9206 {
9207 register enum tree_code code = TREE_CODE (exp);
9208 /* Some cases need to create a label to jump to
9209 in order to properly fall through.
9210 These cases set DROP_THROUGH_LABEL nonzero. */
9211 rtx drop_through_label = 0;
9212 rtx temp;
9213 int i;
9214 tree type;
9215 enum machine_mode mode;
9216
9217 #ifdef MAX_INTEGER_COMPUTATION_MODE
9218 check_max_integer_computation_mode (exp);
9219 #endif
9220
9221 emit_queue ();
9222
9223 switch (code)
9224 {
9225 case ERROR_MARK:
9226 break;
9227
9228 case INTEGER_CST:
9229 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9230 if (temp)
9231 emit_jump (temp);
9232 break;
9233
9234 #if 0
9235 /* This is not true with #pragma weak */
9236 case ADDR_EXPR:
9237 /* The address of something can never be zero. */
9238 if (if_true_label)
9239 emit_jump (if_true_label);
9240 break;
9241 #endif
9242
9243 case NOP_EXPR:
9244 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9245 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9246 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9247 goto normal;
9248 case CONVERT_EXPR:
9249 /* If we are narrowing the operand, we have to do the compare in the
9250 narrower mode. */
9251 if ((TYPE_PRECISION (TREE_TYPE (exp))
9252 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9253 goto normal;
9254 case NON_LVALUE_EXPR:
9255 case REFERENCE_EXPR:
9256 case ABS_EXPR:
9257 case NEGATE_EXPR:
9258 case LROTATE_EXPR:
9259 case RROTATE_EXPR:
9260 /* These cannot change zero->non-zero or vice versa. */
9261 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9262 break;
9263
9264 case WITH_RECORD_EXPR:
9265 /* Put the object on the placeholder list, recurse through our first
9266 operand, and pop the list. */
9267 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9268 placeholder_list);
9269 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9270 placeholder_list = TREE_CHAIN (placeholder_list);
9271 break;
9272
9273 #if 0
9274 /* This is never less insns than evaluating the PLUS_EXPR followed by
9275 a test and can be longer if the test is eliminated. */
9276 case PLUS_EXPR:
9277 /* Reduce to minus. */
9278 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9279 TREE_OPERAND (exp, 0),
9280 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9281 TREE_OPERAND (exp, 1))));
9282 /* Process as MINUS. */
9283 #endif
9284
9285 case MINUS_EXPR:
9286 /* Non-zero iff operands of minus differ. */
9287 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9288 TREE_OPERAND (exp, 0),
9289 TREE_OPERAND (exp, 1)),
9290 NE, NE, if_false_label, if_true_label);
9291 break;
9292
9293 case BIT_AND_EXPR:
9294 /* If we are AND'ing with a small constant, do this comparison in the
9295 smallest type that fits. If the machine doesn't have comparisons
9296 that small, it will be converted back to the wider comparison.
9297 This helps if we are testing the sign bit of a narrower object.
9298 combine can't do this for us because it can't know whether a
9299 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9300
9301 if (! SLOW_BYTE_ACCESS
9302 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9303 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9304 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9305 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9306 && (type = type_for_mode (mode, 1)) != 0
9307 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9308 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9309 != CODE_FOR_nothing))
9310 {
9311 do_jump (convert (type, exp), if_false_label, if_true_label);
9312 break;
9313 }
9314 goto normal;
9315
9316 case TRUTH_NOT_EXPR:
9317 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9318 break;
9319
9320 case TRUTH_ANDIF_EXPR:
9321 if (if_false_label == 0)
9322 if_false_label = drop_through_label = gen_label_rtx ();
9323 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9324 start_cleanup_deferral ();
9325 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9326 end_cleanup_deferral ();
9327 break;
9328
9329 case TRUTH_ORIF_EXPR:
9330 if (if_true_label == 0)
9331 if_true_label = drop_through_label = gen_label_rtx ();
9332 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9333 start_cleanup_deferral ();
9334 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9335 end_cleanup_deferral ();
9336 break;
9337
9338 case COMPOUND_EXPR:
9339 push_temp_slots ();
9340 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9341 preserve_temp_slots (NULL_RTX);
9342 free_temp_slots ();
9343 pop_temp_slots ();
9344 emit_queue ();
9345 do_pending_stack_adjust ();
9346 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9347 break;
9348
9349 case COMPONENT_REF:
9350 case BIT_FIELD_REF:
9351 case ARRAY_REF:
9352 {
9353 int bitsize, bitpos, unsignedp;
9354 enum machine_mode mode;
9355 tree type;
9356 tree offset;
9357 int volatilep = 0;
9358 unsigned int alignment;
9359
9360 /* Get description of this reference. We don't actually care
9361 about the underlying object here. */
9362 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9363 &mode, &unsignedp, &volatilep,
9364 &alignment);
9365
9366 type = type_for_size (bitsize, unsignedp);
9367 if (! SLOW_BYTE_ACCESS
9368 && type != 0 && bitsize >= 0
9369 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9370 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9371 != CODE_FOR_nothing))
9372 {
9373 do_jump (convert (type, exp), if_false_label, if_true_label);
9374 break;
9375 }
9376 goto normal;
9377 }
9378
9379 case COND_EXPR:
9380 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9381 if (integer_onep (TREE_OPERAND (exp, 1))
9382 && integer_zerop (TREE_OPERAND (exp, 2)))
9383 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9384
9385 else if (integer_zerop (TREE_OPERAND (exp, 1))
9386 && integer_onep (TREE_OPERAND (exp, 2)))
9387 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9388
9389 else
9390 {
9391 register rtx label1 = gen_label_rtx ();
9392 drop_through_label = gen_label_rtx ();
9393
9394 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9395
9396 start_cleanup_deferral ();
9397 /* Now the THEN-expression. */
9398 do_jump (TREE_OPERAND (exp, 1),
9399 if_false_label ? if_false_label : drop_through_label,
9400 if_true_label ? if_true_label : drop_through_label);
9401 /* In case the do_jump just above never jumps. */
9402 do_pending_stack_adjust ();
9403 emit_label (label1);
9404
9405 /* Now the ELSE-expression. */
9406 do_jump (TREE_OPERAND (exp, 2),
9407 if_false_label ? if_false_label : drop_through_label,
9408 if_true_label ? if_true_label : drop_through_label);
9409 end_cleanup_deferral ();
9410 }
9411 break;
9412
9413 case EQ_EXPR:
9414 {
9415 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9416
9417 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9418 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9419 {
9420 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9421 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9422 do_jump
9423 (fold
9424 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9425 fold (build (EQ_EXPR, TREE_TYPE (exp),
9426 fold (build1 (REALPART_EXPR,
9427 TREE_TYPE (inner_type),
9428 exp0)),
9429 fold (build1 (REALPART_EXPR,
9430 TREE_TYPE (inner_type),
9431 exp1)))),
9432 fold (build (EQ_EXPR, TREE_TYPE (exp),
9433 fold (build1 (IMAGPART_EXPR,
9434 TREE_TYPE (inner_type),
9435 exp0)),
9436 fold (build1 (IMAGPART_EXPR,
9437 TREE_TYPE (inner_type),
9438 exp1)))))),
9439 if_false_label, if_true_label);
9440 }
9441
9442 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9443 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9444
9445 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9446 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9447 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9448 else
9449 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9450 break;
9451 }
9452
9453 case NE_EXPR:
9454 {
9455 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9456
9457 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9458 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9459 {
9460 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9461 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9462 do_jump
9463 (fold
9464 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9465 fold (build (NE_EXPR, TREE_TYPE (exp),
9466 fold (build1 (REALPART_EXPR,
9467 TREE_TYPE (inner_type),
9468 exp0)),
9469 fold (build1 (REALPART_EXPR,
9470 TREE_TYPE (inner_type),
9471 exp1)))),
9472 fold (build (NE_EXPR, TREE_TYPE (exp),
9473 fold (build1 (IMAGPART_EXPR,
9474 TREE_TYPE (inner_type),
9475 exp0)),
9476 fold (build1 (IMAGPART_EXPR,
9477 TREE_TYPE (inner_type),
9478 exp1)))))),
9479 if_false_label, if_true_label);
9480 }
9481
9482 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9483 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9484
9485 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9486 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9487 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9488 else
9489 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9490 break;
9491 }
9492
9493 case LT_EXPR:
9494 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9495 if (GET_MODE_CLASS (mode) == MODE_INT
9496 && ! can_compare_p (LT, mode, ccp_jump))
9497 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9498 else
9499 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9500 break;
9501
9502 case LE_EXPR:
9503 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9504 if (GET_MODE_CLASS (mode) == MODE_INT
9505 && ! can_compare_p (LE, mode, ccp_jump))
9506 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9507 else
9508 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9509 break;
9510
9511 case GT_EXPR:
9512 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9513 if (GET_MODE_CLASS (mode) == MODE_INT
9514 && ! can_compare_p (GT, mode, ccp_jump))
9515 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9516 else
9517 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9518 break;
9519
9520 case GE_EXPR:
9521 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9522 if (GET_MODE_CLASS (mode) == MODE_INT
9523 && ! can_compare_p (GE, mode, ccp_jump))
9524 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9525 else
9526 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9527 break;
9528
9529 case UNORDERED_EXPR:
9530 case ORDERED_EXPR:
9531 {
9532 enum rtx_code cmp, rcmp;
9533 int do_rev;
9534
9535 if (code == UNORDERED_EXPR)
9536 cmp = UNORDERED, rcmp = ORDERED;
9537 else
9538 cmp = ORDERED, rcmp = UNORDERED;
9539 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9540
9541 do_rev = 0;
9542 if (! can_compare_p (cmp, mode, ccp_jump)
9543 && (can_compare_p (rcmp, mode, ccp_jump)
9544 /* If the target doesn't provide either UNORDERED or ORDERED
9545 comparisons, canonicalize on UNORDERED for the library. */
9546 || rcmp == UNORDERED))
9547 do_rev = 1;
9548
9549 if (! do_rev)
9550 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9551 else
9552 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9553 }
9554 break;
9555
9556 {
9557 enum rtx_code rcode1;
9558 enum tree_code tcode2;
9559
9560 case UNLT_EXPR:
9561 rcode1 = UNLT;
9562 tcode2 = LT_EXPR;
9563 goto unordered_bcc;
9564 case UNLE_EXPR:
9565 rcode1 = UNLE;
9566 tcode2 = LE_EXPR;
9567 goto unordered_bcc;
9568 case UNGT_EXPR:
9569 rcode1 = UNGT;
9570 tcode2 = GT_EXPR;
9571 goto unordered_bcc;
9572 case UNGE_EXPR:
9573 rcode1 = UNGE;
9574 tcode2 = GE_EXPR;
9575 goto unordered_bcc;
9576 case UNEQ_EXPR:
9577 rcode1 = UNEQ;
9578 tcode2 = EQ_EXPR;
9579 goto unordered_bcc;
9580
9581 unordered_bcc:
9582 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9583 if (can_compare_p (rcode1, mode, ccp_jump))
9584 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9585 if_true_label);
9586 else
9587 {
9588 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9589 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9590 tree cmp0, cmp1;
9591
9592 /* If the target doesn't support combined unordered
9593 compares, decompose into UNORDERED + comparison. */
9594 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9595 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9596 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9597 do_jump (exp, if_false_label, if_true_label);
9598 }
9599 }
9600 break;
9601
9602 default:
9603 normal:
9604 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9605 #if 0
9606 /* This is not needed any more and causes poor code since it causes
9607 comparisons and tests from non-SI objects to have different code
9608 sequences. */
9609 /* Copy to register to avoid generating bad insns by cse
9610 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9611 if (!cse_not_expected && GET_CODE (temp) == MEM)
9612 temp = copy_to_reg (temp);
9613 #endif
9614 do_pending_stack_adjust ();
9615 /* Do any postincrements in the expression that was tested. */
9616 emit_queue ();
9617
9618 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9619 {
9620 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9621 if (target)
9622 emit_jump (target);
9623 }
9624 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9625 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9626 /* Note swapping the labels gives us not-equal. */
9627 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9628 else if (GET_MODE (temp) != VOIDmode)
9629 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9630 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9631 GET_MODE (temp), NULL_RTX, 0,
9632 if_false_label, if_true_label);
9633 else
9634 abort ();
9635 }
9636
9637 if (drop_through_label)
9638 {
9639 /* If do_jump produces code that might be jumped around,
9640 do any stack adjusts from that code, before the place
9641 where control merges in. */
9642 do_pending_stack_adjust ();
9643 emit_label (drop_through_label);
9644 }
9645 }
9646 \f
9647 /* Given a comparison expression EXP for values too wide to be compared
9648 with one insn, test the comparison and jump to the appropriate label.
9649 The code of EXP is ignored; we always test GT if SWAP is 0,
9650 and LT if SWAP is 1. */
9651
9652 static void
9653 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9654 tree exp;
9655 int swap;
9656 rtx if_false_label, if_true_label;
9657 {
9658 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9659 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9660 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9661 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9662
9663 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9664 }
9665
9666 /* Compare OP0 with OP1, word at a time, in mode MODE.
9667 UNSIGNEDP says to do unsigned comparison.
9668 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9669
9670 void
9671 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9672 enum machine_mode mode;
9673 int unsignedp;
9674 rtx op0, op1;
9675 rtx if_false_label, if_true_label;
9676 {
9677 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9678 rtx drop_through_label = 0;
9679 int i;
9680
9681 if (! if_true_label || ! if_false_label)
9682 drop_through_label = gen_label_rtx ();
9683 if (! if_true_label)
9684 if_true_label = drop_through_label;
9685 if (! if_false_label)
9686 if_false_label = drop_through_label;
9687
9688 /* Compare a word at a time, high order first. */
9689 for (i = 0; i < nwords; i++)
9690 {
9691 rtx op0_word, op1_word;
9692
9693 if (WORDS_BIG_ENDIAN)
9694 {
9695 op0_word = operand_subword_force (op0, i, mode);
9696 op1_word = operand_subword_force (op1, i, mode);
9697 }
9698 else
9699 {
9700 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9701 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9702 }
9703
9704 /* All but high-order word must be compared as unsigned. */
9705 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9706 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9707 NULL_RTX, if_true_label);
9708
9709 /* Consider lower words only if these are equal. */
9710 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9711 NULL_RTX, 0, NULL_RTX, if_false_label);
9712 }
9713
9714 if (if_false_label)
9715 emit_jump (if_false_label);
9716 if (drop_through_label)
9717 emit_label (drop_through_label);
9718 }
9719
9720 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9721 with one insn, test the comparison and jump to the appropriate label. */
9722
9723 static void
9724 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9725 tree exp;
9726 rtx if_false_label, if_true_label;
9727 {
9728 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9729 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9730 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9731 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9732 int i;
9733 rtx drop_through_label = 0;
9734
9735 if (! if_false_label)
9736 drop_through_label = if_false_label = gen_label_rtx ();
9737
9738 for (i = 0; i < nwords; i++)
9739 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9740 operand_subword_force (op1, i, mode),
9741 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9742 word_mode, NULL_RTX, 0, if_false_label,
9743 NULL_RTX);
9744
9745 if (if_true_label)
9746 emit_jump (if_true_label);
9747 if (drop_through_label)
9748 emit_label (drop_through_label);
9749 }
9750 \f
9751 /* Jump according to whether OP0 is 0.
9752 We assume that OP0 has an integer mode that is too wide
9753 for the available compare insns. */
9754
9755 void
9756 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9757 rtx op0;
9758 rtx if_false_label, if_true_label;
9759 {
9760 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9761 rtx part;
9762 int i;
9763 rtx drop_through_label = 0;
9764
9765 /* The fastest way of doing this comparison on almost any machine is to
9766 "or" all the words and compare the result. If all have to be loaded
9767 from memory and this is a very wide item, it's possible this may
9768 be slower, but that's highly unlikely. */
9769
9770 part = gen_reg_rtx (word_mode);
9771 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9772 for (i = 1; i < nwords && part != 0; i++)
9773 part = expand_binop (word_mode, ior_optab, part,
9774 operand_subword_force (op0, i, GET_MODE (op0)),
9775 part, 1, OPTAB_WIDEN);
9776
9777 if (part != 0)
9778 {
9779 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9780 NULL_RTX, 0, if_false_label, if_true_label);
9781
9782 return;
9783 }
9784
9785 /* If we couldn't do the "or" simply, do this with a series of compares. */
9786 if (! if_false_label)
9787 drop_through_label = if_false_label = gen_label_rtx ();
9788
9789 for (i = 0; i < nwords; i++)
9790 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9791 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9792 if_false_label, NULL_RTX);
9793
9794 if (if_true_label)
9795 emit_jump (if_true_label);
9796
9797 if (drop_through_label)
9798 emit_label (drop_through_label);
9799 }
9800 \f
9801 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9802 (including code to compute the values to be compared)
9803 and set (CC0) according to the result.
9804 The decision as to signed or unsigned comparison must be made by the caller.
9805
9806 We force a stack adjustment unless there are currently
9807 things pushed on the stack that aren't yet used.
9808
9809 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9810 compared.
9811
9812 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9813 size of MODE should be used. */
9814
9815 rtx
9816 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9817 register rtx op0, op1;
9818 enum rtx_code code;
9819 int unsignedp;
9820 enum machine_mode mode;
9821 rtx size;
9822 unsigned int align;
9823 {
9824 rtx tem;
9825
9826 /* If one operand is constant, make it the second one. Only do this
9827 if the other operand is not constant as well. */
9828
9829 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9830 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9831 {
9832 tem = op0;
9833 op0 = op1;
9834 op1 = tem;
9835 code = swap_condition (code);
9836 }
9837
9838 if (flag_force_mem)
9839 {
9840 op0 = force_not_mem (op0);
9841 op1 = force_not_mem (op1);
9842 }
9843
9844 do_pending_stack_adjust ();
9845
9846 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9847 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9848 return tem;
9849
9850 #if 0
9851 /* There's no need to do this now that combine.c can eliminate lots of
9852 sign extensions. This can be less efficient in certain cases on other
9853 machines. */
9854
9855 /* If this is a signed equality comparison, we can do it as an
9856 unsigned comparison since zero-extension is cheaper than sign
9857 extension and comparisons with zero are done as unsigned. This is
9858 the case even on machines that can do fast sign extension, since
9859 zero-extension is easier to combine with other operations than
9860 sign-extension is. If we are comparing against a constant, we must
9861 convert it to what it would look like unsigned. */
9862 if ((code == EQ || code == NE) && ! unsignedp
9863 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9864 {
9865 if (GET_CODE (op1) == CONST_INT
9866 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9867 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9868 unsignedp = 1;
9869 }
9870 #endif
9871
9872 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9873
9874 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9875 }
9876
9877 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9878 The decision as to signed or unsigned comparison must be made by the caller.
9879
9880 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9881 compared.
9882
9883 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9884 size of MODE should be used. */
9885
9886 void
9887 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9888 if_false_label, if_true_label)
9889 register rtx op0, op1;
9890 enum rtx_code code;
9891 int unsignedp;
9892 enum machine_mode mode;
9893 rtx size;
9894 unsigned int align;
9895 rtx if_false_label, if_true_label;
9896 {
9897 rtx tem;
9898 int dummy_true_label = 0;
9899
9900 /* Reverse the comparison if that is safe and we want to jump if it is
9901 false. */
9902 if (! if_true_label && ! FLOAT_MODE_P (mode))
9903 {
9904 if_true_label = if_false_label;
9905 if_false_label = 0;
9906 code = reverse_condition (code);
9907 }
9908
9909 /* If one operand is constant, make it the second one. Only do this
9910 if the other operand is not constant as well. */
9911
9912 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9913 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9914 {
9915 tem = op0;
9916 op0 = op1;
9917 op1 = tem;
9918 code = swap_condition (code);
9919 }
9920
9921 if (flag_force_mem)
9922 {
9923 op0 = force_not_mem (op0);
9924 op1 = force_not_mem (op1);
9925 }
9926
9927 do_pending_stack_adjust ();
9928
9929 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9930 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9931 {
9932 if (tem == const_true_rtx)
9933 {
9934 if (if_true_label)
9935 emit_jump (if_true_label);
9936 }
9937 else
9938 {
9939 if (if_false_label)
9940 emit_jump (if_false_label);
9941 }
9942 return;
9943 }
9944
9945 #if 0
9946 /* There's no need to do this now that combine.c can eliminate lots of
9947 sign extensions. This can be less efficient in certain cases on other
9948 machines. */
9949
9950 /* If this is a signed equality comparison, we can do it as an
9951 unsigned comparison since zero-extension is cheaper than sign
9952 extension and comparisons with zero are done as unsigned. This is
9953 the case even on machines that can do fast sign extension, since
9954 zero-extension is easier to combine with other operations than
9955 sign-extension is. If we are comparing against a constant, we must
9956 convert it to what it would look like unsigned. */
9957 if ((code == EQ || code == NE) && ! unsignedp
9958 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9959 {
9960 if (GET_CODE (op1) == CONST_INT
9961 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9962 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9963 unsignedp = 1;
9964 }
9965 #endif
9966
9967 if (! if_true_label)
9968 {
9969 dummy_true_label = 1;
9970 if_true_label = gen_label_rtx ();
9971 }
9972
9973 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9974 if_true_label);
9975
9976 if (if_false_label)
9977 emit_jump (if_false_label);
9978 if (dummy_true_label)
9979 emit_label (if_true_label);
9980 }
9981
9982 /* Generate code for a comparison expression EXP (including code to compute
9983 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9984 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9985 generated code will drop through.
9986 SIGNED_CODE should be the rtx operation for this comparison for
9987 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9988
9989 We force a stack adjustment unless there are currently
9990 things pushed on the stack that aren't yet used. */
9991
9992 static void
9993 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9994 if_true_label)
9995 register tree exp;
9996 enum rtx_code signed_code, unsigned_code;
9997 rtx if_false_label, if_true_label;
9998 {
9999 unsigned int align0, align1;
10000 register rtx op0, op1;
10001 register tree type;
10002 register enum machine_mode mode;
10003 int unsignedp;
10004 enum rtx_code code;
10005
10006 /* Don't crash if the comparison was erroneous. */
10007 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10008 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10009 return;
10010
10011 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10012 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10013 mode = TYPE_MODE (type);
10014 unsignedp = TREE_UNSIGNED (type);
10015 code = unsignedp ? unsigned_code : signed_code;
10016
10017 #ifdef HAVE_canonicalize_funcptr_for_compare
10018 /* If function pointers need to be "canonicalized" before they can
10019 be reliably compared, then canonicalize them. */
10020 if (HAVE_canonicalize_funcptr_for_compare
10021 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10022 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10023 == FUNCTION_TYPE))
10024 {
10025 rtx new_op0 = gen_reg_rtx (mode);
10026
10027 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10028 op0 = new_op0;
10029 }
10030
10031 if (HAVE_canonicalize_funcptr_for_compare
10032 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10033 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10034 == FUNCTION_TYPE))
10035 {
10036 rtx new_op1 = gen_reg_rtx (mode);
10037
10038 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10039 op1 = new_op1;
10040 }
10041 #endif
10042
10043 /* Do any postincrements in the expression that was tested. */
10044 emit_queue ();
10045
10046 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10047 ((mode == BLKmode)
10048 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10049 MIN (align0, align1) / BITS_PER_UNIT,
10050 if_false_label, if_true_label);
10051 }
10052 \f
10053 /* Generate code to calculate EXP using a store-flag instruction
10054 and return an rtx for the result. EXP is either a comparison
10055 or a TRUTH_NOT_EXPR whose operand is a comparison.
10056
10057 If TARGET is nonzero, store the result there if convenient.
10058
10059 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10060 cheap.
10061
10062 Return zero if there is no suitable set-flag instruction
10063 available on this machine.
10064
10065 Once expand_expr has been called on the arguments of the comparison,
10066 we are committed to doing the store flag, since it is not safe to
10067 re-evaluate the expression. We emit the store-flag insn by calling
10068 emit_store_flag, but only expand the arguments if we have a reason
10069 to believe that emit_store_flag will be successful. If we think that
10070 it will, but it isn't, we have to simulate the store-flag with a
10071 set/jump/set sequence. */
10072
10073 static rtx
10074 do_store_flag (exp, target, mode, only_cheap)
10075 tree exp;
10076 rtx target;
10077 enum machine_mode mode;
10078 int only_cheap;
10079 {
10080 enum rtx_code code;
10081 tree arg0, arg1, type;
10082 tree tem;
10083 enum machine_mode operand_mode;
10084 int invert = 0;
10085 int unsignedp;
10086 rtx op0, op1;
10087 enum insn_code icode;
10088 rtx subtarget = target;
10089 rtx result, label;
10090
10091 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10092 result at the end. We can't simply invert the test since it would
10093 have already been inverted if it were valid. This case occurs for
10094 some floating-point comparisons. */
10095
10096 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10097 invert = 1, exp = TREE_OPERAND (exp, 0);
10098
10099 arg0 = TREE_OPERAND (exp, 0);
10100 arg1 = TREE_OPERAND (exp, 1);
10101 type = TREE_TYPE (arg0);
10102 operand_mode = TYPE_MODE (type);
10103 unsignedp = TREE_UNSIGNED (type);
10104
10105 /* We won't bother with BLKmode store-flag operations because it would mean
10106 passing a lot of information to emit_store_flag. */
10107 if (operand_mode == BLKmode)
10108 return 0;
10109
10110 /* We won't bother with store-flag operations involving function pointers
10111 when function pointers must be canonicalized before comparisons. */
10112 #ifdef HAVE_canonicalize_funcptr_for_compare
10113 if (HAVE_canonicalize_funcptr_for_compare
10114 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10115 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10116 == FUNCTION_TYPE))
10117 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10118 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10119 == FUNCTION_TYPE))))
10120 return 0;
10121 #endif
10122
10123 STRIP_NOPS (arg0);
10124 STRIP_NOPS (arg1);
10125
10126 /* Get the rtx comparison code to use. We know that EXP is a comparison
10127 operation of some type. Some comparisons against 1 and -1 can be
10128 converted to comparisons with zero. Do so here so that the tests
10129 below will be aware that we have a comparison with zero. These
10130 tests will not catch constants in the first operand, but constants
10131 are rarely passed as the first operand. */
10132
10133 switch (TREE_CODE (exp))
10134 {
10135 case EQ_EXPR:
10136 code = EQ;
10137 break;
10138 case NE_EXPR:
10139 code = NE;
10140 break;
10141 case LT_EXPR:
10142 if (integer_onep (arg1))
10143 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10144 else
10145 code = unsignedp ? LTU : LT;
10146 break;
10147 case LE_EXPR:
10148 if (! unsignedp && integer_all_onesp (arg1))
10149 arg1 = integer_zero_node, code = LT;
10150 else
10151 code = unsignedp ? LEU : LE;
10152 break;
10153 case GT_EXPR:
10154 if (! unsignedp && integer_all_onesp (arg1))
10155 arg1 = integer_zero_node, code = GE;
10156 else
10157 code = unsignedp ? GTU : GT;
10158 break;
10159 case GE_EXPR:
10160 if (integer_onep (arg1))
10161 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10162 else
10163 code = unsignedp ? GEU : GE;
10164 break;
10165
10166 case UNORDERED_EXPR:
10167 code = UNORDERED;
10168 break;
10169 case ORDERED_EXPR:
10170 code = ORDERED;
10171 break;
10172 case UNLT_EXPR:
10173 code = UNLT;
10174 break;
10175 case UNLE_EXPR:
10176 code = UNLE;
10177 break;
10178 case UNGT_EXPR:
10179 code = UNGT;
10180 break;
10181 case UNGE_EXPR:
10182 code = UNGE;
10183 break;
10184 case UNEQ_EXPR:
10185 code = UNEQ;
10186 break;
10187
10188 default:
10189 abort ();
10190 }
10191
10192 /* Put a constant second. */
10193 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10194 {
10195 tem = arg0; arg0 = arg1; arg1 = tem;
10196 code = swap_condition (code);
10197 }
10198
10199 /* If this is an equality or inequality test of a single bit, we can
10200 do this by shifting the bit being tested to the low-order bit and
10201 masking the result with the constant 1. If the condition was EQ,
10202 we xor it with 1. This does not require an scc insn and is faster
10203 than an scc insn even if we have it. */
10204
10205 if ((code == NE || code == EQ)
10206 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10207 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10208 {
10209 tree inner = TREE_OPERAND (arg0, 0);
10210 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10211 int ops_unsignedp;
10212
10213 /* If INNER is a right shift of a constant and it plus BITNUM does
10214 not overflow, adjust BITNUM and INNER. */
10215
10216 if (TREE_CODE (inner) == RSHIFT_EXPR
10217 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10218 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10219 && bitnum < TYPE_PRECISION (type)
10220 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10221 bitnum - TYPE_PRECISION (type)))
10222 {
10223 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10224 inner = TREE_OPERAND (inner, 0);
10225 }
10226
10227 /* If we are going to be able to omit the AND below, we must do our
10228 operations as unsigned. If we must use the AND, we have a choice.
10229 Normally unsigned is faster, but for some machines signed is. */
10230 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10231 #ifdef LOAD_EXTEND_OP
10232 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10233 #else
10234 : 1
10235 #endif
10236 );
10237
10238 if (subtarget == 0 || GET_CODE (subtarget) != REG
10239 || GET_MODE (subtarget) != operand_mode
10240 || ! safe_from_p (subtarget, inner, 1))
10241 subtarget = 0;
10242
10243 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10244
10245 if (bitnum != 0)
10246 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10247 size_int (bitnum), subtarget, ops_unsignedp);
10248
10249 if (GET_MODE (op0) != mode)
10250 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10251
10252 if ((code == EQ && ! invert) || (code == NE && invert))
10253 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10254 ops_unsignedp, OPTAB_LIB_WIDEN);
10255
10256 /* Put the AND last so it can combine with more things. */
10257 if (bitnum != TYPE_PRECISION (type) - 1)
10258 op0 = expand_and (op0, const1_rtx, subtarget);
10259
10260 return op0;
10261 }
10262
10263 /* Now see if we are likely to be able to do this. Return if not. */
10264 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10265 return 0;
10266
10267 icode = setcc_gen_code[(int) code];
10268 if (icode == CODE_FOR_nothing
10269 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10270 {
10271 /* We can only do this if it is one of the special cases that
10272 can be handled without an scc insn. */
10273 if ((code == LT && integer_zerop (arg1))
10274 || (! only_cheap && code == GE && integer_zerop (arg1)))
10275 ;
10276 else if (BRANCH_COST >= 0
10277 && ! only_cheap && (code == NE || code == EQ)
10278 && TREE_CODE (type) != REAL_TYPE
10279 && ((abs_optab->handlers[(int) operand_mode].insn_code
10280 != CODE_FOR_nothing)
10281 || (ffs_optab->handlers[(int) operand_mode].insn_code
10282 != CODE_FOR_nothing)))
10283 ;
10284 else
10285 return 0;
10286 }
10287
10288 preexpand_calls (exp);
10289 if (subtarget == 0 || GET_CODE (subtarget) != REG
10290 || GET_MODE (subtarget) != operand_mode
10291 || ! safe_from_p (subtarget, arg1, 1))
10292 subtarget = 0;
10293
10294 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10295 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10296
10297 if (target == 0)
10298 target = gen_reg_rtx (mode);
10299
10300 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10301 because, if the emit_store_flag does anything it will succeed and
10302 OP0 and OP1 will not be used subsequently. */
10303
10304 result = emit_store_flag (target, code,
10305 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10306 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10307 operand_mode, unsignedp, 1);
10308
10309 if (result)
10310 {
10311 if (invert)
10312 result = expand_binop (mode, xor_optab, result, const1_rtx,
10313 result, 0, OPTAB_LIB_WIDEN);
10314 return result;
10315 }
10316
10317 /* If this failed, we have to do this with set/compare/jump/set code. */
10318 if (GET_CODE (target) != REG
10319 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10320 target = gen_reg_rtx (GET_MODE (target));
10321
10322 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10323 result = compare_from_rtx (op0, op1, code, unsignedp,
10324 operand_mode, NULL_RTX, 0);
10325 if (GET_CODE (result) == CONST_INT)
10326 return (((result == const0_rtx && ! invert)
10327 || (result != const0_rtx && invert))
10328 ? const0_rtx : const1_rtx);
10329
10330 label = gen_label_rtx ();
10331 if (bcc_gen_fctn[(int) code] == 0)
10332 abort ();
10333
10334 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10335 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10336 emit_label (label);
10337
10338 return target;
10339 }
10340 \f
10341 /* Generate a tablejump instruction (used for switch statements). */
10342
10343 #ifdef HAVE_tablejump
10344
10345 /* INDEX is the value being switched on, with the lowest value
10346 in the table already subtracted.
10347 MODE is its expected mode (needed if INDEX is constant).
10348 RANGE is the length of the jump table.
10349 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10350
10351 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10352 index value is out of range. */
10353
10354 void
10355 do_tablejump (index, mode, range, table_label, default_label)
10356 rtx index, range, table_label, default_label;
10357 enum machine_mode mode;
10358 {
10359 register rtx temp, vector;
10360
10361 /* Do an unsigned comparison (in the proper mode) between the index
10362 expression and the value which represents the length of the range.
10363 Since we just finished subtracting the lower bound of the range
10364 from the index expression, this comparison allows us to simultaneously
10365 check that the original index expression value is both greater than
10366 or equal to the minimum value of the range and less than or equal to
10367 the maximum value of the range. */
10368
10369 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10370 0, default_label);
10371
10372 /* If index is in range, it must fit in Pmode.
10373 Convert to Pmode so we can index with it. */
10374 if (mode != Pmode)
10375 index = convert_to_mode (Pmode, index, 1);
10376
10377 /* Don't let a MEM slip thru, because then INDEX that comes
10378 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10379 and break_out_memory_refs will go to work on it and mess it up. */
10380 #ifdef PIC_CASE_VECTOR_ADDRESS
10381 if (flag_pic && GET_CODE (index) != REG)
10382 index = copy_to_mode_reg (Pmode, index);
10383 #endif
10384
10385 /* If flag_force_addr were to affect this address
10386 it could interfere with the tricky assumptions made
10387 about addresses that contain label-refs,
10388 which may be valid only very near the tablejump itself. */
10389 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10390 GET_MODE_SIZE, because this indicates how large insns are. The other
10391 uses should all be Pmode, because they are addresses. This code
10392 could fail if addresses and insns are not the same size. */
10393 index = gen_rtx_PLUS (Pmode,
10394 gen_rtx_MULT (Pmode, index,
10395 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10396 gen_rtx_LABEL_REF (Pmode, table_label));
10397 #ifdef PIC_CASE_VECTOR_ADDRESS
10398 if (flag_pic)
10399 index = PIC_CASE_VECTOR_ADDRESS (index);
10400 else
10401 #endif
10402 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10403 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10404 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10405 RTX_UNCHANGING_P (vector) = 1;
10406 convert_move (temp, vector, 0);
10407
10408 emit_jump_insn (gen_tablejump (temp, table_label));
10409
10410 /* If we are generating PIC code or if the table is PC-relative, the
10411 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10412 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10413 emit_barrier ();
10414 }
10415
10416 #endif /* HAVE_tablejump */