Bring back equal forms for libcalls
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
47
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
50 #endif
51
52 /* Supply a default definition for PUSH_ARGS. */
53 #ifndef PUSH_ARGS
54 #ifdef PUSH_ROUNDING
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
56 #else
57 #define PUSH_ARGS 0
58 #endif
59 #endif
60
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
63
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
66
67 #ifdef PUSH_ROUNDING
68
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
72
73 #endif
74
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
82
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
86 #endif
87
88 /* Hook called by safe_from_p for language-specific tree codes. It is
89 up to the language front-end to install a hook if it has any such
90 codes that safe_from_p needs to know about. Since same_from_p will
91 recursively explore the TREE_OPERANDs of an expression, this hook
92 should not reexamine those pieces. This routine may recursively
93 call safe_from_p; it should always pass `0' as the TOP_P
94 parameter. */
95 int (*lang_safe_from_p) PARAMS ((rtx, tree));
96
97 /* If this is nonzero, we do not bother generating VOLATILE
98 around volatile memory references, and we are willing to
99 output indirect addresses. If cse is to follow, we reject
100 indirect addresses so a useful potential cse is generated;
101 if it is used only once, instruction combination will produce
102 the same indirect address eventually. */
103 int cse_not_expected;
104
105 /* Nonzero to generate code for all the subroutines within an
106 expression before generating the upper levels of the expression.
107 Nowadays this is never zero. */
108 int do_preexpand_calls = 1;
109
110 /* Don't check memory usage, since code is being emitted to check a memory
111 usage. Used when current_function_check_memory_usage is true, to avoid
112 infinite recursion. */
113 static int in_check_memory_usage;
114
115 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
116 static tree placeholder_list = 0;
117
118 /* This structure is used by move_by_pieces to describe the move to
119 be performed. */
120 struct move_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 rtx from;
127 rtx from_addr;
128 int autinc_from;
129 int explicit_inc_from;
130 unsigned HOST_WIDE_INT len;
131 HOST_WIDE_INT offset;
132 int reverse;
133 };
134
135 /* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
137
138 struct clear_by_pieces
139 {
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 unsigned HOST_WIDE_INT len;
145 HOST_WIDE_INT offset;
146 int reverse;
147 };
148
149 extern struct obstack permanent_obstack;
150
151 static rtx get_push_address PARAMS ((int));
152
153 static rtx enqueue_insn PARAMS ((rtx, rtx));
154 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
155 PARAMS ((unsigned HOST_WIDE_INT,
156 unsigned int));
157 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
158 struct move_by_pieces *));
159 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
160 unsigned int));
161 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
162 enum machine_mode,
163 struct clear_by_pieces *));
164 static rtx get_subtarget PARAMS ((rtx));
165 static int is_zeros_p PARAMS ((tree));
166 static int mostly_zeros_p PARAMS ((tree));
167 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
168 HOST_WIDE_INT, enum machine_mode,
169 tree, tree, unsigned int, int));
170 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
171 HOST_WIDE_INT));
172 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
173 HOST_WIDE_INT, enum machine_mode,
174 tree, enum machine_mode, int,
175 unsigned int, HOST_WIDE_INT, int));
176 static enum memory_use_mode
177 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
178 static tree save_noncopied_parts PARAMS ((tree, tree));
179 static tree init_noncopied_parts PARAMS ((tree, tree));
180 static int fixed_type_p PARAMS ((tree));
181 static rtx var_rtx PARAMS ((tree));
182 static int readonly_fields_p PARAMS ((tree));
183 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
184 static rtx expand_increment PARAMS ((tree, int, int));
185 static void preexpand_calls PARAMS ((tree));
186 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
187 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
188 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
189 rtx, rtx));
190 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
191
192 /* Record for each mode whether we can move a register directly to or
193 from an object of that mode in memory. If we can't, we won't try
194 to use that mode directly when accessing a field of that mode. */
195
196 static char direct_load[NUM_MACHINE_MODES];
197 static char direct_store[NUM_MACHINE_MODES];
198
199 /* If a memory-to-memory move would take MOVE_RATIO or more simple
200 move-instruction sequences, we will do a movstr or libcall instead. */
201
202 #ifndef MOVE_RATIO
203 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
204 #define MOVE_RATIO 2
205 #else
206 /* If we are optimizing for space (-Os), cut down the default move ratio. */
207 #define MOVE_RATIO (optimize_size ? 3 : 15)
208 #endif
209 #endif
210
211 /* This macro is used to determine whether move_by_pieces should be called
212 to perform a structure copy. */
213 #ifndef MOVE_BY_PIECES_P
214 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
215 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
216 #endif
217
218 /* This array records the insn_code of insns to perform block moves. */
219 enum insn_code movstr_optab[NUM_MACHINE_MODES];
220
221 /* This array records the insn_code of insns to perform block clears. */
222 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
223
224 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
225
226 #ifndef SLOW_UNALIGNED_ACCESS
227 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
228 #endif
229 \f
230 /* This is run once per compilation to set up which modes can be used
231 directly in memory and to initialize the block move optab. */
232
233 void
234 init_expr_once ()
235 {
236 rtx insn, pat;
237 enum machine_mode mode;
238 int num_clobbers;
239 rtx mem, mem1;
240 char *free_point;
241
242 start_sequence ();
243
244 /* Since we are on the permanent obstack, we must be sure we save this
245 spot AFTER we call start_sequence, since it will reuse the rtl it
246 makes. */
247 free_point = (char *) oballoc (0);
248
249 /* Try indexing by frame ptr and try by stack ptr.
250 It is known that on the Convex the stack ptr isn't a valid index.
251 With luck, one or the other is valid on any machine. */
252 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
253 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
254
255 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
256 pat = PATTERN (insn);
257
258 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
259 mode = (enum machine_mode) ((int) mode + 1))
260 {
261 int regno;
262 rtx reg;
263
264 direct_load[(int) mode] = direct_store[(int) mode] = 0;
265 PUT_MODE (mem, mode);
266 PUT_MODE (mem1, mode);
267
268 /* See if there is some register that can be used in this mode and
269 directly loaded or stored from memory. */
270
271 if (mode != VOIDmode && mode != BLKmode)
272 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
273 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
274 regno++)
275 {
276 if (! HARD_REGNO_MODE_OK (regno, mode))
277 continue;
278
279 reg = gen_rtx_REG (mode, regno);
280
281 SET_SRC (pat) = mem;
282 SET_DEST (pat) = reg;
283 if (recog (pat, insn, &num_clobbers) >= 0)
284 direct_load[(int) mode] = 1;
285
286 SET_SRC (pat) = mem1;
287 SET_DEST (pat) = reg;
288 if (recog (pat, insn, &num_clobbers) >= 0)
289 direct_load[(int) mode] = 1;
290
291 SET_SRC (pat) = reg;
292 SET_DEST (pat) = mem;
293 if (recog (pat, insn, &num_clobbers) >= 0)
294 direct_store[(int) mode] = 1;
295
296 SET_SRC (pat) = reg;
297 SET_DEST (pat) = mem1;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_store[(int) mode] = 1;
300 }
301 }
302
303 end_sequence ();
304 obfree (free_point);
305 }
306
307 /* This is run at the start of compiling a function. */
308
309 void
310 init_expr ()
311 {
312 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
313
314 pending_chain = 0;
315 pending_stack_adjust = 0;
316 stack_pointer_delta = 0;
317 inhibit_defer_pop = 0;
318 saveregs_value = 0;
319 apply_args_value = 0;
320 forced_labels = 0;
321 }
322
323 void
324 mark_expr_status (p)
325 struct expr_status *p;
326 {
327 if (p == NULL)
328 return;
329
330 ggc_mark_rtx (p->x_saveregs_value);
331 ggc_mark_rtx (p->x_apply_args_value);
332 ggc_mark_rtx (p->x_forced_labels);
333 }
334
335 void
336 free_expr_status (f)
337 struct function *f;
338 {
339 free (f->expr);
340 f->expr = NULL;
341 }
342
343 /* Small sanity check that the queue is empty at the end of a function. */
344
345 void
346 finish_expr_for_function ()
347 {
348 if (pending_chain)
349 abort ();
350 }
351 \f
352 /* Manage the queue of increment instructions to be output
353 for POSTINCREMENT_EXPR expressions, etc. */
354
355 /* Queue up to increment (or change) VAR later. BODY says how:
356 BODY should be the same thing you would pass to emit_insn
357 to increment right away. It will go to emit_insn later on.
358
359 The value is a QUEUED expression to be used in place of VAR
360 where you want to guarantee the pre-incrementation value of VAR. */
361
362 static rtx
363 enqueue_insn (var, body)
364 rtx var, body;
365 {
366 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
367 body, pending_chain);
368 return pending_chain;
369 }
370
371 /* Use protect_from_queue to convert a QUEUED expression
372 into something that you can put immediately into an instruction.
373 If the queued incrementation has not happened yet,
374 protect_from_queue returns the variable itself.
375 If the incrementation has happened, protect_from_queue returns a temp
376 that contains a copy of the old value of the variable.
377
378 Any time an rtx which might possibly be a QUEUED is to be put
379 into an instruction, it must be passed through protect_from_queue first.
380 QUEUED expressions are not meaningful in instructions.
381
382 Do not pass a value through protect_from_queue and then hold
383 on to it for a while before putting it in an instruction!
384 If the queue is flushed in between, incorrect code will result. */
385
386 rtx
387 protect_from_queue (x, modify)
388 register rtx x;
389 int modify;
390 {
391 register RTX_CODE code = GET_CODE (x);
392
393 #if 0 /* A QUEUED can hang around after the queue is forced out. */
394 /* Shortcut for most common case. */
395 if (pending_chain == 0)
396 return x;
397 #endif
398
399 if (code != QUEUED)
400 {
401 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
402 use of autoincrement. Make a copy of the contents of the memory
403 location rather than a copy of the address, but not if the value is
404 of mode BLKmode. Don't modify X in place since it might be
405 shared. */
406 if (code == MEM && GET_MODE (x) != BLKmode
407 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
408 {
409 register rtx y = XEXP (x, 0);
410 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
411
412 MEM_COPY_ATTRIBUTES (new, x);
413
414 if (QUEUED_INSN (y))
415 {
416 register rtx temp = gen_reg_rtx (GET_MODE (new));
417 emit_insn_before (gen_move_insn (temp, new),
418 QUEUED_INSN (y));
419 return temp;
420 }
421 return new;
422 }
423 /* Otherwise, recursively protect the subexpressions of all
424 the kinds of rtx's that can contain a QUEUED. */
425 if (code == MEM)
426 {
427 rtx tem = protect_from_queue (XEXP (x, 0), 0);
428 if (tem != XEXP (x, 0))
429 {
430 x = copy_rtx (x);
431 XEXP (x, 0) = tem;
432 }
433 }
434 else if (code == PLUS || code == MULT)
435 {
436 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
437 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
438 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
439 {
440 x = copy_rtx (x);
441 XEXP (x, 0) = new0;
442 XEXP (x, 1) = new1;
443 }
444 }
445 return x;
446 }
447 /* If the increment has not happened, use the variable itself. */
448 if (QUEUED_INSN (x) == 0)
449 return QUEUED_VAR (x);
450 /* If the increment has happened and a pre-increment copy exists,
451 use that copy. */
452 if (QUEUED_COPY (x) != 0)
453 return QUEUED_COPY (x);
454 /* The increment has happened but we haven't set up a pre-increment copy.
455 Set one up now, and use it. */
456 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
457 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
458 QUEUED_INSN (x));
459 return QUEUED_COPY (x);
460 }
461
462 /* Return nonzero if X contains a QUEUED expression:
463 if it contains anything that will be altered by a queued increment.
464 We handle only combinations of MEM, PLUS, MINUS and MULT operators
465 since memory addresses generally contain only those. */
466
467 int
468 queued_subexp_p (x)
469 rtx x;
470 {
471 register enum rtx_code code = GET_CODE (x);
472 switch (code)
473 {
474 case QUEUED:
475 return 1;
476 case MEM:
477 return queued_subexp_p (XEXP (x, 0));
478 case MULT:
479 case PLUS:
480 case MINUS:
481 return (queued_subexp_p (XEXP (x, 0))
482 || queued_subexp_p (XEXP (x, 1)));
483 default:
484 return 0;
485 }
486 }
487
488 /* Perform all the pending incrementations. */
489
490 void
491 emit_queue ()
492 {
493 register rtx p;
494 while ((p = pending_chain))
495 {
496 rtx body = QUEUED_BODY (p);
497
498 if (GET_CODE (body) == SEQUENCE)
499 {
500 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
501 emit_insn (QUEUED_BODY (p));
502 }
503 else
504 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
505 pending_chain = QUEUED_NEXT (p);
506 }
507 }
508 \f
509 /* Copy data from FROM to TO, where the machine modes are not the same.
510 Both modes may be integer, or both may be floating.
511 UNSIGNEDP should be nonzero if FROM is an unsigned type.
512 This causes zero-extension instead of sign-extension. */
513
514 void
515 convert_move (to, from, unsignedp)
516 register rtx to, from;
517 int unsignedp;
518 {
519 enum machine_mode to_mode = GET_MODE (to);
520 enum machine_mode from_mode = GET_MODE (from);
521 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
522 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
523 enum insn_code code;
524 rtx libcall;
525
526 /* rtx code for making an equivalent value. */
527 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
528
529 to = protect_from_queue (to, 1);
530 from = protect_from_queue (from, 0);
531
532 if (to_real != from_real)
533 abort ();
534
535 /* If FROM is a SUBREG that indicates that we have already done at least
536 the required extension, strip it. We don't handle such SUBREGs as
537 TO here. */
538
539 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
540 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
541 >= GET_MODE_SIZE (to_mode))
542 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
543 from = gen_lowpart (to_mode, from), from_mode = to_mode;
544
545 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
546 abort ();
547
548 if (to_mode == from_mode
549 || (from_mode == VOIDmode && CONSTANT_P (from)))
550 {
551 emit_move_insn (to, from);
552 return;
553 }
554
555 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
556 {
557 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
558 abort ();
559
560 if (VECTOR_MODE_P (to_mode))
561 from = gen_rtx_SUBREG (to_mode, from, 0);
562 else
563 to = gen_rtx_SUBREG (from_mode, to, 0);
564
565 emit_move_insn (to, from);
566 return;
567 }
568
569 if (to_real != from_real)
570 abort ();
571
572 if (to_real)
573 {
574 rtx value, insns;
575
576 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
577 {
578 /* Try converting directly if the insn is supported. */
579 if ((code = can_extend_p (to_mode, from_mode, 0))
580 != CODE_FOR_nothing)
581 {
582 emit_unop_insn (code, to, from, UNKNOWN);
583 return;
584 }
585 }
586
587 #ifdef HAVE_trunchfqf2
588 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
589 {
590 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
591 return;
592 }
593 #endif
594 #ifdef HAVE_trunctqfqf2
595 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
596 {
597 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
598 return;
599 }
600 #endif
601 #ifdef HAVE_truncsfqf2
602 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
603 {
604 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
605 return;
606 }
607 #endif
608 #ifdef HAVE_truncdfqf2
609 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
610 {
611 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
612 return;
613 }
614 #endif
615 #ifdef HAVE_truncxfqf2
616 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
617 {
618 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
619 return;
620 }
621 #endif
622 #ifdef HAVE_trunctfqf2
623 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
624 {
625 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
626 return;
627 }
628 #endif
629
630 #ifdef HAVE_trunctqfhf2
631 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
632 {
633 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
634 return;
635 }
636 #endif
637 #ifdef HAVE_truncsfhf2
638 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
639 {
640 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
641 return;
642 }
643 #endif
644 #ifdef HAVE_truncdfhf2
645 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
646 {
647 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
648 return;
649 }
650 #endif
651 #ifdef HAVE_truncxfhf2
652 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
653 {
654 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
655 return;
656 }
657 #endif
658 #ifdef HAVE_trunctfhf2
659 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
660 {
661 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
662 return;
663 }
664 #endif
665
666 #ifdef HAVE_truncsftqf2
667 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
668 {
669 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
670 return;
671 }
672 #endif
673 #ifdef HAVE_truncdftqf2
674 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
675 {
676 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
677 return;
678 }
679 #endif
680 #ifdef HAVE_truncxftqf2
681 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
682 {
683 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
684 return;
685 }
686 #endif
687 #ifdef HAVE_trunctftqf2
688 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
689 {
690 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694
695 #ifdef HAVE_truncdfsf2
696 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
697 {
698 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
699 return;
700 }
701 #endif
702 #ifdef HAVE_truncxfsf2
703 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
704 {
705 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
706 return;
707 }
708 #endif
709 #ifdef HAVE_trunctfsf2
710 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
711 {
712 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
713 return;
714 }
715 #endif
716 #ifdef HAVE_truncxfdf2
717 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
718 {
719 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
720 return;
721 }
722 #endif
723 #ifdef HAVE_trunctfdf2
724 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
725 {
726 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
727 return;
728 }
729 #endif
730
731 libcall = (rtx) 0;
732 switch (from_mode)
733 {
734 case SFmode:
735 switch (to_mode)
736 {
737 case DFmode:
738 libcall = extendsfdf2_libfunc;
739 break;
740
741 case XFmode:
742 libcall = extendsfxf2_libfunc;
743 break;
744
745 case TFmode:
746 libcall = extendsftf2_libfunc;
747 break;
748
749 default:
750 break;
751 }
752 break;
753
754 case DFmode:
755 switch (to_mode)
756 {
757 case SFmode:
758 libcall = truncdfsf2_libfunc;
759 break;
760
761 case XFmode:
762 libcall = extenddfxf2_libfunc;
763 break;
764
765 case TFmode:
766 libcall = extenddftf2_libfunc;
767 break;
768
769 default:
770 break;
771 }
772 break;
773
774 case XFmode:
775 switch (to_mode)
776 {
777 case SFmode:
778 libcall = truncxfsf2_libfunc;
779 break;
780
781 case DFmode:
782 libcall = truncxfdf2_libfunc;
783 break;
784
785 default:
786 break;
787 }
788 break;
789
790 case TFmode:
791 switch (to_mode)
792 {
793 case SFmode:
794 libcall = trunctfsf2_libfunc;
795 break;
796
797 case DFmode:
798 libcall = trunctfdf2_libfunc;
799 break;
800
801 default:
802 break;
803 }
804 break;
805
806 default:
807 break;
808 }
809
810 if (libcall == (rtx) 0)
811 /* This conversion is not implemented yet. */
812 abort ();
813
814 start_sequence ();
815 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
816 1, from, from_mode);
817 insns = get_insns ();
818 end_sequence ();
819 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
820 from));
821 return;
822 }
823
824 /* Now both modes are integers. */
825
826 /* Handle expanding beyond a word. */
827 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
828 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
829 {
830 rtx insns;
831 rtx lowpart;
832 rtx fill_value;
833 rtx lowfrom;
834 int i;
835 enum machine_mode lowpart_mode;
836 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
837
838 /* Try converting directly if the insn is supported. */
839 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
840 != CODE_FOR_nothing)
841 {
842 /* If FROM is a SUBREG, put it into a register. Do this
843 so that we always generate the same set of insns for
844 better cse'ing; if an intermediate assignment occurred,
845 we won't be doing the operation directly on the SUBREG. */
846 if (optimize > 0 && GET_CODE (from) == SUBREG)
847 from = force_reg (from_mode, from);
848 emit_unop_insn (code, to, from, equiv_code);
849 return;
850 }
851 /* Next, try converting via full word. */
852 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
853 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
854 != CODE_FOR_nothing))
855 {
856 if (GET_CODE (to) == REG)
857 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
858 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
859 emit_unop_insn (code, to,
860 gen_lowpart (word_mode, to), equiv_code);
861 return;
862 }
863
864 /* No special multiword conversion insn; do it by hand. */
865 start_sequence ();
866
867 /* Since we will turn this into a no conflict block, we must ensure
868 that the source does not overlap the target. */
869
870 if (reg_overlap_mentioned_p (to, from))
871 from = force_reg (from_mode, from);
872
873 /* Get a copy of FROM widened to a word, if necessary. */
874 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
875 lowpart_mode = word_mode;
876 else
877 lowpart_mode = from_mode;
878
879 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
880
881 lowpart = gen_lowpart (lowpart_mode, to);
882 emit_move_insn (lowpart, lowfrom);
883
884 /* Compute the value to put in each remaining word. */
885 if (unsignedp)
886 fill_value = const0_rtx;
887 else
888 {
889 #ifdef HAVE_slt
890 if (HAVE_slt
891 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
892 && STORE_FLAG_VALUE == -1)
893 {
894 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
895 lowpart_mode, 0, 0);
896 fill_value = gen_reg_rtx (word_mode);
897 emit_insn (gen_slt (fill_value));
898 }
899 else
900 #endif
901 {
902 fill_value
903 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
904 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
905 NULL_RTX, 0);
906 fill_value = convert_to_mode (word_mode, fill_value, 1);
907 }
908 }
909
910 /* Fill the remaining words. */
911 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
912 {
913 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
914 rtx subword = operand_subword (to, index, 1, to_mode);
915
916 if (subword == 0)
917 abort ();
918
919 if (fill_value != subword)
920 emit_move_insn (subword, fill_value);
921 }
922
923 insns = get_insns ();
924 end_sequence ();
925
926 emit_no_conflict_block (insns, to, from, NULL_RTX,
927 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
928 return;
929 }
930
931 /* Truncating multi-word to a word or less. */
932 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
933 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
934 {
935 if (!((GET_CODE (from) == MEM
936 && ! MEM_VOLATILE_P (from)
937 && direct_load[(int) to_mode]
938 && ! mode_dependent_address_p (XEXP (from, 0)))
939 || GET_CODE (from) == REG
940 || GET_CODE (from) == SUBREG))
941 from = force_reg (from_mode, from);
942 convert_move (to, gen_lowpart (word_mode, from), 0);
943 return;
944 }
945
946 /* Handle pointer conversion. */ /* SPEE 900220. */
947 if (to_mode == PQImode)
948 {
949 if (from_mode != QImode)
950 from = convert_to_mode (QImode, from, unsignedp);
951
952 #ifdef HAVE_truncqipqi2
953 if (HAVE_truncqipqi2)
954 {
955 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
956 return;
957 }
958 #endif /* HAVE_truncqipqi2 */
959 abort ();
960 }
961
962 if (from_mode == PQImode)
963 {
964 if (to_mode != QImode)
965 {
966 from = convert_to_mode (QImode, from, unsignedp);
967 from_mode = QImode;
968 }
969 else
970 {
971 #ifdef HAVE_extendpqiqi2
972 if (HAVE_extendpqiqi2)
973 {
974 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
975 return;
976 }
977 #endif /* HAVE_extendpqiqi2 */
978 abort ();
979 }
980 }
981
982 if (to_mode == PSImode)
983 {
984 if (from_mode != SImode)
985 from = convert_to_mode (SImode, from, unsignedp);
986
987 #ifdef HAVE_truncsipsi2
988 if (HAVE_truncsipsi2)
989 {
990 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
991 return;
992 }
993 #endif /* HAVE_truncsipsi2 */
994 abort ();
995 }
996
997 if (from_mode == PSImode)
998 {
999 if (to_mode != SImode)
1000 {
1001 from = convert_to_mode (SImode, from, unsignedp);
1002 from_mode = SImode;
1003 }
1004 else
1005 {
1006 #ifdef HAVE_extendpsisi2
1007 if (HAVE_extendpsisi2)
1008 {
1009 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1010 return;
1011 }
1012 #endif /* HAVE_extendpsisi2 */
1013 abort ();
1014 }
1015 }
1016
1017 if (to_mode == PDImode)
1018 {
1019 if (from_mode != DImode)
1020 from = convert_to_mode (DImode, from, unsignedp);
1021
1022 #ifdef HAVE_truncdipdi2
1023 if (HAVE_truncdipdi2)
1024 {
1025 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1026 return;
1027 }
1028 #endif /* HAVE_truncdipdi2 */
1029 abort ();
1030 }
1031
1032 if (from_mode == PDImode)
1033 {
1034 if (to_mode != DImode)
1035 {
1036 from = convert_to_mode (DImode, from, unsignedp);
1037 from_mode = DImode;
1038 }
1039 else
1040 {
1041 #ifdef HAVE_extendpdidi2
1042 if (HAVE_extendpdidi2)
1043 {
1044 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1045 return;
1046 }
1047 #endif /* HAVE_extendpdidi2 */
1048 abort ();
1049 }
1050 }
1051
1052 /* Now follow all the conversions between integers
1053 no more than a word long. */
1054
1055 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1056 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1057 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1058 GET_MODE_BITSIZE (from_mode)))
1059 {
1060 if (!((GET_CODE (from) == MEM
1061 && ! MEM_VOLATILE_P (from)
1062 && direct_load[(int) to_mode]
1063 && ! mode_dependent_address_p (XEXP (from, 0)))
1064 || GET_CODE (from) == REG
1065 || GET_CODE (from) == SUBREG))
1066 from = force_reg (from_mode, from);
1067 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1068 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1069 from = copy_to_reg (from);
1070 emit_move_insn (to, gen_lowpart (to_mode, from));
1071 return;
1072 }
1073
1074 /* Handle extension. */
1075 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1076 {
1077 /* Convert directly if that works. */
1078 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1079 != CODE_FOR_nothing)
1080 {
1081 emit_unop_insn (code, to, from, equiv_code);
1082 return;
1083 }
1084 else
1085 {
1086 enum machine_mode intermediate;
1087 rtx tmp;
1088 tree shift_amount;
1089
1090 /* Search for a mode to convert via. */
1091 for (intermediate = from_mode; intermediate != VOIDmode;
1092 intermediate = GET_MODE_WIDER_MODE (intermediate))
1093 if (((can_extend_p (to_mode, intermediate, unsignedp)
1094 != CODE_FOR_nothing)
1095 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1096 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1097 GET_MODE_BITSIZE (intermediate))))
1098 && (can_extend_p (intermediate, from_mode, unsignedp)
1099 != CODE_FOR_nothing))
1100 {
1101 convert_move (to, convert_to_mode (intermediate, from,
1102 unsignedp), unsignedp);
1103 return;
1104 }
1105
1106 /* No suitable intermediate mode.
1107 Generate what we need with shifts. */
1108 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1109 - GET_MODE_BITSIZE (from_mode), 0);
1110 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1111 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1112 to, unsignedp);
1113 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1114 to, unsignedp);
1115 if (tmp != to)
1116 emit_move_insn (to, tmp);
1117 return;
1118 }
1119 }
1120
1121 /* Support special truncate insns for certain modes. */
1122
1123 if (from_mode == DImode && to_mode == SImode)
1124 {
1125 #ifdef HAVE_truncdisi2
1126 if (HAVE_truncdisi2)
1127 {
1128 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1129 return;
1130 }
1131 #endif
1132 convert_move (to, force_reg (from_mode, from), unsignedp);
1133 return;
1134 }
1135
1136 if (from_mode == DImode && to_mode == HImode)
1137 {
1138 #ifdef HAVE_truncdihi2
1139 if (HAVE_truncdihi2)
1140 {
1141 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1142 return;
1143 }
1144 #endif
1145 convert_move (to, force_reg (from_mode, from), unsignedp);
1146 return;
1147 }
1148
1149 if (from_mode == DImode && to_mode == QImode)
1150 {
1151 #ifdef HAVE_truncdiqi2
1152 if (HAVE_truncdiqi2)
1153 {
1154 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1155 return;
1156 }
1157 #endif
1158 convert_move (to, force_reg (from_mode, from), unsignedp);
1159 return;
1160 }
1161
1162 if (from_mode == SImode && to_mode == HImode)
1163 {
1164 #ifdef HAVE_truncsihi2
1165 if (HAVE_truncsihi2)
1166 {
1167 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1168 return;
1169 }
1170 #endif
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1172 return;
1173 }
1174
1175 if (from_mode == SImode && to_mode == QImode)
1176 {
1177 #ifdef HAVE_truncsiqi2
1178 if (HAVE_truncsiqi2)
1179 {
1180 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1181 return;
1182 }
1183 #endif
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1185 return;
1186 }
1187
1188 if (from_mode == HImode && to_mode == QImode)
1189 {
1190 #ifdef HAVE_trunchiqi2
1191 if (HAVE_trunchiqi2)
1192 {
1193 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1194 return;
1195 }
1196 #endif
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1198 return;
1199 }
1200
1201 if (from_mode == TImode && to_mode == DImode)
1202 {
1203 #ifdef HAVE_trunctidi2
1204 if (HAVE_trunctidi2)
1205 {
1206 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1207 return;
1208 }
1209 #endif
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1211 return;
1212 }
1213
1214 if (from_mode == TImode && to_mode == SImode)
1215 {
1216 #ifdef HAVE_trunctisi2
1217 if (HAVE_trunctisi2)
1218 {
1219 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1220 return;
1221 }
1222 #endif
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1224 return;
1225 }
1226
1227 if (from_mode == TImode && to_mode == HImode)
1228 {
1229 #ifdef HAVE_trunctihi2
1230 if (HAVE_trunctihi2)
1231 {
1232 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1233 return;
1234 }
1235 #endif
1236 convert_move (to, force_reg (from_mode, from), unsignedp);
1237 return;
1238 }
1239
1240 if (from_mode == TImode && to_mode == QImode)
1241 {
1242 #ifdef HAVE_trunctiqi2
1243 if (HAVE_trunctiqi2)
1244 {
1245 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1246 return;
1247 }
1248 #endif
1249 convert_move (to, force_reg (from_mode, from), unsignedp);
1250 return;
1251 }
1252
1253 /* Handle truncation of volatile memrefs, and so on;
1254 the things that couldn't be truncated directly,
1255 and for which there was no special instruction. */
1256 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1257 {
1258 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1259 emit_move_insn (to, temp);
1260 return;
1261 }
1262
1263 /* Mode combination is not recognized. */
1264 abort ();
1265 }
1266
1267 /* Return an rtx for a value that would result
1268 from converting X to mode MODE.
1269 Both X and MODE may be floating, or both integer.
1270 UNSIGNEDP is nonzero if X is an unsigned value.
1271 This can be done by referring to a part of X in place
1272 or by copying to a new temporary with conversion.
1273
1274 This function *must not* call protect_from_queue
1275 except when putting X into an insn (in which case convert_move does it). */
1276
1277 rtx
1278 convert_to_mode (mode, x, unsignedp)
1279 enum machine_mode mode;
1280 rtx x;
1281 int unsignedp;
1282 {
1283 return convert_modes (mode, VOIDmode, x, unsignedp);
1284 }
1285
1286 /* Return an rtx for a value that would result
1287 from converting X from mode OLDMODE to mode MODE.
1288 Both modes may be floating, or both integer.
1289 UNSIGNEDP is nonzero if X is an unsigned value.
1290
1291 This can be done by referring to a part of X in place
1292 or by copying to a new temporary with conversion.
1293
1294 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1295
1296 This function *must not* call protect_from_queue
1297 except when putting X into an insn (in which case convert_move does it). */
1298
1299 rtx
1300 convert_modes (mode, oldmode, x, unsignedp)
1301 enum machine_mode mode, oldmode;
1302 rtx x;
1303 int unsignedp;
1304 {
1305 register rtx temp;
1306
1307 /* If FROM is a SUBREG that indicates that we have already done at least
1308 the required extension, strip it. */
1309
1310 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1311 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1312 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1313 x = gen_lowpart (mode, x);
1314
1315 if (GET_MODE (x) != VOIDmode)
1316 oldmode = GET_MODE (x);
1317
1318 if (mode == oldmode)
1319 return x;
1320
1321 /* There is one case that we must handle specially: If we are converting
1322 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1323 we are to interpret the constant as unsigned, gen_lowpart will do
1324 the wrong if the constant appears negative. What we want to do is
1325 make the high-order word of the constant zero, not all ones. */
1326
1327 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1328 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1329 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1330 {
1331 HOST_WIDE_INT val = INTVAL (x);
1332
1333 if (oldmode != VOIDmode
1334 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1335 {
1336 int width = GET_MODE_BITSIZE (oldmode);
1337
1338 /* We need to zero extend VAL. */
1339 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1340 }
1341
1342 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1343 }
1344
1345 /* We can do this with a gen_lowpart if both desired and current modes
1346 are integer, and this is either a constant integer, a register, or a
1347 non-volatile MEM. Except for the constant case where MODE is no
1348 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1349
1350 if ((GET_CODE (x) == CONST_INT
1351 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1352 || (GET_MODE_CLASS (mode) == MODE_INT
1353 && GET_MODE_CLASS (oldmode) == MODE_INT
1354 && (GET_CODE (x) == CONST_DOUBLE
1355 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1356 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1357 && direct_load[(int) mode])
1358 || (GET_CODE (x) == REG
1359 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1360 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1361 {
1362 /* ?? If we don't know OLDMODE, we have to assume here that
1363 X does not need sign- or zero-extension. This may not be
1364 the case, but it's the best we can do. */
1365 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1366 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1367 {
1368 HOST_WIDE_INT val = INTVAL (x);
1369 int width = GET_MODE_BITSIZE (oldmode);
1370
1371 /* We must sign or zero-extend in this case. Start by
1372 zero-extending, then sign extend if we need to. */
1373 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1374 if (! unsignedp
1375 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1376 val |= (HOST_WIDE_INT) (-1) << width;
1377
1378 return GEN_INT (val);
1379 }
1380
1381 return gen_lowpart (mode, x);
1382 }
1383
1384 temp = gen_reg_rtx (mode);
1385 convert_move (temp, x, unsignedp);
1386 return temp;
1387 }
1388 \f
1389 /* This macro is used to determine what the largest unit size that
1390 move_by_pieces can use is. */
1391
1392 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1393 move efficiently, as opposed to MOVE_MAX which is the maximum
1394 number of bytes we can move with a single instruction. */
1395
1396 #ifndef MOVE_MAX_PIECES
1397 #define MOVE_MAX_PIECES MOVE_MAX
1398 #endif
1399
1400 /* Generate several move instructions to copy LEN bytes
1401 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1402 The caller must pass FROM and TO
1403 through protect_from_queue before calling.
1404 ALIGN is maximum alignment we can assume. */
1405
1406 void
1407 move_by_pieces (to, from, len, align)
1408 rtx to, from;
1409 unsigned HOST_WIDE_INT len;
1410 unsigned int align;
1411 {
1412 struct move_by_pieces data;
1413 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1414 unsigned int max_size = MOVE_MAX_PIECES + 1;
1415 enum machine_mode mode = VOIDmode, tmode;
1416 enum insn_code icode;
1417
1418 data.offset = 0;
1419 data.to_addr = to_addr;
1420 data.from_addr = from_addr;
1421 data.to = to;
1422 data.from = from;
1423 data.autinc_to
1424 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1425 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1426 data.autinc_from
1427 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1428 || GET_CODE (from_addr) == POST_INC
1429 || GET_CODE (from_addr) == POST_DEC);
1430
1431 data.explicit_inc_from = 0;
1432 data.explicit_inc_to = 0;
1433 data.reverse
1434 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1435 if (data.reverse) data.offset = len;
1436 data.len = len;
1437
1438 /* If copying requires more than two move insns,
1439 copy addresses to registers (to make displacements shorter)
1440 and use post-increment if available. */
1441 if (!(data.autinc_from && data.autinc_to)
1442 && move_by_pieces_ninsns (len, align) > 2)
1443 {
1444 /* Find the mode of the largest move... */
1445 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1446 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1447 if (GET_MODE_SIZE (tmode) < max_size)
1448 mode = tmode;
1449
1450 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1451 {
1452 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1453 data.autinc_from = 1;
1454 data.explicit_inc_from = -1;
1455 }
1456 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1457 {
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 data.autinc_from = 1;
1460 data.explicit_inc_from = 1;
1461 }
1462 if (!data.autinc_from && CONSTANT_P (from_addr))
1463 data.from_addr = copy_addr_to_reg (from_addr);
1464 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1465 {
1466 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1467 data.autinc_to = 1;
1468 data.explicit_inc_to = -1;
1469 }
1470 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1471 {
1472 data.to_addr = copy_addr_to_reg (to_addr);
1473 data.autinc_to = 1;
1474 data.explicit_inc_to = 1;
1475 }
1476 if (!data.autinc_to && CONSTANT_P (to_addr))
1477 data.to_addr = copy_addr_to_reg (to_addr);
1478 }
1479
1480 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1481 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1482 align = MOVE_MAX * BITS_PER_UNIT;
1483
1484 /* First move what we can in the largest integer mode, then go to
1485 successively smaller modes. */
1486
1487 while (max_size > 1)
1488 {
1489 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1490 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1491 if (GET_MODE_SIZE (tmode) < max_size)
1492 mode = tmode;
1493
1494 if (mode == VOIDmode)
1495 break;
1496
1497 icode = mov_optab->handlers[(int) mode].insn_code;
1498 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1499 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1500
1501 max_size = GET_MODE_SIZE (mode);
1502 }
1503
1504 /* The code above should have handled everything. */
1505 if (data.len > 0)
1506 abort ();
1507 }
1508
1509 /* Return number of insns required to move L bytes by pieces.
1510 ALIGN (in bytes) is maximum alignment we can assume. */
1511
1512 static unsigned HOST_WIDE_INT
1513 move_by_pieces_ninsns (l, align)
1514 unsigned HOST_WIDE_INT l;
1515 unsigned int align;
1516 {
1517 unsigned HOST_WIDE_INT n_insns = 0;
1518 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1519
1520 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1521 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1522 align = MOVE_MAX * BITS_PER_UNIT;
1523
1524 while (max_size > 1)
1525 {
1526 enum machine_mode mode = VOIDmode, tmode;
1527 enum insn_code icode;
1528
1529 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1530 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1531 if (GET_MODE_SIZE (tmode) < max_size)
1532 mode = tmode;
1533
1534 if (mode == VOIDmode)
1535 break;
1536
1537 icode = mov_optab->handlers[(int) mode].insn_code;
1538 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1539 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1540
1541 max_size = GET_MODE_SIZE (mode);
1542 }
1543
1544 return n_insns;
1545 }
1546
1547 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1548 with move instructions for mode MODE. GENFUN is the gen_... function
1549 to make a move insn for that mode. DATA has all the other info. */
1550
1551 static void
1552 move_by_pieces_1 (genfun, mode, data)
1553 rtx (*genfun) PARAMS ((rtx, ...));
1554 enum machine_mode mode;
1555 struct move_by_pieces *data;
1556 {
1557 unsigned int size = GET_MODE_SIZE (mode);
1558 rtx to1, from1;
1559
1560 while (data->len >= size)
1561 {
1562 if (data->reverse)
1563 data->offset -= size;
1564
1565 if (data->autinc_to)
1566 {
1567 to1 = gen_rtx_MEM (mode, data->to_addr);
1568 MEM_COPY_ATTRIBUTES (to1, data->to);
1569 }
1570 else
1571 to1 = change_address (data->to, mode,
1572 plus_constant (data->to_addr, data->offset));
1573
1574 if (data->autinc_from)
1575 {
1576 from1 = gen_rtx_MEM (mode, data->from_addr);
1577 MEM_COPY_ATTRIBUTES (from1, data->from);
1578 }
1579 else
1580 from1 = change_address (data->from, mode,
1581 plus_constant (data->from_addr, data->offset));
1582
1583 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1584 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1585 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1586 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1587
1588 emit_insn ((*genfun) (to1, from1));
1589
1590 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1591 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1592 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1593 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1594
1595 if (! data->reverse)
1596 data->offset += size;
1597
1598 data->len -= size;
1599 }
1600 }
1601 \f
1602 /* Emit code to move a block Y to a block X.
1603 This may be done with string-move instructions,
1604 with multiple scalar move instructions, or with a library call.
1605
1606 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1607 with mode BLKmode.
1608 SIZE is an rtx that says how long they are.
1609 ALIGN is the maximum alignment we can assume they have.
1610
1611 Return the address of the new block, if memcpy is called and returns it,
1612 0 otherwise. */
1613
1614 rtx
1615 emit_block_move (x, y, size, align)
1616 rtx x, y;
1617 rtx size;
1618 unsigned int align;
1619 {
1620 rtx retval = 0;
1621 #ifdef TARGET_MEM_FUNCTIONS
1622 static tree fn;
1623 tree call_expr, arg_list;
1624 #endif
1625
1626 if (GET_MODE (x) != BLKmode)
1627 abort ();
1628
1629 if (GET_MODE (y) != BLKmode)
1630 abort ();
1631
1632 x = protect_from_queue (x, 1);
1633 y = protect_from_queue (y, 0);
1634 size = protect_from_queue (size, 0);
1635
1636 if (GET_CODE (x) != MEM)
1637 abort ();
1638 if (GET_CODE (y) != MEM)
1639 abort ();
1640 if (size == 0)
1641 abort ();
1642
1643 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1644 move_by_pieces (x, y, INTVAL (size), align);
1645 else
1646 {
1647 /* Try the most limited insn first, because there's no point
1648 including more than one in the machine description unless
1649 the more limited one has some advantage. */
1650
1651 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1652 enum machine_mode mode;
1653
1654 /* Since this is a move insn, we don't care about volatility. */
1655 volatile_ok = 1;
1656
1657 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1658 mode = GET_MODE_WIDER_MODE (mode))
1659 {
1660 enum insn_code code = movstr_optab[(int) mode];
1661 insn_operand_predicate_fn pred;
1662
1663 if (code != CODE_FOR_nothing
1664 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1665 here because if SIZE is less than the mode mask, as it is
1666 returned by the macro, it will definitely be less than the
1667 actual mode mask. */
1668 && ((GET_CODE (size) == CONST_INT
1669 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1670 <= (GET_MODE_MASK (mode) >> 1)))
1671 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1672 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1673 || (*pred) (x, BLKmode))
1674 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1675 || (*pred) (y, BLKmode))
1676 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1677 || (*pred) (opalign, VOIDmode)))
1678 {
1679 rtx op2;
1680 rtx last = get_last_insn ();
1681 rtx pat;
1682
1683 op2 = convert_to_mode (mode, size, 1);
1684 pred = insn_data[(int) code].operand[2].predicate;
1685 if (pred != 0 && ! (*pred) (op2, mode))
1686 op2 = copy_to_mode_reg (mode, op2);
1687
1688 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1689 if (pat)
1690 {
1691 emit_insn (pat);
1692 volatile_ok = 0;
1693 return 0;
1694 }
1695 else
1696 delete_insns_since (last);
1697 }
1698 }
1699
1700 volatile_ok = 0;
1701
1702 /* X, Y, or SIZE may have been passed through protect_from_queue.
1703
1704 It is unsafe to save the value generated by protect_from_queue
1705 and reuse it later. Consider what happens if emit_queue is
1706 called before the return value from protect_from_queue is used.
1707
1708 Expansion of the CALL_EXPR below will call emit_queue before
1709 we are finished emitting RTL for argument setup. So if we are
1710 not careful we could get the wrong value for an argument.
1711
1712 To avoid this problem we go ahead and emit code to copy X, Y &
1713 SIZE into new pseudos. We can then place those new pseudos
1714 into an RTL_EXPR and use them later, even after a call to
1715 emit_queue.
1716
1717 Note this is not strictly needed for library calls since they
1718 do not call emit_queue before loading their arguments. However,
1719 we may need to have library calls call emit_queue in the future
1720 since failing to do so could cause problems for targets which
1721 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1722 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1723 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1724
1725 #ifdef TARGET_MEM_FUNCTIONS
1726 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1727 #else
1728 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1729 TREE_UNSIGNED (integer_type_node));
1730 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1731 #endif
1732
1733 #ifdef TARGET_MEM_FUNCTIONS
1734 /* It is incorrect to use the libcall calling conventions to call
1735 memcpy in this context.
1736
1737 This could be a user call to memcpy and the user may wish to
1738 examine the return value from memcpy.
1739
1740 For targets where libcalls and normal calls have different conventions
1741 for returning pointers, we could end up generating incorrect code.
1742
1743 So instead of using a libcall sequence we build up a suitable
1744 CALL_EXPR and expand the call in the normal fashion. */
1745 if (fn == NULL_TREE)
1746 {
1747 tree fntype;
1748
1749 /* This was copied from except.c, I don't know if all this is
1750 necessary in this context or not. */
1751 fn = get_identifier ("memcpy");
1752 push_obstacks_nochange ();
1753 end_temporary_allocation ();
1754 fntype = build_pointer_type (void_type_node);
1755 fntype = build_function_type (fntype, NULL_TREE);
1756 fn = build_decl (FUNCTION_DECL, fn, fntype);
1757 ggc_add_tree_root (&fn, 1);
1758 DECL_EXTERNAL (fn) = 1;
1759 TREE_PUBLIC (fn) = 1;
1760 DECL_ARTIFICIAL (fn) = 1;
1761 make_decl_rtl (fn, NULL_PTR, 1);
1762 assemble_external (fn);
1763 pop_obstacks ();
1764 }
1765
1766 /* We need to make an argument list for the function call.
1767
1768 memcpy has three arguments, the first two are void * addresses and
1769 the last is a size_t byte count for the copy. */
1770 arg_list
1771 = build_tree_list (NULL_TREE,
1772 make_tree (build_pointer_type (void_type_node), x));
1773 TREE_CHAIN (arg_list)
1774 = build_tree_list (NULL_TREE,
1775 make_tree (build_pointer_type (void_type_node), y));
1776 TREE_CHAIN (TREE_CHAIN (arg_list))
1777 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1778 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1779
1780 /* Now we have to build up the CALL_EXPR itself. */
1781 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1782 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1783 call_expr, arg_list, NULL_TREE);
1784 TREE_SIDE_EFFECTS (call_expr) = 1;
1785
1786 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1787 #else
1788 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1789 VOIDmode, 3, y, Pmode, x, Pmode,
1790 convert_to_mode (TYPE_MODE (integer_type_node), size,
1791 TREE_UNSIGNED (integer_type_node)),
1792 TYPE_MODE (integer_type_node));
1793 #endif
1794 }
1795
1796 return retval;
1797 }
1798 \f
1799 /* Copy all or part of a value X into registers starting at REGNO.
1800 The number of registers to be filled is NREGS. */
1801
1802 void
1803 move_block_to_reg (regno, x, nregs, mode)
1804 int regno;
1805 rtx x;
1806 int nregs;
1807 enum machine_mode mode;
1808 {
1809 int i;
1810 #ifdef HAVE_load_multiple
1811 rtx pat;
1812 rtx last;
1813 #endif
1814
1815 if (nregs == 0)
1816 return;
1817
1818 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1819 x = validize_mem (force_const_mem (mode, x));
1820
1821 /* See if the machine can do this with a load multiple insn. */
1822 #ifdef HAVE_load_multiple
1823 if (HAVE_load_multiple)
1824 {
1825 last = get_last_insn ();
1826 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1827 GEN_INT (nregs));
1828 if (pat)
1829 {
1830 emit_insn (pat);
1831 return;
1832 }
1833 else
1834 delete_insns_since (last);
1835 }
1836 #endif
1837
1838 for (i = 0; i < nregs; i++)
1839 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1840 operand_subword_force (x, i, mode));
1841 }
1842
1843 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1844 The number of registers to be filled is NREGS. SIZE indicates the number
1845 of bytes in the object X. */
1846
1847 void
1848 move_block_from_reg (regno, x, nregs, size)
1849 int regno;
1850 rtx x;
1851 int nregs;
1852 int size;
1853 {
1854 int i;
1855 #ifdef HAVE_store_multiple
1856 rtx pat;
1857 rtx last;
1858 #endif
1859 enum machine_mode mode;
1860
1861 /* If SIZE is that of a mode no bigger than a word, just use that
1862 mode's store operation. */
1863 if (size <= UNITS_PER_WORD
1864 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1865 {
1866 emit_move_insn (change_address (x, mode, NULL),
1867 gen_rtx_REG (mode, regno));
1868 return;
1869 }
1870
1871 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1872 to the left before storing to memory. Note that the previous test
1873 doesn't handle all cases (e.g. SIZE == 3). */
1874 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1875 {
1876 rtx tem = operand_subword (x, 0, 1, BLKmode);
1877 rtx shift;
1878
1879 if (tem == 0)
1880 abort ();
1881
1882 shift = expand_shift (LSHIFT_EXPR, word_mode,
1883 gen_rtx_REG (word_mode, regno),
1884 build_int_2 ((UNITS_PER_WORD - size)
1885 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1886 emit_move_insn (tem, shift);
1887 return;
1888 }
1889
1890 /* See if the machine can do this with a store multiple insn. */
1891 #ifdef HAVE_store_multiple
1892 if (HAVE_store_multiple)
1893 {
1894 last = get_last_insn ();
1895 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1896 GEN_INT (nregs));
1897 if (pat)
1898 {
1899 emit_insn (pat);
1900 return;
1901 }
1902 else
1903 delete_insns_since (last);
1904 }
1905 #endif
1906
1907 for (i = 0; i < nregs; i++)
1908 {
1909 rtx tem = operand_subword (x, i, 1, BLKmode);
1910
1911 if (tem == 0)
1912 abort ();
1913
1914 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1915 }
1916 }
1917
1918 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1919 registers represented by a PARALLEL. SSIZE represents the total size of
1920 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1921 SRC in bits. */
1922 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1923 the balance will be in what would be the low-order memory addresses, i.e.
1924 left justified for big endian, right justified for little endian. This
1925 happens to be true for the targets currently using this support. If this
1926 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1927 would be needed. */
1928
1929 void
1930 emit_group_load (dst, orig_src, ssize, align)
1931 rtx dst, orig_src;
1932 unsigned int align;
1933 int ssize;
1934 {
1935 rtx *tmps, src;
1936 int start, i;
1937
1938 if (GET_CODE (dst) != PARALLEL)
1939 abort ();
1940
1941 /* Check for a NULL entry, used to indicate that the parameter goes
1942 both on the stack and in registers. */
1943 if (XEXP (XVECEXP (dst, 0, 0), 0))
1944 start = 0;
1945 else
1946 start = 1;
1947
1948 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1949
1950 /* If we won't be loading directly from memory, protect the real source
1951 from strange tricks we might play. */
1952 src = orig_src;
1953 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
1954 {
1955 if (GET_MODE (src) == VOIDmode)
1956 src = gen_reg_rtx (GET_MODE (dst));
1957 else
1958 src = gen_reg_rtx (GET_MODE (orig_src));
1959 emit_move_insn (src, orig_src);
1960 }
1961
1962 /* Process the pieces. */
1963 for (i = start; i < XVECLEN (dst, 0); i++)
1964 {
1965 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1966 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1967 unsigned int bytelen = GET_MODE_SIZE (mode);
1968 int shift = 0;
1969
1970 /* Handle trailing fragments that run over the size of the struct. */
1971 if (ssize >= 0 && bytepos + bytelen > ssize)
1972 {
1973 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1974 bytelen = ssize - bytepos;
1975 if (bytelen <= 0)
1976 abort ();
1977 }
1978
1979 /* Optimize the access just a bit. */
1980 if (GET_CODE (src) == MEM
1981 && align >= GET_MODE_ALIGNMENT (mode)
1982 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1983 && bytelen == GET_MODE_SIZE (mode))
1984 {
1985 tmps[i] = gen_reg_rtx (mode);
1986 emit_move_insn (tmps[i],
1987 change_address (src, mode,
1988 plus_constant (XEXP (src, 0),
1989 bytepos)));
1990 }
1991 else if (GET_CODE (src) == CONCAT)
1992 {
1993 if (bytepos == 0
1994 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1995 tmps[i] = XEXP (src, 0);
1996 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1997 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1998 tmps[i] = XEXP (src, 1);
1999 else
2000 abort ();
2001 }
2002 else if ((CONSTANT_P (src)
2003 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
2004 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2005 tmps[i] = src;
2006 else
2007 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2008 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2009 mode, mode, align, ssize);
2010
2011 if (BYTES_BIG_ENDIAN && shift)
2012 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2013 tmps[i], 0, OPTAB_WIDEN);
2014 }
2015
2016 emit_queue ();
2017
2018 /* Copy the extracted pieces into the proper (probable) hard regs. */
2019 for (i = start; i < XVECLEN (dst, 0); i++)
2020 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2021 }
2022
2023 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2024 registers represented by a PARALLEL. SSIZE represents the total size of
2025 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2026
2027 void
2028 emit_group_store (orig_dst, src, ssize, align)
2029 rtx orig_dst, src;
2030 int ssize;
2031 unsigned int align;
2032 {
2033 rtx *tmps, dst;
2034 int start, i;
2035
2036 if (GET_CODE (src) != PARALLEL)
2037 abort ();
2038
2039 /* Check for a NULL entry, used to indicate that the parameter goes
2040 both on the stack and in registers. */
2041 if (XEXP (XVECEXP (src, 0, 0), 0))
2042 start = 0;
2043 else
2044 start = 1;
2045
2046 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2047
2048 /* Copy the (probable) hard regs into pseudos. */
2049 for (i = start; i < XVECLEN (src, 0); i++)
2050 {
2051 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2052 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2053 emit_move_insn (tmps[i], reg);
2054 }
2055 emit_queue ();
2056
2057 /* If we won't be storing directly into memory, protect the real destination
2058 from strange tricks we might play. */
2059 dst = orig_dst;
2060 if (GET_CODE (dst) == PARALLEL)
2061 {
2062 rtx temp;
2063
2064 /* We can get a PARALLEL dst if there is a conditional expression in
2065 a return statement. In that case, the dst and src are the same,
2066 so no action is necessary. */
2067 if (rtx_equal_p (dst, src))
2068 return;
2069
2070 /* It is unclear if we can ever reach here, but we may as well handle
2071 it. Allocate a temporary, and split this into a store/load to/from
2072 the temporary. */
2073
2074 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2075 emit_group_store (temp, src, ssize, align);
2076 emit_group_load (dst, temp, ssize, align);
2077 return;
2078 }
2079 else if (GET_CODE (dst) != MEM)
2080 {
2081 dst = gen_reg_rtx (GET_MODE (orig_dst));
2082 /* Make life a bit easier for combine. */
2083 emit_move_insn (dst, const0_rtx);
2084 }
2085
2086 /* Process the pieces. */
2087 for (i = start; i < XVECLEN (src, 0); i++)
2088 {
2089 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2090 enum machine_mode mode = GET_MODE (tmps[i]);
2091 unsigned int bytelen = GET_MODE_SIZE (mode);
2092
2093 /* Handle trailing fragments that run over the size of the struct. */
2094 if (ssize >= 0 && bytepos + bytelen > ssize)
2095 {
2096 if (BYTES_BIG_ENDIAN)
2097 {
2098 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2099 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2100 tmps[i], 0, OPTAB_WIDEN);
2101 }
2102 bytelen = ssize - bytepos;
2103 }
2104
2105 /* Optimize the access just a bit. */
2106 if (GET_CODE (dst) == MEM
2107 && align >= GET_MODE_ALIGNMENT (mode)
2108 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2109 && bytelen == GET_MODE_SIZE (mode))
2110 emit_move_insn (change_address (dst, mode,
2111 plus_constant (XEXP (dst, 0),
2112 bytepos)),
2113 tmps[i]);
2114 else
2115 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2116 mode, tmps[i], align, ssize);
2117 }
2118
2119 emit_queue ();
2120
2121 /* Copy from the pseudo into the (probable) hard reg. */
2122 if (GET_CODE (dst) == REG)
2123 emit_move_insn (orig_dst, dst);
2124 }
2125
2126 /* Generate code to copy a BLKmode object of TYPE out of a
2127 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2128 is null, a stack temporary is created. TGTBLK is returned.
2129
2130 The primary purpose of this routine is to handle functions
2131 that return BLKmode structures in registers. Some machines
2132 (the PA for example) want to return all small structures
2133 in registers regardless of the structure's alignment. */
2134
2135 rtx
2136 copy_blkmode_from_reg (tgtblk, srcreg, type)
2137 rtx tgtblk;
2138 rtx srcreg;
2139 tree type;
2140 {
2141 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2142 rtx src = NULL, dst = NULL;
2143 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2144 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2145
2146 if (tgtblk == 0)
2147 {
2148 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2149 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2150 preserve_temp_slots (tgtblk);
2151 }
2152
2153 /* This code assumes srcreg is at least a full word. If it isn't,
2154 copy it into a new pseudo which is a full word. */
2155 if (GET_MODE (srcreg) != BLKmode
2156 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2157 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2158
2159 /* Structures whose size is not a multiple of a word are aligned
2160 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2161 machine, this means we must skip the empty high order bytes when
2162 calculating the bit offset. */
2163 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2164 big_endian_correction
2165 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2166
2167 /* Copy the structure BITSIZE bites at a time.
2168
2169 We could probably emit more efficient code for machines which do not use
2170 strict alignment, but it doesn't seem worth the effort at the current
2171 time. */
2172 for (bitpos = 0, xbitpos = big_endian_correction;
2173 bitpos < bytes * BITS_PER_UNIT;
2174 bitpos += bitsize, xbitpos += bitsize)
2175 {
2176 /* We need a new source operand each time xbitpos is on a
2177 word boundary and when xbitpos == big_endian_correction
2178 (the first time through). */
2179 if (xbitpos % BITS_PER_WORD == 0
2180 || xbitpos == big_endian_correction)
2181 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2182
2183 /* We need a new destination operand each time bitpos is on
2184 a word boundary. */
2185 if (bitpos % BITS_PER_WORD == 0)
2186 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2187
2188 /* Use xbitpos for the source extraction (right justified) and
2189 xbitpos for the destination store (left justified). */
2190 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2191 extract_bit_field (src, bitsize,
2192 xbitpos % BITS_PER_WORD, 1,
2193 NULL_RTX, word_mode, word_mode,
2194 bitsize, BITS_PER_WORD),
2195 bitsize, BITS_PER_WORD);
2196 }
2197
2198 return tgtblk;
2199 }
2200
2201 /* Add a USE expression for REG to the (possibly empty) list pointed
2202 to by CALL_FUSAGE. REG must denote a hard register. */
2203
2204 void
2205 use_reg (call_fusage, reg)
2206 rtx *call_fusage, reg;
2207 {
2208 if (GET_CODE (reg) != REG
2209 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2210 abort ();
2211
2212 *call_fusage
2213 = gen_rtx_EXPR_LIST (VOIDmode,
2214 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2215 }
2216
2217 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2218 starting at REGNO. All of these registers must be hard registers. */
2219
2220 void
2221 use_regs (call_fusage, regno, nregs)
2222 rtx *call_fusage;
2223 int regno;
2224 int nregs;
2225 {
2226 int i;
2227
2228 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2229 abort ();
2230
2231 for (i = 0; i < nregs; i++)
2232 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2233 }
2234
2235 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2236 PARALLEL REGS. This is for calls that pass values in multiple
2237 non-contiguous locations. The Irix 6 ABI has examples of this. */
2238
2239 void
2240 use_group_regs (call_fusage, regs)
2241 rtx *call_fusage;
2242 rtx regs;
2243 {
2244 int i;
2245
2246 for (i = 0; i < XVECLEN (regs, 0); i++)
2247 {
2248 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2249
2250 /* A NULL entry means the parameter goes both on the stack and in
2251 registers. This can also be a MEM for targets that pass values
2252 partially on the stack and partially in registers. */
2253 if (reg != 0 && GET_CODE (reg) == REG)
2254 use_reg (call_fusage, reg);
2255 }
2256 }
2257 \f
2258 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2259 rtx with BLKmode). The caller must pass TO through protect_from_queue
2260 before calling. ALIGN is maximum alignment we can assume. */
2261
2262 static void
2263 clear_by_pieces (to, len, align)
2264 rtx to;
2265 unsigned HOST_WIDE_INT len;
2266 unsigned int align;
2267 {
2268 struct clear_by_pieces data;
2269 rtx to_addr = XEXP (to, 0);
2270 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2271 enum machine_mode mode = VOIDmode, tmode;
2272 enum insn_code icode;
2273
2274 data.offset = 0;
2275 data.to_addr = to_addr;
2276 data.to = to;
2277 data.autinc_to
2278 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2279 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2280
2281 data.explicit_inc_to = 0;
2282 data.reverse
2283 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2284 if (data.reverse)
2285 data.offset = len;
2286 data.len = len;
2287
2288 /* If copying requires more than two move insns,
2289 copy addresses to registers (to make displacements shorter)
2290 and use post-increment if available. */
2291 if (!data.autinc_to
2292 && move_by_pieces_ninsns (len, align) > 2)
2293 {
2294 /* Determine the main mode we'll be using. */
2295 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2296 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2297 if (GET_MODE_SIZE (tmode) < max_size)
2298 mode = tmode;
2299
2300 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2301 {
2302 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2303 data.autinc_to = 1;
2304 data.explicit_inc_to = -1;
2305 }
2306
2307 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse
2308 && ! data.autinc_to)
2309 {
2310 data.to_addr = copy_addr_to_reg (to_addr);
2311 data.autinc_to = 1;
2312 data.explicit_inc_to = 1;
2313 }
2314
2315 if ( !data.autinc_to && CONSTANT_P (to_addr))
2316 data.to_addr = copy_addr_to_reg (to_addr);
2317 }
2318
2319 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2320 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2321 align = MOVE_MAX * BITS_PER_UNIT;
2322
2323 /* First move what we can in the largest integer mode, then go to
2324 successively smaller modes. */
2325
2326 while (max_size > 1)
2327 {
2328 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2329 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2330 if (GET_MODE_SIZE (tmode) < max_size)
2331 mode = tmode;
2332
2333 if (mode == VOIDmode)
2334 break;
2335
2336 icode = mov_optab->handlers[(int) mode].insn_code;
2337 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2338 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2339
2340 max_size = GET_MODE_SIZE (mode);
2341 }
2342
2343 /* The code above should have handled everything. */
2344 if (data.len != 0)
2345 abort ();
2346 }
2347
2348 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2349 with move instructions for mode MODE. GENFUN is the gen_... function
2350 to make a move insn for that mode. DATA has all the other info. */
2351
2352 static void
2353 clear_by_pieces_1 (genfun, mode, data)
2354 rtx (*genfun) PARAMS ((rtx, ...));
2355 enum machine_mode mode;
2356 struct clear_by_pieces *data;
2357 {
2358 unsigned int size = GET_MODE_SIZE (mode);
2359 rtx to1;
2360
2361 while (data->len >= size)
2362 {
2363 if (data->reverse)
2364 data->offset -= size;
2365
2366 if (data->autinc_to)
2367 {
2368 to1 = gen_rtx_MEM (mode, data->to_addr);
2369 MEM_COPY_ATTRIBUTES (to1, data->to);
2370 }
2371 else
2372 to1 = change_address (data->to, mode,
2373 plus_constant (data->to_addr, data->offset));
2374
2375 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2376 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2377
2378 emit_insn ((*genfun) (to1, const0_rtx));
2379
2380 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2381 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2382
2383 if (! data->reverse)
2384 data->offset += size;
2385
2386 data->len -= size;
2387 }
2388 }
2389 \f
2390 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2391 its length in bytes and ALIGN is the maximum alignment we can is has.
2392
2393 If we call a function that returns the length of the block, return it. */
2394
2395 rtx
2396 clear_storage (object, size, align)
2397 rtx object;
2398 rtx size;
2399 unsigned int align;
2400 {
2401 #ifdef TARGET_MEM_FUNCTIONS
2402 static tree fn;
2403 tree call_expr, arg_list;
2404 #endif
2405 rtx retval = 0;
2406
2407 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2408 just move a zero. Otherwise, do this a piece at a time. */
2409 if (GET_MODE (object) != BLKmode
2410 && GET_CODE (size) == CONST_INT
2411 && GET_MODE_SIZE (GET_MODE (object)) == INTVAL (size))
2412 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2413 else
2414 {
2415 object = protect_from_queue (object, 1);
2416 size = protect_from_queue (size, 0);
2417
2418 if (GET_CODE (size) == CONST_INT
2419 && MOVE_BY_PIECES_P (INTVAL (size), align))
2420 clear_by_pieces (object, INTVAL (size), align);
2421 else
2422 {
2423 /* Try the most limited insn first, because there's no point
2424 including more than one in the machine description unless
2425 the more limited one has some advantage. */
2426
2427 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2428 enum machine_mode mode;
2429
2430 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2431 mode = GET_MODE_WIDER_MODE (mode))
2432 {
2433 enum insn_code code = clrstr_optab[(int) mode];
2434 insn_operand_predicate_fn pred;
2435
2436 if (code != CODE_FOR_nothing
2437 /* We don't need MODE to be narrower than
2438 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2439 the mode mask, as it is returned by the macro, it will
2440 definitely be less than the actual mode mask. */
2441 && ((GET_CODE (size) == CONST_INT
2442 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2443 <= (GET_MODE_MASK (mode) >> 1)))
2444 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2445 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2446 || (*pred) (object, BLKmode))
2447 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2448 || (*pred) (opalign, VOIDmode)))
2449 {
2450 rtx op1;
2451 rtx last = get_last_insn ();
2452 rtx pat;
2453
2454 op1 = convert_to_mode (mode, size, 1);
2455 pred = insn_data[(int) code].operand[1].predicate;
2456 if (pred != 0 && ! (*pred) (op1, mode))
2457 op1 = copy_to_mode_reg (mode, op1);
2458
2459 pat = GEN_FCN ((int) code) (object, op1, opalign);
2460 if (pat)
2461 {
2462 emit_insn (pat);
2463 return 0;
2464 }
2465 else
2466 delete_insns_since (last);
2467 }
2468 }
2469
2470 /* OBJECT or SIZE may have been passed through protect_from_queue.
2471
2472 It is unsafe to save the value generated by protect_from_queue
2473 and reuse it later. Consider what happens if emit_queue is
2474 called before the return value from protect_from_queue is used.
2475
2476 Expansion of the CALL_EXPR below will call emit_queue before
2477 we are finished emitting RTL for argument setup. So if we are
2478 not careful we could get the wrong value for an argument.
2479
2480 To avoid this problem we go ahead and emit code to copy OBJECT
2481 and SIZE into new pseudos. We can then place those new pseudos
2482 into an RTL_EXPR and use them later, even after a call to
2483 emit_queue.
2484
2485 Note this is not strictly needed for library calls since they
2486 do not call emit_queue before loading their arguments. However,
2487 we may need to have library calls call emit_queue in the future
2488 since failing to do so could cause problems for targets which
2489 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2490 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2491
2492 #ifdef TARGET_MEM_FUNCTIONS
2493 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2494 #else
2495 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2496 TREE_UNSIGNED (integer_type_node));
2497 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2498 #endif
2499
2500 #ifdef TARGET_MEM_FUNCTIONS
2501 /* It is incorrect to use the libcall calling conventions to call
2502 memset in this context.
2503
2504 This could be a user call to memset and the user may wish to
2505 examine the return value from memset.
2506
2507 For targets where libcalls and normal calls have different
2508 conventions for returning pointers, we could end up generating
2509 incorrect code.
2510
2511 So instead of using a libcall sequence we build up a suitable
2512 CALL_EXPR and expand the call in the normal fashion. */
2513 if (fn == NULL_TREE)
2514 {
2515 tree fntype;
2516
2517 /* This was copied from except.c, I don't know if all this is
2518 necessary in this context or not. */
2519 fn = get_identifier ("memset");
2520 push_obstacks_nochange ();
2521 end_temporary_allocation ();
2522 fntype = build_pointer_type (void_type_node);
2523 fntype = build_function_type (fntype, NULL_TREE);
2524 fn = build_decl (FUNCTION_DECL, fn, fntype);
2525 ggc_add_tree_root (&fn, 1);
2526 DECL_EXTERNAL (fn) = 1;
2527 TREE_PUBLIC (fn) = 1;
2528 DECL_ARTIFICIAL (fn) = 1;
2529 make_decl_rtl (fn, NULL_PTR, 1);
2530 assemble_external (fn);
2531 pop_obstacks ();
2532 }
2533
2534 /* We need to make an argument list for the function call.
2535
2536 memset has three arguments, the first is a void * addresses, the
2537 second a integer with the initialization value, the last is a
2538 size_t byte count for the copy. */
2539 arg_list
2540 = build_tree_list (NULL_TREE,
2541 make_tree (build_pointer_type (void_type_node),
2542 object));
2543 TREE_CHAIN (arg_list)
2544 = build_tree_list (NULL_TREE,
2545 make_tree (integer_type_node, const0_rtx));
2546 TREE_CHAIN (TREE_CHAIN (arg_list))
2547 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2548 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2549
2550 /* Now we have to build up the CALL_EXPR itself. */
2551 call_expr = build1 (ADDR_EXPR,
2552 build_pointer_type (TREE_TYPE (fn)), fn);
2553 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2554 call_expr, arg_list, NULL_TREE);
2555 TREE_SIDE_EFFECTS (call_expr) = 1;
2556
2557 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2558 #else
2559 emit_library_call (bzero_libfunc, LCT_NORMAL,
2560 VOIDmode, 2, object, Pmode, size,
2561 TYPE_MODE (integer_type_node));
2562 #endif
2563 }
2564 }
2565
2566 return retval;
2567 }
2568
2569 /* Generate code to copy Y into X.
2570 Both Y and X must have the same mode, except that
2571 Y can be a constant with VOIDmode.
2572 This mode cannot be BLKmode; use emit_block_move for that.
2573
2574 Return the last instruction emitted. */
2575
2576 rtx
2577 emit_move_insn (x, y)
2578 rtx x, y;
2579 {
2580 enum machine_mode mode = GET_MODE (x);
2581
2582 x = protect_from_queue (x, 1);
2583 y = protect_from_queue (y, 0);
2584
2585 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2586 abort ();
2587
2588 /* Never force constant_p_rtx to memory. */
2589 if (GET_CODE (y) == CONSTANT_P_RTX)
2590 ;
2591 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2592 y = force_const_mem (mode, y);
2593
2594 /* If X or Y are memory references, verify that their addresses are valid
2595 for the machine. */
2596 if (GET_CODE (x) == MEM
2597 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2598 && ! push_operand (x, GET_MODE (x)))
2599 || (flag_force_addr
2600 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2601 x = change_address (x, VOIDmode, XEXP (x, 0));
2602
2603 if (GET_CODE (y) == MEM
2604 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2605 || (flag_force_addr
2606 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2607 y = change_address (y, VOIDmode, XEXP (y, 0));
2608
2609 if (mode == BLKmode)
2610 abort ();
2611
2612 return emit_move_insn_1 (x, y);
2613 }
2614
2615 /* Low level part of emit_move_insn.
2616 Called just like emit_move_insn, but assumes X and Y
2617 are basically valid. */
2618
2619 rtx
2620 emit_move_insn_1 (x, y)
2621 rtx x, y;
2622 {
2623 enum machine_mode mode = GET_MODE (x);
2624 enum machine_mode submode;
2625 enum mode_class class = GET_MODE_CLASS (mode);
2626 unsigned int i;
2627
2628 if (mode >= MAX_MACHINE_MODE)
2629 abort ();
2630
2631 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2632 return
2633 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2634
2635 /* Expand complex moves by moving real part and imag part, if possible. */
2636 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2637 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2638 * BITS_PER_UNIT),
2639 (class == MODE_COMPLEX_INT
2640 ? MODE_INT : MODE_FLOAT),
2641 0))
2642 && (mov_optab->handlers[(int) submode].insn_code
2643 != CODE_FOR_nothing))
2644 {
2645 /* Don't split destination if it is a stack push. */
2646 int stack = push_operand (x, GET_MODE (x));
2647
2648 /* If this is a stack, push the highpart first, so it
2649 will be in the argument order.
2650
2651 In that case, change_address is used only to convert
2652 the mode, not to change the address. */
2653 if (stack)
2654 {
2655 /* Note that the real part always precedes the imag part in memory
2656 regardless of machine's endianness. */
2657 #ifdef STACK_GROWS_DOWNWARD
2658 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2659 (gen_rtx_MEM (submode, XEXP (x, 0)),
2660 gen_imagpart (submode, y)));
2661 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2662 (gen_rtx_MEM (submode, XEXP (x, 0)),
2663 gen_realpart (submode, y)));
2664 #else
2665 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2666 (gen_rtx_MEM (submode, XEXP (x, 0)),
2667 gen_realpart (submode, y)));
2668 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2669 (gen_rtx_MEM (submode, XEXP (x, 0)),
2670 gen_imagpart (submode, y)));
2671 #endif
2672 }
2673 else
2674 {
2675 rtx realpart_x, realpart_y;
2676 rtx imagpart_x, imagpart_y;
2677
2678 /* If this is a complex value with each part being smaller than a
2679 word, the usual calling sequence will likely pack the pieces into
2680 a single register. Unfortunately, SUBREG of hard registers only
2681 deals in terms of words, so we have a problem converting input
2682 arguments to the CONCAT of two registers that is used elsewhere
2683 for complex values. If this is before reload, we can copy it into
2684 memory and reload. FIXME, we should see about using extract and
2685 insert on integer registers, but complex short and complex char
2686 variables should be rarely used. */
2687 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2688 && (reload_in_progress | reload_completed) == 0)
2689 {
2690 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2691 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2692
2693 if (packed_dest_p || packed_src_p)
2694 {
2695 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2696 ? MODE_FLOAT : MODE_INT);
2697
2698 enum machine_mode reg_mode =
2699 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2700
2701 if (reg_mode != BLKmode)
2702 {
2703 rtx mem = assign_stack_temp (reg_mode,
2704 GET_MODE_SIZE (mode), 0);
2705
2706 rtx cmem = change_address (mem, mode, NULL_RTX);
2707
2708 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2709
2710 if (packed_dest_p)
2711 {
2712 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2713 emit_move_insn_1 (cmem, y);
2714 return emit_move_insn_1 (sreg, mem);
2715 }
2716 else
2717 {
2718 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2719 emit_move_insn_1 (mem, sreg);
2720 return emit_move_insn_1 (x, cmem);
2721 }
2722 }
2723 }
2724 }
2725
2726 realpart_x = gen_realpart (submode, x);
2727 realpart_y = gen_realpart (submode, y);
2728 imagpart_x = gen_imagpart (submode, x);
2729 imagpart_y = gen_imagpart (submode, y);
2730
2731 /* Show the output dies here. This is necessary for SUBREGs
2732 of pseudos since we cannot track their lifetimes correctly;
2733 hard regs shouldn't appear here except as return values.
2734 We never want to emit such a clobber after reload. */
2735 if (x != y
2736 && ! (reload_in_progress || reload_completed)
2737 && (GET_CODE (realpart_x) == SUBREG
2738 || GET_CODE (imagpart_x) == SUBREG))
2739 {
2740 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2741 }
2742
2743 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2744 (realpart_x, realpart_y));
2745 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2746 (imagpart_x, imagpart_y));
2747 }
2748
2749 return get_last_insn ();
2750 }
2751
2752 /* This will handle any multi-word mode that lacks a move_insn pattern.
2753 However, you will get better code if you define such patterns,
2754 even if they must turn into multiple assembler instructions. */
2755 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2756 {
2757 rtx last_insn = 0;
2758 rtx seq, inner;
2759 int need_clobber;
2760
2761 #ifdef PUSH_ROUNDING
2762
2763 /* If X is a push on the stack, do the push now and replace
2764 X with a reference to the stack pointer. */
2765 if (push_operand (x, GET_MODE (x)))
2766 {
2767 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2768 x = change_address (x, VOIDmode, stack_pointer_rtx);
2769 }
2770 #endif
2771
2772 /* If we are in reload, see if either operand is a MEM whose address
2773 is scheduled for replacement. */
2774 if (reload_in_progress && GET_CODE (x) == MEM
2775 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2776 {
2777 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2778
2779 MEM_COPY_ATTRIBUTES (new, x);
2780 x = new;
2781 }
2782 if (reload_in_progress && GET_CODE (y) == MEM
2783 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2784 {
2785 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2786
2787 MEM_COPY_ATTRIBUTES (new, y);
2788 y = new;
2789 }
2790
2791 start_sequence ();
2792
2793 need_clobber = 0;
2794 for (i = 0;
2795 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2796 i++)
2797 {
2798 rtx xpart = operand_subword (x, i, 1, mode);
2799 rtx ypart = operand_subword (y, i, 1, mode);
2800
2801 /* If we can't get a part of Y, put Y into memory if it is a
2802 constant. Otherwise, force it into a register. If we still
2803 can't get a part of Y, abort. */
2804 if (ypart == 0 && CONSTANT_P (y))
2805 {
2806 y = force_const_mem (mode, y);
2807 ypart = operand_subword (y, i, 1, mode);
2808 }
2809 else if (ypart == 0)
2810 ypart = operand_subword_force (y, i, mode);
2811
2812 if (xpart == 0 || ypart == 0)
2813 abort ();
2814
2815 need_clobber |= (GET_CODE (xpart) == SUBREG);
2816
2817 last_insn = emit_move_insn (xpart, ypart);
2818 }
2819
2820 seq = gen_sequence ();
2821 end_sequence ();
2822
2823 /* Show the output dies here. This is necessary for SUBREGs
2824 of pseudos since we cannot track their lifetimes correctly;
2825 hard regs shouldn't appear here except as return values.
2826 We never want to emit such a clobber after reload. */
2827 if (x != y
2828 && ! (reload_in_progress || reload_completed)
2829 && need_clobber != 0)
2830 {
2831 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2832 }
2833
2834 emit_insn (seq);
2835
2836 return last_insn;
2837 }
2838 else
2839 abort ();
2840 }
2841 \f
2842 /* Pushing data onto the stack. */
2843
2844 /* Push a block of length SIZE (perhaps variable)
2845 and return an rtx to address the beginning of the block.
2846 Note that it is not possible for the value returned to be a QUEUED.
2847 The value may be virtual_outgoing_args_rtx.
2848
2849 EXTRA is the number of bytes of padding to push in addition to SIZE.
2850 BELOW nonzero means this padding comes at low addresses;
2851 otherwise, the padding comes at high addresses. */
2852
2853 rtx
2854 push_block (size, extra, below)
2855 rtx size;
2856 int extra, below;
2857 {
2858 register rtx temp;
2859
2860 size = convert_modes (Pmode, ptr_mode, size, 1);
2861 if (CONSTANT_P (size))
2862 anti_adjust_stack (plus_constant (size, extra));
2863 else if (GET_CODE (size) == REG && extra == 0)
2864 anti_adjust_stack (size);
2865 else
2866 {
2867 temp = copy_to_mode_reg (Pmode, size);
2868 if (extra != 0)
2869 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2870 temp, 0, OPTAB_LIB_WIDEN);
2871 anti_adjust_stack (temp);
2872 }
2873
2874 #ifndef STACK_GROWS_DOWNWARD
2875 #ifdef ARGS_GROW_DOWNWARD
2876 if (!ACCUMULATE_OUTGOING_ARGS)
2877 #else
2878 if (0)
2879 #endif
2880 #else
2881 if (1)
2882 #endif
2883 {
2884 /* Return the lowest stack address when STACK or ARGS grow downward and
2885 we are not aaccumulating outgoing arguments (the c4x port uses such
2886 conventions). */
2887 temp = virtual_outgoing_args_rtx;
2888 if (extra != 0 && below)
2889 temp = plus_constant (temp, extra);
2890 }
2891 else
2892 {
2893 if (GET_CODE (size) == CONST_INT)
2894 temp = plus_constant (virtual_outgoing_args_rtx,
2895 -INTVAL (size) - (below ? 0 : extra));
2896 else if (extra != 0 && !below)
2897 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2898 negate_rtx (Pmode, plus_constant (size, extra)));
2899 else
2900 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2901 negate_rtx (Pmode, size));
2902 }
2903
2904 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2905 }
2906
2907 rtx
2908 gen_push_operand ()
2909 {
2910 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2911 }
2912
2913 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2914 block of SIZE bytes. */
2915
2916 static rtx
2917 get_push_address (size)
2918 int size;
2919 {
2920 register rtx temp;
2921
2922 if (STACK_PUSH_CODE == POST_DEC)
2923 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2924 else if (STACK_PUSH_CODE == POST_INC)
2925 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2926 else
2927 temp = stack_pointer_rtx;
2928
2929 return copy_to_reg (temp);
2930 }
2931
2932 /* Generate code to push X onto the stack, assuming it has mode MODE and
2933 type TYPE.
2934 MODE is redundant except when X is a CONST_INT (since they don't
2935 carry mode info).
2936 SIZE is an rtx for the size of data to be copied (in bytes),
2937 needed only if X is BLKmode.
2938
2939 ALIGN is maximum alignment we can assume.
2940
2941 If PARTIAL and REG are both nonzero, then copy that many of the first
2942 words of X into registers starting with REG, and push the rest of X.
2943 The amount of space pushed is decreased by PARTIAL words,
2944 rounded *down* to a multiple of PARM_BOUNDARY.
2945 REG must be a hard register in this case.
2946 If REG is zero but PARTIAL is not, take any all others actions for an
2947 argument partially in registers, but do not actually load any
2948 registers.
2949
2950 EXTRA is the amount in bytes of extra space to leave next to this arg.
2951 This is ignored if an argument block has already been allocated.
2952
2953 On a machine that lacks real push insns, ARGS_ADDR is the address of
2954 the bottom of the argument block for this call. We use indexing off there
2955 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2956 argument block has not been preallocated.
2957
2958 ARGS_SO_FAR is the size of args previously pushed for this call.
2959
2960 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2961 for arguments passed in registers. If nonzero, it will be the number
2962 of bytes required. */
2963
2964 void
2965 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2966 args_addr, args_so_far, reg_parm_stack_space,
2967 alignment_pad)
2968 register rtx x;
2969 enum machine_mode mode;
2970 tree type;
2971 rtx size;
2972 unsigned int align;
2973 int partial;
2974 rtx reg;
2975 int extra;
2976 rtx args_addr;
2977 rtx args_so_far;
2978 int reg_parm_stack_space;
2979 rtx alignment_pad;
2980 {
2981 rtx xinner;
2982 enum direction stack_direction
2983 #ifdef STACK_GROWS_DOWNWARD
2984 = downward;
2985 #else
2986 = upward;
2987 #endif
2988
2989 /* Decide where to pad the argument: `downward' for below,
2990 `upward' for above, or `none' for don't pad it.
2991 Default is below for small data on big-endian machines; else above. */
2992 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2993
2994 /* Invert direction if stack is post-update. */
2995 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2996 if (where_pad != none)
2997 where_pad = (where_pad == downward ? upward : downward);
2998
2999 xinner = x = protect_from_queue (x, 0);
3000
3001 if (mode == BLKmode)
3002 {
3003 /* Copy a block into the stack, entirely or partially. */
3004
3005 register rtx temp;
3006 int used = partial * UNITS_PER_WORD;
3007 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3008 int skip;
3009
3010 if (size == 0)
3011 abort ();
3012
3013 used -= offset;
3014
3015 /* USED is now the # of bytes we need not copy to the stack
3016 because registers will take care of them. */
3017
3018 if (partial != 0)
3019 xinner = change_address (xinner, BLKmode,
3020 plus_constant (XEXP (xinner, 0), used));
3021
3022 /* If the partial register-part of the arg counts in its stack size,
3023 skip the part of stack space corresponding to the registers.
3024 Otherwise, start copying to the beginning of the stack space,
3025 by setting SKIP to 0. */
3026 skip = (reg_parm_stack_space == 0) ? 0 : used;
3027
3028 #ifdef PUSH_ROUNDING
3029 /* Do it with several push insns if that doesn't take lots of insns
3030 and if there is no difficulty with push insns that skip bytes
3031 on the stack for alignment purposes. */
3032 if (args_addr == 0
3033 && PUSH_ARGS
3034 && GET_CODE (size) == CONST_INT
3035 && skip == 0
3036 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3037 /* Here we avoid the case of a structure whose weak alignment
3038 forces many pushes of a small amount of data,
3039 and such small pushes do rounding that causes trouble. */
3040 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3041 || align >= BIGGEST_ALIGNMENT
3042 || PUSH_ROUNDING (align) == align)
3043 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3044 {
3045 /* Push padding now if padding above and stack grows down,
3046 or if padding below and stack grows up.
3047 But if space already allocated, this has already been done. */
3048 if (extra && args_addr == 0
3049 && where_pad != none && where_pad != stack_direction)
3050 anti_adjust_stack (GEN_INT (extra));
3051
3052 stack_pointer_delta += INTVAL (size) - used;
3053 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3054 INTVAL (size) - used, align);
3055
3056 if (current_function_check_memory_usage && ! in_check_memory_usage)
3057 {
3058 rtx temp;
3059
3060 in_check_memory_usage = 1;
3061 temp = get_push_address (INTVAL (size) - used);
3062 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3063 emit_library_call (chkr_copy_bitmap_libfunc,
3064 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3065 Pmode, XEXP (xinner, 0), Pmode,
3066 GEN_INT (INTVAL (size) - used),
3067 TYPE_MODE (sizetype));
3068 else
3069 emit_library_call (chkr_set_right_libfunc,
3070 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3071 Pmode, GEN_INT (INTVAL (size) - used),
3072 TYPE_MODE (sizetype),
3073 GEN_INT (MEMORY_USE_RW),
3074 TYPE_MODE (integer_type_node));
3075 in_check_memory_usage = 0;
3076 }
3077 }
3078 else
3079 #endif /* PUSH_ROUNDING */
3080 {
3081 rtx target;
3082
3083 /* Otherwise make space on the stack and copy the data
3084 to the address of that space. */
3085
3086 /* Deduct words put into registers from the size we must copy. */
3087 if (partial != 0)
3088 {
3089 if (GET_CODE (size) == CONST_INT)
3090 size = GEN_INT (INTVAL (size) - used);
3091 else
3092 size = expand_binop (GET_MODE (size), sub_optab, size,
3093 GEN_INT (used), NULL_RTX, 0,
3094 OPTAB_LIB_WIDEN);
3095 }
3096
3097 /* Get the address of the stack space.
3098 In this case, we do not deal with EXTRA separately.
3099 A single stack adjust will do. */
3100 if (! args_addr)
3101 {
3102 temp = push_block (size, extra, where_pad == downward);
3103 extra = 0;
3104 }
3105 else if (GET_CODE (args_so_far) == CONST_INT)
3106 temp = memory_address (BLKmode,
3107 plus_constant (args_addr,
3108 skip + INTVAL (args_so_far)));
3109 else
3110 temp = memory_address (BLKmode,
3111 plus_constant (gen_rtx_PLUS (Pmode,
3112 args_addr,
3113 args_so_far),
3114 skip));
3115 if (current_function_check_memory_usage && ! in_check_memory_usage)
3116 {
3117 in_check_memory_usage = 1;
3118 target = copy_to_reg (temp);
3119 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3120 emit_library_call (chkr_copy_bitmap_libfunc,
3121 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3122 target, Pmode,
3123 XEXP (xinner, 0), Pmode,
3124 size, TYPE_MODE (sizetype));
3125 else
3126 emit_library_call (chkr_set_right_libfunc,
3127 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3128 target, Pmode,
3129 size, TYPE_MODE (sizetype),
3130 GEN_INT (MEMORY_USE_RW),
3131 TYPE_MODE (integer_type_node));
3132 in_check_memory_usage = 0;
3133 }
3134
3135 target = gen_rtx_MEM (BLKmode, temp);
3136
3137 if (type != 0)
3138 {
3139 set_mem_attributes (target, type, 1);
3140 /* Function incoming arguments may overlap with sibling call
3141 outgoing arguments and we cannot allow reordering of reads
3142 from function arguments with stores to outgoing arguments
3143 of sibling calls. */
3144 MEM_ALIAS_SET (target) = 0;
3145 }
3146
3147 /* TEMP is the address of the block. Copy the data there. */
3148 if (GET_CODE (size) == CONST_INT
3149 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3150 {
3151 move_by_pieces (target, xinner, INTVAL (size), align);
3152 goto ret;
3153 }
3154 else
3155 {
3156 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3157 enum machine_mode mode;
3158
3159 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3160 mode != VOIDmode;
3161 mode = GET_MODE_WIDER_MODE (mode))
3162 {
3163 enum insn_code code = movstr_optab[(int) mode];
3164 insn_operand_predicate_fn pred;
3165
3166 if (code != CODE_FOR_nothing
3167 && ((GET_CODE (size) == CONST_INT
3168 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3169 <= (GET_MODE_MASK (mode) >> 1)))
3170 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3171 && (!(pred = insn_data[(int) code].operand[0].predicate)
3172 || ((*pred) (target, BLKmode)))
3173 && (!(pred = insn_data[(int) code].operand[1].predicate)
3174 || ((*pred) (xinner, BLKmode)))
3175 && (!(pred = insn_data[(int) code].operand[3].predicate)
3176 || ((*pred) (opalign, VOIDmode))))
3177 {
3178 rtx op2 = convert_to_mode (mode, size, 1);
3179 rtx last = get_last_insn ();
3180 rtx pat;
3181
3182 pred = insn_data[(int) code].operand[2].predicate;
3183 if (pred != 0 && ! (*pred) (op2, mode))
3184 op2 = copy_to_mode_reg (mode, op2);
3185
3186 pat = GEN_FCN ((int) code) (target, xinner,
3187 op2, opalign);
3188 if (pat)
3189 {
3190 emit_insn (pat);
3191 goto ret;
3192 }
3193 else
3194 delete_insns_since (last);
3195 }
3196 }
3197 }
3198
3199 if (!ACCUMULATE_OUTGOING_ARGS)
3200 {
3201 /* If the source is referenced relative to the stack pointer,
3202 copy it to another register to stabilize it. We do not need
3203 to do this if we know that we won't be changing sp. */
3204
3205 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3206 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3207 temp = copy_to_reg (temp);
3208 }
3209
3210 /* Make inhibit_defer_pop nonzero around the library call
3211 to force it to pop the bcopy-arguments right away. */
3212 NO_DEFER_POP;
3213 #ifdef TARGET_MEM_FUNCTIONS
3214 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3215 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3216 convert_to_mode (TYPE_MODE (sizetype),
3217 size, TREE_UNSIGNED (sizetype)),
3218 TYPE_MODE (sizetype));
3219 #else
3220 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3221 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3222 convert_to_mode (TYPE_MODE (integer_type_node),
3223 size,
3224 TREE_UNSIGNED (integer_type_node)),
3225 TYPE_MODE (integer_type_node));
3226 #endif
3227 OK_DEFER_POP;
3228 }
3229 }
3230 else if (partial > 0)
3231 {
3232 /* Scalar partly in registers. */
3233
3234 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3235 int i;
3236 int not_stack;
3237 /* # words of start of argument
3238 that we must make space for but need not store. */
3239 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3240 int args_offset = INTVAL (args_so_far);
3241 int skip;
3242
3243 /* Push padding now if padding above and stack grows down,
3244 or if padding below and stack grows up.
3245 But if space already allocated, this has already been done. */
3246 if (extra && args_addr == 0
3247 && where_pad != none && where_pad != stack_direction)
3248 anti_adjust_stack (GEN_INT (extra));
3249
3250 /* If we make space by pushing it, we might as well push
3251 the real data. Otherwise, we can leave OFFSET nonzero
3252 and leave the space uninitialized. */
3253 if (args_addr == 0)
3254 offset = 0;
3255
3256 /* Now NOT_STACK gets the number of words that we don't need to
3257 allocate on the stack. */
3258 not_stack = partial - offset;
3259
3260 /* If the partial register-part of the arg counts in its stack size,
3261 skip the part of stack space corresponding to the registers.
3262 Otherwise, start copying to the beginning of the stack space,
3263 by setting SKIP to 0. */
3264 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3265
3266 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3267 x = validize_mem (force_const_mem (mode, x));
3268
3269 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3270 SUBREGs of such registers are not allowed. */
3271 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3272 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3273 x = copy_to_reg (x);
3274
3275 /* Loop over all the words allocated on the stack for this arg. */
3276 /* We can do it by words, because any scalar bigger than a word
3277 has a size a multiple of a word. */
3278 #ifndef PUSH_ARGS_REVERSED
3279 for (i = not_stack; i < size; i++)
3280 #else
3281 for (i = size - 1; i >= not_stack; i--)
3282 #endif
3283 if (i >= not_stack + offset)
3284 emit_push_insn (operand_subword_force (x, i, mode),
3285 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3286 0, args_addr,
3287 GEN_INT (args_offset + ((i - not_stack + skip)
3288 * UNITS_PER_WORD)),
3289 reg_parm_stack_space, alignment_pad);
3290 }
3291 else
3292 {
3293 rtx addr;
3294 rtx target = NULL_RTX;
3295 rtx dest;
3296
3297 /* Push padding now if padding above and stack grows down,
3298 or if padding below and stack grows up.
3299 But if space already allocated, this has already been done. */
3300 if (extra && args_addr == 0
3301 && where_pad != none && where_pad != stack_direction)
3302 anti_adjust_stack (GEN_INT (extra));
3303
3304 #ifdef PUSH_ROUNDING
3305 if (args_addr == 0 && PUSH_ARGS)
3306 {
3307 addr = gen_push_operand ();
3308 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3309 }
3310 else
3311 #endif
3312 {
3313 if (GET_CODE (args_so_far) == CONST_INT)
3314 addr
3315 = memory_address (mode,
3316 plus_constant (args_addr,
3317 INTVAL (args_so_far)));
3318 else
3319 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3320 args_so_far));
3321 target = addr;
3322 }
3323
3324 dest = gen_rtx_MEM (mode, addr);
3325 if (type != 0)
3326 {
3327 set_mem_attributes (dest, type, 1);
3328 /* Function incoming arguments may overlap with sibling call
3329 outgoing arguments and we cannot allow reordering of reads
3330 from function arguments with stores to outgoing arguments
3331 of sibling calls. */
3332 MEM_ALIAS_SET (dest) = 0;
3333 }
3334
3335 emit_move_insn (dest, x);
3336
3337 if (current_function_check_memory_usage && ! in_check_memory_usage)
3338 {
3339 in_check_memory_usage = 1;
3340 if (target == 0)
3341 target = get_push_address (GET_MODE_SIZE (mode));
3342
3343 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3344 emit_library_call (chkr_copy_bitmap_libfunc,
3345 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3346 Pmode, XEXP (x, 0), Pmode,
3347 GEN_INT (GET_MODE_SIZE (mode)),
3348 TYPE_MODE (sizetype));
3349 else
3350 emit_library_call (chkr_set_right_libfunc,
3351 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3352 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3353 TYPE_MODE (sizetype),
3354 GEN_INT (MEMORY_USE_RW),
3355 TYPE_MODE (integer_type_node));
3356 in_check_memory_usage = 0;
3357 }
3358 }
3359
3360 ret:
3361 /* If part should go in registers, copy that part
3362 into the appropriate registers. Do this now, at the end,
3363 since mem-to-mem copies above may do function calls. */
3364 if (partial > 0 && reg != 0)
3365 {
3366 /* Handle calls that pass values in multiple non-contiguous locations.
3367 The Irix 6 ABI has examples of this. */
3368 if (GET_CODE (reg) == PARALLEL)
3369 emit_group_load (reg, x, -1, align); /* ??? size? */
3370 else
3371 move_block_to_reg (REGNO (reg), x, partial, mode);
3372 }
3373
3374 if (extra && args_addr == 0 && where_pad == stack_direction)
3375 anti_adjust_stack (GEN_INT (extra));
3376
3377 if (alignment_pad && args_addr == 0)
3378 anti_adjust_stack (alignment_pad);
3379 }
3380 \f
3381 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3382 operations. */
3383
3384 static rtx
3385 get_subtarget (x)
3386 rtx x;
3387 {
3388 return ((x == 0
3389 /* Only registers can be subtargets. */
3390 || GET_CODE (x) != REG
3391 /* If the register is readonly, it can't be set more than once. */
3392 || RTX_UNCHANGING_P (x)
3393 /* Don't use hard regs to avoid extending their life. */
3394 || REGNO (x) < FIRST_PSEUDO_REGISTER
3395 /* Avoid subtargets inside loops,
3396 since they hide some invariant expressions. */
3397 || preserve_subexpressions_p ())
3398 ? 0 : x);
3399 }
3400
3401 /* Expand an assignment that stores the value of FROM into TO.
3402 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3403 (This may contain a QUEUED rtx;
3404 if the value is constant, this rtx is a constant.)
3405 Otherwise, the returned value is NULL_RTX.
3406
3407 SUGGEST_REG is no longer actually used.
3408 It used to mean, copy the value through a register
3409 and return that register, if that is possible.
3410 We now use WANT_VALUE to decide whether to do this. */
3411
3412 rtx
3413 expand_assignment (to, from, want_value, suggest_reg)
3414 tree to, from;
3415 int want_value;
3416 int suggest_reg ATTRIBUTE_UNUSED;
3417 {
3418 register rtx to_rtx = 0;
3419 rtx result;
3420
3421 /* Don't crash if the lhs of the assignment was erroneous. */
3422
3423 if (TREE_CODE (to) == ERROR_MARK)
3424 {
3425 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3426 return want_value ? result : NULL_RTX;
3427 }
3428
3429 /* Assignment of a structure component needs special treatment
3430 if the structure component's rtx is not simply a MEM.
3431 Assignment of an array element at a constant index, and assignment of
3432 an array element in an unaligned packed structure field, has the same
3433 problem. */
3434
3435 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3436 || TREE_CODE (to) == ARRAY_REF)
3437 {
3438 enum machine_mode mode1;
3439 HOST_WIDE_INT bitsize, bitpos;
3440 tree offset;
3441 int unsignedp;
3442 int volatilep = 0;
3443 tree tem;
3444 unsigned int alignment;
3445
3446 push_temp_slots ();
3447 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3448 &unsignedp, &volatilep, &alignment);
3449
3450 /* If we are going to use store_bit_field and extract_bit_field,
3451 make sure to_rtx will be safe for multiple use. */
3452
3453 if (mode1 == VOIDmode && want_value)
3454 tem = stabilize_reference (tem);
3455
3456 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3457 if (offset != 0)
3458 {
3459 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3460
3461 if (GET_CODE (to_rtx) != MEM)
3462 abort ();
3463
3464 if (GET_MODE (offset_rtx) != ptr_mode)
3465 {
3466 #ifdef POINTERS_EXTEND_UNSIGNED
3467 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3468 #else
3469 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3470 #endif
3471 }
3472
3473 /* A constant address in TO_RTX can have VOIDmode, we must not try
3474 to call force_reg for that case. Avoid that case. */
3475 if (GET_CODE (to_rtx) == MEM
3476 && GET_MODE (to_rtx) == BLKmode
3477 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3478 && bitsize
3479 && (bitpos % bitsize) == 0
3480 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3481 && alignment == GET_MODE_ALIGNMENT (mode1))
3482 {
3483 rtx temp = change_address (to_rtx, mode1,
3484 plus_constant (XEXP (to_rtx, 0),
3485 (bitpos /
3486 BITS_PER_UNIT)));
3487 if (GET_CODE (XEXP (temp, 0)) == REG)
3488 to_rtx = temp;
3489 else
3490 to_rtx = change_address (to_rtx, mode1,
3491 force_reg (GET_MODE (XEXP (temp, 0)),
3492 XEXP (temp, 0)));
3493 bitpos = 0;
3494 }
3495
3496 to_rtx = change_address (to_rtx, VOIDmode,
3497 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3498 force_reg (ptr_mode,
3499 offset_rtx)));
3500 }
3501
3502 if (volatilep)
3503 {
3504 if (GET_CODE (to_rtx) == MEM)
3505 {
3506 /* When the offset is zero, to_rtx is the address of the
3507 structure we are storing into, and hence may be shared.
3508 We must make a new MEM before setting the volatile bit. */
3509 if (offset == 0)
3510 to_rtx = copy_rtx (to_rtx);
3511
3512 MEM_VOLATILE_P (to_rtx) = 1;
3513 }
3514 #if 0 /* This was turned off because, when a field is volatile
3515 in an object which is not volatile, the object may be in a register,
3516 and then we would abort over here. */
3517 else
3518 abort ();
3519 #endif
3520 }
3521
3522 if (TREE_CODE (to) == COMPONENT_REF
3523 && TREE_READONLY (TREE_OPERAND (to, 1)))
3524 {
3525 if (offset == 0)
3526 to_rtx = copy_rtx (to_rtx);
3527
3528 RTX_UNCHANGING_P (to_rtx) = 1;
3529 }
3530
3531 /* Check the access. */
3532 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3533 {
3534 rtx to_addr;
3535 int size;
3536 int best_mode_size;
3537 enum machine_mode best_mode;
3538
3539 best_mode = get_best_mode (bitsize, bitpos,
3540 TYPE_ALIGN (TREE_TYPE (tem)),
3541 mode1, volatilep);
3542 if (best_mode == VOIDmode)
3543 best_mode = QImode;
3544
3545 best_mode_size = GET_MODE_BITSIZE (best_mode);
3546 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3547 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3548 size *= GET_MODE_SIZE (best_mode);
3549
3550 /* Check the access right of the pointer. */
3551 in_check_memory_usage = 1;
3552 if (size)
3553 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3554 VOIDmode, 3, to_addr, Pmode,
3555 GEN_INT (size), TYPE_MODE (sizetype),
3556 GEN_INT (MEMORY_USE_WO),
3557 TYPE_MODE (integer_type_node));
3558 in_check_memory_usage = 0;
3559 }
3560
3561 /* If this is a varying-length object, we must get the address of
3562 the source and do an explicit block move. */
3563 if (bitsize < 0)
3564 {
3565 unsigned int from_align;
3566 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3567 rtx inner_to_rtx
3568 = change_address (to_rtx, VOIDmode,
3569 plus_constant (XEXP (to_rtx, 0),
3570 bitpos / BITS_PER_UNIT));
3571
3572 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3573 MIN (alignment, from_align));
3574 free_temp_slots ();
3575 pop_temp_slots ();
3576 return to_rtx;
3577 }
3578 else
3579 {
3580 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3581 (want_value
3582 /* Spurious cast for HPUX compiler. */
3583 ? ((enum machine_mode)
3584 TYPE_MODE (TREE_TYPE (to)))
3585 : VOIDmode),
3586 unsignedp,
3587 alignment,
3588 int_size_in_bytes (TREE_TYPE (tem)),
3589 get_alias_set (to));
3590
3591 preserve_temp_slots (result);
3592 free_temp_slots ();
3593 pop_temp_slots ();
3594
3595 /* If the value is meaningful, convert RESULT to the proper mode.
3596 Otherwise, return nothing. */
3597 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3598 TYPE_MODE (TREE_TYPE (from)),
3599 result,
3600 TREE_UNSIGNED (TREE_TYPE (to)))
3601 : NULL_RTX);
3602 }
3603 }
3604
3605 /* If the rhs is a function call and its value is not an aggregate,
3606 call the function before we start to compute the lhs.
3607 This is needed for correct code for cases such as
3608 val = setjmp (buf) on machines where reference to val
3609 requires loading up part of an address in a separate insn.
3610
3611 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3612 since it might be a promoted variable where the zero- or sign- extension
3613 needs to be done. Handling this in the normal way is safe because no
3614 computation is done before the call. */
3615 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3616 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3617 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3618 && GET_CODE (DECL_RTL (to)) == REG))
3619 {
3620 rtx value;
3621
3622 push_temp_slots ();
3623 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3624 if (to_rtx == 0)
3625 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3626
3627 /* Handle calls that return values in multiple non-contiguous locations.
3628 The Irix 6 ABI has examples of this. */
3629 if (GET_CODE (to_rtx) == PARALLEL)
3630 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3631 TYPE_ALIGN (TREE_TYPE (from)));
3632 else if (GET_MODE (to_rtx) == BLKmode)
3633 emit_block_move (to_rtx, value, expr_size (from),
3634 TYPE_ALIGN (TREE_TYPE (from)));
3635 else
3636 {
3637 #ifdef POINTERS_EXTEND_UNSIGNED
3638 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3639 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3640 value = convert_memory_address (GET_MODE (to_rtx), value);
3641 #endif
3642 emit_move_insn (to_rtx, value);
3643 }
3644 preserve_temp_slots (to_rtx);
3645 free_temp_slots ();
3646 pop_temp_slots ();
3647 return want_value ? to_rtx : NULL_RTX;
3648 }
3649
3650 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3651 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3652
3653 if (to_rtx == 0)
3654 {
3655 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3656 if (GET_CODE (to_rtx) == MEM)
3657 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3658 }
3659
3660 /* Don't move directly into a return register. */
3661 if (TREE_CODE (to) == RESULT_DECL
3662 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3663 {
3664 rtx temp;
3665
3666 push_temp_slots ();
3667 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3668
3669 if (GET_CODE (to_rtx) == PARALLEL)
3670 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3671 TYPE_ALIGN (TREE_TYPE (from)));
3672 else
3673 emit_move_insn (to_rtx, temp);
3674
3675 preserve_temp_slots (to_rtx);
3676 free_temp_slots ();
3677 pop_temp_slots ();
3678 return want_value ? to_rtx : NULL_RTX;
3679 }
3680
3681 /* In case we are returning the contents of an object which overlaps
3682 the place the value is being stored, use a safe function when copying
3683 a value through a pointer into a structure value return block. */
3684 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3685 && current_function_returns_struct
3686 && !current_function_returns_pcc_struct)
3687 {
3688 rtx from_rtx, size;
3689
3690 push_temp_slots ();
3691 size = expr_size (from);
3692 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3693 EXPAND_MEMORY_USE_DONT);
3694
3695 /* Copy the rights of the bitmap. */
3696 if (current_function_check_memory_usage)
3697 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3698 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3699 XEXP (from_rtx, 0), Pmode,
3700 convert_to_mode (TYPE_MODE (sizetype),
3701 size, TREE_UNSIGNED (sizetype)),
3702 TYPE_MODE (sizetype));
3703
3704 #ifdef TARGET_MEM_FUNCTIONS
3705 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3706 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3707 XEXP (from_rtx, 0), Pmode,
3708 convert_to_mode (TYPE_MODE (sizetype),
3709 size, TREE_UNSIGNED (sizetype)),
3710 TYPE_MODE (sizetype));
3711 #else
3712 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3713 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3714 XEXP (to_rtx, 0), Pmode,
3715 convert_to_mode (TYPE_MODE (integer_type_node),
3716 size, TREE_UNSIGNED (integer_type_node)),
3717 TYPE_MODE (integer_type_node));
3718 #endif
3719
3720 preserve_temp_slots (to_rtx);
3721 free_temp_slots ();
3722 pop_temp_slots ();
3723 return want_value ? to_rtx : NULL_RTX;
3724 }
3725
3726 /* Compute FROM and store the value in the rtx we got. */
3727
3728 push_temp_slots ();
3729 result = store_expr (from, to_rtx, want_value);
3730 preserve_temp_slots (result);
3731 free_temp_slots ();
3732 pop_temp_slots ();
3733 return want_value ? result : NULL_RTX;
3734 }
3735
3736 /* Generate code for computing expression EXP,
3737 and storing the value into TARGET.
3738 TARGET may contain a QUEUED rtx.
3739
3740 If WANT_VALUE is nonzero, return a copy of the value
3741 not in TARGET, so that we can be sure to use the proper
3742 value in a containing expression even if TARGET has something
3743 else stored in it. If possible, we copy the value through a pseudo
3744 and return that pseudo. Or, if the value is constant, we try to
3745 return the constant. In some cases, we return a pseudo
3746 copied *from* TARGET.
3747
3748 If the mode is BLKmode then we may return TARGET itself.
3749 It turns out that in BLKmode it doesn't cause a problem.
3750 because C has no operators that could combine two different
3751 assignments into the same BLKmode object with different values
3752 with no sequence point. Will other languages need this to
3753 be more thorough?
3754
3755 If WANT_VALUE is 0, we return NULL, to make sure
3756 to catch quickly any cases where the caller uses the value
3757 and fails to set WANT_VALUE. */
3758
3759 rtx
3760 store_expr (exp, target, want_value)
3761 register tree exp;
3762 register rtx target;
3763 int want_value;
3764 {
3765 register rtx temp;
3766 int dont_return_target = 0;
3767
3768 if (TREE_CODE (exp) == COMPOUND_EXPR)
3769 {
3770 /* Perform first part of compound expression, then assign from second
3771 part. */
3772 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3773 emit_queue ();
3774 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3775 }
3776 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3777 {
3778 /* For conditional expression, get safe form of the target. Then
3779 test the condition, doing the appropriate assignment on either
3780 side. This avoids the creation of unnecessary temporaries.
3781 For non-BLKmode, it is more efficient not to do this. */
3782
3783 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3784
3785 emit_queue ();
3786 target = protect_from_queue (target, 1);
3787
3788 do_pending_stack_adjust ();
3789 NO_DEFER_POP;
3790 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3791 start_cleanup_deferral ();
3792 store_expr (TREE_OPERAND (exp, 1), target, 0);
3793 end_cleanup_deferral ();
3794 emit_queue ();
3795 emit_jump_insn (gen_jump (lab2));
3796 emit_barrier ();
3797 emit_label (lab1);
3798 start_cleanup_deferral ();
3799 store_expr (TREE_OPERAND (exp, 2), target, 0);
3800 end_cleanup_deferral ();
3801 emit_queue ();
3802 emit_label (lab2);
3803 OK_DEFER_POP;
3804
3805 return want_value ? target : NULL_RTX;
3806 }
3807 else if (queued_subexp_p (target))
3808 /* If target contains a postincrement, let's not risk
3809 using it as the place to generate the rhs. */
3810 {
3811 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3812 {
3813 /* Expand EXP into a new pseudo. */
3814 temp = gen_reg_rtx (GET_MODE (target));
3815 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3816 }
3817 else
3818 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3819
3820 /* If target is volatile, ANSI requires accessing the value
3821 *from* the target, if it is accessed. So make that happen.
3822 In no case return the target itself. */
3823 if (! MEM_VOLATILE_P (target) && want_value)
3824 dont_return_target = 1;
3825 }
3826 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3827 && GET_MODE (target) != BLKmode)
3828 /* If target is in memory and caller wants value in a register instead,
3829 arrange that. Pass TARGET as target for expand_expr so that,
3830 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3831 We know expand_expr will not use the target in that case.
3832 Don't do this if TARGET is volatile because we are supposed
3833 to write it and then read it. */
3834 {
3835 temp = expand_expr (exp, target, GET_MODE (target), 0);
3836 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3837 temp = copy_to_reg (temp);
3838 dont_return_target = 1;
3839 }
3840 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3841 /* If this is an scalar in a register that is stored in a wider mode
3842 than the declared mode, compute the result into its declared mode
3843 and then convert to the wider mode. Our value is the computed
3844 expression. */
3845 {
3846 /* If we don't want a value, we can do the conversion inside EXP,
3847 which will often result in some optimizations. Do the conversion
3848 in two steps: first change the signedness, if needed, then
3849 the extend. But don't do this if the type of EXP is a subtype
3850 of something else since then the conversion might involve
3851 more than just converting modes. */
3852 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3853 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3854 {
3855 if (TREE_UNSIGNED (TREE_TYPE (exp))
3856 != SUBREG_PROMOTED_UNSIGNED_P (target))
3857 exp
3858 = convert
3859 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3860 TREE_TYPE (exp)),
3861 exp);
3862
3863 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3864 SUBREG_PROMOTED_UNSIGNED_P (target)),
3865 exp);
3866 }
3867
3868 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3869
3870 /* If TEMP is a volatile MEM and we want a result value, make
3871 the access now so it gets done only once. Likewise if
3872 it contains TARGET. */
3873 if (GET_CODE (temp) == MEM && want_value
3874 && (MEM_VOLATILE_P (temp)
3875 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3876 temp = copy_to_reg (temp);
3877
3878 /* If TEMP is a VOIDmode constant, use convert_modes to make
3879 sure that we properly convert it. */
3880 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3881 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3882 TYPE_MODE (TREE_TYPE (exp)), temp,
3883 SUBREG_PROMOTED_UNSIGNED_P (target));
3884
3885 convert_move (SUBREG_REG (target), temp,
3886 SUBREG_PROMOTED_UNSIGNED_P (target));
3887
3888 /* If we promoted a constant, change the mode back down to match
3889 target. Otherwise, the caller might get confused by a result whose
3890 mode is larger than expected. */
3891
3892 if (want_value && GET_MODE (temp) != GET_MODE (target)
3893 && GET_MODE (temp) != VOIDmode)
3894 {
3895 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3896 SUBREG_PROMOTED_VAR_P (temp) = 1;
3897 SUBREG_PROMOTED_UNSIGNED_P (temp)
3898 = SUBREG_PROMOTED_UNSIGNED_P (target);
3899 }
3900
3901 return want_value ? temp : NULL_RTX;
3902 }
3903 else
3904 {
3905 temp = expand_expr (exp, target, GET_MODE (target), 0);
3906 /* Return TARGET if it's a specified hardware register.
3907 If TARGET is a volatile mem ref, either return TARGET
3908 or return a reg copied *from* TARGET; ANSI requires this.
3909
3910 Otherwise, if TEMP is not TARGET, return TEMP
3911 if it is constant (for efficiency),
3912 or if we really want the correct value. */
3913 if (!(target && GET_CODE (target) == REG
3914 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3915 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3916 && ! rtx_equal_p (temp, target)
3917 && (CONSTANT_P (temp) || want_value))
3918 dont_return_target = 1;
3919 }
3920
3921 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3922 the same as that of TARGET, adjust the constant. This is needed, for
3923 example, in case it is a CONST_DOUBLE and we want only a word-sized
3924 value. */
3925 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3926 && TREE_CODE (exp) != ERROR_MARK
3927 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3928 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3929 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3930
3931 if (current_function_check_memory_usage
3932 && GET_CODE (target) == MEM
3933 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3934 {
3935 in_check_memory_usage = 1;
3936 if (GET_CODE (temp) == MEM)
3937 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3938 VOIDmode, 3, XEXP (target, 0), Pmode,
3939 XEXP (temp, 0), Pmode,
3940 expr_size (exp), TYPE_MODE (sizetype));
3941 else
3942 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3943 VOIDmode, 3, XEXP (target, 0), Pmode,
3944 expr_size (exp), TYPE_MODE (sizetype),
3945 GEN_INT (MEMORY_USE_WO),
3946 TYPE_MODE (integer_type_node));
3947 in_check_memory_usage = 0;
3948 }
3949
3950 /* If value was not generated in the target, store it there.
3951 Convert the value to TARGET's type first if nec. */
3952 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3953 one or both of them are volatile memory refs, we have to distinguish
3954 two cases:
3955 - expand_expr has used TARGET. In this case, we must not generate
3956 another copy. This can be detected by TARGET being equal according
3957 to == .
3958 - expand_expr has not used TARGET - that means that the source just
3959 happens to have the same RTX form. Since temp will have been created
3960 by expand_expr, it will compare unequal according to == .
3961 We must generate a copy in this case, to reach the correct number
3962 of volatile memory references. */
3963
3964 if ((! rtx_equal_p (temp, target)
3965 || (temp != target && (side_effects_p (temp)
3966 || side_effects_p (target))))
3967 && TREE_CODE (exp) != ERROR_MARK)
3968 {
3969 target = protect_from_queue (target, 1);
3970 if (GET_MODE (temp) != GET_MODE (target)
3971 && GET_MODE (temp) != VOIDmode)
3972 {
3973 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3974 if (dont_return_target)
3975 {
3976 /* In this case, we will return TEMP,
3977 so make sure it has the proper mode.
3978 But don't forget to store the value into TARGET. */
3979 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3980 emit_move_insn (target, temp);
3981 }
3982 else
3983 convert_move (target, temp, unsignedp);
3984 }
3985
3986 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3987 {
3988 /* Handle copying a string constant into an array.
3989 The string constant may be shorter than the array.
3990 So copy just the string's actual length, and clear the rest. */
3991 rtx size;
3992 rtx addr;
3993
3994 /* Get the size of the data type of the string,
3995 which is actually the size of the target. */
3996 size = expr_size (exp);
3997 if (GET_CODE (size) == CONST_INT
3998 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3999 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4000 else
4001 {
4002 /* Compute the size of the data to copy from the string. */
4003 tree copy_size
4004 = size_binop (MIN_EXPR,
4005 make_tree (sizetype, size),
4006 size_int (TREE_STRING_LENGTH (exp)));
4007 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4008 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4009 VOIDmode, 0);
4010 rtx label = 0;
4011
4012 /* Copy that much. */
4013 emit_block_move (target, temp, copy_size_rtx,
4014 TYPE_ALIGN (TREE_TYPE (exp)));
4015
4016 /* Figure out how much is left in TARGET that we have to clear.
4017 Do all calculations in ptr_mode. */
4018
4019 addr = XEXP (target, 0);
4020 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4021
4022 if (GET_CODE (copy_size_rtx) == CONST_INT)
4023 {
4024 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4025 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4026 align = MIN (align, (BITS_PER_UNIT
4027 * (INTVAL (copy_size_rtx)
4028 & - INTVAL (copy_size_rtx))));
4029 }
4030 else
4031 {
4032 addr = force_reg (ptr_mode, addr);
4033 addr = expand_binop (ptr_mode, add_optab, addr,
4034 copy_size_rtx, NULL_RTX, 0,
4035 OPTAB_LIB_WIDEN);
4036
4037 size = expand_binop (ptr_mode, sub_optab, size,
4038 copy_size_rtx, NULL_RTX, 0,
4039 OPTAB_LIB_WIDEN);
4040
4041 align = BITS_PER_UNIT;
4042 label = gen_label_rtx ();
4043 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4044 GET_MODE (size), 0, 0, label);
4045 }
4046 align = MIN (align, expr_align (copy_size));
4047
4048 if (size != const0_rtx)
4049 {
4050 rtx dest = gen_rtx_MEM (BLKmode, addr);
4051
4052 MEM_COPY_ATTRIBUTES (dest, target);
4053
4054 /* Be sure we can write on ADDR. */
4055 in_check_memory_usage = 1;
4056 if (current_function_check_memory_usage)
4057 emit_library_call (chkr_check_addr_libfunc,
4058 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4059 addr, Pmode,
4060 size, TYPE_MODE (sizetype),
4061 GEN_INT (MEMORY_USE_WO),
4062 TYPE_MODE (integer_type_node));
4063 in_check_memory_usage = 0;
4064 clear_storage (dest, size, align);
4065 }
4066
4067 if (label)
4068 emit_label (label);
4069 }
4070 }
4071 /* Handle calls that return values in multiple non-contiguous locations.
4072 The Irix 6 ABI has examples of this. */
4073 else if (GET_CODE (target) == PARALLEL)
4074 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4075 TYPE_ALIGN (TREE_TYPE (exp)));
4076 else if (GET_MODE (temp) == BLKmode)
4077 emit_block_move (target, temp, expr_size (exp),
4078 TYPE_ALIGN (TREE_TYPE (exp)));
4079 else
4080 emit_move_insn (target, temp);
4081 }
4082
4083 /* If we don't want a value, return NULL_RTX. */
4084 if (! want_value)
4085 return NULL_RTX;
4086
4087 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4088 ??? The latter test doesn't seem to make sense. */
4089 else if (dont_return_target && GET_CODE (temp) != MEM)
4090 return temp;
4091
4092 /* Return TARGET itself if it is a hard register. */
4093 else if (want_value && GET_MODE (target) != BLKmode
4094 && ! (GET_CODE (target) == REG
4095 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4096 return copy_to_reg (target);
4097
4098 else
4099 return target;
4100 }
4101 \f
4102 /* Return 1 if EXP just contains zeros. */
4103
4104 static int
4105 is_zeros_p (exp)
4106 tree exp;
4107 {
4108 tree elt;
4109
4110 switch (TREE_CODE (exp))
4111 {
4112 case CONVERT_EXPR:
4113 case NOP_EXPR:
4114 case NON_LVALUE_EXPR:
4115 return is_zeros_p (TREE_OPERAND (exp, 0));
4116
4117 case INTEGER_CST:
4118 return integer_zerop (exp);
4119
4120 case COMPLEX_CST:
4121 return
4122 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4123
4124 case REAL_CST:
4125 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4126
4127 case CONSTRUCTOR:
4128 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4129 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4130 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4131 if (! is_zeros_p (TREE_VALUE (elt)))
4132 return 0;
4133
4134 return 1;
4135
4136 default:
4137 return 0;
4138 }
4139 }
4140
4141 /* Return 1 if EXP contains mostly (3/4) zeros. */
4142
4143 static int
4144 mostly_zeros_p (exp)
4145 tree exp;
4146 {
4147 if (TREE_CODE (exp) == CONSTRUCTOR)
4148 {
4149 int elts = 0, zeros = 0;
4150 tree elt = CONSTRUCTOR_ELTS (exp);
4151 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4152 {
4153 /* If there are no ranges of true bits, it is all zero. */
4154 return elt == NULL_TREE;
4155 }
4156 for (; elt; elt = TREE_CHAIN (elt))
4157 {
4158 /* We do not handle the case where the index is a RANGE_EXPR,
4159 so the statistic will be somewhat inaccurate.
4160 We do make a more accurate count in store_constructor itself,
4161 so since this function is only used for nested array elements,
4162 this should be close enough. */
4163 if (mostly_zeros_p (TREE_VALUE (elt)))
4164 zeros++;
4165 elts++;
4166 }
4167
4168 return 4 * zeros >= 3 * elts;
4169 }
4170
4171 return is_zeros_p (exp);
4172 }
4173 \f
4174 /* Helper function for store_constructor.
4175 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4176 TYPE is the type of the CONSTRUCTOR, not the element type.
4177 ALIGN and CLEARED are as for store_constructor.
4178
4179 This provides a recursive shortcut back to store_constructor when it isn't
4180 necessary to go through store_field. This is so that we can pass through
4181 the cleared field to let store_constructor know that we may not have to
4182 clear a substructure if the outer structure has already been cleared. */
4183
4184 static void
4185 store_constructor_field (target, bitsize, bitpos,
4186 mode, exp, type, align, cleared)
4187 rtx target;
4188 unsigned HOST_WIDE_INT bitsize;
4189 HOST_WIDE_INT bitpos;
4190 enum machine_mode mode;
4191 tree exp, type;
4192 unsigned int align;
4193 int cleared;
4194 {
4195 if (TREE_CODE (exp) == CONSTRUCTOR
4196 && bitpos % BITS_PER_UNIT == 0
4197 /* If we have a non-zero bitpos for a register target, then we just
4198 let store_field do the bitfield handling. This is unlikely to
4199 generate unnecessary clear instructions anyways. */
4200 && (bitpos == 0 || GET_CODE (target) == MEM))
4201 {
4202 if (bitpos != 0)
4203 target
4204 = change_address (target,
4205 GET_MODE (target) == BLKmode
4206 || 0 != (bitpos
4207 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4208 ? BLKmode : VOIDmode,
4209 plus_constant (XEXP (target, 0),
4210 bitpos / BITS_PER_UNIT));
4211 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4212 }
4213 else
4214 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4215 int_size_in_bytes (type), 0);
4216 }
4217
4218 /* Store the value of constructor EXP into the rtx TARGET.
4219 TARGET is either a REG or a MEM.
4220 ALIGN is the maximum known alignment for TARGET.
4221 CLEARED is true if TARGET is known to have been zero'd.
4222 SIZE is the number of bytes of TARGET we are allowed to modify: this
4223 may not be the same as the size of EXP if we are assigning to a field
4224 which has been packed to exclude padding bits. */
4225
4226 static void
4227 store_constructor (exp, target, align, cleared, size)
4228 tree exp;
4229 rtx target;
4230 unsigned int align;
4231 int cleared;
4232 HOST_WIDE_INT size;
4233 {
4234 tree type = TREE_TYPE (exp);
4235 #ifdef WORD_REGISTER_OPERATIONS
4236 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4237 #endif
4238
4239 /* We know our target cannot conflict, since safe_from_p has been called. */
4240 #if 0
4241 /* Don't try copying piece by piece into a hard register
4242 since that is vulnerable to being clobbered by EXP.
4243 Instead, construct in a pseudo register and then copy it all. */
4244 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4245 {
4246 rtx temp = gen_reg_rtx (GET_MODE (target));
4247 store_constructor (exp, temp, align, cleared, size);
4248 emit_move_insn (target, temp);
4249 return;
4250 }
4251 #endif
4252
4253 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4254 || TREE_CODE (type) == QUAL_UNION_TYPE)
4255 {
4256 register tree elt;
4257
4258 /* Inform later passes that the whole union value is dead. */
4259 if ((TREE_CODE (type) == UNION_TYPE
4260 || TREE_CODE (type) == QUAL_UNION_TYPE)
4261 && ! cleared)
4262 {
4263 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4264
4265 /* If the constructor is empty, clear the union. */
4266 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4267 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4268 }
4269
4270 /* If we are building a static constructor into a register,
4271 set the initial value as zero so we can fold the value into
4272 a constant. But if more than one register is involved,
4273 this probably loses. */
4274 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4275 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4276 {
4277 if (! cleared)
4278 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4279
4280 cleared = 1;
4281 }
4282
4283 /* If the constructor has fewer fields than the structure
4284 or if we are initializing the structure to mostly zeros,
4285 clear the whole structure first. Don't do this is TARGET is
4286 register whose mode size isn't equal to SIZE since clear_storage
4287 can't handle this case. */
4288 else if (size > 0
4289 && ((list_length (CONSTRUCTOR_ELTS (exp))
4290 != fields_length (type))
4291 || mostly_zeros_p (exp))
4292 && (GET_CODE (target) != REG
4293 || GET_MODE_SIZE (GET_MODE (target)) == size))
4294 {
4295 if (! cleared)
4296 clear_storage (target, GEN_INT (size), align);
4297
4298 cleared = 1;
4299 }
4300 else if (! cleared)
4301 /* Inform later passes that the old value is dead. */
4302 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4303
4304 /* Store each element of the constructor into
4305 the corresponding field of TARGET. */
4306
4307 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4308 {
4309 register tree field = TREE_PURPOSE (elt);
4310 #ifdef WORD_REGISTER_OPERATIONS
4311 tree value = TREE_VALUE (elt);
4312 #endif
4313 register enum machine_mode mode;
4314 HOST_WIDE_INT bitsize;
4315 HOST_WIDE_INT bitpos = 0;
4316 int unsignedp;
4317 tree offset;
4318 rtx to_rtx = target;
4319
4320 /* Just ignore missing fields.
4321 We cleared the whole structure, above,
4322 if any fields are missing. */
4323 if (field == 0)
4324 continue;
4325
4326 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4327 continue;
4328
4329 if (host_integerp (DECL_SIZE (field), 1))
4330 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4331 else
4332 bitsize = -1;
4333
4334 unsignedp = TREE_UNSIGNED (field);
4335 mode = DECL_MODE (field);
4336 if (DECL_BIT_FIELD (field))
4337 mode = VOIDmode;
4338
4339 offset = DECL_FIELD_OFFSET (field);
4340 if (host_integerp (offset, 0)
4341 && host_integerp (bit_position (field), 0))
4342 {
4343 bitpos = int_bit_position (field);
4344 offset = 0;
4345 }
4346 else
4347 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4348
4349 if (offset)
4350 {
4351 rtx offset_rtx;
4352
4353 if (contains_placeholder_p (offset))
4354 offset = build (WITH_RECORD_EXPR, sizetype,
4355 offset, make_tree (TREE_TYPE (exp), target));
4356
4357 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4358 if (GET_CODE (to_rtx) != MEM)
4359 abort ();
4360
4361 if (GET_MODE (offset_rtx) != ptr_mode)
4362 {
4363 #ifdef POINTERS_EXTEND_UNSIGNED
4364 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4365 #else
4366 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4367 #endif
4368 }
4369
4370 to_rtx
4371 = change_address (to_rtx, VOIDmode,
4372 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4373 force_reg (ptr_mode,
4374 offset_rtx)));
4375 align = DECL_OFFSET_ALIGN (field);
4376 }
4377
4378 if (TREE_READONLY (field))
4379 {
4380 if (GET_CODE (to_rtx) == MEM)
4381 to_rtx = copy_rtx (to_rtx);
4382
4383 RTX_UNCHANGING_P (to_rtx) = 1;
4384 }
4385
4386 #ifdef WORD_REGISTER_OPERATIONS
4387 /* If this initializes a field that is smaller than a word, at the
4388 start of a word, try to widen it to a full word.
4389 This special case allows us to output C++ member function
4390 initializations in a form that the optimizers can understand. */
4391 if (GET_CODE (target) == REG
4392 && bitsize < BITS_PER_WORD
4393 && bitpos % BITS_PER_WORD == 0
4394 && GET_MODE_CLASS (mode) == MODE_INT
4395 && TREE_CODE (value) == INTEGER_CST
4396 && exp_size >= 0
4397 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4398 {
4399 tree type = TREE_TYPE (value);
4400 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4401 {
4402 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4403 value = convert (type, value);
4404 }
4405 if (BYTES_BIG_ENDIAN)
4406 value
4407 = fold (build (LSHIFT_EXPR, type, value,
4408 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4409 bitsize = BITS_PER_WORD;
4410 mode = word_mode;
4411 }
4412 #endif
4413 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4414 TREE_VALUE (elt), type, align, cleared);
4415 }
4416 }
4417 else if (TREE_CODE (type) == ARRAY_TYPE)
4418 {
4419 register tree elt;
4420 register int i;
4421 int need_to_clear;
4422 tree domain = TYPE_DOMAIN (type);
4423 tree elttype = TREE_TYPE (type);
4424 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4425 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4426 HOST_WIDE_INT minelt;
4427 HOST_WIDE_INT maxelt;
4428
4429 /* If we have constant bounds for the range of the type, get them. */
4430 if (const_bounds_p)
4431 {
4432 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4433 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4434 }
4435
4436 /* If the constructor has fewer elements than the array,
4437 clear the whole array first. Similarly if this is
4438 static constructor of a non-BLKmode object. */
4439 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4440 need_to_clear = 1;
4441 else
4442 {
4443 HOST_WIDE_INT count = 0, zero_count = 0;
4444 need_to_clear = ! const_bounds_p;
4445
4446 /* This loop is a more accurate version of the loop in
4447 mostly_zeros_p (it handles RANGE_EXPR in an index).
4448 It is also needed to check for missing elements. */
4449 for (elt = CONSTRUCTOR_ELTS (exp);
4450 elt != NULL_TREE && ! need_to_clear;
4451 elt = TREE_CHAIN (elt))
4452 {
4453 tree index = TREE_PURPOSE (elt);
4454 HOST_WIDE_INT this_node_count;
4455
4456 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4457 {
4458 tree lo_index = TREE_OPERAND (index, 0);
4459 tree hi_index = TREE_OPERAND (index, 1);
4460
4461 if (! host_integerp (lo_index, 1)
4462 || ! host_integerp (hi_index, 1))
4463 {
4464 need_to_clear = 1;
4465 break;
4466 }
4467
4468 this_node_count = (tree_low_cst (hi_index, 1)
4469 - tree_low_cst (lo_index, 1) + 1);
4470 }
4471 else
4472 this_node_count = 1;
4473
4474 count += this_node_count;
4475 if (mostly_zeros_p (TREE_VALUE (elt)))
4476 zero_count += this_node_count;
4477 }
4478
4479 /* Clear the entire array first if there are any missing elements,
4480 or if the incidence of zero elements is >= 75%. */
4481 if (! need_to_clear
4482 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4483 need_to_clear = 1;
4484 }
4485
4486 if (need_to_clear && size > 0)
4487 {
4488 if (! cleared)
4489 clear_storage (target, GEN_INT (size), align);
4490 cleared = 1;
4491 }
4492 else
4493 /* Inform later passes that the old value is dead. */
4494 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4495
4496 /* Store each element of the constructor into
4497 the corresponding element of TARGET, determined
4498 by counting the elements. */
4499 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4500 elt;
4501 elt = TREE_CHAIN (elt), i++)
4502 {
4503 register enum machine_mode mode;
4504 HOST_WIDE_INT bitsize;
4505 HOST_WIDE_INT bitpos;
4506 int unsignedp;
4507 tree value = TREE_VALUE (elt);
4508 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4509 tree index = TREE_PURPOSE (elt);
4510 rtx xtarget = target;
4511
4512 if (cleared && is_zeros_p (value))
4513 continue;
4514
4515 unsignedp = TREE_UNSIGNED (elttype);
4516 mode = TYPE_MODE (elttype);
4517 if (mode == BLKmode)
4518 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4519 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4520 : -1);
4521 else
4522 bitsize = GET_MODE_BITSIZE (mode);
4523
4524 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4525 {
4526 tree lo_index = TREE_OPERAND (index, 0);
4527 tree hi_index = TREE_OPERAND (index, 1);
4528 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4529 struct nesting *loop;
4530 HOST_WIDE_INT lo, hi, count;
4531 tree position;
4532
4533 /* If the range is constant and "small", unroll the loop. */
4534 if (const_bounds_p
4535 && host_integerp (lo_index, 0)
4536 && host_integerp (hi_index, 0)
4537 && (lo = tree_low_cst (lo_index, 0),
4538 hi = tree_low_cst (hi_index, 0),
4539 count = hi - lo + 1,
4540 (GET_CODE (target) != MEM
4541 || count <= 2
4542 || (host_integerp (TYPE_SIZE (elttype), 1)
4543 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4544 <= 40 * 8)))))
4545 {
4546 lo -= minelt; hi -= minelt;
4547 for (; lo <= hi; lo++)
4548 {
4549 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4550 store_constructor_field (target, bitsize, bitpos, mode,
4551 value, type, align, cleared);
4552 }
4553 }
4554 else
4555 {
4556 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4557 loop_top = gen_label_rtx ();
4558 loop_end = gen_label_rtx ();
4559
4560 unsignedp = TREE_UNSIGNED (domain);
4561
4562 index = build_decl (VAR_DECL, NULL_TREE, domain);
4563
4564 DECL_RTL (index) = index_r
4565 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4566 &unsignedp, 0));
4567
4568 if (TREE_CODE (value) == SAVE_EXPR
4569 && SAVE_EXPR_RTL (value) == 0)
4570 {
4571 /* Make sure value gets expanded once before the
4572 loop. */
4573 expand_expr (value, const0_rtx, VOIDmode, 0);
4574 emit_queue ();
4575 }
4576 store_expr (lo_index, index_r, 0);
4577 loop = expand_start_loop (0);
4578
4579 /* Assign value to element index. */
4580 position
4581 = convert (ssizetype,
4582 fold (build (MINUS_EXPR, TREE_TYPE (index),
4583 index, TYPE_MIN_VALUE (domain))));
4584 position = size_binop (MULT_EXPR, position,
4585 convert (ssizetype,
4586 TYPE_SIZE_UNIT (elttype)));
4587
4588 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4589 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4590 xtarget = change_address (target, mode, addr);
4591 if (TREE_CODE (value) == CONSTRUCTOR)
4592 store_constructor (value, xtarget, align, cleared,
4593 bitsize / BITS_PER_UNIT);
4594 else
4595 store_expr (value, xtarget, 0);
4596
4597 expand_exit_loop_if_false (loop,
4598 build (LT_EXPR, integer_type_node,
4599 index, hi_index));
4600
4601 expand_increment (build (PREINCREMENT_EXPR,
4602 TREE_TYPE (index),
4603 index, integer_one_node), 0, 0);
4604 expand_end_loop ();
4605 emit_label (loop_end);
4606 }
4607 }
4608 else if ((index != 0 && ! host_integerp (index, 0))
4609 || ! host_integerp (TYPE_SIZE (elttype), 1))
4610 {
4611 rtx pos_rtx, addr;
4612 tree position;
4613
4614 if (index == 0)
4615 index = ssize_int (1);
4616
4617 if (minelt)
4618 index = convert (ssizetype,
4619 fold (build (MINUS_EXPR, index,
4620 TYPE_MIN_VALUE (domain))));
4621
4622 position = size_binop (MULT_EXPR, index,
4623 convert (ssizetype,
4624 TYPE_SIZE_UNIT (elttype)));
4625 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4626 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4627 xtarget = change_address (target, mode, addr);
4628 store_expr (value, xtarget, 0);
4629 }
4630 else
4631 {
4632 if (index != 0)
4633 bitpos = ((tree_low_cst (index, 0) - minelt)
4634 * tree_low_cst (TYPE_SIZE (elttype), 1));
4635 else
4636 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4637
4638 store_constructor_field (target, bitsize, bitpos, mode, value,
4639 type, align, cleared);
4640 }
4641 }
4642 }
4643
4644 /* Set constructor assignments. */
4645 else if (TREE_CODE (type) == SET_TYPE)
4646 {
4647 tree elt = CONSTRUCTOR_ELTS (exp);
4648 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4649 tree domain = TYPE_DOMAIN (type);
4650 tree domain_min, domain_max, bitlength;
4651
4652 /* The default implementation strategy is to extract the constant
4653 parts of the constructor, use that to initialize the target,
4654 and then "or" in whatever non-constant ranges we need in addition.
4655
4656 If a large set is all zero or all ones, it is
4657 probably better to set it using memset (if available) or bzero.
4658 Also, if a large set has just a single range, it may also be
4659 better to first clear all the first clear the set (using
4660 bzero/memset), and set the bits we want. */
4661
4662 /* Check for all zeros. */
4663 if (elt == NULL_TREE && size > 0)
4664 {
4665 if (!cleared)
4666 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4667 return;
4668 }
4669
4670 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4671 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4672 bitlength = size_binop (PLUS_EXPR,
4673 size_diffop (domain_max, domain_min),
4674 ssize_int (1));
4675
4676 nbits = tree_low_cst (bitlength, 1);
4677
4678 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4679 are "complicated" (more than one range), initialize (the
4680 constant parts) by copying from a constant. */
4681 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4682 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4683 {
4684 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4685 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4686 char *bit_buffer = (char *) alloca (nbits);
4687 HOST_WIDE_INT word = 0;
4688 unsigned int bit_pos = 0;
4689 unsigned int ibit = 0;
4690 unsigned int offset = 0; /* In bytes from beginning of set. */
4691
4692 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4693 for (;;)
4694 {
4695 if (bit_buffer[ibit])
4696 {
4697 if (BYTES_BIG_ENDIAN)
4698 word |= (1 << (set_word_size - 1 - bit_pos));
4699 else
4700 word |= 1 << bit_pos;
4701 }
4702
4703 bit_pos++; ibit++;
4704 if (bit_pos >= set_word_size || ibit == nbits)
4705 {
4706 if (word != 0 || ! cleared)
4707 {
4708 rtx datum = GEN_INT (word);
4709 rtx to_rtx;
4710
4711 /* The assumption here is that it is safe to use
4712 XEXP if the set is multi-word, but not if
4713 it's single-word. */
4714 if (GET_CODE (target) == MEM)
4715 {
4716 to_rtx = plus_constant (XEXP (target, 0), offset);
4717 to_rtx = change_address (target, mode, to_rtx);
4718 }
4719 else if (offset == 0)
4720 to_rtx = target;
4721 else
4722 abort ();
4723 emit_move_insn (to_rtx, datum);
4724 }
4725
4726 if (ibit == nbits)
4727 break;
4728 word = 0;
4729 bit_pos = 0;
4730 offset += set_word_size / BITS_PER_UNIT;
4731 }
4732 }
4733 }
4734 else if (!cleared)
4735 /* Don't bother clearing storage if the set is all ones. */
4736 if (TREE_CHAIN (elt) != NULL_TREE
4737 || (TREE_PURPOSE (elt) == NULL_TREE
4738 ? nbits != 1
4739 : ( ! host_integerp (TREE_VALUE (elt), 0)
4740 || ! host_integerp (TREE_PURPOSE (elt), 0)
4741 || (tree_low_cst (TREE_VALUE (elt), 0)
4742 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4743 != (HOST_WIDE_INT) nbits))))
4744 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4745
4746 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4747 {
4748 /* Start of range of element or NULL. */
4749 tree startbit = TREE_PURPOSE (elt);
4750 /* End of range of element, or element value. */
4751 tree endbit = TREE_VALUE (elt);
4752 #ifdef TARGET_MEM_FUNCTIONS
4753 HOST_WIDE_INT startb, endb;
4754 #endif
4755 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4756
4757 bitlength_rtx = expand_expr (bitlength,
4758 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4759
4760 /* Handle non-range tuple element like [ expr ]. */
4761 if (startbit == NULL_TREE)
4762 {
4763 startbit = save_expr (endbit);
4764 endbit = startbit;
4765 }
4766
4767 startbit = convert (sizetype, startbit);
4768 endbit = convert (sizetype, endbit);
4769 if (! integer_zerop (domain_min))
4770 {
4771 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4772 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4773 }
4774 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4775 EXPAND_CONST_ADDRESS);
4776 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4777 EXPAND_CONST_ADDRESS);
4778
4779 if (REG_P (target))
4780 {
4781 targetx = assign_stack_temp (GET_MODE (target),
4782 GET_MODE_SIZE (GET_MODE (target)),
4783 0);
4784 emit_move_insn (targetx, target);
4785 }
4786
4787 else if (GET_CODE (target) == MEM)
4788 targetx = target;
4789 else
4790 abort ();
4791
4792 #ifdef TARGET_MEM_FUNCTIONS
4793 /* Optimization: If startbit and endbit are
4794 constants divisible by BITS_PER_UNIT,
4795 call memset instead. */
4796 if (TREE_CODE (startbit) == INTEGER_CST
4797 && TREE_CODE (endbit) == INTEGER_CST
4798 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4799 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4800 {
4801 emit_library_call (memset_libfunc, LCT_NORMAL,
4802 VOIDmode, 3,
4803 plus_constant (XEXP (targetx, 0),
4804 startb / BITS_PER_UNIT),
4805 Pmode,
4806 constm1_rtx, TYPE_MODE (integer_type_node),
4807 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4808 TYPE_MODE (sizetype));
4809 }
4810 else
4811 #endif
4812 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4813 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4814 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4815 startbit_rtx, TYPE_MODE (sizetype),
4816 endbit_rtx, TYPE_MODE (sizetype));
4817
4818 if (REG_P (target))
4819 emit_move_insn (target, targetx);
4820 }
4821 }
4822
4823 else
4824 abort ();
4825 }
4826
4827 /* Store the value of EXP (an expression tree)
4828 into a subfield of TARGET which has mode MODE and occupies
4829 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4830 If MODE is VOIDmode, it means that we are storing into a bit-field.
4831
4832 If VALUE_MODE is VOIDmode, return nothing in particular.
4833 UNSIGNEDP is not used in this case.
4834
4835 Otherwise, return an rtx for the value stored. This rtx
4836 has mode VALUE_MODE if that is convenient to do.
4837 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4838
4839 ALIGN is the alignment that TARGET is known to have.
4840 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4841
4842 ALIAS_SET is the alias set for the destination. This value will
4843 (in general) be different from that for TARGET, since TARGET is a
4844 reference to the containing structure. */
4845
4846 static rtx
4847 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4848 unsignedp, align, total_size, alias_set)
4849 rtx target;
4850 HOST_WIDE_INT bitsize;
4851 HOST_WIDE_INT bitpos;
4852 enum machine_mode mode;
4853 tree exp;
4854 enum machine_mode value_mode;
4855 int unsignedp;
4856 unsigned int align;
4857 HOST_WIDE_INT total_size;
4858 int alias_set;
4859 {
4860 HOST_WIDE_INT width_mask = 0;
4861
4862 if (TREE_CODE (exp) == ERROR_MARK)
4863 return const0_rtx;
4864
4865 if (bitsize < HOST_BITS_PER_WIDE_INT)
4866 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4867
4868 /* If we are storing into an unaligned field of an aligned union that is
4869 in a register, we may have the mode of TARGET being an integer mode but
4870 MODE == BLKmode. In that case, get an aligned object whose size and
4871 alignment are the same as TARGET and store TARGET into it (we can avoid
4872 the store if the field being stored is the entire width of TARGET). Then
4873 call ourselves recursively to store the field into a BLKmode version of
4874 that object. Finally, load from the object into TARGET. This is not
4875 very efficient in general, but should only be slightly more expensive
4876 than the otherwise-required unaligned accesses. Perhaps this can be
4877 cleaned up later. */
4878
4879 if (mode == BLKmode
4880 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4881 {
4882 rtx object = assign_stack_temp (GET_MODE (target),
4883 GET_MODE_SIZE (GET_MODE (target)), 0);
4884 rtx blk_object = copy_rtx (object);
4885
4886 MEM_SET_IN_STRUCT_P (object, 1);
4887 MEM_SET_IN_STRUCT_P (blk_object, 1);
4888 PUT_MODE (blk_object, BLKmode);
4889
4890 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4891 emit_move_insn (object, target);
4892
4893 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4894 align, total_size, alias_set);
4895
4896 /* Even though we aren't returning target, we need to
4897 give it the updated value. */
4898 emit_move_insn (target, object);
4899
4900 return blk_object;
4901 }
4902
4903 if (GET_CODE (target) == CONCAT)
4904 {
4905 /* We're storing into a struct containing a single __complex. */
4906
4907 if (bitpos != 0)
4908 abort ();
4909 return store_expr (exp, target, 0);
4910 }
4911
4912 /* If the structure is in a register or if the component
4913 is a bit field, we cannot use addressing to access it.
4914 Use bit-field techniques or SUBREG to store in it. */
4915
4916 if (mode == VOIDmode
4917 || (mode != BLKmode && ! direct_store[(int) mode]
4918 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4919 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4920 || GET_CODE (target) == REG
4921 || GET_CODE (target) == SUBREG
4922 /* If the field isn't aligned enough to store as an ordinary memref,
4923 store it as a bit field. */
4924 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4925 && (align < GET_MODE_ALIGNMENT (mode)
4926 || bitpos % GET_MODE_ALIGNMENT (mode)))
4927 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4928 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4929 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4930 /* If the RHS and field are a constant size and the size of the
4931 RHS isn't the same size as the bitfield, we must use bitfield
4932 operations. */
4933 || (bitsize >= 0
4934 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4935 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4936 {
4937 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4938
4939 /* If BITSIZE is narrower than the size of the type of EXP
4940 we will be narrowing TEMP. Normally, what's wanted are the
4941 low-order bits. However, if EXP's type is a record and this is
4942 big-endian machine, we want the upper BITSIZE bits. */
4943 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4944 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4945 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4946 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4947 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4948 - bitsize),
4949 temp, 1);
4950
4951 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4952 MODE. */
4953 if (mode != VOIDmode && mode != BLKmode
4954 && mode != TYPE_MODE (TREE_TYPE (exp)))
4955 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4956
4957 /* If the modes of TARGET and TEMP are both BLKmode, both
4958 must be in memory and BITPOS must be aligned on a byte
4959 boundary. If so, we simply do a block copy. */
4960 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4961 {
4962 unsigned int exp_align = expr_align (exp);
4963
4964 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4965 || bitpos % BITS_PER_UNIT != 0)
4966 abort ();
4967
4968 target = change_address (target, VOIDmode,
4969 plus_constant (XEXP (target, 0),
4970 bitpos / BITS_PER_UNIT));
4971
4972 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4973 align = MIN (exp_align, align);
4974
4975 /* Find an alignment that is consistent with the bit position. */
4976 while ((bitpos % align) != 0)
4977 align >>= 1;
4978
4979 emit_block_move (target, temp,
4980 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4981 / BITS_PER_UNIT),
4982 align);
4983
4984 return value_mode == VOIDmode ? const0_rtx : target;
4985 }
4986
4987 /* Store the value in the bitfield. */
4988 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4989 if (value_mode != VOIDmode)
4990 {
4991 /* The caller wants an rtx for the value. */
4992 /* If possible, avoid refetching from the bitfield itself. */
4993 if (width_mask != 0
4994 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4995 {
4996 tree count;
4997 enum machine_mode tmode;
4998
4999 if (unsignedp)
5000 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5001 tmode = GET_MODE (temp);
5002 if (tmode == VOIDmode)
5003 tmode = value_mode;
5004 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5005 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5006 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5007 }
5008 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5009 NULL_RTX, value_mode, 0, align,
5010 total_size);
5011 }
5012 return const0_rtx;
5013 }
5014 else
5015 {
5016 rtx addr = XEXP (target, 0);
5017 rtx to_rtx;
5018
5019 /* If a value is wanted, it must be the lhs;
5020 so make the address stable for multiple use. */
5021
5022 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5023 && ! CONSTANT_ADDRESS_P (addr)
5024 /* A frame-pointer reference is already stable. */
5025 && ! (GET_CODE (addr) == PLUS
5026 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5027 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5028 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5029 addr = copy_to_reg (addr);
5030
5031 /* Now build a reference to just the desired component. */
5032
5033 to_rtx = copy_rtx (change_address (target, mode,
5034 plus_constant (addr,
5035 (bitpos
5036 / BITS_PER_UNIT))));
5037 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5038 MEM_ALIAS_SET (to_rtx) = alias_set;
5039
5040 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5041 }
5042 }
5043 \f
5044 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5045 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5046 ARRAY_REFs and find the ultimate containing object, which we return.
5047
5048 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5049 bit position, and *PUNSIGNEDP to the signedness of the field.
5050 If the position of the field is variable, we store a tree
5051 giving the variable offset (in units) in *POFFSET.
5052 This offset is in addition to the bit position.
5053 If the position is not variable, we store 0 in *POFFSET.
5054 We set *PALIGNMENT to the alignment of the address that will be
5055 computed. This is the alignment of the thing we return if *POFFSET
5056 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5057
5058 If any of the extraction expressions is volatile,
5059 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5060
5061 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5062 is a mode that can be used to access the field. In that case, *PBITSIZE
5063 is redundant.
5064
5065 If the field describes a variable-sized object, *PMODE is set to
5066 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5067 this case, but the address of the object can be found. */
5068
5069 tree
5070 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5071 punsignedp, pvolatilep, palignment)
5072 tree exp;
5073 HOST_WIDE_INT *pbitsize;
5074 HOST_WIDE_INT *pbitpos;
5075 tree *poffset;
5076 enum machine_mode *pmode;
5077 int *punsignedp;
5078 int *pvolatilep;
5079 unsigned int *palignment;
5080 {
5081 tree size_tree = 0;
5082 enum machine_mode mode = VOIDmode;
5083 tree offset = size_zero_node;
5084 tree bit_offset = bitsize_zero_node;
5085 unsigned int alignment = BIGGEST_ALIGNMENT;
5086 tree tem;
5087
5088 /* First get the mode, signedness, and size. We do this from just the
5089 outermost expression. */
5090 if (TREE_CODE (exp) == COMPONENT_REF)
5091 {
5092 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5093 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5094 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5095
5096 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5097 }
5098 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5099 {
5100 size_tree = TREE_OPERAND (exp, 1);
5101 *punsignedp = TREE_UNSIGNED (exp);
5102 }
5103 else
5104 {
5105 mode = TYPE_MODE (TREE_TYPE (exp));
5106 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5107
5108 if (mode == BLKmode)
5109 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5110 else
5111 *pbitsize = GET_MODE_BITSIZE (mode);
5112 }
5113
5114 if (size_tree != 0)
5115 {
5116 if (! host_integerp (size_tree, 1))
5117 mode = BLKmode, *pbitsize = -1;
5118 else
5119 *pbitsize = tree_low_cst (size_tree, 1);
5120 }
5121
5122 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5123 and find the ultimate containing object. */
5124 while (1)
5125 {
5126 if (TREE_CODE (exp) == BIT_FIELD_REF)
5127 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5128 else if (TREE_CODE (exp) == COMPONENT_REF)
5129 {
5130 tree field = TREE_OPERAND (exp, 1);
5131 tree this_offset = DECL_FIELD_OFFSET (field);
5132
5133 /* If this field hasn't been filled in yet, don't go
5134 past it. This should only happen when folding expressions
5135 made during type construction. */
5136 if (this_offset == 0)
5137 break;
5138 else if (! TREE_CONSTANT (this_offset)
5139 && contains_placeholder_p (this_offset))
5140 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5141
5142 offset = size_binop (PLUS_EXPR, offset, this_offset);
5143 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5144 DECL_FIELD_BIT_OFFSET (field));
5145
5146 if (! host_integerp (offset, 0))
5147 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5148 }
5149
5150 else if (TREE_CODE (exp) == ARRAY_REF)
5151 {
5152 tree index = TREE_OPERAND (exp, 1);
5153 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5154 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5155 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5156
5157 /* We assume all arrays have sizes that are a multiple of a byte.
5158 First subtract the lower bound, if any, in the type of the
5159 index, then convert to sizetype and multiply by the size of the
5160 array element. */
5161 if (low_bound != 0 && ! integer_zerop (low_bound))
5162 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5163 index, low_bound));
5164
5165 /* If the index has a self-referential type, pass it to a
5166 WITH_RECORD_EXPR; if the component size is, pass our
5167 component to one. */
5168 if (! TREE_CONSTANT (index)
5169 && contains_placeholder_p (index))
5170 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5171 if (! TREE_CONSTANT (unit_size)
5172 && contains_placeholder_p (unit_size))
5173 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5174 TREE_OPERAND (exp, 0));
5175
5176 offset = size_binop (PLUS_EXPR, offset,
5177 size_binop (MULT_EXPR,
5178 convert (sizetype, index),
5179 unit_size));
5180 }
5181
5182 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5183 && ! ((TREE_CODE (exp) == NOP_EXPR
5184 || TREE_CODE (exp) == CONVERT_EXPR)
5185 && (TYPE_MODE (TREE_TYPE (exp))
5186 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5187 break;
5188
5189 /* If any reference in the chain is volatile, the effect is volatile. */
5190 if (TREE_THIS_VOLATILE (exp))
5191 *pvolatilep = 1;
5192
5193 /* If the offset is non-constant already, then we can't assume any
5194 alignment more than the alignment here. */
5195 if (! TREE_CONSTANT (offset))
5196 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5197
5198 exp = TREE_OPERAND (exp, 0);
5199 }
5200
5201 if (DECL_P (exp))
5202 alignment = MIN (alignment, DECL_ALIGN (exp));
5203 else if (TREE_TYPE (exp) != 0)
5204 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5205
5206 /* If OFFSET is constant, see if we can return the whole thing as a
5207 constant bit position. Otherwise, split it up. */
5208 if (host_integerp (offset, 0)
5209 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5210 bitsize_unit_node))
5211 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5212 && host_integerp (tem, 0))
5213 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5214 else
5215 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5216
5217 *pmode = mode;
5218 *palignment = alignment;
5219 return exp;
5220 }
5221
5222 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5223
5224 static enum memory_use_mode
5225 get_memory_usage_from_modifier (modifier)
5226 enum expand_modifier modifier;
5227 {
5228 switch (modifier)
5229 {
5230 case EXPAND_NORMAL:
5231 case EXPAND_SUM:
5232 return MEMORY_USE_RO;
5233 break;
5234 case EXPAND_MEMORY_USE_WO:
5235 return MEMORY_USE_WO;
5236 break;
5237 case EXPAND_MEMORY_USE_RW:
5238 return MEMORY_USE_RW;
5239 break;
5240 case EXPAND_MEMORY_USE_DONT:
5241 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5242 MEMORY_USE_DONT, because they are modifiers to a call of
5243 expand_expr in the ADDR_EXPR case of expand_expr. */
5244 case EXPAND_CONST_ADDRESS:
5245 case EXPAND_INITIALIZER:
5246 return MEMORY_USE_DONT;
5247 case EXPAND_MEMORY_USE_BAD:
5248 default:
5249 abort ();
5250 }
5251 }
5252 \f
5253 /* Given an rtx VALUE that may contain additions and multiplications,
5254 return an equivalent value that just refers to a register or memory.
5255 This is done by generating instructions to perform the arithmetic
5256 and returning a pseudo-register containing the value.
5257
5258 The returned value may be a REG, SUBREG, MEM or constant. */
5259
5260 rtx
5261 force_operand (value, target)
5262 rtx value, target;
5263 {
5264 register optab binoptab = 0;
5265 /* Use a temporary to force order of execution of calls to
5266 `force_operand'. */
5267 rtx tmp;
5268 register rtx op2;
5269 /* Use subtarget as the target for operand 0 of a binary operation. */
5270 register rtx subtarget = get_subtarget (target);
5271
5272 /* Check for a PIC address load. */
5273 if (flag_pic
5274 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5275 && XEXP (value, 0) == pic_offset_table_rtx
5276 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5277 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5278 || GET_CODE (XEXP (value, 1)) == CONST))
5279 {
5280 if (!subtarget)
5281 subtarget = gen_reg_rtx (GET_MODE (value));
5282 emit_move_insn (subtarget, value);
5283 return subtarget;
5284 }
5285
5286 if (GET_CODE (value) == PLUS)
5287 binoptab = add_optab;
5288 else if (GET_CODE (value) == MINUS)
5289 binoptab = sub_optab;
5290 else if (GET_CODE (value) == MULT)
5291 {
5292 op2 = XEXP (value, 1);
5293 if (!CONSTANT_P (op2)
5294 && !(GET_CODE (op2) == REG && op2 != subtarget))
5295 subtarget = 0;
5296 tmp = force_operand (XEXP (value, 0), subtarget);
5297 return expand_mult (GET_MODE (value), tmp,
5298 force_operand (op2, NULL_RTX),
5299 target, 0);
5300 }
5301
5302 if (binoptab)
5303 {
5304 op2 = XEXP (value, 1);
5305 if (!CONSTANT_P (op2)
5306 && !(GET_CODE (op2) == REG && op2 != subtarget))
5307 subtarget = 0;
5308 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5309 {
5310 binoptab = add_optab;
5311 op2 = negate_rtx (GET_MODE (value), op2);
5312 }
5313
5314 /* Check for an addition with OP2 a constant integer and our first
5315 operand a PLUS of a virtual register and something else. In that
5316 case, we want to emit the sum of the virtual register and the
5317 constant first and then add the other value. This allows virtual
5318 register instantiation to simply modify the constant rather than
5319 creating another one around this addition. */
5320 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5321 && GET_CODE (XEXP (value, 0)) == PLUS
5322 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5323 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5324 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5325 {
5326 rtx temp = expand_binop (GET_MODE (value), binoptab,
5327 XEXP (XEXP (value, 0), 0), op2,
5328 subtarget, 0, OPTAB_LIB_WIDEN);
5329 return expand_binop (GET_MODE (value), binoptab, temp,
5330 force_operand (XEXP (XEXP (value, 0), 1), 0),
5331 target, 0, OPTAB_LIB_WIDEN);
5332 }
5333
5334 tmp = force_operand (XEXP (value, 0), subtarget);
5335 return expand_binop (GET_MODE (value), binoptab, tmp,
5336 force_operand (op2, NULL_RTX),
5337 target, 0, OPTAB_LIB_WIDEN);
5338 /* We give UNSIGNEDP = 0 to expand_binop
5339 because the only operations we are expanding here are signed ones. */
5340 }
5341 return value;
5342 }
5343 \f
5344 /* Subroutine of expand_expr:
5345 save the non-copied parts (LIST) of an expr (LHS), and return a list
5346 which can restore these values to their previous values,
5347 should something modify their storage. */
5348
5349 static tree
5350 save_noncopied_parts (lhs, list)
5351 tree lhs;
5352 tree list;
5353 {
5354 tree tail;
5355 tree parts = 0;
5356
5357 for (tail = list; tail; tail = TREE_CHAIN (tail))
5358 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5359 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5360 else
5361 {
5362 tree part = TREE_VALUE (tail);
5363 tree part_type = TREE_TYPE (part);
5364 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5365 rtx target = assign_temp (part_type, 0, 1, 1);
5366 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5367 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5368 parts = tree_cons (to_be_saved,
5369 build (RTL_EXPR, part_type, NULL_TREE,
5370 (tree) target),
5371 parts);
5372 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5373 }
5374 return parts;
5375 }
5376
5377 /* Subroutine of expand_expr:
5378 record the non-copied parts (LIST) of an expr (LHS), and return a list
5379 which specifies the initial values of these parts. */
5380
5381 static tree
5382 init_noncopied_parts (lhs, list)
5383 tree lhs;
5384 tree list;
5385 {
5386 tree tail;
5387 tree parts = 0;
5388
5389 for (tail = list; tail; tail = TREE_CHAIN (tail))
5390 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5391 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5392 else if (TREE_PURPOSE (tail))
5393 {
5394 tree part = TREE_VALUE (tail);
5395 tree part_type = TREE_TYPE (part);
5396 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5397 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5398 }
5399 return parts;
5400 }
5401
5402 /* Subroutine of expand_expr: return nonzero iff there is no way that
5403 EXP can reference X, which is being modified. TOP_P is nonzero if this
5404 call is going to be used to determine whether we need a temporary
5405 for EXP, as opposed to a recursive call to this function.
5406
5407 It is always safe for this routine to return zero since it merely
5408 searches for optimization opportunities. */
5409
5410 int
5411 safe_from_p (x, exp, top_p)
5412 rtx x;
5413 tree exp;
5414 int top_p;
5415 {
5416 rtx exp_rtl = 0;
5417 int i, nops;
5418 static int save_expr_count;
5419 static int save_expr_size = 0;
5420 static tree *save_expr_rewritten;
5421 static tree save_expr_trees[256];
5422
5423 if (x == 0
5424 /* If EXP has varying size, we MUST use a target since we currently
5425 have no way of allocating temporaries of variable size
5426 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5427 So we assume here that something at a higher level has prevented a
5428 clash. This is somewhat bogus, but the best we can do. Only
5429 do this when X is BLKmode and when we are at the top level. */
5430 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5431 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5432 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5433 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5434 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5435 != INTEGER_CST)
5436 && GET_MODE (x) == BLKmode))
5437 return 1;
5438
5439 if (top_p && save_expr_size == 0)
5440 {
5441 int rtn;
5442
5443 save_expr_count = 0;
5444 save_expr_size = ARRAY_SIZE (save_expr_trees);
5445 save_expr_rewritten = &save_expr_trees[0];
5446
5447 rtn = safe_from_p (x, exp, 1);
5448
5449 for (i = 0; i < save_expr_count; ++i)
5450 {
5451 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5452 abort ();
5453 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5454 }
5455
5456 save_expr_size = 0;
5457
5458 return rtn;
5459 }
5460
5461 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5462 find the underlying pseudo. */
5463 if (GET_CODE (x) == SUBREG)
5464 {
5465 x = SUBREG_REG (x);
5466 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5467 return 0;
5468 }
5469
5470 /* If X is a location in the outgoing argument area, it is always safe. */
5471 if (GET_CODE (x) == MEM
5472 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5473 || (GET_CODE (XEXP (x, 0)) == PLUS
5474 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5475 return 1;
5476
5477 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5478 {
5479 case 'd':
5480 exp_rtl = DECL_RTL (exp);
5481 break;
5482
5483 case 'c':
5484 return 1;
5485
5486 case 'x':
5487 if (TREE_CODE (exp) == TREE_LIST)
5488 return ((TREE_VALUE (exp) == 0
5489 || safe_from_p (x, TREE_VALUE (exp), 0))
5490 && (TREE_CHAIN (exp) == 0
5491 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5492 else if (TREE_CODE (exp) == ERROR_MARK)
5493 return 1; /* An already-visited SAVE_EXPR? */
5494 else
5495 return 0;
5496
5497 case '1':
5498 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5499
5500 case '2':
5501 case '<':
5502 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5503 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5504
5505 case 'e':
5506 case 'r':
5507 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5508 the expression. If it is set, we conflict iff we are that rtx or
5509 both are in memory. Otherwise, we check all operands of the
5510 expression recursively. */
5511
5512 switch (TREE_CODE (exp))
5513 {
5514 case ADDR_EXPR:
5515 return (staticp (TREE_OPERAND (exp, 0))
5516 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5517 || TREE_STATIC (exp));
5518
5519 case INDIRECT_REF:
5520 if (GET_CODE (x) == MEM)
5521 return 0;
5522 break;
5523
5524 case CALL_EXPR:
5525 exp_rtl = CALL_EXPR_RTL (exp);
5526 if (exp_rtl == 0)
5527 {
5528 /* Assume that the call will clobber all hard registers and
5529 all of memory. */
5530 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5531 || GET_CODE (x) == MEM)
5532 return 0;
5533 }
5534
5535 break;
5536
5537 case RTL_EXPR:
5538 /* If a sequence exists, we would have to scan every instruction
5539 in the sequence to see if it was safe. This is probably not
5540 worthwhile. */
5541 if (RTL_EXPR_SEQUENCE (exp))
5542 return 0;
5543
5544 exp_rtl = RTL_EXPR_RTL (exp);
5545 break;
5546
5547 case WITH_CLEANUP_EXPR:
5548 exp_rtl = RTL_EXPR_RTL (exp);
5549 break;
5550
5551 case CLEANUP_POINT_EXPR:
5552 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5553
5554 case SAVE_EXPR:
5555 exp_rtl = SAVE_EXPR_RTL (exp);
5556 if (exp_rtl)
5557 break;
5558
5559 /* This SAVE_EXPR might appear many times in the top-level
5560 safe_from_p() expression, and if it has a complex
5561 subexpression, examining it multiple times could result
5562 in a combinatorial explosion. E.g. on an Alpha
5563 running at least 200MHz, a Fortran test case compiled with
5564 optimization took about 28 minutes to compile -- even though
5565 it was only a few lines long, and the complicated line causing
5566 so much time to be spent in the earlier version of safe_from_p()
5567 had only 293 or so unique nodes.
5568
5569 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5570 where it is so we can turn it back in the top-level safe_from_p()
5571 when we're done. */
5572
5573 /* For now, don't bother re-sizing the array. */
5574 if (save_expr_count >= save_expr_size)
5575 return 0;
5576 save_expr_rewritten[save_expr_count++] = exp;
5577
5578 nops = TREE_CODE_LENGTH (SAVE_EXPR);
5579 for (i = 0; i < nops; i++)
5580 {
5581 tree operand = TREE_OPERAND (exp, i);
5582 if (operand == NULL_TREE)
5583 continue;
5584 TREE_SET_CODE (exp, ERROR_MARK);
5585 if (!safe_from_p (x, operand, 0))
5586 return 0;
5587 TREE_SET_CODE (exp, SAVE_EXPR);
5588 }
5589 TREE_SET_CODE (exp, ERROR_MARK);
5590 return 1;
5591
5592 case BIND_EXPR:
5593 /* The only operand we look at is operand 1. The rest aren't
5594 part of the expression. */
5595 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5596
5597 case METHOD_CALL_EXPR:
5598 /* This takes a rtx argument, but shouldn't appear here. */
5599 abort ();
5600
5601 default:
5602 break;
5603 }
5604
5605 /* If we have an rtx, we do not need to scan our operands. */
5606 if (exp_rtl)
5607 break;
5608
5609 nops = first_rtl_op (TREE_CODE (exp));
5610 for (i = 0; i < nops; i++)
5611 if (TREE_OPERAND (exp, i) != 0
5612 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5613 return 0;
5614
5615 /* If this is a language-specific tree code, it may require
5616 special handling. */
5617 if (TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE
5618 && lang_safe_from_p
5619 && !(*lang_safe_from_p) (x, exp))
5620 return 0;
5621 }
5622
5623 /* If we have an rtl, find any enclosed object. Then see if we conflict
5624 with it. */
5625 if (exp_rtl)
5626 {
5627 if (GET_CODE (exp_rtl) == SUBREG)
5628 {
5629 exp_rtl = SUBREG_REG (exp_rtl);
5630 if (GET_CODE (exp_rtl) == REG
5631 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5632 return 0;
5633 }
5634
5635 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5636 are memory and EXP is not readonly. */
5637 return ! (rtx_equal_p (x, exp_rtl)
5638 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5639 && ! TREE_READONLY (exp)));
5640 }
5641
5642 /* If we reach here, it is safe. */
5643 return 1;
5644 }
5645
5646 /* Subroutine of expand_expr: return nonzero iff EXP is an
5647 expression whose type is statically determinable. */
5648
5649 static int
5650 fixed_type_p (exp)
5651 tree exp;
5652 {
5653 if (TREE_CODE (exp) == PARM_DECL
5654 || TREE_CODE (exp) == VAR_DECL
5655 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5656 || TREE_CODE (exp) == COMPONENT_REF
5657 || TREE_CODE (exp) == ARRAY_REF)
5658 return 1;
5659 return 0;
5660 }
5661
5662 /* Subroutine of expand_expr: return rtx if EXP is a
5663 variable or parameter; else return 0. */
5664
5665 static rtx
5666 var_rtx (exp)
5667 tree exp;
5668 {
5669 STRIP_NOPS (exp);
5670 switch (TREE_CODE (exp))
5671 {
5672 case PARM_DECL:
5673 case VAR_DECL:
5674 return DECL_RTL (exp);
5675 default:
5676 return 0;
5677 }
5678 }
5679
5680 #ifdef MAX_INTEGER_COMPUTATION_MODE
5681 void
5682 check_max_integer_computation_mode (exp)
5683 tree exp;
5684 {
5685 enum tree_code code;
5686 enum machine_mode mode;
5687
5688 /* Strip any NOPs that don't change the mode. */
5689 STRIP_NOPS (exp);
5690 code = TREE_CODE (exp);
5691
5692 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5693 if (code == NOP_EXPR
5694 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5695 return;
5696
5697 /* First check the type of the overall operation. We need only look at
5698 unary, binary and relational operations. */
5699 if (TREE_CODE_CLASS (code) == '1'
5700 || TREE_CODE_CLASS (code) == '2'
5701 || TREE_CODE_CLASS (code) == '<')
5702 {
5703 mode = TYPE_MODE (TREE_TYPE (exp));
5704 if (GET_MODE_CLASS (mode) == MODE_INT
5705 && mode > MAX_INTEGER_COMPUTATION_MODE)
5706 fatal ("unsupported wide integer operation");
5707 }
5708
5709 /* Check operand of a unary op. */
5710 if (TREE_CODE_CLASS (code) == '1')
5711 {
5712 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5713 if (GET_MODE_CLASS (mode) == MODE_INT
5714 && mode > MAX_INTEGER_COMPUTATION_MODE)
5715 fatal ("unsupported wide integer operation");
5716 }
5717
5718 /* Check operands of a binary/comparison op. */
5719 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5720 {
5721 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5722 if (GET_MODE_CLASS (mode) == MODE_INT
5723 && mode > MAX_INTEGER_COMPUTATION_MODE)
5724 fatal ("unsupported wide integer operation");
5725
5726 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5727 if (GET_MODE_CLASS (mode) == MODE_INT
5728 && mode > MAX_INTEGER_COMPUTATION_MODE)
5729 fatal ("unsupported wide integer operation");
5730 }
5731 }
5732 #endif
5733 \f
5734 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5735 has any readonly fields. If any of the fields have types that
5736 contain readonly fields, return true as well. */
5737
5738 static int
5739 readonly_fields_p (type)
5740 tree type;
5741 {
5742 tree field;
5743
5744 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5745 if (TREE_CODE (field) == FIELD_DECL
5746 && (TREE_READONLY (field)
5747 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5748 && readonly_fields_p (TREE_TYPE (field)))))
5749 return 1;
5750
5751 return 0;
5752 }
5753 \f
5754 /* expand_expr: generate code for computing expression EXP.
5755 An rtx for the computed value is returned. The value is never null.
5756 In the case of a void EXP, const0_rtx is returned.
5757
5758 The value may be stored in TARGET if TARGET is nonzero.
5759 TARGET is just a suggestion; callers must assume that
5760 the rtx returned may not be the same as TARGET.
5761
5762 If TARGET is CONST0_RTX, it means that the value will be ignored.
5763
5764 If TMODE is not VOIDmode, it suggests generating the
5765 result in mode TMODE. But this is done only when convenient.
5766 Otherwise, TMODE is ignored and the value generated in its natural mode.
5767 TMODE is just a suggestion; callers must assume that
5768 the rtx returned may not have mode TMODE.
5769
5770 Note that TARGET may have neither TMODE nor MODE. In that case, it
5771 probably will not be used.
5772
5773 If MODIFIER is EXPAND_SUM then when EXP is an addition
5774 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5775 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5776 products as above, or REG or MEM, or constant.
5777 Ordinarily in such cases we would output mul or add instructions
5778 and then return a pseudo reg containing the sum.
5779
5780 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5781 it also marks a label as absolutely required (it can't be dead).
5782 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5783 This is used for outputting expressions used in initializers.
5784
5785 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5786 with a constant address even if that address is not normally legitimate.
5787 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5788
5789 rtx
5790 expand_expr (exp, target, tmode, modifier)
5791 register tree exp;
5792 rtx target;
5793 enum machine_mode tmode;
5794 enum expand_modifier modifier;
5795 {
5796 register rtx op0, op1, temp;
5797 tree type = TREE_TYPE (exp);
5798 int unsignedp = TREE_UNSIGNED (type);
5799 register enum machine_mode mode;
5800 register enum tree_code code = TREE_CODE (exp);
5801 optab this_optab;
5802 rtx subtarget, original_target;
5803 int ignore;
5804 tree context;
5805 /* Used by check-memory-usage to make modifier read only. */
5806 enum expand_modifier ro_modifier;
5807
5808 /* Handle ERROR_MARK before anybody tries to access its type. */
5809 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5810 {
5811 op0 = CONST0_RTX (tmode);
5812 if (op0 != 0)
5813 return op0;
5814 return const0_rtx;
5815 }
5816
5817 mode = TYPE_MODE (type);
5818 /* Use subtarget as the target for operand 0 of a binary operation. */
5819 subtarget = get_subtarget (target);
5820 original_target = target;
5821 ignore = (target == const0_rtx
5822 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5823 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5824 || code == COND_EXPR)
5825 && TREE_CODE (type) == VOID_TYPE));
5826
5827 /* Make a read-only version of the modifier. */
5828 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5829 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5830 ro_modifier = modifier;
5831 else
5832 ro_modifier = EXPAND_NORMAL;
5833
5834 /* If we are going to ignore this result, we need only do something
5835 if there is a side-effect somewhere in the expression. If there
5836 is, short-circuit the most common cases here. Note that we must
5837 not call expand_expr with anything but const0_rtx in case this
5838 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5839
5840 if (ignore)
5841 {
5842 if (! TREE_SIDE_EFFECTS (exp))
5843 return const0_rtx;
5844
5845 /* Ensure we reference a volatile object even if value is ignored, but
5846 don't do this if all we are doing is taking its address. */
5847 if (TREE_THIS_VOLATILE (exp)
5848 && TREE_CODE (exp) != FUNCTION_DECL
5849 && mode != VOIDmode && mode != BLKmode
5850 && modifier != EXPAND_CONST_ADDRESS)
5851 {
5852 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5853 if (GET_CODE (temp) == MEM)
5854 temp = copy_to_reg (temp);
5855 return const0_rtx;
5856 }
5857
5858 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5859 || code == INDIRECT_REF || code == BUFFER_REF)
5860 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5861 VOIDmode, ro_modifier);
5862 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5863 || code == ARRAY_REF)
5864 {
5865 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5866 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5867 return const0_rtx;
5868 }
5869 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5870 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5871 /* If the second operand has no side effects, just evaluate
5872 the first. */
5873 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5874 VOIDmode, ro_modifier);
5875 else if (code == BIT_FIELD_REF)
5876 {
5877 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5878 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5879 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5880 return const0_rtx;
5881 }
5882 ;
5883 target = 0;
5884 }
5885
5886 #ifdef MAX_INTEGER_COMPUTATION_MODE
5887 /* Only check stuff here if the mode we want is different from the mode
5888 of the expression; if it's the same, check_max_integer_computiation_mode
5889 will handle it. Do we really need to check this stuff at all? */
5890
5891 if (target
5892 && GET_MODE (target) != mode
5893 && TREE_CODE (exp) != INTEGER_CST
5894 && TREE_CODE (exp) != PARM_DECL
5895 && TREE_CODE (exp) != ARRAY_REF
5896 && TREE_CODE (exp) != COMPONENT_REF
5897 && TREE_CODE (exp) != BIT_FIELD_REF
5898 && TREE_CODE (exp) != INDIRECT_REF
5899 && TREE_CODE (exp) != CALL_EXPR
5900 && TREE_CODE (exp) != VAR_DECL
5901 && TREE_CODE (exp) != RTL_EXPR)
5902 {
5903 enum machine_mode mode = GET_MODE (target);
5904
5905 if (GET_MODE_CLASS (mode) == MODE_INT
5906 && mode > MAX_INTEGER_COMPUTATION_MODE)
5907 fatal ("unsupported wide integer operation");
5908 }
5909
5910 if (tmode != mode
5911 && TREE_CODE (exp) != INTEGER_CST
5912 && TREE_CODE (exp) != PARM_DECL
5913 && TREE_CODE (exp) != ARRAY_REF
5914 && TREE_CODE (exp) != COMPONENT_REF
5915 && TREE_CODE (exp) != BIT_FIELD_REF
5916 && TREE_CODE (exp) != INDIRECT_REF
5917 && TREE_CODE (exp) != VAR_DECL
5918 && TREE_CODE (exp) != CALL_EXPR
5919 && TREE_CODE (exp) != RTL_EXPR
5920 && GET_MODE_CLASS (tmode) == MODE_INT
5921 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5922 fatal ("unsupported wide integer operation");
5923
5924 check_max_integer_computation_mode (exp);
5925 #endif
5926
5927 /* If will do cse, generate all results into pseudo registers
5928 since 1) that allows cse to find more things
5929 and 2) otherwise cse could produce an insn the machine
5930 cannot support. */
5931
5932 if (! cse_not_expected && mode != BLKmode && target
5933 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5934 target = subtarget;
5935
5936 switch (code)
5937 {
5938 case LABEL_DECL:
5939 {
5940 tree function = decl_function_context (exp);
5941 /* Handle using a label in a containing function. */
5942 if (function != current_function_decl
5943 && function != inline_function_decl && function != 0)
5944 {
5945 struct function *p = find_function_data (function);
5946 /* Allocate in the memory associated with the function
5947 that the label is in. */
5948 push_obstacks (p->function_obstack,
5949 p->function_maybepermanent_obstack);
5950
5951 p->expr->x_forced_labels
5952 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5953 p->expr->x_forced_labels);
5954 pop_obstacks ();
5955 }
5956 else
5957 {
5958 if (modifier == EXPAND_INITIALIZER)
5959 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5960 label_rtx (exp),
5961 forced_labels);
5962 }
5963
5964 temp = gen_rtx_MEM (FUNCTION_MODE,
5965 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5966 if (function != current_function_decl
5967 && function != inline_function_decl && function != 0)
5968 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5969 return temp;
5970 }
5971
5972 case PARM_DECL:
5973 if (DECL_RTL (exp) == 0)
5974 {
5975 error_with_decl (exp, "prior parameter's size depends on `%s'");
5976 return CONST0_RTX (mode);
5977 }
5978
5979 /* ... fall through ... */
5980
5981 case VAR_DECL:
5982 /* If a static var's type was incomplete when the decl was written,
5983 but the type is complete now, lay out the decl now. */
5984 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5985 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5986 {
5987 push_obstacks_nochange ();
5988 end_temporary_allocation ();
5989 layout_decl (exp, 0);
5990 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5991 pop_obstacks ();
5992 }
5993
5994 /* Although static-storage variables start off initialized, according to
5995 ANSI C, a memcpy could overwrite them with uninitialized values. So
5996 we check them too. This also lets us check for read-only variables
5997 accessed via a non-const declaration, in case it won't be detected
5998 any other way (e.g., in an embedded system or OS kernel without
5999 memory protection).
6000
6001 Aggregates are not checked here; they're handled elsewhere. */
6002 if (cfun && current_function_check_memory_usage
6003 && code == VAR_DECL
6004 && GET_CODE (DECL_RTL (exp)) == MEM
6005 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6006 {
6007 enum memory_use_mode memory_usage;
6008 memory_usage = get_memory_usage_from_modifier (modifier);
6009
6010 in_check_memory_usage = 1;
6011 if (memory_usage != MEMORY_USE_DONT)
6012 emit_library_call (chkr_check_addr_libfunc,
6013 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6014 XEXP (DECL_RTL (exp), 0), Pmode,
6015 GEN_INT (int_size_in_bytes (type)),
6016 TYPE_MODE (sizetype),
6017 GEN_INT (memory_usage),
6018 TYPE_MODE (integer_type_node));
6019 in_check_memory_usage = 0;
6020 }
6021
6022 /* ... fall through ... */
6023
6024 case FUNCTION_DECL:
6025 case RESULT_DECL:
6026 if (DECL_RTL (exp) == 0)
6027 abort ();
6028
6029 /* Ensure variable marked as used even if it doesn't go through
6030 a parser. If it hasn't be used yet, write out an external
6031 definition. */
6032 if (! TREE_USED (exp))
6033 {
6034 assemble_external (exp);
6035 TREE_USED (exp) = 1;
6036 }
6037
6038 /* Show we haven't gotten RTL for this yet. */
6039 temp = 0;
6040
6041 /* Handle variables inherited from containing functions. */
6042 context = decl_function_context (exp);
6043
6044 /* We treat inline_function_decl as an alias for the current function
6045 because that is the inline function whose vars, types, etc.
6046 are being merged into the current function.
6047 See expand_inline_function. */
6048
6049 if (context != 0 && context != current_function_decl
6050 && context != inline_function_decl
6051 /* If var is static, we don't need a static chain to access it. */
6052 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6053 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6054 {
6055 rtx addr;
6056
6057 /* Mark as non-local and addressable. */
6058 DECL_NONLOCAL (exp) = 1;
6059 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6060 abort ();
6061 mark_addressable (exp);
6062 if (GET_CODE (DECL_RTL (exp)) != MEM)
6063 abort ();
6064 addr = XEXP (DECL_RTL (exp), 0);
6065 if (GET_CODE (addr) == MEM)
6066 addr = change_address (addr, Pmode,
6067 fix_lexical_addr (XEXP (addr, 0), exp));
6068 else
6069 addr = fix_lexical_addr (addr, exp);
6070
6071 temp = change_address (DECL_RTL (exp), mode, addr);
6072 }
6073
6074 /* This is the case of an array whose size is to be determined
6075 from its initializer, while the initializer is still being parsed.
6076 See expand_decl. */
6077
6078 else if (GET_CODE (DECL_RTL (exp)) == MEM
6079 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6080 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6081 XEXP (DECL_RTL (exp), 0));
6082
6083 /* If DECL_RTL is memory, we are in the normal case and either
6084 the address is not valid or it is not a register and -fforce-addr
6085 is specified, get the address into a register. */
6086
6087 else if (GET_CODE (DECL_RTL (exp)) == MEM
6088 && modifier != EXPAND_CONST_ADDRESS
6089 && modifier != EXPAND_SUM
6090 && modifier != EXPAND_INITIALIZER
6091 && (! memory_address_p (DECL_MODE (exp),
6092 XEXP (DECL_RTL (exp), 0))
6093 || (flag_force_addr
6094 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6095 temp = change_address (DECL_RTL (exp), VOIDmode,
6096 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6097
6098 /* If we got something, return it. But first, set the alignment
6099 the address is a register. */
6100 if (temp != 0)
6101 {
6102 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6103 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6104
6105 return temp;
6106 }
6107
6108 /* If the mode of DECL_RTL does not match that of the decl, it
6109 must be a promoted value. We return a SUBREG of the wanted mode,
6110 but mark it so that we know that it was already extended. */
6111
6112 if (GET_CODE (DECL_RTL (exp)) == REG
6113 && GET_MODE (DECL_RTL (exp)) != mode)
6114 {
6115 /* Get the signedness used for this variable. Ensure we get the
6116 same mode we got when the variable was declared. */
6117 if (GET_MODE (DECL_RTL (exp))
6118 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6119 abort ();
6120
6121 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6122 SUBREG_PROMOTED_VAR_P (temp) = 1;
6123 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6124 return temp;
6125 }
6126
6127 return DECL_RTL (exp);
6128
6129 case INTEGER_CST:
6130 return immed_double_const (TREE_INT_CST_LOW (exp),
6131 TREE_INT_CST_HIGH (exp), mode);
6132
6133 case CONST_DECL:
6134 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6135 EXPAND_MEMORY_USE_BAD);
6136
6137 case REAL_CST:
6138 /* If optimized, generate immediate CONST_DOUBLE
6139 which will be turned into memory by reload if necessary.
6140
6141 We used to force a register so that loop.c could see it. But
6142 this does not allow gen_* patterns to perform optimizations with
6143 the constants. It also produces two insns in cases like "x = 1.0;".
6144 On most machines, floating-point constants are not permitted in
6145 many insns, so we'd end up copying it to a register in any case.
6146
6147 Now, we do the copying in expand_binop, if appropriate. */
6148 return immed_real_const (exp);
6149
6150 case COMPLEX_CST:
6151 case STRING_CST:
6152 if (! TREE_CST_RTL (exp))
6153 output_constant_def (exp);
6154
6155 /* TREE_CST_RTL probably contains a constant address.
6156 On RISC machines where a constant address isn't valid,
6157 make some insns to get that address into a register. */
6158 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6159 && modifier != EXPAND_CONST_ADDRESS
6160 && modifier != EXPAND_INITIALIZER
6161 && modifier != EXPAND_SUM
6162 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6163 || (flag_force_addr
6164 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6165 return change_address (TREE_CST_RTL (exp), VOIDmode,
6166 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6167 return TREE_CST_RTL (exp);
6168
6169 case EXPR_WITH_FILE_LOCATION:
6170 {
6171 rtx to_return;
6172 const char *saved_input_filename = input_filename;
6173 int saved_lineno = lineno;
6174 input_filename = EXPR_WFL_FILENAME (exp);
6175 lineno = EXPR_WFL_LINENO (exp);
6176 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6177 emit_line_note (input_filename, lineno);
6178 /* Possibly avoid switching back and force here. */
6179 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6180 input_filename = saved_input_filename;
6181 lineno = saved_lineno;
6182 return to_return;
6183 }
6184
6185 case SAVE_EXPR:
6186 context = decl_function_context (exp);
6187
6188 /* If this SAVE_EXPR was at global context, assume we are an
6189 initialization function and move it into our context. */
6190 if (context == 0)
6191 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6192
6193 /* We treat inline_function_decl as an alias for the current function
6194 because that is the inline function whose vars, types, etc.
6195 are being merged into the current function.
6196 See expand_inline_function. */
6197 if (context == current_function_decl || context == inline_function_decl)
6198 context = 0;
6199
6200 /* If this is non-local, handle it. */
6201 if (context)
6202 {
6203 /* The following call just exists to abort if the context is
6204 not of a containing function. */
6205 find_function_data (context);
6206
6207 temp = SAVE_EXPR_RTL (exp);
6208 if (temp && GET_CODE (temp) == REG)
6209 {
6210 put_var_into_stack (exp);
6211 temp = SAVE_EXPR_RTL (exp);
6212 }
6213 if (temp == 0 || GET_CODE (temp) != MEM)
6214 abort ();
6215 return change_address (temp, mode,
6216 fix_lexical_addr (XEXP (temp, 0), exp));
6217 }
6218 if (SAVE_EXPR_RTL (exp) == 0)
6219 {
6220 if (mode == VOIDmode)
6221 temp = const0_rtx;
6222 else
6223 temp = assign_temp (type, 3, 0, 0);
6224
6225 SAVE_EXPR_RTL (exp) = temp;
6226 if (!optimize && GET_CODE (temp) == REG)
6227 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6228 save_expr_regs);
6229
6230 /* If the mode of TEMP does not match that of the expression, it
6231 must be a promoted value. We pass store_expr a SUBREG of the
6232 wanted mode but mark it so that we know that it was already
6233 extended. Note that `unsignedp' was modified above in
6234 this case. */
6235
6236 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6237 {
6238 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6239 SUBREG_PROMOTED_VAR_P (temp) = 1;
6240 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6241 }
6242
6243 if (temp == const0_rtx)
6244 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6245 EXPAND_MEMORY_USE_BAD);
6246 else
6247 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6248
6249 TREE_USED (exp) = 1;
6250 }
6251
6252 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6253 must be a promoted value. We return a SUBREG of the wanted mode,
6254 but mark it so that we know that it was already extended. */
6255
6256 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6257 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6258 {
6259 /* Compute the signedness and make the proper SUBREG. */
6260 promote_mode (type, mode, &unsignedp, 0);
6261 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6262 SUBREG_PROMOTED_VAR_P (temp) = 1;
6263 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6264 return temp;
6265 }
6266
6267 return SAVE_EXPR_RTL (exp);
6268
6269 case UNSAVE_EXPR:
6270 {
6271 rtx temp;
6272 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6273 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6274 return temp;
6275 }
6276
6277 case PLACEHOLDER_EXPR:
6278 {
6279 tree placeholder_expr;
6280
6281 /* If there is an object on the head of the placeholder list,
6282 see if some object in it of type TYPE or a pointer to it. For
6283 further information, see tree.def. */
6284 for (placeholder_expr = placeholder_list;
6285 placeholder_expr != 0;
6286 placeholder_expr = TREE_CHAIN (placeholder_expr))
6287 {
6288 tree need_type = TYPE_MAIN_VARIANT (type);
6289 tree object = 0;
6290 tree old_list = placeholder_list;
6291 tree elt;
6292
6293 /* Find the outermost reference that is of the type we want.
6294 If none, see if any object has a type that is a pointer to
6295 the type we want. */
6296 for (elt = TREE_PURPOSE (placeholder_expr);
6297 elt != 0 && object == 0;
6298 elt
6299 = ((TREE_CODE (elt) == COMPOUND_EXPR
6300 || TREE_CODE (elt) == COND_EXPR)
6301 ? TREE_OPERAND (elt, 1)
6302 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6303 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6304 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6305 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6306 ? TREE_OPERAND (elt, 0) : 0))
6307 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6308 object = elt;
6309
6310 for (elt = TREE_PURPOSE (placeholder_expr);
6311 elt != 0 && object == 0;
6312 elt
6313 = ((TREE_CODE (elt) == COMPOUND_EXPR
6314 || TREE_CODE (elt) == COND_EXPR)
6315 ? TREE_OPERAND (elt, 1)
6316 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6317 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6318 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6319 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6320 ? TREE_OPERAND (elt, 0) : 0))
6321 if (POINTER_TYPE_P (TREE_TYPE (elt))
6322 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6323 == need_type))
6324 object = build1 (INDIRECT_REF, need_type, elt);
6325
6326 if (object != 0)
6327 {
6328 /* Expand this object skipping the list entries before
6329 it was found in case it is also a PLACEHOLDER_EXPR.
6330 In that case, we want to translate it using subsequent
6331 entries. */
6332 placeholder_list = TREE_CHAIN (placeholder_expr);
6333 temp = expand_expr (object, original_target, tmode,
6334 ro_modifier);
6335 placeholder_list = old_list;
6336 return temp;
6337 }
6338 }
6339 }
6340
6341 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6342 abort ();
6343
6344 case WITH_RECORD_EXPR:
6345 /* Put the object on the placeholder list, expand our first operand,
6346 and pop the list. */
6347 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6348 placeholder_list);
6349 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6350 tmode, ro_modifier);
6351 placeholder_list = TREE_CHAIN (placeholder_list);
6352 return target;
6353
6354 case GOTO_EXPR:
6355 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6356 expand_goto (TREE_OPERAND (exp, 0));
6357 else
6358 expand_computed_goto (TREE_OPERAND (exp, 0));
6359 return const0_rtx;
6360
6361 case EXIT_EXPR:
6362 expand_exit_loop_if_false (NULL_PTR,
6363 invert_truthvalue (TREE_OPERAND (exp, 0)));
6364 return const0_rtx;
6365
6366 case LABELED_BLOCK_EXPR:
6367 if (LABELED_BLOCK_BODY (exp))
6368 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6369 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6370 return const0_rtx;
6371
6372 case EXIT_BLOCK_EXPR:
6373 if (EXIT_BLOCK_RETURN (exp))
6374 sorry ("returned value in block_exit_expr");
6375 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6376 return const0_rtx;
6377
6378 case LOOP_EXPR:
6379 push_temp_slots ();
6380 expand_start_loop (1);
6381 expand_expr_stmt (TREE_OPERAND (exp, 0));
6382 expand_end_loop ();
6383 pop_temp_slots ();
6384
6385 return const0_rtx;
6386
6387 case BIND_EXPR:
6388 {
6389 tree vars = TREE_OPERAND (exp, 0);
6390 int vars_need_expansion = 0;
6391
6392 /* Need to open a binding contour here because
6393 if there are any cleanups they must be contained here. */
6394 expand_start_bindings (2);
6395
6396 /* Mark the corresponding BLOCK for output in its proper place. */
6397 if (TREE_OPERAND (exp, 2) != 0
6398 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6399 insert_block (TREE_OPERAND (exp, 2));
6400
6401 /* If VARS have not yet been expanded, expand them now. */
6402 while (vars)
6403 {
6404 if (DECL_RTL (vars) == 0)
6405 {
6406 vars_need_expansion = 1;
6407 expand_decl (vars);
6408 }
6409 expand_decl_init (vars);
6410 vars = TREE_CHAIN (vars);
6411 }
6412
6413 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6414
6415 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6416
6417 return temp;
6418 }
6419
6420 case RTL_EXPR:
6421 if (RTL_EXPR_SEQUENCE (exp))
6422 {
6423 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6424 abort ();
6425 emit_insns (RTL_EXPR_SEQUENCE (exp));
6426 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6427 }
6428 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6429 free_temps_for_rtl_expr (exp);
6430 return RTL_EXPR_RTL (exp);
6431
6432 case CONSTRUCTOR:
6433 /* If we don't need the result, just ensure we evaluate any
6434 subexpressions. */
6435 if (ignore)
6436 {
6437 tree elt;
6438 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6439 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6440 EXPAND_MEMORY_USE_BAD);
6441 return const0_rtx;
6442 }
6443
6444 /* All elts simple constants => refer to a constant in memory. But
6445 if this is a non-BLKmode mode, let it store a field at a time
6446 since that should make a CONST_INT or CONST_DOUBLE when we
6447 fold. Likewise, if we have a target we can use, it is best to
6448 store directly into the target unless the type is large enough
6449 that memcpy will be used. If we are making an initializer and
6450 all operands are constant, put it in memory as well. */
6451 else if ((TREE_STATIC (exp)
6452 && ((mode == BLKmode
6453 && ! (target != 0 && safe_from_p (target, exp, 1)))
6454 || TREE_ADDRESSABLE (exp)
6455 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6456 && (! MOVE_BY_PIECES_P
6457 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6458 TYPE_ALIGN (type)))
6459 && ! mostly_zeros_p (exp))))
6460 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6461 {
6462 rtx constructor = output_constant_def (exp);
6463
6464 if (modifier != EXPAND_CONST_ADDRESS
6465 && modifier != EXPAND_INITIALIZER
6466 && modifier != EXPAND_SUM
6467 && (! memory_address_p (GET_MODE (constructor),
6468 XEXP (constructor, 0))
6469 || (flag_force_addr
6470 && GET_CODE (XEXP (constructor, 0)) != REG)))
6471 constructor = change_address (constructor, VOIDmode,
6472 XEXP (constructor, 0));
6473 return constructor;
6474 }
6475
6476 else
6477 {
6478 /* Handle calls that pass values in multiple non-contiguous
6479 locations. The Irix 6 ABI has examples of this. */
6480 if (target == 0 || ! safe_from_p (target, exp, 1)
6481 || GET_CODE (target) == PARALLEL)
6482 {
6483 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6484 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6485 else
6486 target = assign_temp (type, 0, 1, 1);
6487 }
6488
6489 if (TREE_READONLY (exp))
6490 {
6491 if (GET_CODE (target) == MEM)
6492 target = copy_rtx (target);
6493
6494 RTX_UNCHANGING_P (target) = 1;
6495 }
6496
6497 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6498 int_size_in_bytes (TREE_TYPE (exp)));
6499 return target;
6500 }
6501
6502 case INDIRECT_REF:
6503 {
6504 tree exp1 = TREE_OPERAND (exp, 0);
6505 tree index;
6506 tree string = string_constant (exp1, &index);
6507
6508 /* Try to optimize reads from const strings. */
6509 if (string
6510 && TREE_CODE (string) == STRING_CST
6511 && TREE_CODE (index) == INTEGER_CST
6512 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6513 && GET_MODE_CLASS (mode) == MODE_INT
6514 && GET_MODE_SIZE (mode) == 1
6515 && modifier != EXPAND_MEMORY_USE_WO)
6516 return
6517 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6518
6519 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6520 op0 = memory_address (mode, op0);
6521
6522 if (cfun && current_function_check_memory_usage
6523 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6524 {
6525 enum memory_use_mode memory_usage;
6526 memory_usage = get_memory_usage_from_modifier (modifier);
6527
6528 if (memory_usage != MEMORY_USE_DONT)
6529 {
6530 in_check_memory_usage = 1;
6531 emit_library_call (chkr_check_addr_libfunc,
6532 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6533 Pmode, GEN_INT (int_size_in_bytes (type)),
6534 TYPE_MODE (sizetype),
6535 GEN_INT (memory_usage),
6536 TYPE_MODE (integer_type_node));
6537 in_check_memory_usage = 0;
6538 }
6539 }
6540
6541 temp = gen_rtx_MEM (mode, op0);
6542 set_mem_attributes (temp, exp, 0);
6543
6544 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6545 here, because, in C and C++, the fact that a location is accessed
6546 through a pointer to const does not mean that the value there can
6547 never change. Languages where it can never change should
6548 also set TREE_STATIC. */
6549 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6550
6551 /* If we are writing to this object and its type is a record with
6552 readonly fields, we must mark it as readonly so it will
6553 conflict with readonly references to those fields. */
6554 if (modifier == EXPAND_MEMORY_USE_WO
6555 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6556 RTX_UNCHANGING_P (temp) = 1;
6557
6558 return temp;
6559 }
6560
6561 case ARRAY_REF:
6562 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6563 abort ();
6564
6565 {
6566 tree array = TREE_OPERAND (exp, 0);
6567 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6568 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6569 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6570 HOST_WIDE_INT i;
6571
6572 /* Optimize the special-case of a zero lower bound.
6573
6574 We convert the low_bound to sizetype to avoid some problems
6575 with constant folding. (E.g. suppose the lower bound is 1,
6576 and its mode is QI. Without the conversion, (ARRAY
6577 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6578 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6579
6580 if (! integer_zerop (low_bound))
6581 index = size_diffop (index, convert (sizetype, low_bound));
6582
6583 /* Fold an expression like: "foo"[2].
6584 This is not done in fold so it won't happen inside &.
6585 Don't fold if this is for wide characters since it's too
6586 difficult to do correctly and this is a very rare case. */
6587
6588 if (TREE_CODE (array) == STRING_CST
6589 && TREE_CODE (index) == INTEGER_CST
6590 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6591 && GET_MODE_CLASS (mode) == MODE_INT
6592 && GET_MODE_SIZE (mode) == 1)
6593 return
6594 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6595
6596 /* If this is a constant index into a constant array,
6597 just get the value from the array. Handle both the cases when
6598 we have an explicit constructor and when our operand is a variable
6599 that was declared const. */
6600
6601 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6602 && TREE_CODE (index) == INTEGER_CST
6603 && 0 > compare_tree_int (index,
6604 list_length (CONSTRUCTOR_ELTS
6605 (TREE_OPERAND (exp, 0)))))
6606 {
6607 tree elem;
6608
6609 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6610 i = TREE_INT_CST_LOW (index);
6611 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6612 ;
6613
6614 if (elem)
6615 return expand_expr (fold (TREE_VALUE (elem)), target,
6616 tmode, ro_modifier);
6617 }
6618
6619 else if (optimize >= 1
6620 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6621 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6622 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6623 {
6624 if (TREE_CODE (index) == INTEGER_CST)
6625 {
6626 tree init = DECL_INITIAL (array);
6627
6628 if (TREE_CODE (init) == CONSTRUCTOR)
6629 {
6630 tree elem;
6631
6632 for (elem = CONSTRUCTOR_ELTS (init);
6633 (elem
6634 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6635 elem = TREE_CHAIN (elem))
6636 ;
6637
6638 if (elem)
6639 return expand_expr (fold (TREE_VALUE (elem)), target,
6640 tmode, ro_modifier);
6641 }
6642 else if (TREE_CODE (init) == STRING_CST
6643 && 0 > compare_tree_int (index,
6644 TREE_STRING_LENGTH (init)))
6645 {
6646 tree type = TREE_TYPE (TREE_TYPE (init));
6647 enum machine_mode mode = TYPE_MODE (type);
6648
6649 if (GET_MODE_CLASS (mode) == MODE_INT
6650 && GET_MODE_SIZE (mode) == 1)
6651 return (GEN_INT
6652 (TREE_STRING_POINTER
6653 (init)[TREE_INT_CST_LOW (index)]));
6654 }
6655 }
6656 }
6657 }
6658 /* Fall through. */
6659
6660 case COMPONENT_REF:
6661 case BIT_FIELD_REF:
6662 /* If the operand is a CONSTRUCTOR, we can just extract the
6663 appropriate field if it is present. Don't do this if we have
6664 already written the data since we want to refer to that copy
6665 and varasm.c assumes that's what we'll do. */
6666 if (code != ARRAY_REF
6667 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6668 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6669 {
6670 tree elt;
6671
6672 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6673 elt = TREE_CHAIN (elt))
6674 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6675 /* We can normally use the value of the field in the
6676 CONSTRUCTOR. However, if this is a bitfield in
6677 an integral mode that we can fit in a HOST_WIDE_INT,
6678 we must mask only the number of bits in the bitfield,
6679 since this is done implicitly by the constructor. If
6680 the bitfield does not meet either of those conditions,
6681 we can't do this optimization. */
6682 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6683 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6684 == MODE_INT)
6685 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6686 <= HOST_BITS_PER_WIDE_INT))))
6687 {
6688 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6689 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6690 {
6691 HOST_WIDE_INT bitsize
6692 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6693
6694 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6695 {
6696 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6697 op0 = expand_and (op0, op1, target);
6698 }
6699 else
6700 {
6701 enum machine_mode imode
6702 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6703 tree count
6704 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6705 0);
6706
6707 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6708 target, 0);
6709 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6710 target, 0);
6711 }
6712 }
6713
6714 return op0;
6715 }
6716 }
6717
6718 {
6719 enum machine_mode mode1;
6720 HOST_WIDE_INT bitsize, bitpos;
6721 tree offset;
6722 int volatilep = 0;
6723 unsigned int alignment;
6724 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6725 &mode1, &unsignedp, &volatilep,
6726 &alignment);
6727
6728 /* If we got back the original object, something is wrong. Perhaps
6729 we are evaluating an expression too early. In any event, don't
6730 infinitely recurse. */
6731 if (tem == exp)
6732 abort ();
6733
6734 /* If TEM's type is a union of variable size, pass TARGET to the inner
6735 computation, since it will need a temporary and TARGET is known
6736 to have to do. This occurs in unchecked conversion in Ada. */
6737
6738 op0 = expand_expr (tem,
6739 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6740 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6741 != INTEGER_CST)
6742 ? target : NULL_RTX),
6743 VOIDmode,
6744 (modifier == EXPAND_INITIALIZER
6745 || modifier == EXPAND_CONST_ADDRESS)
6746 ? modifier : EXPAND_NORMAL);
6747
6748 /* If this is a constant, put it into a register if it is a
6749 legitimate constant and OFFSET is 0 and memory if it isn't. */
6750 if (CONSTANT_P (op0))
6751 {
6752 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6753 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6754 && offset == 0)
6755 op0 = force_reg (mode, op0);
6756 else
6757 op0 = validize_mem (force_const_mem (mode, op0));
6758 }
6759
6760 if (offset != 0)
6761 {
6762 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6763
6764 /* If this object is in memory, put it into a register.
6765 This case can't occur in C, but can in Ada if we have
6766 unchecked conversion of an expression from a scalar type to
6767 an array or record type. */
6768 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6769 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6770 {
6771 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6772
6773 mark_temp_addr_taken (memloc);
6774 emit_move_insn (memloc, op0);
6775 op0 = memloc;
6776 }
6777
6778 if (GET_CODE (op0) != MEM)
6779 abort ();
6780
6781 if (GET_MODE (offset_rtx) != ptr_mode)
6782 {
6783 #ifdef POINTERS_EXTEND_UNSIGNED
6784 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6785 #else
6786 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6787 #endif
6788 }
6789
6790 /* A constant address in OP0 can have VOIDmode, we must not try
6791 to call force_reg for that case. Avoid that case. */
6792 if (GET_CODE (op0) == MEM
6793 && GET_MODE (op0) == BLKmode
6794 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6795 && bitsize != 0
6796 && (bitpos % bitsize) == 0
6797 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6798 && alignment == GET_MODE_ALIGNMENT (mode1))
6799 {
6800 rtx temp = change_address (op0, mode1,
6801 plus_constant (XEXP (op0, 0),
6802 (bitpos /
6803 BITS_PER_UNIT)));
6804 if (GET_CODE (XEXP (temp, 0)) == REG)
6805 op0 = temp;
6806 else
6807 op0 = change_address (op0, mode1,
6808 force_reg (GET_MODE (XEXP (temp, 0)),
6809 XEXP (temp, 0)));
6810 bitpos = 0;
6811 }
6812
6813 op0 = change_address (op0, VOIDmode,
6814 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6815 force_reg (ptr_mode,
6816 offset_rtx)));
6817 }
6818
6819 /* Don't forget about volatility even if this is a bitfield. */
6820 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6821 {
6822 op0 = copy_rtx (op0);
6823 MEM_VOLATILE_P (op0) = 1;
6824 }
6825
6826 /* Check the access. */
6827 if (cfun != 0 && current_function_check_memory_usage
6828 && GET_CODE (op0) == MEM)
6829 {
6830 enum memory_use_mode memory_usage;
6831 memory_usage = get_memory_usage_from_modifier (modifier);
6832
6833 if (memory_usage != MEMORY_USE_DONT)
6834 {
6835 rtx to;
6836 int size;
6837
6838 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6839 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6840
6841 /* Check the access right of the pointer. */
6842 in_check_memory_usage = 1;
6843 if (size > BITS_PER_UNIT)
6844 emit_library_call (chkr_check_addr_libfunc,
6845 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6846 Pmode, GEN_INT (size / BITS_PER_UNIT),
6847 TYPE_MODE (sizetype),
6848 GEN_INT (memory_usage),
6849 TYPE_MODE (integer_type_node));
6850 in_check_memory_usage = 0;
6851 }
6852 }
6853
6854 /* In cases where an aligned union has an unaligned object
6855 as a field, we might be extracting a BLKmode value from
6856 an integer-mode (e.g., SImode) object. Handle this case
6857 by doing the extract into an object as wide as the field
6858 (which we know to be the width of a basic mode), then
6859 storing into memory, and changing the mode to BLKmode.
6860 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6861 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6862 if (mode1 == VOIDmode
6863 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6864 || (modifier != EXPAND_CONST_ADDRESS
6865 && modifier != EXPAND_INITIALIZER
6866 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6867 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6868 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6869 /* If the field isn't aligned enough to fetch as a memref,
6870 fetch it as a bit field. */
6871 || (mode1 != BLKmode
6872 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6873 && ((TYPE_ALIGN (TREE_TYPE (tem))
6874 < GET_MODE_ALIGNMENT (mode))
6875 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6876 /* If the type and the field are a constant size and the
6877 size of the type isn't the same size as the bitfield,
6878 we must use bitfield operations. */
6879 || ((bitsize >= 0
6880 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6881 == INTEGER_CST)
6882 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6883 bitsize)))))
6884 || (modifier != EXPAND_CONST_ADDRESS
6885 && modifier != EXPAND_INITIALIZER
6886 && mode == BLKmode
6887 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6888 && (TYPE_ALIGN (type) > alignment
6889 || bitpos % TYPE_ALIGN (type) != 0)))
6890 {
6891 enum machine_mode ext_mode = mode;
6892
6893 if (ext_mode == BLKmode
6894 && ! (target != 0 && GET_CODE (op0) == MEM
6895 && GET_CODE (target) == MEM
6896 && bitpos % BITS_PER_UNIT == 0))
6897 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6898
6899 if (ext_mode == BLKmode)
6900 {
6901 /* In this case, BITPOS must start at a byte boundary and
6902 TARGET, if specified, must be a MEM. */
6903 if (GET_CODE (op0) != MEM
6904 || (target != 0 && GET_CODE (target) != MEM)
6905 || bitpos % BITS_PER_UNIT != 0)
6906 abort ();
6907
6908 op0 = change_address (op0, VOIDmode,
6909 plus_constant (XEXP (op0, 0),
6910 bitpos / BITS_PER_UNIT));
6911 if (target == 0)
6912 target = assign_temp (type, 0, 1, 1);
6913
6914 emit_block_move (target, op0,
6915 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6916 / BITS_PER_UNIT),
6917 BITS_PER_UNIT);
6918
6919 return target;
6920 }
6921
6922 op0 = validize_mem (op0);
6923
6924 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6925 mark_reg_pointer (XEXP (op0, 0), alignment);
6926
6927 op0 = extract_bit_field (op0, bitsize, bitpos,
6928 unsignedp, target, ext_mode, ext_mode,
6929 alignment,
6930 int_size_in_bytes (TREE_TYPE (tem)));
6931
6932 /* If the result is a record type and BITSIZE is narrower than
6933 the mode of OP0, an integral mode, and this is a big endian
6934 machine, we must put the field into the high-order bits. */
6935 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6936 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6937 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6938 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6939 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6940 - bitsize),
6941 op0, 1);
6942
6943 if (mode == BLKmode)
6944 {
6945 rtx new = assign_stack_temp (ext_mode,
6946 bitsize / BITS_PER_UNIT, 0);
6947
6948 emit_move_insn (new, op0);
6949 op0 = copy_rtx (new);
6950 PUT_MODE (op0, BLKmode);
6951 MEM_SET_IN_STRUCT_P (op0, 1);
6952 }
6953
6954 return op0;
6955 }
6956
6957 /* If the result is BLKmode, use that to access the object
6958 now as well. */
6959 if (mode == BLKmode)
6960 mode1 = BLKmode;
6961
6962 /* Get a reference to just this component. */
6963 if (modifier == EXPAND_CONST_ADDRESS
6964 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6965 {
6966 rtx new = gen_rtx_MEM (mode1,
6967 plus_constant (XEXP (op0, 0),
6968 (bitpos / BITS_PER_UNIT)));
6969
6970 MEM_COPY_ATTRIBUTES (new, op0);
6971 op0 = new;
6972 }
6973 else
6974 op0 = change_address (op0, mode1,
6975 plus_constant (XEXP (op0, 0),
6976 (bitpos / BITS_PER_UNIT)));
6977
6978 set_mem_attributes (op0, exp, 0);
6979 if (GET_CODE (XEXP (op0, 0)) == REG)
6980 mark_reg_pointer (XEXP (op0, 0), alignment);
6981
6982 MEM_VOLATILE_P (op0) |= volatilep;
6983 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6984 || modifier == EXPAND_CONST_ADDRESS
6985 || modifier == EXPAND_INITIALIZER)
6986 return op0;
6987 else if (target == 0)
6988 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6989
6990 convert_move (target, op0, unsignedp);
6991 return target;
6992 }
6993
6994 /* Intended for a reference to a buffer of a file-object in Pascal.
6995 But it's not certain that a special tree code will really be
6996 necessary for these. INDIRECT_REF might work for them. */
6997 case BUFFER_REF:
6998 abort ();
6999
7000 case IN_EXPR:
7001 {
7002 /* Pascal set IN expression.
7003
7004 Algorithm:
7005 rlo = set_low - (set_low%bits_per_word);
7006 the_word = set [ (index - rlo)/bits_per_word ];
7007 bit_index = index % bits_per_word;
7008 bitmask = 1 << bit_index;
7009 return !!(the_word & bitmask); */
7010
7011 tree set = TREE_OPERAND (exp, 0);
7012 tree index = TREE_OPERAND (exp, 1);
7013 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7014 tree set_type = TREE_TYPE (set);
7015 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7016 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7017 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7018 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7019 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7020 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7021 rtx setaddr = XEXP (setval, 0);
7022 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7023 rtx rlow;
7024 rtx diff, quo, rem, addr, bit, result;
7025
7026 preexpand_calls (exp);
7027
7028 /* If domain is empty, answer is no. Likewise if index is constant
7029 and out of bounds. */
7030 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7031 && TREE_CODE (set_low_bound) == INTEGER_CST
7032 && tree_int_cst_lt (set_high_bound, set_low_bound))
7033 || (TREE_CODE (index) == INTEGER_CST
7034 && TREE_CODE (set_low_bound) == INTEGER_CST
7035 && tree_int_cst_lt (index, set_low_bound))
7036 || (TREE_CODE (set_high_bound) == INTEGER_CST
7037 && TREE_CODE (index) == INTEGER_CST
7038 && tree_int_cst_lt (set_high_bound, index))))
7039 return const0_rtx;
7040
7041 if (target == 0)
7042 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7043
7044 /* If we get here, we have to generate the code for both cases
7045 (in range and out of range). */
7046
7047 op0 = gen_label_rtx ();
7048 op1 = gen_label_rtx ();
7049
7050 if (! (GET_CODE (index_val) == CONST_INT
7051 && GET_CODE (lo_r) == CONST_INT))
7052 {
7053 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7054 GET_MODE (index_val), iunsignedp, 0, op1);
7055 }
7056
7057 if (! (GET_CODE (index_val) == CONST_INT
7058 && GET_CODE (hi_r) == CONST_INT))
7059 {
7060 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7061 GET_MODE (index_val), iunsignedp, 0, op1);
7062 }
7063
7064 /* Calculate the element number of bit zero in the first word
7065 of the set. */
7066 if (GET_CODE (lo_r) == CONST_INT)
7067 rlow = GEN_INT (INTVAL (lo_r)
7068 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7069 else
7070 rlow = expand_binop (index_mode, and_optab, lo_r,
7071 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7072 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7073
7074 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7075 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7076
7077 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7078 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7079 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7080 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7081
7082 addr = memory_address (byte_mode,
7083 expand_binop (index_mode, add_optab, diff,
7084 setaddr, NULL_RTX, iunsignedp,
7085 OPTAB_LIB_WIDEN));
7086
7087 /* Extract the bit we want to examine. */
7088 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7089 gen_rtx_MEM (byte_mode, addr),
7090 make_tree (TREE_TYPE (index), rem),
7091 NULL_RTX, 1);
7092 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7093 GET_MODE (target) == byte_mode ? target : 0,
7094 1, OPTAB_LIB_WIDEN);
7095
7096 if (result != target)
7097 convert_move (target, result, 1);
7098
7099 /* Output the code to handle the out-of-range case. */
7100 emit_jump (op0);
7101 emit_label (op1);
7102 emit_move_insn (target, const0_rtx);
7103 emit_label (op0);
7104 return target;
7105 }
7106
7107 case WITH_CLEANUP_EXPR:
7108 if (RTL_EXPR_RTL (exp) == 0)
7109 {
7110 RTL_EXPR_RTL (exp)
7111 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7112 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7113
7114 /* That's it for this cleanup. */
7115 TREE_OPERAND (exp, 2) = 0;
7116 }
7117 return RTL_EXPR_RTL (exp);
7118
7119 case CLEANUP_POINT_EXPR:
7120 {
7121 /* Start a new binding layer that will keep track of all cleanup
7122 actions to be performed. */
7123 expand_start_bindings (2);
7124
7125 target_temp_slot_level = temp_slot_level;
7126
7127 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7128 /* If we're going to use this value, load it up now. */
7129 if (! ignore)
7130 op0 = force_not_mem (op0);
7131 preserve_temp_slots (op0);
7132 expand_end_bindings (NULL_TREE, 0, 0);
7133 }
7134 return op0;
7135
7136 case CALL_EXPR:
7137 /* Check for a built-in function. */
7138 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7139 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7140 == FUNCTION_DECL)
7141 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7142 return expand_builtin (exp, target, subtarget, tmode, ignore);
7143
7144 /* If this call was expanded already by preexpand_calls,
7145 just return the result we got. */
7146 if (CALL_EXPR_RTL (exp) != 0)
7147 return CALL_EXPR_RTL (exp);
7148
7149 return expand_call (exp, target, ignore);
7150
7151 case NON_LVALUE_EXPR:
7152 case NOP_EXPR:
7153 case CONVERT_EXPR:
7154 case REFERENCE_EXPR:
7155 if (TREE_OPERAND (exp, 0) == error_mark_node)
7156 return const0_rtx;
7157
7158 if (TREE_CODE (type) == UNION_TYPE)
7159 {
7160 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7161
7162 /* If both input and output are BLKmode, this conversion
7163 isn't actually doing anything unless we need to make the
7164 alignment stricter. */
7165 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7166 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7167 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7168 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7169 modifier);
7170
7171 if (target == 0)
7172 {
7173 if (mode != BLKmode)
7174 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7175 else
7176 target = assign_temp (type, 0, 1, 1);
7177 }
7178
7179 if (GET_CODE (target) == MEM)
7180 /* Store data into beginning of memory target. */
7181 store_expr (TREE_OPERAND (exp, 0),
7182 change_address (target, TYPE_MODE (valtype), 0), 0);
7183
7184 else if (GET_CODE (target) == REG)
7185 /* Store this field into a union of the proper type. */
7186 store_field (target,
7187 MIN ((int_size_in_bytes (TREE_TYPE
7188 (TREE_OPERAND (exp, 0)))
7189 * BITS_PER_UNIT),
7190 GET_MODE_BITSIZE (mode)),
7191 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7192 VOIDmode, 0, BITS_PER_UNIT,
7193 int_size_in_bytes (type), 0);
7194 else
7195 abort ();
7196
7197 /* Return the entire union. */
7198 return target;
7199 }
7200
7201 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7202 {
7203 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7204 ro_modifier);
7205
7206 /* If the signedness of the conversion differs and OP0 is
7207 a promoted SUBREG, clear that indication since we now
7208 have to do the proper extension. */
7209 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7210 && GET_CODE (op0) == SUBREG)
7211 SUBREG_PROMOTED_VAR_P (op0) = 0;
7212
7213 return op0;
7214 }
7215
7216 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7217 if (GET_MODE (op0) == mode)
7218 return op0;
7219
7220 /* If OP0 is a constant, just convert it into the proper mode. */
7221 if (CONSTANT_P (op0))
7222 return
7223 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7224 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7225
7226 if (modifier == EXPAND_INITIALIZER)
7227 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7228
7229 if (target == 0)
7230 return
7231 convert_to_mode (mode, op0,
7232 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7233 else
7234 convert_move (target, op0,
7235 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7236 return target;
7237
7238 case PLUS_EXPR:
7239 /* We come here from MINUS_EXPR when the second operand is a
7240 constant. */
7241 plus_expr:
7242 this_optab = add_optab;
7243
7244 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7245 something else, make sure we add the register to the constant and
7246 then to the other thing. This case can occur during strength
7247 reduction and doing it this way will produce better code if the
7248 frame pointer or argument pointer is eliminated.
7249
7250 fold-const.c will ensure that the constant is always in the inner
7251 PLUS_EXPR, so the only case we need to do anything about is if
7252 sp, ap, or fp is our second argument, in which case we must swap
7253 the innermost first argument and our second argument. */
7254
7255 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7256 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7257 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7258 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7259 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7260 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7261 {
7262 tree t = TREE_OPERAND (exp, 1);
7263
7264 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7265 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7266 }
7267
7268 /* If the result is to be ptr_mode and we are adding an integer to
7269 something, we might be forming a constant. So try to use
7270 plus_constant. If it produces a sum and we can't accept it,
7271 use force_operand. This allows P = &ARR[const] to generate
7272 efficient code on machines where a SYMBOL_REF is not a valid
7273 address.
7274
7275 If this is an EXPAND_SUM call, always return the sum. */
7276 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7277 || mode == ptr_mode)
7278 {
7279 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7280 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7281 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7282 {
7283 rtx constant_part;
7284
7285 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7286 EXPAND_SUM);
7287 /* Use immed_double_const to ensure that the constant is
7288 truncated according to the mode of OP1, then sign extended
7289 to a HOST_WIDE_INT. Using the constant directly can result
7290 in non-canonical RTL in a 64x32 cross compile. */
7291 constant_part
7292 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7293 (HOST_WIDE_INT) 0,
7294 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7295 op1 = plus_constant (op1, INTVAL (constant_part));
7296 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7297 op1 = force_operand (op1, target);
7298 return op1;
7299 }
7300
7301 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7302 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7303 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7304 {
7305 rtx constant_part;
7306
7307 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7308 EXPAND_SUM);
7309 if (! CONSTANT_P (op0))
7310 {
7311 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7312 VOIDmode, modifier);
7313 /* Don't go to both_summands if modifier
7314 says it's not right to return a PLUS. */
7315 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7316 goto binop2;
7317 goto both_summands;
7318 }
7319 /* Use immed_double_const to ensure that the constant is
7320 truncated according to the mode of OP1, then sign extended
7321 to a HOST_WIDE_INT. Using the constant directly can result
7322 in non-canonical RTL in a 64x32 cross compile. */
7323 constant_part
7324 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7325 (HOST_WIDE_INT) 0,
7326 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7327 op0 = plus_constant (op0, INTVAL (constant_part));
7328 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7329 op0 = force_operand (op0, target);
7330 return op0;
7331 }
7332 }
7333
7334 /* No sense saving up arithmetic to be done
7335 if it's all in the wrong mode to form part of an address.
7336 And force_operand won't know whether to sign-extend or
7337 zero-extend. */
7338 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7339 || mode != ptr_mode)
7340 goto binop;
7341
7342 preexpand_calls (exp);
7343 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7344 subtarget = 0;
7345
7346 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7347 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7348
7349 both_summands:
7350 /* Make sure any term that's a sum with a constant comes last. */
7351 if (GET_CODE (op0) == PLUS
7352 && CONSTANT_P (XEXP (op0, 1)))
7353 {
7354 temp = op0;
7355 op0 = op1;
7356 op1 = temp;
7357 }
7358 /* If adding to a sum including a constant,
7359 associate it to put the constant outside. */
7360 if (GET_CODE (op1) == PLUS
7361 && CONSTANT_P (XEXP (op1, 1)))
7362 {
7363 rtx constant_term = const0_rtx;
7364
7365 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7366 if (temp != 0)
7367 op0 = temp;
7368 /* Ensure that MULT comes first if there is one. */
7369 else if (GET_CODE (op0) == MULT)
7370 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7371 else
7372 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7373
7374 /* Let's also eliminate constants from op0 if possible. */
7375 op0 = eliminate_constant_term (op0, &constant_term);
7376
7377 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7378 their sum should be a constant. Form it into OP1, since the
7379 result we want will then be OP0 + OP1. */
7380
7381 temp = simplify_binary_operation (PLUS, mode, constant_term,
7382 XEXP (op1, 1));
7383 if (temp != 0)
7384 op1 = temp;
7385 else
7386 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7387 }
7388
7389 /* Put a constant term last and put a multiplication first. */
7390 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7391 temp = op1, op1 = op0, op0 = temp;
7392
7393 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7394 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7395
7396 case MINUS_EXPR:
7397 /* For initializers, we are allowed to return a MINUS of two
7398 symbolic constants. Here we handle all cases when both operands
7399 are constant. */
7400 /* Handle difference of two symbolic constants,
7401 for the sake of an initializer. */
7402 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7403 && really_constant_p (TREE_OPERAND (exp, 0))
7404 && really_constant_p (TREE_OPERAND (exp, 1)))
7405 {
7406 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7407 VOIDmode, ro_modifier);
7408 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7409 VOIDmode, ro_modifier);
7410
7411 /* If the last operand is a CONST_INT, use plus_constant of
7412 the negated constant. Else make the MINUS. */
7413 if (GET_CODE (op1) == CONST_INT)
7414 return plus_constant (op0, - INTVAL (op1));
7415 else
7416 return gen_rtx_MINUS (mode, op0, op1);
7417 }
7418 /* Convert A - const to A + (-const). */
7419 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7420 {
7421 tree negated = fold (build1 (NEGATE_EXPR, type,
7422 TREE_OPERAND (exp, 1)));
7423
7424 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7425 /* If we can't negate the constant in TYPE, leave it alone and
7426 expand_binop will negate it for us. We used to try to do it
7427 here in the signed version of TYPE, but that doesn't work
7428 on POINTER_TYPEs. */;
7429 else
7430 {
7431 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7432 goto plus_expr;
7433 }
7434 }
7435 this_optab = sub_optab;
7436 goto binop;
7437
7438 case MULT_EXPR:
7439 preexpand_calls (exp);
7440 /* If first operand is constant, swap them.
7441 Thus the following special case checks need only
7442 check the second operand. */
7443 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7444 {
7445 register tree t1 = TREE_OPERAND (exp, 0);
7446 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7447 TREE_OPERAND (exp, 1) = t1;
7448 }
7449
7450 /* Attempt to return something suitable for generating an
7451 indexed address, for machines that support that. */
7452
7453 if (modifier == EXPAND_SUM && mode == ptr_mode
7454 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7455 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7456 {
7457 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7458 EXPAND_SUM);
7459
7460 /* Apply distributive law if OP0 is x+c. */
7461 if (GET_CODE (op0) == PLUS
7462 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7463 return
7464 gen_rtx_PLUS
7465 (mode,
7466 gen_rtx_MULT
7467 (mode, XEXP (op0, 0),
7468 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7469 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7470 * INTVAL (XEXP (op0, 1))));
7471
7472 if (GET_CODE (op0) != REG)
7473 op0 = force_operand (op0, NULL_RTX);
7474 if (GET_CODE (op0) != REG)
7475 op0 = copy_to_mode_reg (mode, op0);
7476
7477 return
7478 gen_rtx_MULT (mode, op0,
7479 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7480 }
7481
7482 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7483 subtarget = 0;
7484
7485 /* Check for multiplying things that have been extended
7486 from a narrower type. If this machine supports multiplying
7487 in that narrower type with a result in the desired type,
7488 do it that way, and avoid the explicit type-conversion. */
7489 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7490 && TREE_CODE (type) == INTEGER_TYPE
7491 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7492 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7493 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7494 && int_fits_type_p (TREE_OPERAND (exp, 1),
7495 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7496 /* Don't use a widening multiply if a shift will do. */
7497 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7498 > HOST_BITS_PER_WIDE_INT)
7499 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7500 ||
7501 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7502 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7503 ==
7504 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7505 /* If both operands are extended, they must either both
7506 be zero-extended or both be sign-extended. */
7507 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7508 ==
7509 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7510 {
7511 enum machine_mode innermode
7512 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7513 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7514 ? smul_widen_optab : umul_widen_optab);
7515 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7516 ? umul_widen_optab : smul_widen_optab);
7517 if (mode == GET_MODE_WIDER_MODE (innermode))
7518 {
7519 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7520 {
7521 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7522 NULL_RTX, VOIDmode, 0);
7523 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7524 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7525 VOIDmode, 0);
7526 else
7527 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7528 NULL_RTX, VOIDmode, 0);
7529 goto binop2;
7530 }
7531 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7532 && innermode == word_mode)
7533 {
7534 rtx htem;
7535 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7536 NULL_RTX, VOIDmode, 0);
7537 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7538 op1 = convert_modes (innermode, mode,
7539 expand_expr (TREE_OPERAND (exp, 1),
7540 NULL_RTX, VOIDmode, 0),
7541 unsignedp);
7542 else
7543 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7544 NULL_RTX, VOIDmode, 0);
7545 temp = expand_binop (mode, other_optab, op0, op1, target,
7546 unsignedp, OPTAB_LIB_WIDEN);
7547 htem = expand_mult_highpart_adjust (innermode,
7548 gen_highpart (innermode, temp),
7549 op0, op1,
7550 gen_highpart (innermode, temp),
7551 unsignedp);
7552 emit_move_insn (gen_highpart (innermode, temp), htem);
7553 return temp;
7554 }
7555 }
7556 }
7557 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7558 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7559 return expand_mult (mode, op0, op1, target, unsignedp);
7560
7561 case TRUNC_DIV_EXPR:
7562 case FLOOR_DIV_EXPR:
7563 case CEIL_DIV_EXPR:
7564 case ROUND_DIV_EXPR:
7565 case EXACT_DIV_EXPR:
7566 preexpand_calls (exp);
7567 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7568 subtarget = 0;
7569 /* Possible optimization: compute the dividend with EXPAND_SUM
7570 then if the divisor is constant can optimize the case
7571 where some terms of the dividend have coeffs divisible by it. */
7572 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7573 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7574 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7575
7576 case RDIV_EXPR:
7577 this_optab = flodiv_optab;
7578 goto binop;
7579
7580 case TRUNC_MOD_EXPR:
7581 case FLOOR_MOD_EXPR:
7582 case CEIL_MOD_EXPR:
7583 case ROUND_MOD_EXPR:
7584 preexpand_calls (exp);
7585 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7586 subtarget = 0;
7587 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7588 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7589 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7590
7591 case FIX_ROUND_EXPR:
7592 case FIX_FLOOR_EXPR:
7593 case FIX_CEIL_EXPR:
7594 abort (); /* Not used for C. */
7595
7596 case FIX_TRUNC_EXPR:
7597 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7598 if (target == 0)
7599 target = gen_reg_rtx (mode);
7600 expand_fix (target, op0, unsignedp);
7601 return target;
7602
7603 case FLOAT_EXPR:
7604 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7605 if (target == 0)
7606 target = gen_reg_rtx (mode);
7607 /* expand_float can't figure out what to do if FROM has VOIDmode.
7608 So give it the correct mode. With -O, cse will optimize this. */
7609 if (GET_MODE (op0) == VOIDmode)
7610 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7611 op0);
7612 expand_float (target, op0,
7613 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7614 return target;
7615
7616 case NEGATE_EXPR:
7617 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7618 temp = expand_unop (mode, neg_optab, op0, target, 0);
7619 if (temp == 0)
7620 abort ();
7621 return temp;
7622
7623 case ABS_EXPR:
7624 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7625
7626 /* Handle complex values specially. */
7627 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7628 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7629 return expand_complex_abs (mode, op0, target, unsignedp);
7630
7631 /* Unsigned abs is simply the operand. Testing here means we don't
7632 risk generating incorrect code below. */
7633 if (TREE_UNSIGNED (type))
7634 return op0;
7635
7636 return expand_abs (mode, op0, target,
7637 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7638
7639 case MAX_EXPR:
7640 case MIN_EXPR:
7641 target = original_target;
7642 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7643 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7644 || GET_MODE (target) != mode
7645 || (GET_CODE (target) == REG
7646 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7647 target = gen_reg_rtx (mode);
7648 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7649 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7650
7651 /* First try to do it with a special MIN or MAX instruction.
7652 If that does not win, use a conditional jump to select the proper
7653 value. */
7654 this_optab = (TREE_UNSIGNED (type)
7655 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7656 : (code == MIN_EXPR ? smin_optab : smax_optab));
7657
7658 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7659 OPTAB_WIDEN);
7660 if (temp != 0)
7661 return temp;
7662
7663 /* At this point, a MEM target is no longer useful; we will get better
7664 code without it. */
7665
7666 if (GET_CODE (target) == MEM)
7667 target = gen_reg_rtx (mode);
7668
7669 if (target != op0)
7670 emit_move_insn (target, op0);
7671
7672 op0 = gen_label_rtx ();
7673
7674 /* If this mode is an integer too wide to compare properly,
7675 compare word by word. Rely on cse to optimize constant cases. */
7676 if (GET_MODE_CLASS (mode) == MODE_INT
7677 && ! can_compare_p (GE, mode, ccp_jump))
7678 {
7679 if (code == MAX_EXPR)
7680 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7681 target, op1, NULL_RTX, op0);
7682 else
7683 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7684 op1, target, NULL_RTX, op0);
7685 }
7686 else
7687 {
7688 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7689 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7690 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7691 op0);
7692 }
7693 emit_move_insn (target, op1);
7694 emit_label (op0);
7695 return target;
7696
7697 case BIT_NOT_EXPR:
7698 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7699 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7700 if (temp == 0)
7701 abort ();
7702 return temp;
7703
7704 case FFS_EXPR:
7705 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7706 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7707 if (temp == 0)
7708 abort ();
7709 return temp;
7710
7711 /* ??? Can optimize bitwise operations with one arg constant.
7712 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7713 and (a bitwise1 b) bitwise2 b (etc)
7714 but that is probably not worth while. */
7715
7716 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7717 boolean values when we want in all cases to compute both of them. In
7718 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7719 as actual zero-or-1 values and then bitwise anding. In cases where
7720 there cannot be any side effects, better code would be made by
7721 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7722 how to recognize those cases. */
7723
7724 case TRUTH_AND_EXPR:
7725 case BIT_AND_EXPR:
7726 this_optab = and_optab;
7727 goto binop;
7728
7729 case TRUTH_OR_EXPR:
7730 case BIT_IOR_EXPR:
7731 this_optab = ior_optab;
7732 goto binop;
7733
7734 case TRUTH_XOR_EXPR:
7735 case BIT_XOR_EXPR:
7736 this_optab = xor_optab;
7737 goto binop;
7738
7739 case LSHIFT_EXPR:
7740 case RSHIFT_EXPR:
7741 case LROTATE_EXPR:
7742 case RROTATE_EXPR:
7743 preexpand_calls (exp);
7744 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7745 subtarget = 0;
7746 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7747 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7748 unsignedp);
7749
7750 /* Could determine the answer when only additive constants differ. Also,
7751 the addition of one can be handled by changing the condition. */
7752 case LT_EXPR:
7753 case LE_EXPR:
7754 case GT_EXPR:
7755 case GE_EXPR:
7756 case EQ_EXPR:
7757 case NE_EXPR:
7758 case UNORDERED_EXPR:
7759 case ORDERED_EXPR:
7760 case UNLT_EXPR:
7761 case UNLE_EXPR:
7762 case UNGT_EXPR:
7763 case UNGE_EXPR:
7764 case UNEQ_EXPR:
7765 preexpand_calls (exp);
7766 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7767 if (temp != 0)
7768 return temp;
7769
7770 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7771 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7772 && original_target
7773 && GET_CODE (original_target) == REG
7774 && (GET_MODE (original_target)
7775 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7776 {
7777 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7778 VOIDmode, 0);
7779
7780 if (temp != original_target)
7781 temp = copy_to_reg (temp);
7782
7783 op1 = gen_label_rtx ();
7784 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7785 GET_MODE (temp), unsignedp, 0, op1);
7786 emit_move_insn (temp, const1_rtx);
7787 emit_label (op1);
7788 return temp;
7789 }
7790
7791 /* If no set-flag instruction, must generate a conditional
7792 store into a temporary variable. Drop through
7793 and handle this like && and ||. */
7794
7795 case TRUTH_ANDIF_EXPR:
7796 case TRUTH_ORIF_EXPR:
7797 if (! ignore
7798 && (target == 0 || ! safe_from_p (target, exp, 1)
7799 /* Make sure we don't have a hard reg (such as function's return
7800 value) live across basic blocks, if not optimizing. */
7801 || (!optimize && GET_CODE (target) == REG
7802 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7803 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7804
7805 if (target)
7806 emit_clr_insn (target);
7807
7808 op1 = gen_label_rtx ();
7809 jumpifnot (exp, op1);
7810
7811 if (target)
7812 emit_0_to_1_insn (target);
7813
7814 emit_label (op1);
7815 return ignore ? const0_rtx : target;
7816
7817 case TRUTH_NOT_EXPR:
7818 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7819 /* The parser is careful to generate TRUTH_NOT_EXPR
7820 only with operands that are always zero or one. */
7821 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7822 target, 1, OPTAB_LIB_WIDEN);
7823 if (temp == 0)
7824 abort ();
7825 return temp;
7826
7827 case COMPOUND_EXPR:
7828 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7829 emit_queue ();
7830 return expand_expr (TREE_OPERAND (exp, 1),
7831 (ignore ? const0_rtx : target),
7832 VOIDmode, 0);
7833
7834 case COND_EXPR:
7835 /* If we would have a "singleton" (see below) were it not for a
7836 conversion in each arm, bring that conversion back out. */
7837 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7838 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7839 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7840 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7841 {
7842 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7843 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7844
7845 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7846 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7847 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7848 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7849 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7850 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7851 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7852 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7853 return expand_expr (build1 (NOP_EXPR, type,
7854 build (COND_EXPR, TREE_TYPE (true),
7855 TREE_OPERAND (exp, 0),
7856 true, false)),
7857 target, tmode, modifier);
7858 }
7859
7860 {
7861 /* Note that COND_EXPRs whose type is a structure or union
7862 are required to be constructed to contain assignments of
7863 a temporary variable, so that we can evaluate them here
7864 for side effect only. If type is void, we must do likewise. */
7865
7866 /* If an arm of the branch requires a cleanup,
7867 only that cleanup is performed. */
7868
7869 tree singleton = 0;
7870 tree binary_op = 0, unary_op = 0;
7871
7872 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7873 convert it to our mode, if necessary. */
7874 if (integer_onep (TREE_OPERAND (exp, 1))
7875 && integer_zerop (TREE_OPERAND (exp, 2))
7876 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7877 {
7878 if (ignore)
7879 {
7880 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7881 ro_modifier);
7882 return const0_rtx;
7883 }
7884
7885 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7886 if (GET_MODE (op0) == mode)
7887 return op0;
7888
7889 if (target == 0)
7890 target = gen_reg_rtx (mode);
7891 convert_move (target, op0, unsignedp);
7892 return target;
7893 }
7894
7895 /* Check for X ? A + B : A. If we have this, we can copy A to the
7896 output and conditionally add B. Similarly for unary operations.
7897 Don't do this if X has side-effects because those side effects
7898 might affect A or B and the "?" operation is a sequence point in
7899 ANSI. (operand_equal_p tests for side effects.) */
7900
7901 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7902 && operand_equal_p (TREE_OPERAND (exp, 2),
7903 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7904 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7905 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7906 && operand_equal_p (TREE_OPERAND (exp, 1),
7907 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7908 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7909 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7910 && operand_equal_p (TREE_OPERAND (exp, 2),
7911 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7912 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7913 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7914 && operand_equal_p (TREE_OPERAND (exp, 1),
7915 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7916 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7917
7918 /* If we are not to produce a result, we have no target. Otherwise,
7919 if a target was specified use it; it will not be used as an
7920 intermediate target unless it is safe. If no target, use a
7921 temporary. */
7922
7923 if (ignore)
7924 temp = 0;
7925 else if (original_target
7926 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7927 || (singleton && GET_CODE (original_target) == REG
7928 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7929 && original_target == var_rtx (singleton)))
7930 && GET_MODE (original_target) == mode
7931 #ifdef HAVE_conditional_move
7932 && (! can_conditionally_move_p (mode)
7933 || GET_CODE (original_target) == REG
7934 || TREE_ADDRESSABLE (type))
7935 #endif
7936 && ! (GET_CODE (original_target) == MEM
7937 && MEM_VOLATILE_P (original_target)))
7938 temp = original_target;
7939 else if (TREE_ADDRESSABLE (type))
7940 abort ();
7941 else
7942 temp = assign_temp (type, 0, 0, 1);
7943
7944 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7945 do the test of X as a store-flag operation, do this as
7946 A + ((X != 0) << log C). Similarly for other simple binary
7947 operators. Only do for C == 1 if BRANCH_COST is low. */
7948 if (temp && singleton && binary_op
7949 && (TREE_CODE (binary_op) == PLUS_EXPR
7950 || TREE_CODE (binary_op) == MINUS_EXPR
7951 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7952 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7953 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7954 : integer_onep (TREE_OPERAND (binary_op, 1)))
7955 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7956 {
7957 rtx result;
7958 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7959 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7960 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7961 : xor_optab);
7962
7963 /* If we had X ? A : A + 1, do this as A + (X == 0).
7964
7965 We have to invert the truth value here and then put it
7966 back later if do_store_flag fails. We cannot simply copy
7967 TREE_OPERAND (exp, 0) to another variable and modify that
7968 because invert_truthvalue can modify the tree pointed to
7969 by its argument. */
7970 if (singleton == TREE_OPERAND (exp, 1))
7971 TREE_OPERAND (exp, 0)
7972 = invert_truthvalue (TREE_OPERAND (exp, 0));
7973
7974 result = do_store_flag (TREE_OPERAND (exp, 0),
7975 (safe_from_p (temp, singleton, 1)
7976 ? temp : NULL_RTX),
7977 mode, BRANCH_COST <= 1);
7978
7979 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7980 result = expand_shift (LSHIFT_EXPR, mode, result,
7981 build_int_2 (tree_log2
7982 (TREE_OPERAND
7983 (binary_op, 1)),
7984 0),
7985 (safe_from_p (temp, singleton, 1)
7986 ? temp : NULL_RTX), 0);
7987
7988 if (result)
7989 {
7990 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7991 return expand_binop (mode, boptab, op1, result, temp,
7992 unsignedp, OPTAB_LIB_WIDEN);
7993 }
7994 else if (singleton == TREE_OPERAND (exp, 1))
7995 TREE_OPERAND (exp, 0)
7996 = invert_truthvalue (TREE_OPERAND (exp, 0));
7997 }
7998
7999 do_pending_stack_adjust ();
8000 NO_DEFER_POP;
8001 op0 = gen_label_rtx ();
8002
8003 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8004 {
8005 if (temp != 0)
8006 {
8007 /* If the target conflicts with the other operand of the
8008 binary op, we can't use it. Also, we can't use the target
8009 if it is a hard register, because evaluating the condition
8010 might clobber it. */
8011 if ((binary_op
8012 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8013 || (GET_CODE (temp) == REG
8014 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8015 temp = gen_reg_rtx (mode);
8016 store_expr (singleton, temp, 0);
8017 }
8018 else
8019 expand_expr (singleton,
8020 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8021 if (singleton == TREE_OPERAND (exp, 1))
8022 jumpif (TREE_OPERAND (exp, 0), op0);
8023 else
8024 jumpifnot (TREE_OPERAND (exp, 0), op0);
8025
8026 start_cleanup_deferral ();
8027 if (binary_op && temp == 0)
8028 /* Just touch the other operand. */
8029 expand_expr (TREE_OPERAND (binary_op, 1),
8030 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8031 else if (binary_op)
8032 store_expr (build (TREE_CODE (binary_op), type,
8033 make_tree (type, temp),
8034 TREE_OPERAND (binary_op, 1)),
8035 temp, 0);
8036 else
8037 store_expr (build1 (TREE_CODE (unary_op), type,
8038 make_tree (type, temp)),
8039 temp, 0);
8040 op1 = op0;
8041 }
8042 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8043 comparison operator. If we have one of these cases, set the
8044 output to A, branch on A (cse will merge these two references),
8045 then set the output to FOO. */
8046 else if (temp
8047 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8048 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8049 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8050 TREE_OPERAND (exp, 1), 0)
8051 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8052 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8053 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8054 {
8055 if (GET_CODE (temp) == REG
8056 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8057 temp = gen_reg_rtx (mode);
8058 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8059 jumpif (TREE_OPERAND (exp, 0), op0);
8060
8061 start_cleanup_deferral ();
8062 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8063 op1 = op0;
8064 }
8065 else if (temp
8066 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8067 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8068 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8069 TREE_OPERAND (exp, 2), 0)
8070 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8071 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8072 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8073 {
8074 if (GET_CODE (temp) == REG
8075 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8076 temp = gen_reg_rtx (mode);
8077 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8078 jumpifnot (TREE_OPERAND (exp, 0), op0);
8079
8080 start_cleanup_deferral ();
8081 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8082 op1 = op0;
8083 }
8084 else
8085 {
8086 op1 = gen_label_rtx ();
8087 jumpifnot (TREE_OPERAND (exp, 0), op0);
8088
8089 start_cleanup_deferral ();
8090
8091 /* One branch of the cond can be void, if it never returns. For
8092 example A ? throw : E */
8093 if (temp != 0
8094 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8095 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8096 else
8097 expand_expr (TREE_OPERAND (exp, 1),
8098 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8099 end_cleanup_deferral ();
8100 emit_queue ();
8101 emit_jump_insn (gen_jump (op1));
8102 emit_barrier ();
8103 emit_label (op0);
8104 start_cleanup_deferral ();
8105 if (temp != 0
8106 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8107 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8108 else
8109 expand_expr (TREE_OPERAND (exp, 2),
8110 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8111 }
8112
8113 end_cleanup_deferral ();
8114
8115 emit_queue ();
8116 emit_label (op1);
8117 OK_DEFER_POP;
8118
8119 return temp;
8120 }
8121
8122 case TARGET_EXPR:
8123 {
8124 /* Something needs to be initialized, but we didn't know
8125 where that thing was when building the tree. For example,
8126 it could be the return value of a function, or a parameter
8127 to a function which lays down in the stack, or a temporary
8128 variable which must be passed by reference.
8129
8130 We guarantee that the expression will either be constructed
8131 or copied into our original target. */
8132
8133 tree slot = TREE_OPERAND (exp, 0);
8134 tree cleanups = NULL_TREE;
8135 tree exp1;
8136
8137 if (TREE_CODE (slot) != VAR_DECL)
8138 abort ();
8139
8140 if (! ignore)
8141 target = original_target;
8142
8143 /* Set this here so that if we get a target that refers to a
8144 register variable that's already been used, put_reg_into_stack
8145 knows that it should fix up those uses. */
8146 TREE_USED (slot) = 1;
8147
8148 if (target == 0)
8149 {
8150 if (DECL_RTL (slot) != 0)
8151 {
8152 target = DECL_RTL (slot);
8153 /* If we have already expanded the slot, so don't do
8154 it again. (mrs) */
8155 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8156 return target;
8157 }
8158 else
8159 {
8160 target = assign_temp (type, 2, 0, 1);
8161 /* All temp slots at this level must not conflict. */
8162 preserve_temp_slots (target);
8163 DECL_RTL (slot) = target;
8164 if (TREE_ADDRESSABLE (slot))
8165 put_var_into_stack (slot);
8166
8167 /* Since SLOT is not known to the called function
8168 to belong to its stack frame, we must build an explicit
8169 cleanup. This case occurs when we must build up a reference
8170 to pass the reference as an argument. In this case,
8171 it is very likely that such a reference need not be
8172 built here. */
8173
8174 if (TREE_OPERAND (exp, 2) == 0)
8175 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8176 cleanups = TREE_OPERAND (exp, 2);
8177 }
8178 }
8179 else
8180 {
8181 /* This case does occur, when expanding a parameter which
8182 needs to be constructed on the stack. The target
8183 is the actual stack address that we want to initialize.
8184 The function we call will perform the cleanup in this case. */
8185
8186 /* If we have already assigned it space, use that space,
8187 not target that we were passed in, as our target
8188 parameter is only a hint. */
8189 if (DECL_RTL (slot) != 0)
8190 {
8191 target = DECL_RTL (slot);
8192 /* If we have already expanded the slot, so don't do
8193 it again. (mrs) */
8194 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8195 return target;
8196 }
8197 else
8198 {
8199 DECL_RTL (slot) = target;
8200 /* If we must have an addressable slot, then make sure that
8201 the RTL that we just stored in slot is OK. */
8202 if (TREE_ADDRESSABLE (slot))
8203 put_var_into_stack (slot);
8204 }
8205 }
8206
8207 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8208 /* Mark it as expanded. */
8209 TREE_OPERAND (exp, 1) = NULL_TREE;
8210
8211 store_expr (exp1, target, 0);
8212
8213 expand_decl_cleanup (NULL_TREE, cleanups);
8214
8215 return target;
8216 }
8217
8218 case INIT_EXPR:
8219 {
8220 tree lhs = TREE_OPERAND (exp, 0);
8221 tree rhs = TREE_OPERAND (exp, 1);
8222 tree noncopied_parts = 0;
8223 tree lhs_type = TREE_TYPE (lhs);
8224
8225 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8226 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8227 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8228 TYPE_NONCOPIED_PARTS (lhs_type));
8229 while (noncopied_parts != 0)
8230 {
8231 expand_assignment (TREE_VALUE (noncopied_parts),
8232 TREE_PURPOSE (noncopied_parts), 0, 0);
8233 noncopied_parts = TREE_CHAIN (noncopied_parts);
8234 }
8235 return temp;
8236 }
8237
8238 case MODIFY_EXPR:
8239 {
8240 /* If lhs is complex, expand calls in rhs before computing it.
8241 That's so we don't compute a pointer and save it over a call.
8242 If lhs is simple, compute it first so we can give it as a
8243 target if the rhs is just a call. This avoids an extra temp and copy
8244 and that prevents a partial-subsumption which makes bad code.
8245 Actually we could treat component_ref's of vars like vars. */
8246
8247 tree lhs = TREE_OPERAND (exp, 0);
8248 tree rhs = TREE_OPERAND (exp, 1);
8249 tree noncopied_parts = 0;
8250 tree lhs_type = TREE_TYPE (lhs);
8251
8252 temp = 0;
8253
8254 if (TREE_CODE (lhs) != VAR_DECL
8255 && TREE_CODE (lhs) != RESULT_DECL
8256 && TREE_CODE (lhs) != PARM_DECL
8257 && ! (TREE_CODE (lhs) == INDIRECT_REF
8258 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8259 preexpand_calls (exp);
8260
8261 /* Check for |= or &= of a bitfield of size one into another bitfield
8262 of size 1. In this case, (unless we need the result of the
8263 assignment) we can do this more efficiently with a
8264 test followed by an assignment, if necessary.
8265
8266 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8267 things change so we do, this code should be enhanced to
8268 support it. */
8269 if (ignore
8270 && TREE_CODE (lhs) == COMPONENT_REF
8271 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8272 || TREE_CODE (rhs) == BIT_AND_EXPR)
8273 && TREE_OPERAND (rhs, 0) == lhs
8274 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8275 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8276 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8277 {
8278 rtx label = gen_label_rtx ();
8279
8280 do_jump (TREE_OPERAND (rhs, 1),
8281 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8282 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8283 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8284 (TREE_CODE (rhs) == BIT_IOR_EXPR
8285 ? integer_one_node
8286 : integer_zero_node)),
8287 0, 0);
8288 do_pending_stack_adjust ();
8289 emit_label (label);
8290 return const0_rtx;
8291 }
8292
8293 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8294 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8295 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8296 TYPE_NONCOPIED_PARTS (lhs_type));
8297
8298 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8299 while (noncopied_parts != 0)
8300 {
8301 expand_assignment (TREE_PURPOSE (noncopied_parts),
8302 TREE_VALUE (noncopied_parts), 0, 0);
8303 noncopied_parts = TREE_CHAIN (noncopied_parts);
8304 }
8305 return temp;
8306 }
8307
8308 case RETURN_EXPR:
8309 if (!TREE_OPERAND (exp, 0))
8310 expand_null_return ();
8311 else
8312 expand_return (TREE_OPERAND (exp, 0));
8313 return const0_rtx;
8314
8315 case PREINCREMENT_EXPR:
8316 case PREDECREMENT_EXPR:
8317 return expand_increment (exp, 0, ignore);
8318
8319 case POSTINCREMENT_EXPR:
8320 case POSTDECREMENT_EXPR:
8321 /* Faster to treat as pre-increment if result is not used. */
8322 return expand_increment (exp, ! ignore, ignore);
8323
8324 case ADDR_EXPR:
8325 /* If nonzero, TEMP will be set to the address of something that might
8326 be a MEM corresponding to a stack slot. */
8327 temp = 0;
8328
8329 /* Are we taking the address of a nested function? */
8330 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8331 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8332 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8333 && ! TREE_STATIC (exp))
8334 {
8335 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8336 op0 = force_operand (op0, target);
8337 }
8338 /* If we are taking the address of something erroneous, just
8339 return a zero. */
8340 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8341 return const0_rtx;
8342 else
8343 {
8344 /* We make sure to pass const0_rtx down if we came in with
8345 ignore set, to avoid doing the cleanups twice for something. */
8346 op0 = expand_expr (TREE_OPERAND (exp, 0),
8347 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8348 (modifier == EXPAND_INITIALIZER
8349 ? modifier : EXPAND_CONST_ADDRESS));
8350
8351 /* If we are going to ignore the result, OP0 will have been set
8352 to const0_rtx, so just return it. Don't get confused and
8353 think we are taking the address of the constant. */
8354 if (ignore)
8355 return op0;
8356
8357 op0 = protect_from_queue (op0, 0);
8358
8359 /* We would like the object in memory. If it is a constant, we can
8360 have it be statically allocated into memory. For a non-constant,
8361 we need to allocate some memory and store the value into it. */
8362
8363 if (CONSTANT_P (op0))
8364 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8365 op0);
8366 else if (GET_CODE (op0) == MEM)
8367 {
8368 mark_temp_addr_taken (op0);
8369 temp = XEXP (op0, 0);
8370 }
8371
8372 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8373 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8374 {
8375 /* If this object is in a register, it must be not
8376 be BLKmode. */
8377 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8378 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8379
8380 mark_temp_addr_taken (memloc);
8381 emit_move_insn (memloc, op0);
8382 op0 = memloc;
8383 }
8384
8385 if (GET_CODE (op0) != MEM)
8386 abort ();
8387
8388 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8389 {
8390 temp = XEXP (op0, 0);
8391 #ifdef POINTERS_EXTEND_UNSIGNED
8392 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8393 && mode == ptr_mode)
8394 temp = convert_memory_address (ptr_mode, temp);
8395 #endif
8396 return temp;
8397 }
8398
8399 op0 = force_operand (XEXP (op0, 0), target);
8400 }
8401
8402 if (flag_force_addr && GET_CODE (op0) != REG)
8403 op0 = force_reg (Pmode, op0);
8404
8405 if (GET_CODE (op0) == REG
8406 && ! REG_USERVAR_P (op0))
8407 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8408
8409 /* If we might have had a temp slot, add an equivalent address
8410 for it. */
8411 if (temp != 0)
8412 update_temp_slot_address (temp, op0);
8413
8414 #ifdef POINTERS_EXTEND_UNSIGNED
8415 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8416 && mode == ptr_mode)
8417 op0 = convert_memory_address (ptr_mode, op0);
8418 #endif
8419
8420 return op0;
8421
8422 case ENTRY_VALUE_EXPR:
8423 abort ();
8424
8425 /* COMPLEX type for Extended Pascal & Fortran */
8426 case COMPLEX_EXPR:
8427 {
8428 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8429 rtx insns;
8430
8431 /* Get the rtx code of the operands. */
8432 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8433 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8434
8435 if (! target)
8436 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8437
8438 start_sequence ();
8439
8440 /* Move the real (op0) and imaginary (op1) parts to their location. */
8441 emit_move_insn (gen_realpart (mode, target), op0);
8442 emit_move_insn (gen_imagpart (mode, target), op1);
8443
8444 insns = get_insns ();
8445 end_sequence ();
8446
8447 /* Complex construction should appear as a single unit. */
8448 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8449 each with a separate pseudo as destination.
8450 It's not correct for flow to treat them as a unit. */
8451 if (GET_CODE (target) != CONCAT)
8452 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8453 else
8454 emit_insns (insns);
8455
8456 return target;
8457 }
8458
8459 case REALPART_EXPR:
8460 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8461 return gen_realpart (mode, op0);
8462
8463 case IMAGPART_EXPR:
8464 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8465 return gen_imagpart (mode, op0);
8466
8467 case CONJ_EXPR:
8468 {
8469 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8470 rtx imag_t;
8471 rtx insns;
8472
8473 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8474
8475 if (! target)
8476 target = gen_reg_rtx (mode);
8477
8478 start_sequence ();
8479
8480 /* Store the realpart and the negated imagpart to target. */
8481 emit_move_insn (gen_realpart (partmode, target),
8482 gen_realpart (partmode, op0));
8483
8484 imag_t = gen_imagpart (partmode, target);
8485 temp = expand_unop (partmode, neg_optab,
8486 gen_imagpart (partmode, op0), imag_t, 0);
8487 if (temp != imag_t)
8488 emit_move_insn (imag_t, temp);
8489
8490 insns = get_insns ();
8491 end_sequence ();
8492
8493 /* Conjugate should appear as a single unit
8494 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8495 each with a separate pseudo as destination.
8496 It's not correct for flow to treat them as a unit. */
8497 if (GET_CODE (target) != CONCAT)
8498 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8499 else
8500 emit_insns (insns);
8501
8502 return target;
8503 }
8504
8505 case TRY_CATCH_EXPR:
8506 {
8507 tree handler = TREE_OPERAND (exp, 1);
8508
8509 expand_eh_region_start ();
8510
8511 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8512
8513 expand_eh_region_end (handler);
8514
8515 return op0;
8516 }
8517
8518 case TRY_FINALLY_EXPR:
8519 {
8520 tree try_block = TREE_OPERAND (exp, 0);
8521 tree finally_block = TREE_OPERAND (exp, 1);
8522 rtx finally_label = gen_label_rtx ();
8523 rtx done_label = gen_label_rtx ();
8524 rtx return_link = gen_reg_rtx (Pmode);
8525 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8526 (tree) finally_label, (tree) return_link);
8527 TREE_SIDE_EFFECTS (cleanup) = 1;
8528
8529 /* Start a new binding layer that will keep track of all cleanup
8530 actions to be performed. */
8531 expand_start_bindings (2);
8532
8533 target_temp_slot_level = temp_slot_level;
8534
8535 expand_decl_cleanup (NULL_TREE, cleanup);
8536 op0 = expand_expr (try_block, target, tmode, modifier);
8537
8538 preserve_temp_slots (op0);
8539 expand_end_bindings (NULL_TREE, 0, 0);
8540 emit_jump (done_label);
8541 emit_label (finally_label);
8542 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8543 emit_indirect_jump (return_link);
8544 emit_label (done_label);
8545 return op0;
8546 }
8547
8548 case GOTO_SUBROUTINE_EXPR:
8549 {
8550 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8551 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8552 rtx return_address = gen_label_rtx ();
8553 emit_move_insn (return_link,
8554 gen_rtx_LABEL_REF (Pmode, return_address));
8555 emit_jump (subr);
8556 emit_label (return_address);
8557 return const0_rtx;
8558 }
8559
8560 case POPDCC_EXPR:
8561 {
8562 rtx dcc = get_dynamic_cleanup_chain ();
8563 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8564 return const0_rtx;
8565 }
8566
8567 case POPDHC_EXPR:
8568 {
8569 rtx dhc = get_dynamic_handler_chain ();
8570 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8571 return const0_rtx;
8572 }
8573
8574 case VA_ARG_EXPR:
8575 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8576
8577 default:
8578 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8579 }
8580
8581 /* Here to do an ordinary binary operator, generating an instruction
8582 from the optab already placed in `this_optab'. */
8583 binop:
8584 preexpand_calls (exp);
8585 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8586 subtarget = 0;
8587 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8588 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8589 binop2:
8590 temp = expand_binop (mode, this_optab, op0, op1, target,
8591 unsignedp, OPTAB_LIB_WIDEN);
8592 if (temp == 0)
8593 abort ();
8594 return temp;
8595 }
8596 \f
8597 /* Similar to expand_expr, except that we don't specify a target, target
8598 mode, or modifier and we return the alignment of the inner type. This is
8599 used in cases where it is not necessary to align the result to the
8600 alignment of its type as long as we know the alignment of the result, for
8601 example for comparisons of BLKmode values. */
8602
8603 static rtx
8604 expand_expr_unaligned (exp, palign)
8605 register tree exp;
8606 unsigned int *palign;
8607 {
8608 register rtx op0;
8609 tree type = TREE_TYPE (exp);
8610 register enum machine_mode mode = TYPE_MODE (type);
8611
8612 /* Default the alignment we return to that of the type. */
8613 *palign = TYPE_ALIGN (type);
8614
8615 /* The only cases in which we do anything special is if the resulting mode
8616 is BLKmode. */
8617 if (mode != BLKmode)
8618 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8619
8620 switch (TREE_CODE (exp))
8621 {
8622 case CONVERT_EXPR:
8623 case NOP_EXPR:
8624 case NON_LVALUE_EXPR:
8625 /* Conversions between BLKmode values don't change the underlying
8626 alignment or value. */
8627 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8628 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8629 break;
8630
8631 case ARRAY_REF:
8632 /* Much of the code for this case is copied directly from expand_expr.
8633 We need to duplicate it here because we will do something different
8634 in the fall-through case, so we need to handle the same exceptions
8635 it does. */
8636 {
8637 tree array = TREE_OPERAND (exp, 0);
8638 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8639 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8640 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8641 HOST_WIDE_INT i;
8642
8643 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8644 abort ();
8645
8646 /* Optimize the special-case of a zero lower bound.
8647
8648 We convert the low_bound to sizetype to avoid some problems
8649 with constant folding. (E.g. suppose the lower bound is 1,
8650 and its mode is QI. Without the conversion, (ARRAY
8651 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8652 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8653
8654 if (! integer_zerop (low_bound))
8655 index = size_diffop (index, convert (sizetype, low_bound));
8656
8657 /* If this is a constant index into a constant array,
8658 just get the value from the array. Handle both the cases when
8659 we have an explicit constructor and when our operand is a variable
8660 that was declared const. */
8661
8662 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8663 && 0 > compare_tree_int (index,
8664 list_length (CONSTRUCTOR_ELTS
8665 (TREE_OPERAND (exp, 0)))))
8666 {
8667 tree elem;
8668
8669 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8670 i = TREE_INT_CST_LOW (index);
8671 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8672 ;
8673
8674 if (elem)
8675 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8676 }
8677
8678 else if (optimize >= 1
8679 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8680 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8681 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8682 {
8683 if (TREE_CODE (index) == INTEGER_CST)
8684 {
8685 tree init = DECL_INITIAL (array);
8686
8687 if (TREE_CODE (init) == CONSTRUCTOR)
8688 {
8689 tree elem;
8690
8691 for (elem = CONSTRUCTOR_ELTS (init);
8692 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8693 elem = TREE_CHAIN (elem))
8694 ;
8695
8696 if (elem)
8697 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8698 palign);
8699 }
8700 }
8701 }
8702 }
8703 /* Fall through. */
8704
8705 case COMPONENT_REF:
8706 case BIT_FIELD_REF:
8707 /* If the operand is a CONSTRUCTOR, we can just extract the
8708 appropriate field if it is present. Don't do this if we have
8709 already written the data since we want to refer to that copy
8710 and varasm.c assumes that's what we'll do. */
8711 if (TREE_CODE (exp) != ARRAY_REF
8712 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8713 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8714 {
8715 tree elt;
8716
8717 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8718 elt = TREE_CHAIN (elt))
8719 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8720 /* Note that unlike the case in expand_expr, we know this is
8721 BLKmode and hence not an integer. */
8722 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8723 }
8724
8725 {
8726 enum machine_mode mode1;
8727 HOST_WIDE_INT bitsize, bitpos;
8728 tree offset;
8729 int volatilep = 0;
8730 unsigned int alignment;
8731 int unsignedp;
8732 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8733 &mode1, &unsignedp, &volatilep,
8734 &alignment);
8735
8736 /* If we got back the original object, something is wrong. Perhaps
8737 we are evaluating an expression too early. In any event, don't
8738 infinitely recurse. */
8739 if (tem == exp)
8740 abort ();
8741
8742 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8743
8744 /* If this is a constant, put it into a register if it is a
8745 legitimate constant and OFFSET is 0 and memory if it isn't. */
8746 if (CONSTANT_P (op0))
8747 {
8748 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8749
8750 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8751 && offset == 0)
8752 op0 = force_reg (inner_mode, op0);
8753 else
8754 op0 = validize_mem (force_const_mem (inner_mode, op0));
8755 }
8756
8757 if (offset != 0)
8758 {
8759 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8760
8761 /* If this object is in a register, put it into memory.
8762 This case can't occur in C, but can in Ada if we have
8763 unchecked conversion of an expression from a scalar type to
8764 an array or record type. */
8765 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8766 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8767 {
8768 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8769
8770 mark_temp_addr_taken (memloc);
8771 emit_move_insn (memloc, op0);
8772 op0 = memloc;
8773 }
8774
8775 if (GET_CODE (op0) != MEM)
8776 abort ();
8777
8778 if (GET_MODE (offset_rtx) != ptr_mode)
8779 {
8780 #ifdef POINTERS_EXTEND_UNSIGNED
8781 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8782 #else
8783 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8784 #endif
8785 }
8786
8787 op0 = change_address (op0, VOIDmode,
8788 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8789 force_reg (ptr_mode,
8790 offset_rtx)));
8791 }
8792
8793 /* Don't forget about volatility even if this is a bitfield. */
8794 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8795 {
8796 op0 = copy_rtx (op0);
8797 MEM_VOLATILE_P (op0) = 1;
8798 }
8799
8800 /* Check the access. */
8801 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8802 {
8803 rtx to;
8804 int size;
8805
8806 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8807 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8808
8809 /* Check the access right of the pointer. */
8810 in_check_memory_usage = 1;
8811 if (size > BITS_PER_UNIT)
8812 emit_library_call (chkr_check_addr_libfunc,
8813 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
8814 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8815 TYPE_MODE (sizetype),
8816 GEN_INT (MEMORY_USE_RO),
8817 TYPE_MODE (integer_type_node));
8818 in_check_memory_usage = 0;
8819 }
8820
8821 /* In cases where an aligned union has an unaligned object
8822 as a field, we might be extracting a BLKmode value from
8823 an integer-mode (e.g., SImode) object. Handle this case
8824 by doing the extract into an object as wide as the field
8825 (which we know to be the width of a basic mode), then
8826 storing into memory, and changing the mode to BLKmode.
8827 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8828 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8829 if (mode1 == VOIDmode
8830 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8831 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8832 && (TYPE_ALIGN (type) > alignment
8833 || bitpos % TYPE_ALIGN (type) != 0)))
8834 {
8835 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8836
8837 if (ext_mode == BLKmode)
8838 {
8839 /* In this case, BITPOS must start at a byte boundary. */
8840 if (GET_CODE (op0) != MEM
8841 || bitpos % BITS_PER_UNIT != 0)
8842 abort ();
8843
8844 op0 = change_address (op0, VOIDmode,
8845 plus_constant (XEXP (op0, 0),
8846 bitpos / BITS_PER_UNIT));
8847 }
8848 else
8849 {
8850 rtx new = assign_stack_temp (ext_mode,
8851 bitsize / BITS_PER_UNIT, 0);
8852
8853 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8854 unsignedp, NULL_RTX, ext_mode,
8855 ext_mode, alignment,
8856 int_size_in_bytes (TREE_TYPE (tem)));
8857
8858 /* If the result is a record type and BITSIZE is narrower than
8859 the mode of OP0, an integral mode, and this is a big endian
8860 machine, we must put the field into the high-order bits. */
8861 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8862 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8863 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8864 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8865 size_int (GET_MODE_BITSIZE
8866 (GET_MODE (op0))
8867 - bitsize),
8868 op0, 1);
8869
8870 emit_move_insn (new, op0);
8871 op0 = copy_rtx (new);
8872 PUT_MODE (op0, BLKmode);
8873 }
8874 }
8875 else
8876 /* Get a reference to just this component. */
8877 op0 = change_address (op0, mode1,
8878 plus_constant (XEXP (op0, 0),
8879 (bitpos / BITS_PER_UNIT)));
8880
8881 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8882
8883 /* Adjust the alignment in case the bit position is not
8884 a multiple of the alignment of the inner object. */
8885 while (bitpos % alignment != 0)
8886 alignment >>= 1;
8887
8888 if (GET_CODE (XEXP (op0, 0)) == REG)
8889 mark_reg_pointer (XEXP (op0, 0), alignment);
8890
8891 MEM_IN_STRUCT_P (op0) = 1;
8892 MEM_VOLATILE_P (op0) |= volatilep;
8893
8894 *palign = alignment;
8895 return op0;
8896 }
8897
8898 default:
8899 break;
8900
8901 }
8902
8903 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8904 }
8905 \f
8906 /* Return the tree node if a ARG corresponds to a string constant or zero
8907 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8908 in bytes within the string that ARG is accessing. The type of the
8909 offset will be `sizetype'. */
8910
8911 tree
8912 string_constant (arg, ptr_offset)
8913 tree arg;
8914 tree *ptr_offset;
8915 {
8916 STRIP_NOPS (arg);
8917
8918 if (TREE_CODE (arg) == ADDR_EXPR
8919 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8920 {
8921 *ptr_offset = size_zero_node;
8922 return TREE_OPERAND (arg, 0);
8923 }
8924 else if (TREE_CODE (arg) == PLUS_EXPR)
8925 {
8926 tree arg0 = TREE_OPERAND (arg, 0);
8927 tree arg1 = TREE_OPERAND (arg, 1);
8928
8929 STRIP_NOPS (arg0);
8930 STRIP_NOPS (arg1);
8931
8932 if (TREE_CODE (arg0) == ADDR_EXPR
8933 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8934 {
8935 *ptr_offset = convert (sizetype, arg1);
8936 return TREE_OPERAND (arg0, 0);
8937 }
8938 else if (TREE_CODE (arg1) == ADDR_EXPR
8939 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8940 {
8941 *ptr_offset = convert (sizetype, arg0);
8942 return TREE_OPERAND (arg1, 0);
8943 }
8944 }
8945
8946 return 0;
8947 }
8948 \f
8949 /* Expand code for a post- or pre- increment or decrement
8950 and return the RTX for the result.
8951 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8952
8953 static rtx
8954 expand_increment (exp, post, ignore)
8955 register tree exp;
8956 int post, ignore;
8957 {
8958 register rtx op0, op1;
8959 register rtx temp, value;
8960 register tree incremented = TREE_OPERAND (exp, 0);
8961 optab this_optab = add_optab;
8962 int icode;
8963 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8964 int op0_is_copy = 0;
8965 int single_insn = 0;
8966 /* 1 means we can't store into OP0 directly,
8967 because it is a subreg narrower than a word,
8968 and we don't dare clobber the rest of the word. */
8969 int bad_subreg = 0;
8970
8971 /* Stabilize any component ref that might need to be
8972 evaluated more than once below. */
8973 if (!post
8974 || TREE_CODE (incremented) == BIT_FIELD_REF
8975 || (TREE_CODE (incremented) == COMPONENT_REF
8976 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8977 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8978 incremented = stabilize_reference (incremented);
8979 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8980 ones into save exprs so that they don't accidentally get evaluated
8981 more than once by the code below. */
8982 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8983 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8984 incremented = save_expr (incremented);
8985
8986 /* Compute the operands as RTX.
8987 Note whether OP0 is the actual lvalue or a copy of it:
8988 I believe it is a copy iff it is a register or subreg
8989 and insns were generated in computing it. */
8990
8991 temp = get_last_insn ();
8992 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8993
8994 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8995 in place but instead must do sign- or zero-extension during assignment,
8996 so we copy it into a new register and let the code below use it as
8997 a copy.
8998
8999 Note that we can safely modify this SUBREG since it is know not to be
9000 shared (it was made by the expand_expr call above). */
9001
9002 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9003 {
9004 if (post)
9005 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9006 else
9007 bad_subreg = 1;
9008 }
9009 else if (GET_CODE (op0) == SUBREG
9010 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9011 {
9012 /* We cannot increment this SUBREG in place. If we are
9013 post-incrementing, get a copy of the old value. Otherwise,
9014 just mark that we cannot increment in place. */
9015 if (post)
9016 op0 = copy_to_reg (op0);
9017 else
9018 bad_subreg = 1;
9019 }
9020
9021 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9022 && temp != get_last_insn ());
9023 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9024 EXPAND_MEMORY_USE_BAD);
9025
9026 /* Decide whether incrementing or decrementing. */
9027 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9028 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9029 this_optab = sub_optab;
9030
9031 /* Convert decrement by a constant into a negative increment. */
9032 if (this_optab == sub_optab
9033 && GET_CODE (op1) == CONST_INT)
9034 {
9035 op1 = GEN_INT (-INTVAL (op1));
9036 this_optab = add_optab;
9037 }
9038
9039 /* For a preincrement, see if we can do this with a single instruction. */
9040 if (!post)
9041 {
9042 icode = (int) this_optab->handlers[(int) mode].insn_code;
9043 if (icode != (int) CODE_FOR_nothing
9044 /* Make sure that OP0 is valid for operands 0 and 1
9045 of the insn we want to queue. */
9046 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9047 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9048 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9049 single_insn = 1;
9050 }
9051
9052 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9053 then we cannot just increment OP0. We must therefore contrive to
9054 increment the original value. Then, for postincrement, we can return
9055 OP0 since it is a copy of the old value. For preincrement, expand here
9056 unless we can do it with a single insn.
9057
9058 Likewise if storing directly into OP0 would clobber high bits
9059 we need to preserve (bad_subreg). */
9060 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9061 {
9062 /* This is the easiest way to increment the value wherever it is.
9063 Problems with multiple evaluation of INCREMENTED are prevented
9064 because either (1) it is a component_ref or preincrement,
9065 in which case it was stabilized above, or (2) it is an array_ref
9066 with constant index in an array in a register, which is
9067 safe to reevaluate. */
9068 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9069 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9070 ? MINUS_EXPR : PLUS_EXPR),
9071 TREE_TYPE (exp),
9072 incremented,
9073 TREE_OPERAND (exp, 1));
9074
9075 while (TREE_CODE (incremented) == NOP_EXPR
9076 || TREE_CODE (incremented) == CONVERT_EXPR)
9077 {
9078 newexp = convert (TREE_TYPE (incremented), newexp);
9079 incremented = TREE_OPERAND (incremented, 0);
9080 }
9081
9082 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9083 return post ? op0 : temp;
9084 }
9085
9086 if (post)
9087 {
9088 /* We have a true reference to the value in OP0.
9089 If there is an insn to add or subtract in this mode, queue it.
9090 Queueing the increment insn avoids the register shuffling
9091 that often results if we must increment now and first save
9092 the old value for subsequent use. */
9093
9094 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9095 op0 = stabilize (op0);
9096 #endif
9097
9098 icode = (int) this_optab->handlers[(int) mode].insn_code;
9099 if (icode != (int) CODE_FOR_nothing
9100 /* Make sure that OP0 is valid for operands 0 and 1
9101 of the insn we want to queue. */
9102 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9103 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9104 {
9105 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9106 op1 = force_reg (mode, op1);
9107
9108 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9109 }
9110 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9111 {
9112 rtx addr = (general_operand (XEXP (op0, 0), mode)
9113 ? force_reg (Pmode, XEXP (op0, 0))
9114 : copy_to_reg (XEXP (op0, 0)));
9115 rtx temp, result;
9116
9117 op0 = change_address (op0, VOIDmode, addr);
9118 temp = force_reg (GET_MODE (op0), op0);
9119 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9120 op1 = force_reg (mode, op1);
9121
9122 /* The increment queue is LIFO, thus we have to `queue'
9123 the instructions in reverse order. */
9124 enqueue_insn (op0, gen_move_insn (op0, temp));
9125 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9126 return result;
9127 }
9128 }
9129
9130 /* Preincrement, or we can't increment with one simple insn. */
9131 if (post)
9132 /* Save a copy of the value before inc or dec, to return it later. */
9133 temp = value = copy_to_reg (op0);
9134 else
9135 /* Arrange to return the incremented value. */
9136 /* Copy the rtx because expand_binop will protect from the queue,
9137 and the results of that would be invalid for us to return
9138 if our caller does emit_queue before using our result. */
9139 temp = copy_rtx (value = op0);
9140
9141 /* Increment however we can. */
9142 op1 = expand_binop (mode, this_optab, value, op1,
9143 current_function_check_memory_usage ? NULL_RTX : op0,
9144 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9145 /* Make sure the value is stored into OP0. */
9146 if (op1 != op0)
9147 emit_move_insn (op0, op1);
9148
9149 return temp;
9150 }
9151 \f
9152 /* Expand all function calls contained within EXP, innermost ones first.
9153 But don't look within expressions that have sequence points.
9154 For each CALL_EXPR, record the rtx for its value
9155 in the CALL_EXPR_RTL field. */
9156
9157 static void
9158 preexpand_calls (exp)
9159 tree exp;
9160 {
9161 register int nops, i;
9162 int class = TREE_CODE_CLASS (TREE_CODE (exp));
9163
9164 if (! do_preexpand_calls)
9165 return;
9166
9167 /* Only expressions and references can contain calls. */
9168
9169 if (! IS_EXPR_CODE_CLASS (class) && class != 'r')
9170 return;
9171
9172 switch (TREE_CODE (exp))
9173 {
9174 case CALL_EXPR:
9175 /* Do nothing if already expanded. */
9176 if (CALL_EXPR_RTL (exp) != 0
9177 /* Do nothing if the call returns a variable-sized object. */
9178 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9179 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9180 /* Do nothing to built-in functions. */
9181 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9182 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9183 == FUNCTION_DECL)
9184 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9185 return;
9186
9187 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9188 return;
9189
9190 case COMPOUND_EXPR:
9191 case COND_EXPR:
9192 case TRUTH_ANDIF_EXPR:
9193 case TRUTH_ORIF_EXPR:
9194 /* If we find one of these, then we can be sure
9195 the adjust will be done for it (since it makes jumps).
9196 Do it now, so that if this is inside an argument
9197 of a function, we don't get the stack adjustment
9198 after some other args have already been pushed. */
9199 do_pending_stack_adjust ();
9200 return;
9201
9202 case BLOCK:
9203 case RTL_EXPR:
9204 case WITH_CLEANUP_EXPR:
9205 case CLEANUP_POINT_EXPR:
9206 case TRY_CATCH_EXPR:
9207 return;
9208
9209 case SAVE_EXPR:
9210 if (SAVE_EXPR_RTL (exp) != 0)
9211 return;
9212
9213 default:
9214 break;
9215 }
9216
9217 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
9218 for (i = 0; i < nops; i++)
9219 if (TREE_OPERAND (exp, i) != 0)
9220 {
9221 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9222 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9223 It doesn't happen before the call is made. */
9224 ;
9225 else
9226 {
9227 class = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9228 if (IS_EXPR_CODE_CLASS (class) || class == 'r')
9229 preexpand_calls (TREE_OPERAND (exp, i));
9230 }
9231 }
9232 }
9233 \f
9234 /* At the start of a function, record that we have no previously-pushed
9235 arguments waiting to be popped. */
9236
9237 void
9238 init_pending_stack_adjust ()
9239 {
9240 pending_stack_adjust = 0;
9241 }
9242
9243 /* When exiting from function, if safe, clear out any pending stack adjust
9244 so the adjustment won't get done.
9245
9246 Note, if the current function calls alloca, then it must have a
9247 frame pointer regardless of the value of flag_omit_frame_pointer. */
9248
9249 void
9250 clear_pending_stack_adjust ()
9251 {
9252 #ifdef EXIT_IGNORE_STACK
9253 if (optimize > 0
9254 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9255 && EXIT_IGNORE_STACK
9256 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9257 && ! flag_inline_functions)
9258 {
9259 stack_pointer_delta -= pending_stack_adjust,
9260 pending_stack_adjust = 0;
9261 }
9262 #endif
9263 }
9264
9265 /* Pop any previously-pushed arguments that have not been popped yet. */
9266
9267 void
9268 do_pending_stack_adjust ()
9269 {
9270 if (inhibit_defer_pop == 0)
9271 {
9272 if (pending_stack_adjust != 0)
9273 adjust_stack (GEN_INT (pending_stack_adjust));
9274 pending_stack_adjust = 0;
9275 }
9276 }
9277 \f
9278 /* Expand conditional expressions. */
9279
9280 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9281 LABEL is an rtx of code CODE_LABEL, in this function and all the
9282 functions here. */
9283
9284 void
9285 jumpifnot (exp, label)
9286 tree exp;
9287 rtx label;
9288 {
9289 do_jump (exp, label, NULL_RTX);
9290 }
9291
9292 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9293
9294 void
9295 jumpif (exp, label)
9296 tree exp;
9297 rtx label;
9298 {
9299 do_jump (exp, NULL_RTX, label);
9300 }
9301
9302 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9303 the result is zero, or IF_TRUE_LABEL if the result is one.
9304 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9305 meaning fall through in that case.
9306
9307 do_jump always does any pending stack adjust except when it does not
9308 actually perform a jump. An example where there is no jump
9309 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9310
9311 This function is responsible for optimizing cases such as
9312 &&, || and comparison operators in EXP. */
9313
9314 void
9315 do_jump (exp, if_false_label, if_true_label)
9316 tree exp;
9317 rtx if_false_label, if_true_label;
9318 {
9319 register enum tree_code code = TREE_CODE (exp);
9320 /* Some cases need to create a label to jump to
9321 in order to properly fall through.
9322 These cases set DROP_THROUGH_LABEL nonzero. */
9323 rtx drop_through_label = 0;
9324 rtx temp;
9325 int i;
9326 tree type;
9327 enum machine_mode mode;
9328
9329 #ifdef MAX_INTEGER_COMPUTATION_MODE
9330 check_max_integer_computation_mode (exp);
9331 #endif
9332
9333 emit_queue ();
9334
9335 switch (code)
9336 {
9337 case ERROR_MARK:
9338 break;
9339
9340 case INTEGER_CST:
9341 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9342 if (temp)
9343 emit_jump (temp);
9344 break;
9345
9346 #if 0
9347 /* This is not true with #pragma weak */
9348 case ADDR_EXPR:
9349 /* The address of something can never be zero. */
9350 if (if_true_label)
9351 emit_jump (if_true_label);
9352 break;
9353 #endif
9354
9355 case NOP_EXPR:
9356 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9357 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9358 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9359 goto normal;
9360 case CONVERT_EXPR:
9361 /* If we are narrowing the operand, we have to do the compare in the
9362 narrower mode. */
9363 if ((TYPE_PRECISION (TREE_TYPE (exp))
9364 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9365 goto normal;
9366 case NON_LVALUE_EXPR:
9367 case REFERENCE_EXPR:
9368 case ABS_EXPR:
9369 case NEGATE_EXPR:
9370 case LROTATE_EXPR:
9371 case RROTATE_EXPR:
9372 /* These cannot change zero->non-zero or vice versa. */
9373 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9374 break;
9375
9376 case WITH_RECORD_EXPR:
9377 /* Put the object on the placeholder list, recurse through our first
9378 operand, and pop the list. */
9379 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9380 placeholder_list);
9381 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9382 placeholder_list = TREE_CHAIN (placeholder_list);
9383 break;
9384
9385 #if 0
9386 /* This is never less insns than evaluating the PLUS_EXPR followed by
9387 a test and can be longer if the test is eliminated. */
9388 case PLUS_EXPR:
9389 /* Reduce to minus. */
9390 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9391 TREE_OPERAND (exp, 0),
9392 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9393 TREE_OPERAND (exp, 1))));
9394 /* Process as MINUS. */
9395 #endif
9396
9397 case MINUS_EXPR:
9398 /* Non-zero iff operands of minus differ. */
9399 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9400 TREE_OPERAND (exp, 0),
9401 TREE_OPERAND (exp, 1)),
9402 NE, NE, if_false_label, if_true_label);
9403 break;
9404
9405 case BIT_AND_EXPR:
9406 /* If we are AND'ing with a small constant, do this comparison in the
9407 smallest type that fits. If the machine doesn't have comparisons
9408 that small, it will be converted back to the wider comparison.
9409 This helps if we are testing the sign bit of a narrower object.
9410 combine can't do this for us because it can't know whether a
9411 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9412
9413 if (! SLOW_BYTE_ACCESS
9414 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9415 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9416 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9417 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9418 && (type = type_for_mode (mode, 1)) != 0
9419 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9420 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9421 != CODE_FOR_nothing))
9422 {
9423 do_jump (convert (type, exp), if_false_label, if_true_label);
9424 break;
9425 }
9426 goto normal;
9427
9428 case TRUTH_NOT_EXPR:
9429 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9430 break;
9431
9432 case TRUTH_ANDIF_EXPR:
9433 if (if_false_label == 0)
9434 if_false_label = drop_through_label = gen_label_rtx ();
9435 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9436 start_cleanup_deferral ();
9437 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9438 end_cleanup_deferral ();
9439 break;
9440
9441 case TRUTH_ORIF_EXPR:
9442 if (if_true_label == 0)
9443 if_true_label = drop_through_label = gen_label_rtx ();
9444 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9445 start_cleanup_deferral ();
9446 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9447 end_cleanup_deferral ();
9448 break;
9449
9450 case COMPOUND_EXPR:
9451 push_temp_slots ();
9452 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9453 preserve_temp_slots (NULL_RTX);
9454 free_temp_slots ();
9455 pop_temp_slots ();
9456 emit_queue ();
9457 do_pending_stack_adjust ();
9458 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9459 break;
9460
9461 case COMPONENT_REF:
9462 case BIT_FIELD_REF:
9463 case ARRAY_REF:
9464 {
9465 HOST_WIDE_INT bitsize, bitpos;
9466 int unsignedp;
9467 enum machine_mode mode;
9468 tree type;
9469 tree offset;
9470 int volatilep = 0;
9471 unsigned int alignment;
9472
9473 /* Get description of this reference. We don't actually care
9474 about the underlying object here. */
9475 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9476 &unsignedp, &volatilep, &alignment);
9477
9478 type = type_for_size (bitsize, unsignedp);
9479 if (! SLOW_BYTE_ACCESS
9480 && type != 0 && bitsize >= 0
9481 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9482 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9483 != CODE_FOR_nothing))
9484 {
9485 do_jump (convert (type, exp), if_false_label, if_true_label);
9486 break;
9487 }
9488 goto normal;
9489 }
9490
9491 case COND_EXPR:
9492 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9493 if (integer_onep (TREE_OPERAND (exp, 1))
9494 && integer_zerop (TREE_OPERAND (exp, 2)))
9495 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9496
9497 else if (integer_zerop (TREE_OPERAND (exp, 1))
9498 && integer_onep (TREE_OPERAND (exp, 2)))
9499 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9500
9501 else
9502 {
9503 register rtx label1 = gen_label_rtx ();
9504 drop_through_label = gen_label_rtx ();
9505
9506 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9507
9508 start_cleanup_deferral ();
9509 /* Now the THEN-expression. */
9510 do_jump (TREE_OPERAND (exp, 1),
9511 if_false_label ? if_false_label : drop_through_label,
9512 if_true_label ? if_true_label : drop_through_label);
9513 /* In case the do_jump just above never jumps. */
9514 do_pending_stack_adjust ();
9515 emit_label (label1);
9516
9517 /* Now the ELSE-expression. */
9518 do_jump (TREE_OPERAND (exp, 2),
9519 if_false_label ? if_false_label : drop_through_label,
9520 if_true_label ? if_true_label : drop_through_label);
9521 end_cleanup_deferral ();
9522 }
9523 break;
9524
9525 case EQ_EXPR:
9526 {
9527 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9528
9529 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9530 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9531 {
9532 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9533 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9534 do_jump
9535 (fold
9536 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9537 fold (build (EQ_EXPR, TREE_TYPE (exp),
9538 fold (build1 (REALPART_EXPR,
9539 TREE_TYPE (inner_type),
9540 exp0)),
9541 fold (build1 (REALPART_EXPR,
9542 TREE_TYPE (inner_type),
9543 exp1)))),
9544 fold (build (EQ_EXPR, TREE_TYPE (exp),
9545 fold (build1 (IMAGPART_EXPR,
9546 TREE_TYPE (inner_type),
9547 exp0)),
9548 fold (build1 (IMAGPART_EXPR,
9549 TREE_TYPE (inner_type),
9550 exp1)))))),
9551 if_false_label, if_true_label);
9552 }
9553
9554 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9555 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9556
9557 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9558 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9559 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9560 else
9561 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9562 break;
9563 }
9564
9565 case NE_EXPR:
9566 {
9567 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9568
9569 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9570 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9571 {
9572 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9573 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9574 do_jump
9575 (fold
9576 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9577 fold (build (NE_EXPR, TREE_TYPE (exp),
9578 fold (build1 (REALPART_EXPR,
9579 TREE_TYPE (inner_type),
9580 exp0)),
9581 fold (build1 (REALPART_EXPR,
9582 TREE_TYPE (inner_type),
9583 exp1)))),
9584 fold (build (NE_EXPR, TREE_TYPE (exp),
9585 fold (build1 (IMAGPART_EXPR,
9586 TREE_TYPE (inner_type),
9587 exp0)),
9588 fold (build1 (IMAGPART_EXPR,
9589 TREE_TYPE (inner_type),
9590 exp1)))))),
9591 if_false_label, if_true_label);
9592 }
9593
9594 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9595 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9596
9597 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9598 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9599 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9600 else
9601 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9602 break;
9603 }
9604
9605 case LT_EXPR:
9606 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9607 if (GET_MODE_CLASS (mode) == MODE_INT
9608 && ! can_compare_p (LT, mode, ccp_jump))
9609 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9610 else
9611 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9612 break;
9613
9614 case LE_EXPR:
9615 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9616 if (GET_MODE_CLASS (mode) == MODE_INT
9617 && ! can_compare_p (LE, mode, ccp_jump))
9618 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9619 else
9620 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9621 break;
9622
9623 case GT_EXPR:
9624 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9625 if (GET_MODE_CLASS (mode) == MODE_INT
9626 && ! can_compare_p (GT, mode, ccp_jump))
9627 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9628 else
9629 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9630 break;
9631
9632 case GE_EXPR:
9633 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9634 if (GET_MODE_CLASS (mode) == MODE_INT
9635 && ! can_compare_p (GE, mode, ccp_jump))
9636 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9637 else
9638 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9639 break;
9640
9641 case UNORDERED_EXPR:
9642 case ORDERED_EXPR:
9643 {
9644 enum rtx_code cmp, rcmp;
9645 int do_rev;
9646
9647 if (code == UNORDERED_EXPR)
9648 cmp = UNORDERED, rcmp = ORDERED;
9649 else
9650 cmp = ORDERED, rcmp = UNORDERED;
9651 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9652
9653 do_rev = 0;
9654 if (! can_compare_p (cmp, mode, ccp_jump)
9655 && (can_compare_p (rcmp, mode, ccp_jump)
9656 /* If the target doesn't provide either UNORDERED or ORDERED
9657 comparisons, canonicalize on UNORDERED for the library. */
9658 || rcmp == UNORDERED))
9659 do_rev = 1;
9660
9661 if (! do_rev)
9662 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9663 else
9664 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9665 }
9666 break;
9667
9668 {
9669 enum rtx_code rcode1;
9670 enum tree_code tcode2;
9671
9672 case UNLT_EXPR:
9673 rcode1 = UNLT;
9674 tcode2 = LT_EXPR;
9675 goto unordered_bcc;
9676 case UNLE_EXPR:
9677 rcode1 = UNLE;
9678 tcode2 = LE_EXPR;
9679 goto unordered_bcc;
9680 case UNGT_EXPR:
9681 rcode1 = UNGT;
9682 tcode2 = GT_EXPR;
9683 goto unordered_bcc;
9684 case UNGE_EXPR:
9685 rcode1 = UNGE;
9686 tcode2 = GE_EXPR;
9687 goto unordered_bcc;
9688 case UNEQ_EXPR:
9689 rcode1 = UNEQ;
9690 tcode2 = EQ_EXPR;
9691 goto unordered_bcc;
9692
9693 unordered_bcc:
9694 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9695 if (can_compare_p (rcode1, mode, ccp_jump))
9696 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9697 if_true_label);
9698 else
9699 {
9700 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9701 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9702 tree cmp0, cmp1;
9703
9704 /* If the target doesn't support combined unordered
9705 compares, decompose into UNORDERED + comparison. */
9706 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9707 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9708 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9709 do_jump (exp, if_false_label, if_true_label);
9710 }
9711 }
9712 break;
9713
9714 default:
9715 normal:
9716 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9717 #if 0
9718 /* This is not needed any more and causes poor code since it causes
9719 comparisons and tests from non-SI objects to have different code
9720 sequences. */
9721 /* Copy to register to avoid generating bad insns by cse
9722 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9723 if (!cse_not_expected && GET_CODE (temp) == MEM)
9724 temp = copy_to_reg (temp);
9725 #endif
9726 do_pending_stack_adjust ();
9727 /* Do any postincrements in the expression that was tested. */
9728 emit_queue ();
9729
9730 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9731 {
9732 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9733 if (target)
9734 emit_jump (target);
9735 }
9736 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9737 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9738 /* Note swapping the labels gives us not-equal. */
9739 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9740 else if (GET_MODE (temp) != VOIDmode)
9741 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9742 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9743 GET_MODE (temp), NULL_RTX, 0,
9744 if_false_label, if_true_label);
9745 else
9746 abort ();
9747 }
9748
9749 if (drop_through_label)
9750 {
9751 /* If do_jump produces code that might be jumped around,
9752 do any stack adjusts from that code, before the place
9753 where control merges in. */
9754 do_pending_stack_adjust ();
9755 emit_label (drop_through_label);
9756 }
9757 }
9758 \f
9759 /* Given a comparison expression EXP for values too wide to be compared
9760 with one insn, test the comparison and jump to the appropriate label.
9761 The code of EXP is ignored; we always test GT if SWAP is 0,
9762 and LT if SWAP is 1. */
9763
9764 static void
9765 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9766 tree exp;
9767 int swap;
9768 rtx if_false_label, if_true_label;
9769 {
9770 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9771 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9772 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9773 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9774
9775 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9776 }
9777
9778 /* Compare OP0 with OP1, word at a time, in mode MODE.
9779 UNSIGNEDP says to do unsigned comparison.
9780 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9781
9782 void
9783 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9784 enum machine_mode mode;
9785 int unsignedp;
9786 rtx op0, op1;
9787 rtx if_false_label, if_true_label;
9788 {
9789 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9790 rtx drop_through_label = 0;
9791 int i;
9792
9793 if (! if_true_label || ! if_false_label)
9794 drop_through_label = gen_label_rtx ();
9795 if (! if_true_label)
9796 if_true_label = drop_through_label;
9797 if (! if_false_label)
9798 if_false_label = drop_through_label;
9799
9800 /* Compare a word at a time, high order first. */
9801 for (i = 0; i < nwords; i++)
9802 {
9803 rtx op0_word, op1_word;
9804
9805 if (WORDS_BIG_ENDIAN)
9806 {
9807 op0_word = operand_subword_force (op0, i, mode);
9808 op1_word = operand_subword_force (op1, i, mode);
9809 }
9810 else
9811 {
9812 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9813 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9814 }
9815
9816 /* All but high-order word must be compared as unsigned. */
9817 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9818 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9819 NULL_RTX, if_true_label);
9820
9821 /* Consider lower words only if these are equal. */
9822 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9823 NULL_RTX, 0, NULL_RTX, if_false_label);
9824 }
9825
9826 if (if_false_label)
9827 emit_jump (if_false_label);
9828 if (drop_through_label)
9829 emit_label (drop_through_label);
9830 }
9831
9832 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9833 with one insn, test the comparison and jump to the appropriate label. */
9834
9835 static void
9836 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9837 tree exp;
9838 rtx if_false_label, if_true_label;
9839 {
9840 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9841 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9842 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9843 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9844 int i;
9845 rtx drop_through_label = 0;
9846
9847 if (! if_false_label)
9848 drop_through_label = if_false_label = gen_label_rtx ();
9849
9850 for (i = 0; i < nwords; i++)
9851 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9852 operand_subword_force (op1, i, mode),
9853 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9854 word_mode, NULL_RTX, 0, if_false_label,
9855 NULL_RTX);
9856
9857 if (if_true_label)
9858 emit_jump (if_true_label);
9859 if (drop_through_label)
9860 emit_label (drop_through_label);
9861 }
9862 \f
9863 /* Jump according to whether OP0 is 0.
9864 We assume that OP0 has an integer mode that is too wide
9865 for the available compare insns. */
9866
9867 void
9868 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9869 rtx op0;
9870 rtx if_false_label, if_true_label;
9871 {
9872 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9873 rtx part;
9874 int i;
9875 rtx drop_through_label = 0;
9876
9877 /* The fastest way of doing this comparison on almost any machine is to
9878 "or" all the words and compare the result. If all have to be loaded
9879 from memory and this is a very wide item, it's possible this may
9880 be slower, but that's highly unlikely. */
9881
9882 part = gen_reg_rtx (word_mode);
9883 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9884 for (i = 1; i < nwords && part != 0; i++)
9885 part = expand_binop (word_mode, ior_optab, part,
9886 operand_subword_force (op0, i, GET_MODE (op0)),
9887 part, 1, OPTAB_WIDEN);
9888
9889 if (part != 0)
9890 {
9891 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9892 NULL_RTX, 0, if_false_label, if_true_label);
9893
9894 return;
9895 }
9896
9897 /* If we couldn't do the "or" simply, do this with a series of compares. */
9898 if (! if_false_label)
9899 drop_through_label = if_false_label = gen_label_rtx ();
9900
9901 for (i = 0; i < nwords; i++)
9902 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9903 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9904 if_false_label, NULL_RTX);
9905
9906 if (if_true_label)
9907 emit_jump (if_true_label);
9908
9909 if (drop_through_label)
9910 emit_label (drop_through_label);
9911 }
9912 \f
9913 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9914 (including code to compute the values to be compared)
9915 and set (CC0) according to the result.
9916 The decision as to signed or unsigned comparison must be made by the caller.
9917
9918 We force a stack adjustment unless there are currently
9919 things pushed on the stack that aren't yet used.
9920
9921 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9922 compared.
9923
9924 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9925 size of MODE should be used. */
9926
9927 rtx
9928 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9929 register rtx op0, op1;
9930 enum rtx_code code;
9931 int unsignedp;
9932 enum machine_mode mode;
9933 rtx size;
9934 unsigned int align;
9935 {
9936 rtx tem;
9937
9938 /* If one operand is constant, make it the second one. Only do this
9939 if the other operand is not constant as well. */
9940
9941 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9942 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9943 {
9944 tem = op0;
9945 op0 = op1;
9946 op1 = tem;
9947 code = swap_condition (code);
9948 }
9949
9950 if (flag_force_mem)
9951 {
9952 op0 = force_not_mem (op0);
9953 op1 = force_not_mem (op1);
9954 }
9955
9956 do_pending_stack_adjust ();
9957
9958 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9959 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9960 return tem;
9961
9962 #if 0
9963 /* There's no need to do this now that combine.c can eliminate lots of
9964 sign extensions. This can be less efficient in certain cases on other
9965 machines. */
9966
9967 /* If this is a signed equality comparison, we can do it as an
9968 unsigned comparison since zero-extension is cheaper than sign
9969 extension and comparisons with zero are done as unsigned. This is
9970 the case even on machines that can do fast sign extension, since
9971 zero-extension is easier to combine with other operations than
9972 sign-extension is. If we are comparing against a constant, we must
9973 convert it to what it would look like unsigned. */
9974 if ((code == EQ || code == NE) && ! unsignedp
9975 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9976 {
9977 if (GET_CODE (op1) == CONST_INT
9978 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9979 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9980 unsignedp = 1;
9981 }
9982 #endif
9983
9984 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9985
9986 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9987 }
9988
9989 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9990 The decision as to signed or unsigned comparison must be made by the caller.
9991
9992 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9993 compared.
9994
9995 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9996 size of MODE should be used. */
9997
9998 void
9999 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10000 if_false_label, if_true_label)
10001 register rtx op0, op1;
10002 enum rtx_code code;
10003 int unsignedp;
10004 enum machine_mode mode;
10005 rtx size;
10006 unsigned int align;
10007 rtx if_false_label, if_true_label;
10008 {
10009 rtx tem;
10010 int dummy_true_label = 0;
10011
10012 /* Reverse the comparison if that is safe and we want to jump if it is
10013 false. */
10014 if (! if_true_label && ! FLOAT_MODE_P (mode))
10015 {
10016 if_true_label = if_false_label;
10017 if_false_label = 0;
10018 code = reverse_condition (code);
10019 }
10020
10021 /* If one operand is constant, make it the second one. Only do this
10022 if the other operand is not constant as well. */
10023
10024 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10025 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10026 {
10027 tem = op0;
10028 op0 = op1;
10029 op1 = tem;
10030 code = swap_condition (code);
10031 }
10032
10033 if (flag_force_mem)
10034 {
10035 op0 = force_not_mem (op0);
10036 op1 = force_not_mem (op1);
10037 }
10038
10039 do_pending_stack_adjust ();
10040
10041 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10042 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10043 {
10044 if (tem == const_true_rtx)
10045 {
10046 if (if_true_label)
10047 emit_jump (if_true_label);
10048 }
10049 else
10050 {
10051 if (if_false_label)
10052 emit_jump (if_false_label);
10053 }
10054 return;
10055 }
10056
10057 #if 0
10058 /* There's no need to do this now that combine.c can eliminate lots of
10059 sign extensions. This can be less efficient in certain cases on other
10060 machines. */
10061
10062 /* If this is a signed equality comparison, we can do it as an
10063 unsigned comparison since zero-extension is cheaper than sign
10064 extension and comparisons with zero are done as unsigned. This is
10065 the case even on machines that can do fast sign extension, since
10066 zero-extension is easier to combine with other operations than
10067 sign-extension is. If we are comparing against a constant, we must
10068 convert it to what it would look like unsigned. */
10069 if ((code == EQ || code == NE) && ! unsignedp
10070 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10071 {
10072 if (GET_CODE (op1) == CONST_INT
10073 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10074 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10075 unsignedp = 1;
10076 }
10077 #endif
10078
10079 if (! if_true_label)
10080 {
10081 dummy_true_label = 1;
10082 if_true_label = gen_label_rtx ();
10083 }
10084
10085 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10086 if_true_label);
10087
10088 if (if_false_label)
10089 emit_jump (if_false_label);
10090 if (dummy_true_label)
10091 emit_label (if_true_label);
10092 }
10093
10094 /* Generate code for a comparison expression EXP (including code to compute
10095 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10096 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10097 generated code will drop through.
10098 SIGNED_CODE should be the rtx operation for this comparison for
10099 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10100
10101 We force a stack adjustment unless there are currently
10102 things pushed on the stack that aren't yet used. */
10103
10104 static void
10105 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10106 if_true_label)
10107 register tree exp;
10108 enum rtx_code signed_code, unsigned_code;
10109 rtx if_false_label, if_true_label;
10110 {
10111 unsigned int align0, align1;
10112 register rtx op0, op1;
10113 register tree type;
10114 register enum machine_mode mode;
10115 int unsignedp;
10116 enum rtx_code code;
10117
10118 /* Don't crash if the comparison was erroneous. */
10119 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10120 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10121 return;
10122
10123 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10124 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10125 mode = TYPE_MODE (type);
10126 unsignedp = TREE_UNSIGNED (type);
10127 code = unsignedp ? unsigned_code : signed_code;
10128
10129 #ifdef HAVE_canonicalize_funcptr_for_compare
10130 /* If function pointers need to be "canonicalized" before they can
10131 be reliably compared, then canonicalize them. */
10132 if (HAVE_canonicalize_funcptr_for_compare
10133 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10134 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10135 == FUNCTION_TYPE))
10136 {
10137 rtx new_op0 = gen_reg_rtx (mode);
10138
10139 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10140 op0 = new_op0;
10141 }
10142
10143 if (HAVE_canonicalize_funcptr_for_compare
10144 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10145 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10146 == FUNCTION_TYPE))
10147 {
10148 rtx new_op1 = gen_reg_rtx (mode);
10149
10150 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10151 op1 = new_op1;
10152 }
10153 #endif
10154
10155 /* Do any postincrements in the expression that was tested. */
10156 emit_queue ();
10157
10158 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10159 ((mode == BLKmode)
10160 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10161 MIN (align0, align1),
10162 if_false_label, if_true_label);
10163 }
10164 \f
10165 /* Generate code to calculate EXP using a store-flag instruction
10166 and return an rtx for the result. EXP is either a comparison
10167 or a TRUTH_NOT_EXPR whose operand is a comparison.
10168
10169 If TARGET is nonzero, store the result there if convenient.
10170
10171 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10172 cheap.
10173
10174 Return zero if there is no suitable set-flag instruction
10175 available on this machine.
10176
10177 Once expand_expr has been called on the arguments of the comparison,
10178 we are committed to doing the store flag, since it is not safe to
10179 re-evaluate the expression. We emit the store-flag insn by calling
10180 emit_store_flag, but only expand the arguments if we have a reason
10181 to believe that emit_store_flag will be successful. If we think that
10182 it will, but it isn't, we have to simulate the store-flag with a
10183 set/jump/set sequence. */
10184
10185 static rtx
10186 do_store_flag (exp, target, mode, only_cheap)
10187 tree exp;
10188 rtx target;
10189 enum machine_mode mode;
10190 int only_cheap;
10191 {
10192 enum rtx_code code;
10193 tree arg0, arg1, type;
10194 tree tem;
10195 enum machine_mode operand_mode;
10196 int invert = 0;
10197 int unsignedp;
10198 rtx op0, op1;
10199 enum insn_code icode;
10200 rtx subtarget = target;
10201 rtx result, label;
10202
10203 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10204 result at the end. We can't simply invert the test since it would
10205 have already been inverted if it were valid. This case occurs for
10206 some floating-point comparisons. */
10207
10208 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10209 invert = 1, exp = TREE_OPERAND (exp, 0);
10210
10211 arg0 = TREE_OPERAND (exp, 0);
10212 arg1 = TREE_OPERAND (exp, 1);
10213 type = TREE_TYPE (arg0);
10214 operand_mode = TYPE_MODE (type);
10215 unsignedp = TREE_UNSIGNED (type);
10216
10217 /* We won't bother with BLKmode store-flag operations because it would mean
10218 passing a lot of information to emit_store_flag. */
10219 if (operand_mode == BLKmode)
10220 return 0;
10221
10222 /* We won't bother with store-flag operations involving function pointers
10223 when function pointers must be canonicalized before comparisons. */
10224 #ifdef HAVE_canonicalize_funcptr_for_compare
10225 if (HAVE_canonicalize_funcptr_for_compare
10226 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10227 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10228 == FUNCTION_TYPE))
10229 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10230 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10231 == FUNCTION_TYPE))))
10232 return 0;
10233 #endif
10234
10235 STRIP_NOPS (arg0);
10236 STRIP_NOPS (arg1);
10237
10238 /* Get the rtx comparison code to use. We know that EXP is a comparison
10239 operation of some type. Some comparisons against 1 and -1 can be
10240 converted to comparisons with zero. Do so here so that the tests
10241 below will be aware that we have a comparison with zero. These
10242 tests will not catch constants in the first operand, but constants
10243 are rarely passed as the first operand. */
10244
10245 switch (TREE_CODE (exp))
10246 {
10247 case EQ_EXPR:
10248 code = EQ;
10249 break;
10250 case NE_EXPR:
10251 code = NE;
10252 break;
10253 case LT_EXPR:
10254 if (integer_onep (arg1))
10255 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10256 else
10257 code = unsignedp ? LTU : LT;
10258 break;
10259 case LE_EXPR:
10260 if (! unsignedp && integer_all_onesp (arg1))
10261 arg1 = integer_zero_node, code = LT;
10262 else
10263 code = unsignedp ? LEU : LE;
10264 break;
10265 case GT_EXPR:
10266 if (! unsignedp && integer_all_onesp (arg1))
10267 arg1 = integer_zero_node, code = GE;
10268 else
10269 code = unsignedp ? GTU : GT;
10270 break;
10271 case GE_EXPR:
10272 if (integer_onep (arg1))
10273 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10274 else
10275 code = unsignedp ? GEU : GE;
10276 break;
10277
10278 case UNORDERED_EXPR:
10279 code = UNORDERED;
10280 break;
10281 case ORDERED_EXPR:
10282 code = ORDERED;
10283 break;
10284 case UNLT_EXPR:
10285 code = UNLT;
10286 break;
10287 case UNLE_EXPR:
10288 code = UNLE;
10289 break;
10290 case UNGT_EXPR:
10291 code = UNGT;
10292 break;
10293 case UNGE_EXPR:
10294 code = UNGE;
10295 break;
10296 case UNEQ_EXPR:
10297 code = UNEQ;
10298 break;
10299
10300 default:
10301 abort ();
10302 }
10303
10304 /* Put a constant second. */
10305 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10306 {
10307 tem = arg0; arg0 = arg1; arg1 = tem;
10308 code = swap_condition (code);
10309 }
10310
10311 /* If this is an equality or inequality test of a single bit, we can
10312 do this by shifting the bit being tested to the low-order bit and
10313 masking the result with the constant 1. If the condition was EQ,
10314 we xor it with 1. This does not require an scc insn and is faster
10315 than an scc insn even if we have it. */
10316
10317 if ((code == NE || code == EQ)
10318 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10319 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10320 {
10321 tree inner = TREE_OPERAND (arg0, 0);
10322 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10323 int ops_unsignedp;
10324
10325 /* If INNER is a right shift of a constant and it plus BITNUM does
10326 not overflow, adjust BITNUM and INNER. */
10327
10328 if (TREE_CODE (inner) == RSHIFT_EXPR
10329 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10330 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10331 && bitnum < TYPE_PRECISION (type)
10332 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10333 bitnum - TYPE_PRECISION (type)))
10334 {
10335 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10336 inner = TREE_OPERAND (inner, 0);
10337 }
10338
10339 /* If we are going to be able to omit the AND below, we must do our
10340 operations as unsigned. If we must use the AND, we have a choice.
10341 Normally unsigned is faster, but for some machines signed is. */
10342 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10343 #ifdef LOAD_EXTEND_OP
10344 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10345 #else
10346 : 1
10347 #endif
10348 );
10349
10350 if (! get_subtarget (subtarget)
10351 || GET_MODE (subtarget) != operand_mode
10352 || ! safe_from_p (subtarget, inner, 1))
10353 subtarget = 0;
10354
10355 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10356
10357 if (bitnum != 0)
10358 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10359 size_int (bitnum), subtarget, ops_unsignedp);
10360
10361 if (GET_MODE (op0) != mode)
10362 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10363
10364 if ((code == EQ && ! invert) || (code == NE && invert))
10365 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10366 ops_unsignedp, OPTAB_LIB_WIDEN);
10367
10368 /* Put the AND last so it can combine with more things. */
10369 if (bitnum != TYPE_PRECISION (type) - 1)
10370 op0 = expand_and (op0, const1_rtx, subtarget);
10371
10372 return op0;
10373 }
10374
10375 /* Now see if we are likely to be able to do this. Return if not. */
10376 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10377 return 0;
10378
10379 icode = setcc_gen_code[(int) code];
10380 if (icode == CODE_FOR_nothing
10381 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10382 {
10383 /* We can only do this if it is one of the special cases that
10384 can be handled without an scc insn. */
10385 if ((code == LT && integer_zerop (arg1))
10386 || (! only_cheap && code == GE && integer_zerop (arg1)))
10387 ;
10388 else if (BRANCH_COST >= 0
10389 && ! only_cheap && (code == NE || code == EQ)
10390 && TREE_CODE (type) != REAL_TYPE
10391 && ((abs_optab->handlers[(int) operand_mode].insn_code
10392 != CODE_FOR_nothing)
10393 || (ffs_optab->handlers[(int) operand_mode].insn_code
10394 != CODE_FOR_nothing)))
10395 ;
10396 else
10397 return 0;
10398 }
10399
10400 preexpand_calls (exp);
10401 if (! get_subtarget (target)
10402 || GET_MODE (subtarget) != operand_mode
10403 || ! safe_from_p (subtarget, arg1, 1))
10404 subtarget = 0;
10405
10406 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10407 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10408
10409 if (target == 0)
10410 target = gen_reg_rtx (mode);
10411
10412 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10413 because, if the emit_store_flag does anything it will succeed and
10414 OP0 and OP1 will not be used subsequently. */
10415
10416 result = emit_store_flag (target, code,
10417 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10418 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10419 operand_mode, unsignedp, 1);
10420
10421 if (result)
10422 {
10423 if (invert)
10424 result = expand_binop (mode, xor_optab, result, const1_rtx,
10425 result, 0, OPTAB_LIB_WIDEN);
10426 return result;
10427 }
10428
10429 /* If this failed, we have to do this with set/compare/jump/set code. */
10430 if (GET_CODE (target) != REG
10431 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10432 target = gen_reg_rtx (GET_MODE (target));
10433
10434 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10435 result = compare_from_rtx (op0, op1, code, unsignedp,
10436 operand_mode, NULL_RTX, 0);
10437 if (GET_CODE (result) == CONST_INT)
10438 return (((result == const0_rtx && ! invert)
10439 || (result != const0_rtx && invert))
10440 ? const0_rtx : const1_rtx);
10441
10442 label = gen_label_rtx ();
10443 if (bcc_gen_fctn[(int) code] == 0)
10444 abort ();
10445
10446 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10447 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10448 emit_label (label);
10449
10450 return target;
10451 }
10452 \f
10453 /* Generate a tablejump instruction (used for switch statements). */
10454
10455 #ifdef HAVE_tablejump
10456
10457 /* INDEX is the value being switched on, with the lowest value
10458 in the table already subtracted.
10459 MODE is its expected mode (needed if INDEX is constant).
10460 RANGE is the length of the jump table.
10461 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10462
10463 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10464 index value is out of range. */
10465
10466 void
10467 do_tablejump (index, mode, range, table_label, default_label)
10468 rtx index, range, table_label, default_label;
10469 enum machine_mode mode;
10470 {
10471 register rtx temp, vector;
10472
10473 /* Do an unsigned comparison (in the proper mode) between the index
10474 expression and the value which represents the length of the range.
10475 Since we just finished subtracting the lower bound of the range
10476 from the index expression, this comparison allows us to simultaneously
10477 check that the original index expression value is both greater than
10478 or equal to the minimum value of the range and less than or equal to
10479 the maximum value of the range. */
10480
10481 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10482 0, default_label);
10483
10484 /* If index is in range, it must fit in Pmode.
10485 Convert to Pmode so we can index with it. */
10486 if (mode != Pmode)
10487 index = convert_to_mode (Pmode, index, 1);
10488
10489 /* Don't let a MEM slip thru, because then INDEX that comes
10490 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10491 and break_out_memory_refs will go to work on it and mess it up. */
10492 #ifdef PIC_CASE_VECTOR_ADDRESS
10493 if (flag_pic && GET_CODE (index) != REG)
10494 index = copy_to_mode_reg (Pmode, index);
10495 #endif
10496
10497 /* If flag_force_addr were to affect this address
10498 it could interfere with the tricky assumptions made
10499 about addresses that contain label-refs,
10500 which may be valid only very near the tablejump itself. */
10501 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10502 GET_MODE_SIZE, because this indicates how large insns are. The other
10503 uses should all be Pmode, because they are addresses. This code
10504 could fail if addresses and insns are not the same size. */
10505 index = gen_rtx_PLUS (Pmode,
10506 gen_rtx_MULT (Pmode, index,
10507 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10508 gen_rtx_LABEL_REF (Pmode, table_label));
10509 #ifdef PIC_CASE_VECTOR_ADDRESS
10510 if (flag_pic)
10511 index = PIC_CASE_VECTOR_ADDRESS (index);
10512 else
10513 #endif
10514 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10515 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10516 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10517 RTX_UNCHANGING_P (vector) = 1;
10518 convert_move (temp, vector, 0);
10519
10520 emit_jump_insn (gen_tablejump (temp, table_label));
10521
10522 /* If we are generating PIC code or if the table is PC-relative, the
10523 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10524 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10525 emit_barrier ();
10526 }
10527
10528 #endif /* HAVE_tablejump */