cpphash.c (special_symbol): Remove spurious argument to cpp_lookup.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-99, 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
43 #include "ggc.h"
44 #include "tm_p.h"
45
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
48
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
51
52 #ifdef PUSH_ROUNDING
53
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
56 #endif
57
58 #endif
59
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
67
68 /* Assume that case vectors are not pc-relative. */
69 #ifndef CASE_VECTOR_PC_RELATIVE
70 #define CASE_VECTOR_PC_RELATIVE 0
71 #endif
72
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79 int cse_not_expected;
80
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
85
86 /* Don't check memory usage, since code is being emitted to check a memory
87 usage. Used when current_function_check_memory_usage is true, to avoid
88 infinite recursion. */
89 static int in_check_memory_usage;
90
91 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
92 static tree placeholder_list = 0;
93
94 /* This structure is used by move_by_pieces to describe the move to
95 be performed. */
96 struct move_by_pieces
97 {
98 rtx to;
99 rtx to_addr;
100 int autinc_to;
101 int explicit_inc_to;
102 int to_struct;
103 int to_readonly;
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
108 int from_struct;
109 int from_readonly;
110 int len;
111 int offset;
112 int reverse;
113 };
114
115 /* This structure is used by clear_by_pieces to describe the clear to
116 be performed. */
117
118 struct clear_by_pieces
119 {
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
124 int to_struct;
125 int len;
126 int offset;
127 int reverse;
128 };
129
130 extern struct obstack permanent_obstack;
131
132 static rtx get_push_address PARAMS ((int));
133
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
136 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *));
138 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
139 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
140 enum machine_mode,
141 struct clear_by_pieces *));
142 static int is_zeros_p PARAMS ((tree));
143 static int mostly_zeros_p PARAMS ((tree));
144 static void store_constructor_field PARAMS ((rtx, int, int, enum machine_mode,
145 tree, tree, unsigned int, int));
146 static void store_constructor PARAMS ((tree, rtx, unsigned int, int, int));
147 static rtx store_field PARAMS ((rtx, int, int, enum machine_mode,
148 tree, enum machine_mode, int,
149 unsigned int, int, int));
150 static enum memory_use_mode
151 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
152 static tree save_noncopied_parts PARAMS ((tree, tree));
153 static tree init_noncopied_parts PARAMS ((tree, tree));
154 static int safe_from_p PARAMS ((rtx, tree, int));
155 static int fixed_type_p PARAMS ((tree));
156 static rtx var_rtx PARAMS ((tree));
157 static int readonly_fields_p PARAMS ((tree));
158 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
159 static rtx expand_increment PARAMS ((tree, int, int));
160 static void preexpand_calls PARAMS ((tree));
161 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
162 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
163 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code, rtx, rtx));
164 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
165
166 /* Record for each mode whether we can move a register directly to or
167 from an object of that mode in memory. If we can't, we won't try
168 to use that mode directly when accessing a field of that mode. */
169
170 static char direct_load[NUM_MACHINE_MODES];
171 static char direct_store[NUM_MACHINE_MODES];
172
173 /* If a memory-to-memory move would take MOVE_RATIO or more simple
174 move-instruction sequences, we will do a movstr or libcall instead. */
175
176 #ifndef MOVE_RATIO
177 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
178 #define MOVE_RATIO 2
179 #else
180 /* If we are optimizing for space (-Os), cut down the default move ratio */
181 #define MOVE_RATIO (optimize_size ? 3 : 15)
182 #endif
183 #endif
184
185 /* This macro is used to determine whether move_by_pieces should be called
186 to perform a structure copy. */
187 #ifndef MOVE_BY_PIECES_P
188 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
189 (SIZE, ALIGN) < MOVE_RATIO)
190 #endif
191
192 /* This array records the insn_code of insns to perform block moves. */
193 enum insn_code movstr_optab[NUM_MACHINE_MODES];
194
195 /* This array records the insn_code of insns to perform block clears. */
196 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
197
198 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
199
200 #ifndef SLOW_UNALIGNED_ACCESS
201 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
202 #endif
203 \f
204 /* This is run once per compilation to set up which modes can be used
205 directly in memory and to initialize the block move optab. */
206
207 void
208 init_expr_once ()
209 {
210 rtx insn, pat;
211 enum machine_mode mode;
212 int num_clobbers;
213 rtx mem, mem1;
214 char *free_point;
215
216 start_sequence ();
217
218 /* Since we are on the permanent obstack, we must be sure we save this
219 spot AFTER we call start_sequence, since it will reuse the rtl it
220 makes. */
221 free_point = (char *) oballoc (0);
222
223 /* Try indexing by frame ptr and try by stack ptr.
224 It is known that on the Convex the stack ptr isn't a valid index.
225 With luck, one or the other is valid on any machine. */
226 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
227 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
228
229 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
230 pat = PATTERN (insn);
231
232 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
233 mode = (enum machine_mode) ((int) mode + 1))
234 {
235 int regno;
236 rtx reg;
237
238 direct_load[(int) mode] = direct_store[(int) mode] = 0;
239 PUT_MODE (mem, mode);
240 PUT_MODE (mem1, mode);
241
242 /* See if there is some register that can be used in this mode and
243 directly loaded or stored from memory. */
244
245 if (mode != VOIDmode && mode != BLKmode)
246 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
247 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
248 regno++)
249 {
250 if (! HARD_REGNO_MODE_OK (regno, mode))
251 continue;
252
253 reg = gen_rtx_REG (mode, regno);
254
255 SET_SRC (pat) = mem;
256 SET_DEST (pat) = reg;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_load[(int) mode] = 1;
259
260 SET_SRC (pat) = mem1;
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
264
265 SET_SRC (pat) = reg;
266 SET_DEST (pat) = mem;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_store[(int) mode] = 1;
269
270 SET_SRC (pat) = reg;
271 SET_DEST (pat) = mem1;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
274 }
275 }
276
277 end_sequence ();
278 obfree (free_point);
279 }
280
281 /* This is run at the start of compiling a function. */
282
283 void
284 init_expr ()
285 {
286 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
287
288 pending_chain = 0;
289 pending_stack_adjust = 0;
290 inhibit_defer_pop = 0;
291 saveregs_value = 0;
292 apply_args_value = 0;
293 forced_labels = 0;
294 }
295
296 void
297 mark_expr_status (p)
298 struct expr_status *p;
299 {
300 if (p == NULL)
301 return;
302
303 ggc_mark_rtx (p->x_saveregs_value);
304 ggc_mark_rtx (p->x_apply_args_value);
305 ggc_mark_rtx (p->x_forced_labels);
306 }
307
308 void
309 free_expr_status (f)
310 struct function *f;
311 {
312 free (f->expr);
313 f->expr = NULL;
314 }
315
316 /* Small sanity check that the queue is empty at the end of a function. */
317 void
318 finish_expr_for_function ()
319 {
320 if (pending_chain)
321 abort ();
322 }
323 \f
324 /* Manage the queue of increment instructions to be output
325 for POSTINCREMENT_EXPR expressions, etc. */
326
327 /* Queue up to increment (or change) VAR later. BODY says how:
328 BODY should be the same thing you would pass to emit_insn
329 to increment right away. It will go to emit_insn later on.
330
331 The value is a QUEUED expression to be used in place of VAR
332 where you want to guarantee the pre-incrementation value of VAR. */
333
334 static rtx
335 enqueue_insn (var, body)
336 rtx var, body;
337 {
338 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
339 body, pending_chain);
340 return pending_chain;
341 }
342
343 /* Use protect_from_queue to convert a QUEUED expression
344 into something that you can put immediately into an instruction.
345 If the queued incrementation has not happened yet,
346 protect_from_queue returns the variable itself.
347 If the incrementation has happened, protect_from_queue returns a temp
348 that contains a copy of the old value of the variable.
349
350 Any time an rtx which might possibly be a QUEUED is to be put
351 into an instruction, it must be passed through protect_from_queue first.
352 QUEUED expressions are not meaningful in instructions.
353
354 Do not pass a value through protect_from_queue and then hold
355 on to it for a while before putting it in an instruction!
356 If the queue is flushed in between, incorrect code will result. */
357
358 rtx
359 protect_from_queue (x, modify)
360 register rtx x;
361 int modify;
362 {
363 register RTX_CODE code = GET_CODE (x);
364
365 #if 0 /* A QUEUED can hang around after the queue is forced out. */
366 /* Shortcut for most common case. */
367 if (pending_chain == 0)
368 return x;
369 #endif
370
371 if (code != QUEUED)
372 {
373 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
374 use of autoincrement. Make a copy of the contents of the memory
375 location rather than a copy of the address, but not if the value is
376 of mode BLKmode. Don't modify X in place since it might be
377 shared. */
378 if (code == MEM && GET_MODE (x) != BLKmode
379 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
380 {
381 register rtx y = XEXP (x, 0);
382 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
383
384 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
385 MEM_COPY_ATTRIBUTES (new, x);
386 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
387
388 if (QUEUED_INSN (y))
389 {
390 register rtx temp = gen_reg_rtx (GET_MODE (new));
391 emit_insn_before (gen_move_insn (temp, new),
392 QUEUED_INSN (y));
393 return temp;
394 }
395 return new;
396 }
397 /* Otherwise, recursively protect the subexpressions of all
398 the kinds of rtx's that can contain a QUEUED. */
399 if (code == MEM)
400 {
401 rtx tem = protect_from_queue (XEXP (x, 0), 0);
402 if (tem != XEXP (x, 0))
403 {
404 x = copy_rtx (x);
405 XEXP (x, 0) = tem;
406 }
407 }
408 else if (code == PLUS || code == MULT)
409 {
410 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
411 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
412 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
413 {
414 x = copy_rtx (x);
415 XEXP (x, 0) = new0;
416 XEXP (x, 1) = new1;
417 }
418 }
419 return x;
420 }
421 /* If the increment has not happened, use the variable itself. */
422 if (QUEUED_INSN (x) == 0)
423 return QUEUED_VAR (x);
424 /* If the increment has happened and a pre-increment copy exists,
425 use that copy. */
426 if (QUEUED_COPY (x) != 0)
427 return QUEUED_COPY (x);
428 /* The increment has happened but we haven't set up a pre-increment copy.
429 Set one up now, and use it. */
430 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
431 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
432 QUEUED_INSN (x));
433 return QUEUED_COPY (x);
434 }
435
436 /* Return nonzero if X contains a QUEUED expression:
437 if it contains anything that will be altered by a queued increment.
438 We handle only combinations of MEM, PLUS, MINUS and MULT operators
439 since memory addresses generally contain only those. */
440
441 int
442 queued_subexp_p (x)
443 rtx x;
444 {
445 register enum rtx_code code = GET_CODE (x);
446 switch (code)
447 {
448 case QUEUED:
449 return 1;
450 case MEM:
451 return queued_subexp_p (XEXP (x, 0));
452 case MULT:
453 case PLUS:
454 case MINUS:
455 return (queued_subexp_p (XEXP (x, 0))
456 || queued_subexp_p (XEXP (x, 1)));
457 default:
458 return 0;
459 }
460 }
461
462 /* Perform all the pending incrementations. */
463
464 void
465 emit_queue ()
466 {
467 register rtx p;
468 while ((p = pending_chain))
469 {
470 rtx body = QUEUED_BODY (p);
471
472 if (GET_CODE (body) == SEQUENCE)
473 {
474 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
475 emit_insn (QUEUED_BODY (p));
476 }
477 else
478 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
479 pending_chain = QUEUED_NEXT (p);
480 }
481 }
482 \f
483 /* Copy data from FROM to TO, where the machine modes are not the same.
484 Both modes may be integer, or both may be floating.
485 UNSIGNEDP should be nonzero if FROM is an unsigned type.
486 This causes zero-extension instead of sign-extension. */
487
488 void
489 convert_move (to, from, unsignedp)
490 register rtx to, from;
491 int unsignedp;
492 {
493 enum machine_mode to_mode = GET_MODE (to);
494 enum machine_mode from_mode = GET_MODE (from);
495 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
496 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
497 enum insn_code code;
498 rtx libcall;
499
500 /* rtx code for making an equivalent value. */
501 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
502
503 to = protect_from_queue (to, 1);
504 from = protect_from_queue (from, 0);
505
506 if (to_real != from_real)
507 abort ();
508
509 /* If FROM is a SUBREG that indicates that we have already done at least
510 the required extension, strip it. We don't handle such SUBREGs as
511 TO here. */
512
513 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
514 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
515 >= GET_MODE_SIZE (to_mode))
516 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
517 from = gen_lowpart (to_mode, from), from_mode = to_mode;
518
519 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
520 abort ();
521
522 if (to_mode == from_mode
523 || (from_mode == VOIDmode && CONSTANT_P (from)))
524 {
525 emit_move_insn (to, from);
526 return;
527 }
528
529 if (to_real)
530 {
531 rtx value;
532
533 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
534 {
535 /* Try converting directly if the insn is supported. */
536 if ((code = can_extend_p (to_mode, from_mode, 0))
537 != CODE_FOR_nothing)
538 {
539 emit_unop_insn (code, to, from, UNKNOWN);
540 return;
541 }
542 }
543
544 #ifdef HAVE_trunchfqf2
545 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
546 {
547 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
548 return;
549 }
550 #endif
551 #ifdef HAVE_trunctqfqf2
552 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
553 {
554 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
555 return;
556 }
557 #endif
558 #ifdef HAVE_truncsfqf2
559 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
560 {
561 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
562 return;
563 }
564 #endif
565 #ifdef HAVE_truncdfqf2
566 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
567 {
568 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
569 return;
570 }
571 #endif
572 #ifdef HAVE_truncxfqf2
573 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
574 {
575 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
576 return;
577 }
578 #endif
579 #ifdef HAVE_trunctfqf2
580 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
583 return;
584 }
585 #endif
586
587 #ifdef HAVE_trunctqfhf2
588 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
589 {
590 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
591 return;
592 }
593 #endif
594 #ifdef HAVE_truncsfhf2
595 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
596 {
597 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
598 return;
599 }
600 #endif
601 #ifdef HAVE_truncdfhf2
602 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
603 {
604 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
605 return;
606 }
607 #endif
608 #ifdef HAVE_truncxfhf2
609 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
610 {
611 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
612 return;
613 }
614 #endif
615 #ifdef HAVE_trunctfhf2
616 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
617 {
618 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
619 return;
620 }
621 #endif
622
623 #ifdef HAVE_truncsftqf2
624 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
625 {
626 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
627 return;
628 }
629 #endif
630 #ifdef HAVE_truncdftqf2
631 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
632 {
633 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
634 return;
635 }
636 #endif
637 #ifdef HAVE_truncxftqf2
638 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
639 {
640 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
641 return;
642 }
643 #endif
644 #ifdef HAVE_trunctftqf2
645 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
646 {
647 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
648 return;
649 }
650 #endif
651
652 #ifdef HAVE_truncdfsf2
653 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
654 {
655 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
656 return;
657 }
658 #endif
659 #ifdef HAVE_truncxfsf2
660 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
661 {
662 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
663 return;
664 }
665 #endif
666 #ifdef HAVE_trunctfsf2
667 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
668 {
669 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
670 return;
671 }
672 #endif
673 #ifdef HAVE_truncxfdf2
674 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
675 {
676 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
677 return;
678 }
679 #endif
680 #ifdef HAVE_trunctfdf2
681 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
682 {
683 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
684 return;
685 }
686 #endif
687
688 libcall = (rtx) 0;
689 switch (from_mode)
690 {
691 case SFmode:
692 switch (to_mode)
693 {
694 case DFmode:
695 libcall = extendsfdf2_libfunc;
696 break;
697
698 case XFmode:
699 libcall = extendsfxf2_libfunc;
700 break;
701
702 case TFmode:
703 libcall = extendsftf2_libfunc;
704 break;
705
706 default:
707 break;
708 }
709 break;
710
711 case DFmode:
712 switch (to_mode)
713 {
714 case SFmode:
715 libcall = truncdfsf2_libfunc;
716 break;
717
718 case XFmode:
719 libcall = extenddfxf2_libfunc;
720 break;
721
722 case TFmode:
723 libcall = extenddftf2_libfunc;
724 break;
725
726 default:
727 break;
728 }
729 break;
730
731 case XFmode:
732 switch (to_mode)
733 {
734 case SFmode:
735 libcall = truncxfsf2_libfunc;
736 break;
737
738 case DFmode:
739 libcall = truncxfdf2_libfunc;
740 break;
741
742 default:
743 break;
744 }
745 break;
746
747 case TFmode:
748 switch (to_mode)
749 {
750 case SFmode:
751 libcall = trunctfsf2_libfunc;
752 break;
753
754 case DFmode:
755 libcall = trunctfdf2_libfunc;
756 break;
757
758 default:
759 break;
760 }
761 break;
762
763 default:
764 break;
765 }
766
767 if (libcall == (rtx) 0)
768 /* This conversion is not implemented yet. */
769 abort ();
770
771 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
772 1, from, from_mode);
773 emit_move_insn (to, value);
774 return;
775 }
776
777 /* Now both modes are integers. */
778
779 /* Handle expanding beyond a word. */
780 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
781 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
782 {
783 rtx insns;
784 rtx lowpart;
785 rtx fill_value;
786 rtx lowfrom;
787 int i;
788 enum machine_mode lowpart_mode;
789 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
790
791 /* Try converting directly if the insn is supported. */
792 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
793 != CODE_FOR_nothing)
794 {
795 /* If FROM is a SUBREG, put it into a register. Do this
796 so that we always generate the same set of insns for
797 better cse'ing; if an intermediate assignment occurred,
798 we won't be doing the operation directly on the SUBREG. */
799 if (optimize > 0 && GET_CODE (from) == SUBREG)
800 from = force_reg (from_mode, from);
801 emit_unop_insn (code, to, from, equiv_code);
802 return;
803 }
804 /* Next, try converting via full word. */
805 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
806 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
807 != CODE_FOR_nothing))
808 {
809 if (GET_CODE (to) == REG)
810 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
811 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
812 emit_unop_insn (code, to,
813 gen_lowpart (word_mode, to), equiv_code);
814 return;
815 }
816
817 /* No special multiword conversion insn; do it by hand. */
818 start_sequence ();
819
820 /* Since we will turn this into a no conflict block, we must ensure
821 that the source does not overlap the target. */
822
823 if (reg_overlap_mentioned_p (to, from))
824 from = force_reg (from_mode, from);
825
826 /* Get a copy of FROM widened to a word, if necessary. */
827 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
828 lowpart_mode = word_mode;
829 else
830 lowpart_mode = from_mode;
831
832 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
833
834 lowpart = gen_lowpart (lowpart_mode, to);
835 emit_move_insn (lowpart, lowfrom);
836
837 /* Compute the value to put in each remaining word. */
838 if (unsignedp)
839 fill_value = const0_rtx;
840 else
841 {
842 #ifdef HAVE_slt
843 if (HAVE_slt
844 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
845 && STORE_FLAG_VALUE == -1)
846 {
847 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
848 lowpart_mode, 0, 0);
849 fill_value = gen_reg_rtx (word_mode);
850 emit_insn (gen_slt (fill_value));
851 }
852 else
853 #endif
854 {
855 fill_value
856 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
857 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
858 NULL_RTX, 0);
859 fill_value = convert_to_mode (word_mode, fill_value, 1);
860 }
861 }
862
863 /* Fill the remaining words. */
864 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
865 {
866 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
867 rtx subword = operand_subword (to, index, 1, to_mode);
868
869 if (subword == 0)
870 abort ();
871
872 if (fill_value != subword)
873 emit_move_insn (subword, fill_value);
874 }
875
876 insns = get_insns ();
877 end_sequence ();
878
879 emit_no_conflict_block (insns, to, from, NULL_RTX,
880 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
881 return;
882 }
883
884 /* Truncating multi-word to a word or less. */
885 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
886 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
887 {
888 if (!((GET_CODE (from) == MEM
889 && ! MEM_VOLATILE_P (from)
890 && direct_load[(int) to_mode]
891 && ! mode_dependent_address_p (XEXP (from, 0)))
892 || GET_CODE (from) == REG
893 || GET_CODE (from) == SUBREG))
894 from = force_reg (from_mode, from);
895 convert_move (to, gen_lowpart (word_mode, from), 0);
896 return;
897 }
898
899 /* Handle pointer conversion */ /* SPEE 900220 */
900 if (to_mode == PQImode)
901 {
902 if (from_mode != QImode)
903 from = convert_to_mode (QImode, from, unsignedp);
904
905 #ifdef HAVE_truncqipqi2
906 if (HAVE_truncqipqi2)
907 {
908 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
909 return;
910 }
911 #endif /* HAVE_truncqipqi2 */
912 abort ();
913 }
914
915 if (from_mode == PQImode)
916 {
917 if (to_mode != QImode)
918 {
919 from = convert_to_mode (QImode, from, unsignedp);
920 from_mode = QImode;
921 }
922 else
923 {
924 #ifdef HAVE_extendpqiqi2
925 if (HAVE_extendpqiqi2)
926 {
927 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
928 return;
929 }
930 #endif /* HAVE_extendpqiqi2 */
931 abort ();
932 }
933 }
934
935 if (to_mode == PSImode)
936 {
937 if (from_mode != SImode)
938 from = convert_to_mode (SImode, from, unsignedp);
939
940 #ifdef HAVE_truncsipsi2
941 if (HAVE_truncsipsi2)
942 {
943 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
944 return;
945 }
946 #endif /* HAVE_truncsipsi2 */
947 abort ();
948 }
949
950 if (from_mode == PSImode)
951 {
952 if (to_mode != SImode)
953 {
954 from = convert_to_mode (SImode, from, unsignedp);
955 from_mode = SImode;
956 }
957 else
958 {
959 #ifdef HAVE_extendpsisi2
960 if (HAVE_extendpsisi2)
961 {
962 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
963 return;
964 }
965 #endif /* HAVE_extendpsisi2 */
966 abort ();
967 }
968 }
969
970 if (to_mode == PDImode)
971 {
972 if (from_mode != DImode)
973 from = convert_to_mode (DImode, from, unsignedp);
974
975 #ifdef HAVE_truncdipdi2
976 if (HAVE_truncdipdi2)
977 {
978 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
979 return;
980 }
981 #endif /* HAVE_truncdipdi2 */
982 abort ();
983 }
984
985 if (from_mode == PDImode)
986 {
987 if (to_mode != DImode)
988 {
989 from = convert_to_mode (DImode, from, unsignedp);
990 from_mode = DImode;
991 }
992 else
993 {
994 #ifdef HAVE_extendpdidi2
995 if (HAVE_extendpdidi2)
996 {
997 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
998 return;
999 }
1000 #endif /* HAVE_extendpdidi2 */
1001 abort ();
1002 }
1003 }
1004
1005 /* Now follow all the conversions between integers
1006 no more than a word long. */
1007
1008 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1009 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1010 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1011 GET_MODE_BITSIZE (from_mode)))
1012 {
1013 if (!((GET_CODE (from) == MEM
1014 && ! MEM_VOLATILE_P (from)
1015 && direct_load[(int) to_mode]
1016 && ! mode_dependent_address_p (XEXP (from, 0)))
1017 || GET_CODE (from) == REG
1018 || GET_CODE (from) == SUBREG))
1019 from = force_reg (from_mode, from);
1020 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1021 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1022 from = copy_to_reg (from);
1023 emit_move_insn (to, gen_lowpart (to_mode, from));
1024 return;
1025 }
1026
1027 /* Handle extension. */
1028 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1029 {
1030 /* Convert directly if that works. */
1031 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1032 != CODE_FOR_nothing)
1033 {
1034 emit_unop_insn (code, to, from, equiv_code);
1035 return;
1036 }
1037 else
1038 {
1039 enum machine_mode intermediate;
1040 rtx tmp;
1041 tree shift_amount;
1042
1043 /* Search for a mode to convert via. */
1044 for (intermediate = from_mode; intermediate != VOIDmode;
1045 intermediate = GET_MODE_WIDER_MODE (intermediate))
1046 if (((can_extend_p (to_mode, intermediate, unsignedp)
1047 != CODE_FOR_nothing)
1048 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1049 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1050 GET_MODE_BITSIZE (intermediate))))
1051 && (can_extend_p (intermediate, from_mode, unsignedp)
1052 != CODE_FOR_nothing))
1053 {
1054 convert_move (to, convert_to_mode (intermediate, from,
1055 unsignedp), unsignedp);
1056 return;
1057 }
1058
1059 /* No suitable intermediate mode.
1060 Generate what we need with shifts. */
1061 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1062 - GET_MODE_BITSIZE (from_mode), 0);
1063 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1064 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1065 to, unsignedp);
1066 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1067 to, unsignedp);
1068 if (tmp != to)
1069 emit_move_insn (to, tmp);
1070 return;
1071 }
1072 }
1073
1074 /* Support special truncate insns for certain modes. */
1075
1076 if (from_mode == DImode && to_mode == SImode)
1077 {
1078 #ifdef HAVE_truncdisi2
1079 if (HAVE_truncdisi2)
1080 {
1081 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1082 return;
1083 }
1084 #endif
1085 convert_move (to, force_reg (from_mode, from), unsignedp);
1086 return;
1087 }
1088
1089 if (from_mode == DImode && to_mode == HImode)
1090 {
1091 #ifdef HAVE_truncdihi2
1092 if (HAVE_truncdihi2)
1093 {
1094 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1095 return;
1096 }
1097 #endif
1098 convert_move (to, force_reg (from_mode, from), unsignedp);
1099 return;
1100 }
1101
1102 if (from_mode == DImode && to_mode == QImode)
1103 {
1104 #ifdef HAVE_truncdiqi2
1105 if (HAVE_truncdiqi2)
1106 {
1107 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1108 return;
1109 }
1110 #endif
1111 convert_move (to, force_reg (from_mode, from), unsignedp);
1112 return;
1113 }
1114
1115 if (from_mode == SImode && to_mode == HImode)
1116 {
1117 #ifdef HAVE_truncsihi2
1118 if (HAVE_truncsihi2)
1119 {
1120 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1121 return;
1122 }
1123 #endif
1124 convert_move (to, force_reg (from_mode, from), unsignedp);
1125 return;
1126 }
1127
1128 if (from_mode == SImode && to_mode == QImode)
1129 {
1130 #ifdef HAVE_truncsiqi2
1131 if (HAVE_truncsiqi2)
1132 {
1133 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1134 return;
1135 }
1136 #endif
1137 convert_move (to, force_reg (from_mode, from), unsignedp);
1138 return;
1139 }
1140
1141 if (from_mode == HImode && to_mode == QImode)
1142 {
1143 #ifdef HAVE_trunchiqi2
1144 if (HAVE_trunchiqi2)
1145 {
1146 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1147 return;
1148 }
1149 #endif
1150 convert_move (to, force_reg (from_mode, from), unsignedp);
1151 return;
1152 }
1153
1154 if (from_mode == TImode && to_mode == DImode)
1155 {
1156 #ifdef HAVE_trunctidi2
1157 if (HAVE_trunctidi2)
1158 {
1159 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1160 return;
1161 }
1162 #endif
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1164 return;
1165 }
1166
1167 if (from_mode == TImode && to_mode == SImode)
1168 {
1169 #ifdef HAVE_trunctisi2
1170 if (HAVE_trunctisi2)
1171 {
1172 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1173 return;
1174 }
1175 #endif
1176 convert_move (to, force_reg (from_mode, from), unsignedp);
1177 return;
1178 }
1179
1180 if (from_mode == TImode && to_mode == HImode)
1181 {
1182 #ifdef HAVE_trunctihi2
1183 if (HAVE_trunctihi2)
1184 {
1185 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1186 return;
1187 }
1188 #endif
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 return;
1191 }
1192
1193 if (from_mode == TImode && to_mode == QImode)
1194 {
1195 #ifdef HAVE_trunctiqi2
1196 if (HAVE_trunctiqi2)
1197 {
1198 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1199 return;
1200 }
1201 #endif
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 return;
1204 }
1205
1206 /* Handle truncation of volatile memrefs, and so on;
1207 the things that couldn't be truncated directly,
1208 and for which there was no special instruction. */
1209 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1210 {
1211 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1212 emit_move_insn (to, temp);
1213 return;
1214 }
1215
1216 /* Mode combination is not recognized. */
1217 abort ();
1218 }
1219
1220 /* Return an rtx for a value that would result
1221 from converting X to mode MODE.
1222 Both X and MODE may be floating, or both integer.
1223 UNSIGNEDP is nonzero if X is an unsigned value.
1224 This can be done by referring to a part of X in place
1225 or by copying to a new temporary with conversion.
1226
1227 This function *must not* call protect_from_queue
1228 except when putting X into an insn (in which case convert_move does it). */
1229
1230 rtx
1231 convert_to_mode (mode, x, unsignedp)
1232 enum machine_mode mode;
1233 rtx x;
1234 int unsignedp;
1235 {
1236 return convert_modes (mode, VOIDmode, x, unsignedp);
1237 }
1238
1239 /* Return an rtx for a value that would result
1240 from converting X from mode OLDMODE to mode MODE.
1241 Both modes may be floating, or both integer.
1242 UNSIGNEDP is nonzero if X is an unsigned value.
1243
1244 This can be done by referring to a part of X in place
1245 or by copying to a new temporary with conversion.
1246
1247 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1248
1249 This function *must not* call protect_from_queue
1250 except when putting X into an insn (in which case convert_move does it). */
1251
1252 rtx
1253 convert_modes (mode, oldmode, x, unsignedp)
1254 enum machine_mode mode, oldmode;
1255 rtx x;
1256 int unsignedp;
1257 {
1258 register rtx temp;
1259
1260 /* If FROM is a SUBREG that indicates that we have already done at least
1261 the required extension, strip it. */
1262
1263 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1264 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1265 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1266 x = gen_lowpart (mode, x);
1267
1268 if (GET_MODE (x) != VOIDmode)
1269 oldmode = GET_MODE (x);
1270
1271 if (mode == oldmode)
1272 return x;
1273
1274 /* There is one case that we must handle specially: If we are converting
1275 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1276 we are to interpret the constant as unsigned, gen_lowpart will do
1277 the wrong if the constant appears negative. What we want to do is
1278 make the high-order word of the constant zero, not all ones. */
1279
1280 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1281 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1282 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1283 {
1284 HOST_WIDE_INT val = INTVAL (x);
1285
1286 if (oldmode != VOIDmode
1287 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1288 {
1289 int width = GET_MODE_BITSIZE (oldmode);
1290
1291 /* We need to zero extend VAL. */
1292 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1293 }
1294
1295 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1296 }
1297
1298 /* We can do this with a gen_lowpart if both desired and current modes
1299 are integer, and this is either a constant integer, a register, or a
1300 non-volatile MEM. Except for the constant case where MODE is no
1301 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1302
1303 if ((GET_CODE (x) == CONST_INT
1304 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1305 || (GET_MODE_CLASS (mode) == MODE_INT
1306 && GET_MODE_CLASS (oldmode) == MODE_INT
1307 && (GET_CODE (x) == CONST_DOUBLE
1308 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1309 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1310 && direct_load[(int) mode])
1311 || (GET_CODE (x) == REG
1312 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1313 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1314 {
1315 /* ?? If we don't know OLDMODE, we have to assume here that
1316 X does not need sign- or zero-extension. This may not be
1317 the case, but it's the best we can do. */
1318 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1319 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1320 {
1321 HOST_WIDE_INT val = INTVAL (x);
1322 int width = GET_MODE_BITSIZE (oldmode);
1323
1324 /* We must sign or zero-extend in this case. Start by
1325 zero-extending, then sign extend if we need to. */
1326 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1327 if (! unsignedp
1328 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1329 val |= (HOST_WIDE_INT) (-1) << width;
1330
1331 return GEN_INT (val);
1332 }
1333
1334 return gen_lowpart (mode, x);
1335 }
1336
1337 temp = gen_reg_rtx (mode);
1338 convert_move (temp, x, unsignedp);
1339 return temp;
1340 }
1341 \f
1342
1343 /* This macro is used to determine what the largest unit size that
1344 move_by_pieces can use is. */
1345
1346 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1347 move efficiently, as opposed to MOVE_MAX which is the maximum
1348 number of bhytes we can move with a single instruction. */
1349
1350 #ifndef MOVE_MAX_PIECES
1351 #define MOVE_MAX_PIECES MOVE_MAX
1352 #endif
1353
1354 /* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1359
1360 void
1361 move_by_pieces (to, from, len, align)
1362 rtx to, from;
1363 int len;
1364 unsigned int align;
1365 {
1366 struct move_by_pieces data;
1367 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1368 int max_size = MOVE_MAX_PIECES + 1;
1369 enum machine_mode mode = VOIDmode, tmode;
1370 enum insn_code icode;
1371
1372 data.offset = 0;
1373 data.to_addr = to_addr;
1374 data.from_addr = from_addr;
1375 data.to = to;
1376 data.from = from;
1377 data.autinc_to
1378 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1379 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1380 data.autinc_from
1381 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1382 || GET_CODE (from_addr) == POST_INC
1383 || GET_CODE (from_addr) == POST_DEC);
1384
1385 data.explicit_inc_from = 0;
1386 data.explicit_inc_to = 0;
1387 data.reverse
1388 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1389 if (data.reverse) data.offset = len;
1390 data.len = len;
1391
1392 data.to_struct = MEM_IN_STRUCT_P (to);
1393 data.from_struct = MEM_IN_STRUCT_P (from);
1394 data.to_readonly = RTX_UNCHANGING_P (to);
1395 data.from_readonly = RTX_UNCHANGING_P (from);
1396
1397 /* If copying requires more than two move insns,
1398 copy addresses to registers (to make displacements shorter)
1399 and use post-increment if available. */
1400 if (!(data.autinc_from && data.autinc_to)
1401 && move_by_pieces_ninsns (len, align) > 2)
1402 {
1403 /* Find the mode of the largest move... */
1404 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1405 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1406 if (GET_MODE_SIZE (tmode) < max_size)
1407 mode = tmode;
1408
1409 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1410 {
1411 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1412 data.autinc_from = 1;
1413 data.explicit_inc_from = -1;
1414 }
1415 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1416 {
1417 data.from_addr = copy_addr_to_reg (from_addr);
1418 data.autinc_from = 1;
1419 data.explicit_inc_from = 1;
1420 }
1421 if (!data.autinc_from && CONSTANT_P (from_addr))
1422 data.from_addr = copy_addr_to_reg (from_addr);
1423 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1424 {
1425 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1426 data.autinc_to = 1;
1427 data.explicit_inc_to = -1;
1428 }
1429 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1430 {
1431 data.to_addr = copy_addr_to_reg (to_addr);
1432 data.autinc_to = 1;
1433 data.explicit_inc_to = 1;
1434 }
1435 if (!data.autinc_to && CONSTANT_P (to_addr))
1436 data.to_addr = copy_addr_to_reg (to_addr);
1437 }
1438
1439 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1440 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1441 align = MOVE_MAX;
1442
1443 /* First move what we can in the largest integer mode, then go to
1444 successively smaller modes. */
1445
1446 while (max_size > 1)
1447 {
1448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1450 if (GET_MODE_SIZE (tmode) < max_size)
1451 mode = tmode;
1452
1453 if (mode == VOIDmode)
1454 break;
1455
1456 icode = mov_optab->handlers[(int) mode].insn_code;
1457 if (icode != CODE_FOR_nothing
1458 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1459 (unsigned int) GET_MODE_SIZE (mode)))
1460 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1461
1462 max_size = GET_MODE_SIZE (mode);
1463 }
1464
1465 /* The code above should have handled everything. */
1466 if (data.len > 0)
1467 abort ();
1468 }
1469
1470 /* Return number of insns required to move L bytes by pieces.
1471 ALIGN (in bytes) is maximum alignment we can assume. */
1472
1473 static int
1474 move_by_pieces_ninsns (l, align)
1475 unsigned int l;
1476 unsigned int align;
1477 {
1478 register int n_insns = 0;
1479 int max_size = MOVE_MAX + 1;
1480
1481 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1482 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1483 align = MOVE_MAX;
1484
1485 while (max_size > 1)
1486 {
1487 enum machine_mode mode = VOIDmode, tmode;
1488 enum insn_code icode;
1489
1490 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1491 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1492 if (GET_MODE_SIZE (tmode) < max_size)
1493 mode = tmode;
1494
1495 if (mode == VOIDmode)
1496 break;
1497
1498 icode = mov_optab->handlers[(int) mode].insn_code;
1499 if (icode != CODE_FOR_nothing
1500 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1501 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1502
1503 max_size = GET_MODE_SIZE (mode);
1504 }
1505
1506 return n_insns;
1507 }
1508
1509 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1510 with move instructions for mode MODE. GENFUN is the gen_... function
1511 to make a move insn for that mode. DATA has all the other info. */
1512
1513 static void
1514 move_by_pieces_1 (genfun, mode, data)
1515 rtx (*genfun) PARAMS ((rtx, ...));
1516 enum machine_mode mode;
1517 struct move_by_pieces *data;
1518 {
1519 register int size = GET_MODE_SIZE (mode);
1520 register rtx to1, from1;
1521
1522 while (data->len >= size)
1523 {
1524 if (data->reverse) data->offset -= size;
1525
1526 to1 = (data->autinc_to
1527 ? gen_rtx_MEM (mode, data->to_addr)
1528 : copy_rtx (change_address (data->to, mode,
1529 plus_constant (data->to_addr,
1530 data->offset))));
1531 MEM_IN_STRUCT_P (to1) = data->to_struct;
1532 RTX_UNCHANGING_P (to1) = data->to_readonly;
1533
1534 from1
1535 = (data->autinc_from
1536 ? gen_rtx_MEM (mode, data->from_addr)
1537 : copy_rtx (change_address (data->from, mode,
1538 plus_constant (data->from_addr,
1539 data->offset))));
1540 MEM_IN_STRUCT_P (from1) = data->from_struct;
1541 RTX_UNCHANGING_P (from1) = data->from_readonly;
1542
1543 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1544 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1545 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1546 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1547
1548 emit_insn ((*genfun) (to1, from1));
1549 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1550 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1551 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1552 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1553
1554 if (! data->reverse) data->offset += size;
1555
1556 data->len -= size;
1557 }
1558 }
1559 \f
1560 /* Emit code to move a block Y to a block X.
1561 This may be done with string-move instructions,
1562 with multiple scalar move instructions, or with a library call.
1563
1564 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1565 with mode BLKmode.
1566 SIZE is an rtx that says how long they are.
1567 ALIGN is the maximum alignment we can assume they have,
1568 measured in bytes.
1569
1570 Return the address of the new block, if memcpy is called and returns it,
1571 0 otherwise. */
1572
1573 rtx
1574 emit_block_move (x, y, size, align)
1575 rtx x, y;
1576 rtx size;
1577 unsigned int align;
1578 {
1579 rtx retval = 0;
1580 #ifdef TARGET_MEM_FUNCTIONS
1581 static tree fn;
1582 tree call_expr, arg_list;
1583 #endif
1584
1585 if (GET_MODE (x) != BLKmode)
1586 abort ();
1587
1588 if (GET_MODE (y) != BLKmode)
1589 abort ();
1590
1591 x = protect_from_queue (x, 1);
1592 y = protect_from_queue (y, 0);
1593 size = protect_from_queue (size, 0);
1594
1595 if (GET_CODE (x) != MEM)
1596 abort ();
1597 if (GET_CODE (y) != MEM)
1598 abort ();
1599 if (size == 0)
1600 abort ();
1601
1602 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1603 move_by_pieces (x, y, INTVAL (size), align);
1604 else
1605 {
1606 /* Try the most limited insn first, because there's no point
1607 including more than one in the machine description unless
1608 the more limited one has some advantage. */
1609
1610 rtx opalign = GEN_INT (align);
1611 enum machine_mode mode;
1612
1613 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1614 mode = GET_MODE_WIDER_MODE (mode))
1615 {
1616 enum insn_code code = movstr_optab[(int) mode];
1617 insn_operand_predicate_fn pred;
1618
1619 if (code != CODE_FOR_nothing
1620 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1621 here because if SIZE is less than the mode mask, as it is
1622 returned by the macro, it will definitely be less than the
1623 actual mode mask. */
1624 && ((GET_CODE (size) == CONST_INT
1625 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1626 <= (GET_MODE_MASK (mode) >> 1)))
1627 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1628 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1629 || (*pred) (x, BLKmode))
1630 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1631 || (*pred) (y, BLKmode))
1632 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1633 || (*pred) (opalign, VOIDmode)))
1634 {
1635 rtx op2;
1636 rtx last = get_last_insn ();
1637 rtx pat;
1638
1639 op2 = convert_to_mode (mode, size, 1);
1640 pred = insn_data[(int) code].operand[2].predicate;
1641 if (pred != 0 && ! (*pred) (op2, mode))
1642 op2 = copy_to_mode_reg (mode, op2);
1643
1644 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1645 if (pat)
1646 {
1647 emit_insn (pat);
1648 return 0;
1649 }
1650 else
1651 delete_insns_since (last);
1652 }
1653 }
1654
1655 /* X, Y, or SIZE may have been passed through protect_from_queue.
1656
1657 It is unsafe to save the value generated by protect_from_queue
1658 and reuse it later. Consider what happens if emit_queue is
1659 called before the return value from protect_from_queue is used.
1660
1661 Expansion of the CALL_EXPR below will call emit_queue before
1662 we are finished emitting RTL for argument setup. So if we are
1663 not careful we could get the wrong value for an argument.
1664
1665 To avoid this problem we go ahead and emit code to copy X, Y &
1666 SIZE into new pseudos. We can then place those new pseudos
1667 into an RTL_EXPR and use them later, even after a call to
1668 emit_queue.
1669
1670 Note this is not strictly needed for library calls since they
1671 do not call emit_queue before loading their arguments. However,
1672 we may need to have library calls call emit_queue in the future
1673 since failing to do so could cause problems for targets which
1674 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1675 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1676 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1677
1678 #ifdef TARGET_MEM_FUNCTIONS
1679 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1680 #else
1681 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1682 TREE_UNSIGNED (integer_type_node));
1683 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1684 #endif
1685
1686 #ifdef TARGET_MEM_FUNCTIONS
1687 /* It is incorrect to use the libcall calling conventions to call
1688 memcpy in this context.
1689
1690 This could be a user call to memcpy and the user may wish to
1691 examine the return value from memcpy.
1692
1693 For targets where libcalls and normal calls have different conventions
1694 for returning pointers, we could end up generating incorrect code.
1695
1696 So instead of using a libcall sequence we build up a suitable
1697 CALL_EXPR and expand the call in the normal fashion. */
1698 if (fn == NULL_TREE)
1699 {
1700 tree fntype;
1701
1702 /* This was copied from except.c, I don't know if all this is
1703 necessary in this context or not. */
1704 fn = get_identifier ("memcpy");
1705 push_obstacks_nochange ();
1706 end_temporary_allocation ();
1707 fntype = build_pointer_type (void_type_node);
1708 fntype = build_function_type (fntype, NULL_TREE);
1709 fn = build_decl (FUNCTION_DECL, fn, fntype);
1710 ggc_add_tree_root (&fn, 1);
1711 DECL_EXTERNAL (fn) = 1;
1712 TREE_PUBLIC (fn) = 1;
1713 DECL_ARTIFICIAL (fn) = 1;
1714 make_decl_rtl (fn, NULL_PTR, 1);
1715 assemble_external (fn);
1716 pop_obstacks ();
1717 }
1718
1719 /* We need to make an argument list for the function call.
1720
1721 memcpy has three arguments, the first two are void * addresses and
1722 the last is a size_t byte count for the copy. */
1723 arg_list
1724 = build_tree_list (NULL_TREE,
1725 make_tree (build_pointer_type (void_type_node), x));
1726 TREE_CHAIN (arg_list)
1727 = build_tree_list (NULL_TREE,
1728 make_tree (build_pointer_type (void_type_node), y));
1729 TREE_CHAIN (TREE_CHAIN (arg_list))
1730 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1731 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1732
1733 /* Now we have to build up the CALL_EXPR itself. */
1734 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1735 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1736 call_expr, arg_list, NULL_TREE);
1737 TREE_SIDE_EFFECTS (call_expr) = 1;
1738
1739 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1740 #else
1741 emit_library_call (bcopy_libfunc, 0,
1742 VOIDmode, 3, y, Pmode, x, Pmode,
1743 convert_to_mode (TYPE_MODE (integer_type_node), size,
1744 TREE_UNSIGNED (integer_type_node)),
1745 TYPE_MODE (integer_type_node));
1746 #endif
1747 }
1748
1749 return retval;
1750 }
1751 \f
1752 /* Copy all or part of a value X into registers starting at REGNO.
1753 The number of registers to be filled is NREGS. */
1754
1755 void
1756 move_block_to_reg (regno, x, nregs, mode)
1757 int regno;
1758 rtx x;
1759 int nregs;
1760 enum machine_mode mode;
1761 {
1762 int i;
1763 #ifdef HAVE_load_multiple
1764 rtx pat;
1765 rtx last;
1766 #endif
1767
1768 if (nregs == 0)
1769 return;
1770
1771 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1772 x = validize_mem (force_const_mem (mode, x));
1773
1774 /* See if the machine can do this with a load multiple insn. */
1775 #ifdef HAVE_load_multiple
1776 if (HAVE_load_multiple)
1777 {
1778 last = get_last_insn ();
1779 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1780 GEN_INT (nregs));
1781 if (pat)
1782 {
1783 emit_insn (pat);
1784 return;
1785 }
1786 else
1787 delete_insns_since (last);
1788 }
1789 #endif
1790
1791 for (i = 0; i < nregs; i++)
1792 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1793 operand_subword_force (x, i, mode));
1794 }
1795
1796 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1797 The number of registers to be filled is NREGS. SIZE indicates the number
1798 of bytes in the object X. */
1799
1800
1801 void
1802 move_block_from_reg (regno, x, nregs, size)
1803 int regno;
1804 rtx x;
1805 int nregs;
1806 int size;
1807 {
1808 int i;
1809 #ifdef HAVE_store_multiple
1810 rtx pat;
1811 rtx last;
1812 #endif
1813 enum machine_mode mode;
1814
1815 /* If SIZE is that of a mode no bigger than a word, just use that
1816 mode's store operation. */
1817 if (size <= UNITS_PER_WORD
1818 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1819 {
1820 emit_move_insn (change_address (x, mode, NULL),
1821 gen_rtx_REG (mode, regno));
1822 return;
1823 }
1824
1825 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1826 to the left before storing to memory. Note that the previous test
1827 doesn't handle all cases (e.g. SIZE == 3). */
1828 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1829 {
1830 rtx tem = operand_subword (x, 0, 1, BLKmode);
1831 rtx shift;
1832
1833 if (tem == 0)
1834 abort ();
1835
1836 shift = expand_shift (LSHIFT_EXPR, word_mode,
1837 gen_rtx_REG (word_mode, regno),
1838 build_int_2 ((UNITS_PER_WORD - size)
1839 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1840 emit_move_insn (tem, shift);
1841 return;
1842 }
1843
1844 /* See if the machine can do this with a store multiple insn. */
1845 #ifdef HAVE_store_multiple
1846 if (HAVE_store_multiple)
1847 {
1848 last = get_last_insn ();
1849 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1850 GEN_INT (nregs));
1851 if (pat)
1852 {
1853 emit_insn (pat);
1854 return;
1855 }
1856 else
1857 delete_insns_since (last);
1858 }
1859 #endif
1860
1861 for (i = 0; i < nregs; i++)
1862 {
1863 rtx tem = operand_subword (x, i, 1, BLKmode);
1864
1865 if (tem == 0)
1866 abort ();
1867
1868 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1869 }
1870 }
1871
1872 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1873 registers represented by a PARALLEL. SSIZE represents the total size of
1874 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1875 SRC in bits. */
1876 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1877 the balance will be in what would be the low-order memory addresses, i.e.
1878 left justified for big endian, right justified for little endian. This
1879 happens to be true for the targets currently using this support. If this
1880 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1881 would be needed. */
1882
1883 void
1884 emit_group_load (dst, orig_src, ssize, align)
1885 rtx dst, orig_src;
1886 unsigned int align;
1887 int ssize;
1888 {
1889 rtx *tmps, src;
1890 int start, i;
1891
1892 if (GET_CODE (dst) != PARALLEL)
1893 abort ();
1894
1895 /* Check for a NULL entry, used to indicate that the parameter goes
1896 both on the stack and in registers. */
1897 if (XEXP (XVECEXP (dst, 0, 0), 0))
1898 start = 0;
1899 else
1900 start = 1;
1901
1902 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1903
1904 /* If we won't be loading directly from memory, protect the real source
1905 from strange tricks we might play. */
1906 src = orig_src;
1907 if (GET_CODE (src) != MEM)
1908 {
1909 if (GET_CODE (src) == VOIDmode)
1910 src = gen_reg_rtx (GET_MODE (dst));
1911 else
1912 src = gen_reg_rtx (GET_MODE (orig_src));
1913 emit_move_insn (src, orig_src);
1914 }
1915
1916 /* Process the pieces. */
1917 for (i = start; i < XVECLEN (dst, 0); i++)
1918 {
1919 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1920 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1921 int bytelen = GET_MODE_SIZE (mode);
1922 int shift = 0;
1923
1924 /* Handle trailing fragments that run over the size of the struct. */
1925 if (ssize >= 0 && bytepos + bytelen > ssize)
1926 {
1927 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1928 bytelen = ssize - bytepos;
1929 if (bytelen <= 0)
1930 abort ();
1931 }
1932
1933 /* Optimize the access just a bit. */
1934 if (GET_CODE (src) == MEM
1935 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1936 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1937 && bytelen == GET_MODE_SIZE (mode))
1938 {
1939 tmps[i] = gen_reg_rtx (mode);
1940 emit_move_insn (tmps[i],
1941 change_address (src, mode,
1942 plus_constant (XEXP (src, 0),
1943 bytepos)));
1944 }
1945 else if (GET_CODE (src) == CONCAT)
1946 {
1947 if (bytepos == 0
1948 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1949 tmps[i] = XEXP (src, 0);
1950 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1951 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1952 tmps[i] = XEXP (src, 1);
1953 else
1954 abort ();
1955 }
1956 else
1957 {
1958 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1959 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1960 mode, mode, align, ssize);
1961 }
1962
1963 if (BYTES_BIG_ENDIAN && shift)
1964 {
1965 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1966 tmps[i], 0, OPTAB_WIDEN);
1967 }
1968 }
1969 emit_queue();
1970
1971 /* Copy the extracted pieces into the proper (probable) hard regs. */
1972 for (i = start; i < XVECLEN (dst, 0); i++)
1973 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1974 }
1975
1976 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1977 registers represented by a PARALLEL. SSIZE represents the total size of
1978 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1979
1980 void
1981 emit_group_store (orig_dst, src, ssize, align)
1982 rtx orig_dst, src;
1983 int ssize;
1984 unsigned int align;
1985 {
1986 rtx *tmps, dst;
1987 int start, i;
1988
1989 if (GET_CODE (src) != PARALLEL)
1990 abort ();
1991
1992 /* Check for a NULL entry, used to indicate that the parameter goes
1993 both on the stack and in registers. */
1994 if (XEXP (XVECEXP (src, 0, 0), 0))
1995 start = 0;
1996 else
1997 start = 1;
1998
1999 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2000
2001 /* Copy the (probable) hard regs into pseudos. */
2002 for (i = start; i < XVECLEN (src, 0); i++)
2003 {
2004 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2005 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2006 emit_move_insn (tmps[i], reg);
2007 }
2008 emit_queue();
2009
2010 /* If we won't be storing directly into memory, protect the real destination
2011 from strange tricks we might play. */
2012 dst = orig_dst;
2013 if (GET_CODE (dst) == PARALLEL)
2014 {
2015 rtx temp;
2016
2017 /* We can get a PARALLEL dst if there is a conditional expression in
2018 a return statement. In that case, the dst and src are the same,
2019 so no action is necessary. */
2020 if (rtx_equal_p (dst, src))
2021 return;
2022
2023 /* It is unclear if we can ever reach here, but we may as well handle
2024 it. Allocate a temporary, and split this into a store/load to/from
2025 the temporary. */
2026
2027 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2028 emit_group_store (temp, src, ssize, align);
2029 emit_group_load (dst, temp, ssize, align);
2030 return;
2031 }
2032 else if (GET_CODE (dst) != MEM)
2033 {
2034 dst = gen_reg_rtx (GET_MODE (orig_dst));
2035 /* Make life a bit easier for combine. */
2036 emit_move_insn (dst, const0_rtx);
2037 }
2038 else if (! MEM_IN_STRUCT_P (dst))
2039 {
2040 /* store_bit_field requires that memory operations have
2041 mem_in_struct_p set; we might not. */
2042
2043 dst = copy_rtx (orig_dst);
2044 MEM_SET_IN_STRUCT_P (dst, 1);
2045 }
2046
2047 /* Process the pieces. */
2048 for (i = start; i < XVECLEN (src, 0); i++)
2049 {
2050 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2051 enum machine_mode mode = GET_MODE (tmps[i]);
2052 int bytelen = GET_MODE_SIZE (mode);
2053
2054 /* Handle trailing fragments that run over the size of the struct. */
2055 if (ssize >= 0 && bytepos + bytelen > ssize)
2056 {
2057 if (BYTES_BIG_ENDIAN)
2058 {
2059 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2060 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2061 tmps[i], 0, OPTAB_WIDEN);
2062 }
2063 bytelen = ssize - bytepos;
2064 }
2065
2066 /* Optimize the access just a bit. */
2067 if (GET_CODE (dst) == MEM
2068 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2069 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2070 && bytelen == GET_MODE_SIZE (mode))
2071 emit_move_insn (change_address (dst, mode,
2072 plus_constant (XEXP (dst, 0),
2073 bytepos)),
2074 tmps[i]);
2075 else
2076 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2077 mode, tmps[i], align, ssize);
2078 }
2079
2080 emit_queue();
2081
2082 /* Copy from the pseudo into the (probable) hard reg. */
2083 if (GET_CODE (dst) == REG)
2084 emit_move_insn (orig_dst, dst);
2085 }
2086
2087 /* Generate code to copy a BLKmode object of TYPE out of a
2088 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2089 is null, a stack temporary is created. TGTBLK is returned.
2090
2091 The primary purpose of this routine is to handle functions
2092 that return BLKmode structures in registers. Some machines
2093 (the PA for example) want to return all small structures
2094 in registers regardless of the structure's alignment. */
2095
2096 rtx
2097 copy_blkmode_from_reg (tgtblk,srcreg,type)
2098 rtx tgtblk;
2099 rtx srcreg;
2100 tree type;
2101 {
2102 int bytes = int_size_in_bytes (type);
2103 rtx src = NULL, dst = NULL;
2104 int bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2105 int bitpos, xbitpos, big_endian_correction = 0;
2106
2107 if (tgtblk == 0)
2108 {
2109 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2110 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2111 preserve_temp_slots (tgtblk);
2112 }
2113
2114 /* This code assumes srcreg is at least a full word. If it isn't,
2115 copy it into a new pseudo which is a full word. */
2116 if (GET_MODE (srcreg) != BLKmode
2117 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2118 srcreg = convert_to_mode (word_mode, srcreg,
2119 TREE_UNSIGNED (type));
2120
2121 /* Structures whose size is not a multiple of a word are aligned
2122 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2123 machine, this means we must skip the empty high order bytes when
2124 calculating the bit offset. */
2125 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2126 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2127 * BITS_PER_UNIT));
2128
2129 /* Copy the structure BITSIZE bites at a time.
2130
2131 We could probably emit more efficient code for machines
2132 which do not use strict alignment, but it doesn't seem
2133 worth the effort at the current time. */
2134 for (bitpos = 0, xbitpos = big_endian_correction;
2135 bitpos < bytes * BITS_PER_UNIT;
2136 bitpos += bitsize, xbitpos += bitsize)
2137 {
2138
2139 /* We need a new source operand each time xbitpos is on a
2140 word boundary and when xbitpos == big_endian_correction
2141 (the first time through). */
2142 if (xbitpos % BITS_PER_WORD == 0
2143 || xbitpos == big_endian_correction)
2144 src = operand_subword_force (srcreg,
2145 xbitpos / BITS_PER_WORD,
2146 BLKmode);
2147
2148 /* We need a new destination operand each time bitpos is on
2149 a word boundary. */
2150 if (bitpos % BITS_PER_WORD == 0)
2151 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2152
2153 /* Use xbitpos for the source extraction (right justified) and
2154 xbitpos for the destination store (left justified). */
2155 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2156 extract_bit_field (src, bitsize,
2157 xbitpos % BITS_PER_WORD, 1,
2158 NULL_RTX, word_mode,
2159 word_mode,
2160 bitsize / BITS_PER_UNIT,
2161 BITS_PER_WORD),
2162 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2163 }
2164 return tgtblk;
2165 }
2166
2167
2168 /* Add a USE expression for REG to the (possibly empty) list pointed
2169 to by CALL_FUSAGE. REG must denote a hard register. */
2170
2171 void
2172 use_reg (call_fusage, reg)
2173 rtx *call_fusage, reg;
2174 {
2175 if (GET_CODE (reg) != REG
2176 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2177 abort();
2178
2179 *call_fusage
2180 = gen_rtx_EXPR_LIST (VOIDmode,
2181 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2182 }
2183
2184 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2185 starting at REGNO. All of these registers must be hard registers. */
2186
2187 void
2188 use_regs (call_fusage, regno, nregs)
2189 rtx *call_fusage;
2190 int regno;
2191 int nregs;
2192 {
2193 int i;
2194
2195 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2196 abort ();
2197
2198 for (i = 0; i < nregs; i++)
2199 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2200 }
2201
2202 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2203 PARALLEL REGS. This is for calls that pass values in multiple
2204 non-contiguous locations. The Irix 6 ABI has examples of this. */
2205
2206 void
2207 use_group_regs (call_fusage, regs)
2208 rtx *call_fusage;
2209 rtx regs;
2210 {
2211 int i;
2212
2213 for (i = 0; i < XVECLEN (regs, 0); i++)
2214 {
2215 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2216
2217 /* A NULL entry means the parameter goes both on the stack and in
2218 registers. This can also be a MEM for targets that pass values
2219 partially on the stack and partially in registers. */
2220 if (reg != 0 && GET_CODE (reg) == REG)
2221 use_reg (call_fusage, reg);
2222 }
2223 }
2224 \f
2225 /* Generate several move instructions to clear LEN bytes of block TO.
2226 (A MEM rtx with BLKmode). The caller must pass TO through
2227 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2228 we can assume. */
2229
2230 static void
2231 clear_by_pieces (to, len, align)
2232 rtx to;
2233 int len;
2234 unsigned int align;
2235 {
2236 struct clear_by_pieces data;
2237 rtx to_addr = XEXP (to, 0);
2238 int max_size = MOVE_MAX_PIECES + 1;
2239 enum machine_mode mode = VOIDmode, tmode;
2240 enum insn_code icode;
2241
2242 data.offset = 0;
2243 data.to_addr = to_addr;
2244 data.to = to;
2245 data.autinc_to
2246 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2247 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2248
2249 data.explicit_inc_to = 0;
2250 data.reverse
2251 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2252 if (data.reverse) data.offset = len;
2253 data.len = len;
2254
2255 data.to_struct = MEM_IN_STRUCT_P (to);
2256
2257 /* If copying requires more than two move insns,
2258 copy addresses to registers (to make displacements shorter)
2259 and use post-increment if available. */
2260 if (!data.autinc_to
2261 && move_by_pieces_ninsns (len, align) > 2)
2262 {
2263 /* Determine the main mode we'll be using */
2264 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2265 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2266 if (GET_MODE_SIZE (tmode) < max_size)
2267 mode = tmode;
2268
2269 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2270 {
2271 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2272 data.autinc_to = 1;
2273 data.explicit_inc_to = -1;
2274 }
2275 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2276 {
2277 data.to_addr = copy_addr_to_reg (to_addr);
2278 data.autinc_to = 1;
2279 data.explicit_inc_to = 1;
2280 }
2281 if (!data.autinc_to && CONSTANT_P (to_addr))
2282 data.to_addr = copy_addr_to_reg (to_addr);
2283 }
2284
2285 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2286 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2287 align = MOVE_MAX;
2288
2289 /* First move what we can in the largest integer mode, then go to
2290 successively smaller modes. */
2291
2292 while (max_size > 1)
2293 {
2294 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2295 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2296 if (GET_MODE_SIZE (tmode) < max_size)
2297 mode = tmode;
2298
2299 if (mode == VOIDmode)
2300 break;
2301
2302 icode = mov_optab->handlers[(int) mode].insn_code;
2303 if (icode != CODE_FOR_nothing
2304 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2305 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2306
2307 max_size = GET_MODE_SIZE (mode);
2308 }
2309
2310 /* The code above should have handled everything. */
2311 if (data.len != 0)
2312 abort ();
2313 }
2314
2315 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2316 with move instructions for mode MODE. GENFUN is the gen_... function
2317 to make a move insn for that mode. DATA has all the other info. */
2318
2319 static void
2320 clear_by_pieces_1 (genfun, mode, data)
2321 rtx (*genfun) PARAMS ((rtx, ...));
2322 enum machine_mode mode;
2323 struct clear_by_pieces *data;
2324 {
2325 register int size = GET_MODE_SIZE (mode);
2326 register rtx to1;
2327
2328 while (data->len >= size)
2329 {
2330 if (data->reverse) data->offset -= size;
2331
2332 to1 = (data->autinc_to
2333 ? gen_rtx_MEM (mode, data->to_addr)
2334 : copy_rtx (change_address (data->to, mode,
2335 plus_constant (data->to_addr,
2336 data->offset))));
2337 MEM_IN_STRUCT_P (to1) = data->to_struct;
2338
2339 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2340 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2341
2342 emit_insn ((*genfun) (to1, const0_rtx));
2343 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2344 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2345
2346 if (! data->reverse) data->offset += size;
2347
2348 data->len -= size;
2349 }
2350 }
2351 \f
2352 /* Write zeros through the storage of OBJECT.
2353 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2354 the maximum alignment we can is has, measured in bytes.
2355
2356 If we call a function that returns the length of the block, return it. */
2357
2358 rtx
2359 clear_storage (object, size, align)
2360 rtx object;
2361 rtx size;
2362 unsigned int align;
2363 {
2364 #ifdef TARGET_MEM_FUNCTIONS
2365 static tree fn;
2366 tree call_expr, arg_list;
2367 #endif
2368 rtx retval = 0;
2369
2370 if (GET_MODE (object) == BLKmode)
2371 {
2372 object = protect_from_queue (object, 1);
2373 size = protect_from_queue (size, 0);
2374
2375 if (GET_CODE (size) == CONST_INT
2376 && MOVE_BY_PIECES_P (INTVAL (size), align))
2377 clear_by_pieces (object, INTVAL (size), align);
2378
2379 else
2380 {
2381 /* Try the most limited insn first, because there's no point
2382 including more than one in the machine description unless
2383 the more limited one has some advantage. */
2384
2385 rtx opalign = GEN_INT (align);
2386 enum machine_mode mode;
2387
2388 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2389 mode = GET_MODE_WIDER_MODE (mode))
2390 {
2391 enum insn_code code = clrstr_optab[(int) mode];
2392 insn_operand_predicate_fn pred;
2393
2394 if (code != CODE_FOR_nothing
2395 /* We don't need MODE to be narrower than
2396 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2397 the mode mask, as it is returned by the macro, it will
2398 definitely be less than the actual mode mask. */
2399 && ((GET_CODE (size) == CONST_INT
2400 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2401 <= (GET_MODE_MASK (mode) >> 1)))
2402 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2403 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2404 || (*pred) (object, BLKmode))
2405 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2406 || (*pred) (opalign, VOIDmode)))
2407 {
2408 rtx op1;
2409 rtx last = get_last_insn ();
2410 rtx pat;
2411
2412 op1 = convert_to_mode (mode, size, 1);
2413 pred = insn_data[(int) code].operand[1].predicate;
2414 if (pred != 0 && ! (*pred) (op1, mode))
2415 op1 = copy_to_mode_reg (mode, op1);
2416
2417 pat = GEN_FCN ((int) code) (object, op1, opalign);
2418 if (pat)
2419 {
2420 emit_insn (pat);
2421 return 0;
2422 }
2423 else
2424 delete_insns_since (last);
2425 }
2426 }
2427
2428 /* OBJECT or SIZE may have been passed through protect_from_queue.
2429
2430 It is unsafe to save the value generated by protect_from_queue
2431 and reuse it later. Consider what happens if emit_queue is
2432 called before the return value from protect_from_queue is used.
2433
2434 Expansion of the CALL_EXPR below will call emit_queue before
2435 we are finished emitting RTL for argument setup. So if we are
2436 not careful we could get the wrong value for an argument.
2437
2438 To avoid this problem we go ahead and emit code to copy OBJECT
2439 and SIZE into new pseudos. We can then place those new pseudos
2440 into an RTL_EXPR and use them later, even after a call to
2441 emit_queue.
2442
2443 Note this is not strictly needed for library calls since they
2444 do not call emit_queue before loading their arguments. However,
2445 we may need to have library calls call emit_queue in the future
2446 since failing to do so could cause problems for targets which
2447 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2448 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2449
2450 #ifdef TARGET_MEM_FUNCTIONS
2451 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2452 #else
2453 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2454 TREE_UNSIGNED (integer_type_node));
2455 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2456 #endif
2457
2458
2459 #ifdef TARGET_MEM_FUNCTIONS
2460 /* It is incorrect to use the libcall calling conventions to call
2461 memset in this context.
2462
2463 This could be a user call to memset and the user may wish to
2464 examine the return value from memset.
2465
2466 For targets where libcalls and normal calls have different
2467 conventions for returning pointers, we could end up generating
2468 incorrect code.
2469
2470 So instead of using a libcall sequence we build up a suitable
2471 CALL_EXPR and expand the call in the normal fashion. */
2472 if (fn == NULL_TREE)
2473 {
2474 tree fntype;
2475
2476 /* This was copied from except.c, I don't know if all this is
2477 necessary in this context or not. */
2478 fn = get_identifier ("memset");
2479 push_obstacks_nochange ();
2480 end_temporary_allocation ();
2481 fntype = build_pointer_type (void_type_node);
2482 fntype = build_function_type (fntype, NULL_TREE);
2483 fn = build_decl (FUNCTION_DECL, fn, fntype);
2484 ggc_add_tree_root (&fn, 1);
2485 DECL_EXTERNAL (fn) = 1;
2486 TREE_PUBLIC (fn) = 1;
2487 DECL_ARTIFICIAL (fn) = 1;
2488 make_decl_rtl (fn, NULL_PTR, 1);
2489 assemble_external (fn);
2490 pop_obstacks ();
2491 }
2492
2493 /* We need to make an argument list for the function call.
2494
2495 memset has three arguments, the first is a void * addresses, the
2496 second a integer with the initialization value, the last is a
2497 size_t byte count for the copy. */
2498 arg_list
2499 = build_tree_list (NULL_TREE,
2500 make_tree (build_pointer_type (void_type_node),
2501 object));
2502 TREE_CHAIN (arg_list)
2503 = build_tree_list (NULL_TREE,
2504 make_tree (integer_type_node, const0_rtx));
2505 TREE_CHAIN (TREE_CHAIN (arg_list))
2506 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2507 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2508
2509 /* Now we have to build up the CALL_EXPR itself. */
2510 call_expr = build1 (ADDR_EXPR,
2511 build_pointer_type (TREE_TYPE (fn)), fn);
2512 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2513 call_expr, arg_list, NULL_TREE);
2514 TREE_SIDE_EFFECTS (call_expr) = 1;
2515
2516 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2517 #else
2518 emit_library_call (bzero_libfunc, 0,
2519 VOIDmode, 2, object, Pmode, size,
2520 TYPE_MODE (integer_type_node));
2521 #endif
2522 }
2523 }
2524 else
2525 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2526
2527 return retval;
2528 }
2529
2530 /* Generate code to copy Y into X.
2531 Both Y and X must have the same mode, except that
2532 Y can be a constant with VOIDmode.
2533 This mode cannot be BLKmode; use emit_block_move for that.
2534
2535 Return the last instruction emitted. */
2536
2537 rtx
2538 emit_move_insn (x, y)
2539 rtx x, y;
2540 {
2541 enum machine_mode mode = GET_MODE (x);
2542
2543 x = protect_from_queue (x, 1);
2544 y = protect_from_queue (y, 0);
2545
2546 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2547 abort ();
2548
2549 /* Never force constant_p_rtx to memory. */
2550 if (GET_CODE (y) == CONSTANT_P_RTX)
2551 ;
2552 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2553 y = force_const_mem (mode, y);
2554
2555 /* If X or Y are memory references, verify that their addresses are valid
2556 for the machine. */
2557 if (GET_CODE (x) == MEM
2558 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2559 && ! push_operand (x, GET_MODE (x)))
2560 || (flag_force_addr
2561 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2562 x = change_address (x, VOIDmode, XEXP (x, 0));
2563
2564 if (GET_CODE (y) == MEM
2565 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2566 || (flag_force_addr
2567 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2568 y = change_address (y, VOIDmode, XEXP (y, 0));
2569
2570 if (mode == BLKmode)
2571 abort ();
2572
2573 return emit_move_insn_1 (x, y);
2574 }
2575
2576 /* Low level part of emit_move_insn.
2577 Called just like emit_move_insn, but assumes X and Y
2578 are basically valid. */
2579
2580 rtx
2581 emit_move_insn_1 (x, y)
2582 rtx x, y;
2583 {
2584 enum machine_mode mode = GET_MODE (x);
2585 enum machine_mode submode;
2586 enum mode_class class = GET_MODE_CLASS (mode);
2587 int i;
2588
2589 if (mode >= MAX_MACHINE_MODE)
2590 abort ();
2591
2592 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2593 return
2594 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2595
2596 /* Expand complex moves by moving real part and imag part, if possible. */
2597 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2598 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2599 * BITS_PER_UNIT),
2600 (class == MODE_COMPLEX_INT
2601 ? MODE_INT : MODE_FLOAT),
2602 0))
2603 && (mov_optab->handlers[(int) submode].insn_code
2604 != CODE_FOR_nothing))
2605 {
2606 /* Don't split destination if it is a stack push. */
2607 int stack = push_operand (x, GET_MODE (x));
2608
2609 /* If this is a stack, push the highpart first, so it
2610 will be in the argument order.
2611
2612 In that case, change_address is used only to convert
2613 the mode, not to change the address. */
2614 if (stack)
2615 {
2616 /* Note that the real part always precedes the imag part in memory
2617 regardless of machine's endianness. */
2618 #ifdef STACK_GROWS_DOWNWARD
2619 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2620 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2621 gen_imagpart (submode, y)));
2622 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2623 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2624 gen_realpart (submode, y)));
2625 #else
2626 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2627 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2628 gen_realpart (submode, y)));
2629 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2630 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2631 gen_imagpart (submode, y)));
2632 #endif
2633 }
2634 else
2635 {
2636 rtx realpart_x, realpart_y;
2637 rtx imagpart_x, imagpart_y;
2638
2639 /* If this is a complex value with each part being smaller than a
2640 word, the usual calling sequence will likely pack the pieces into
2641 a single register. Unfortunately, SUBREG of hard registers only
2642 deals in terms of words, so we have a problem converting input
2643 arguments to the CONCAT of two registers that is used elsewhere
2644 for complex values. If this is before reload, we can copy it into
2645 memory and reload. FIXME, we should see about using extract and
2646 insert on integer registers, but complex short and complex char
2647 variables should be rarely used. */
2648 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2649 && (reload_in_progress | reload_completed) == 0)
2650 {
2651 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2652 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2653
2654 if (packed_dest_p || packed_src_p)
2655 {
2656 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2657 ? MODE_FLOAT : MODE_INT);
2658
2659 enum machine_mode reg_mode =
2660 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2661
2662 if (reg_mode != BLKmode)
2663 {
2664 rtx mem = assign_stack_temp (reg_mode,
2665 GET_MODE_SIZE (mode), 0);
2666
2667 rtx cmem = change_address (mem, mode, NULL_RTX);
2668
2669 cfun->cannot_inline = "function uses short complex types";
2670
2671 if (packed_dest_p)
2672 {
2673 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2674 emit_move_insn_1 (cmem, y);
2675 return emit_move_insn_1 (sreg, mem);
2676 }
2677 else
2678 {
2679 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2680 emit_move_insn_1 (mem, sreg);
2681 return emit_move_insn_1 (x, cmem);
2682 }
2683 }
2684 }
2685 }
2686
2687 realpart_x = gen_realpart (submode, x);
2688 realpart_y = gen_realpart (submode, y);
2689 imagpart_x = gen_imagpart (submode, x);
2690 imagpart_y = gen_imagpart (submode, y);
2691
2692 /* Show the output dies here. This is necessary for SUBREGs
2693 of pseudos since we cannot track their lifetimes correctly;
2694 hard regs shouldn't appear here except as return values.
2695 We never want to emit such a clobber after reload. */
2696 if (x != y
2697 && ! (reload_in_progress || reload_completed)
2698 && (GET_CODE (realpart_x) == SUBREG
2699 || GET_CODE (imagpart_x) == SUBREG))
2700 {
2701 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2702 }
2703
2704 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2705 (realpart_x, realpart_y));
2706 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2707 (imagpart_x, imagpart_y));
2708 }
2709
2710 return get_last_insn ();
2711 }
2712
2713 /* This will handle any multi-word mode that lacks a move_insn pattern.
2714 However, you will get better code if you define such patterns,
2715 even if they must turn into multiple assembler instructions. */
2716 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2717 {
2718 rtx last_insn = 0;
2719 rtx seq;
2720 int need_clobber;
2721
2722 #ifdef PUSH_ROUNDING
2723
2724 /* If X is a push on the stack, do the push now and replace
2725 X with a reference to the stack pointer. */
2726 if (push_operand (x, GET_MODE (x)))
2727 {
2728 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2729 x = change_address (x, VOIDmode, stack_pointer_rtx);
2730 }
2731 #endif
2732
2733 start_sequence ();
2734
2735 need_clobber = 0;
2736 for (i = 0;
2737 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2738 i++)
2739 {
2740 rtx xpart = operand_subword (x, i, 1, mode);
2741 rtx ypart = operand_subword (y, i, 1, mode);
2742
2743 /* If we can't get a part of Y, put Y into memory if it is a
2744 constant. Otherwise, force it into a register. If we still
2745 can't get a part of Y, abort. */
2746 if (ypart == 0 && CONSTANT_P (y))
2747 {
2748 y = force_const_mem (mode, y);
2749 ypart = operand_subword (y, i, 1, mode);
2750 }
2751 else if (ypart == 0)
2752 ypart = operand_subword_force (y, i, mode);
2753
2754 if (xpart == 0 || ypart == 0)
2755 abort ();
2756
2757 need_clobber |= (GET_CODE (xpart) == SUBREG);
2758
2759 last_insn = emit_move_insn (xpart, ypart);
2760 }
2761
2762 seq = gen_sequence ();
2763 end_sequence ();
2764
2765 /* Show the output dies here. This is necessary for SUBREGs
2766 of pseudos since we cannot track their lifetimes correctly;
2767 hard regs shouldn't appear here except as return values.
2768 We never want to emit such a clobber after reload. */
2769 if (x != y
2770 && ! (reload_in_progress || reload_completed)
2771 && need_clobber != 0)
2772 {
2773 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2774 }
2775
2776 emit_insn (seq);
2777
2778 return last_insn;
2779 }
2780 else
2781 abort ();
2782 }
2783 \f
2784 /* Pushing data onto the stack. */
2785
2786 /* Push a block of length SIZE (perhaps variable)
2787 and return an rtx to address the beginning of the block.
2788 Note that it is not possible for the value returned to be a QUEUED.
2789 The value may be virtual_outgoing_args_rtx.
2790
2791 EXTRA is the number of bytes of padding to push in addition to SIZE.
2792 BELOW nonzero means this padding comes at low addresses;
2793 otherwise, the padding comes at high addresses. */
2794
2795 rtx
2796 push_block (size, extra, below)
2797 rtx size;
2798 int extra, below;
2799 {
2800 register rtx temp;
2801
2802 size = convert_modes (Pmode, ptr_mode, size, 1);
2803 if (CONSTANT_P (size))
2804 anti_adjust_stack (plus_constant (size, extra));
2805 else if (GET_CODE (size) == REG && extra == 0)
2806 anti_adjust_stack (size);
2807 else
2808 {
2809 rtx temp = copy_to_mode_reg (Pmode, size);
2810 if (extra != 0)
2811 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2812 temp, 0, OPTAB_LIB_WIDEN);
2813 anti_adjust_stack (temp);
2814 }
2815
2816 #if defined (STACK_GROWS_DOWNWARD) \
2817 || (defined (ARGS_GROW_DOWNWARD) \
2818 && !defined (ACCUMULATE_OUTGOING_ARGS))
2819
2820 /* Return the lowest stack address when STACK or ARGS grow downward and
2821 we are not aaccumulating outgoing arguments (the c4x port uses such
2822 conventions). */
2823 temp = virtual_outgoing_args_rtx;
2824 if (extra != 0 && below)
2825 temp = plus_constant (temp, extra);
2826 #else
2827 if (GET_CODE (size) == CONST_INT)
2828 temp = plus_constant (virtual_outgoing_args_rtx,
2829 - INTVAL (size) - (below ? 0 : extra));
2830 else if (extra != 0 && !below)
2831 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2832 negate_rtx (Pmode, plus_constant (size, extra)));
2833 else
2834 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2835 negate_rtx (Pmode, size));
2836 #endif
2837
2838 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2839 }
2840
2841 rtx
2842 gen_push_operand ()
2843 {
2844 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2845 }
2846
2847 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2848 block of SIZE bytes. */
2849
2850 static rtx
2851 get_push_address (size)
2852 int size;
2853 {
2854 register rtx temp;
2855
2856 if (STACK_PUSH_CODE == POST_DEC)
2857 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2858 else if (STACK_PUSH_CODE == POST_INC)
2859 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2860 else
2861 temp = stack_pointer_rtx;
2862
2863 return copy_to_reg (temp);
2864 }
2865
2866 /* Generate code to push X onto the stack, assuming it has mode MODE and
2867 type TYPE.
2868 MODE is redundant except when X is a CONST_INT (since they don't
2869 carry mode info).
2870 SIZE is an rtx for the size of data to be copied (in bytes),
2871 needed only if X is BLKmode.
2872
2873 ALIGN (in bytes) is maximum alignment we can assume.
2874
2875 If PARTIAL and REG are both nonzero, then copy that many of the first
2876 words of X into registers starting with REG, and push the rest of X.
2877 The amount of space pushed is decreased by PARTIAL words,
2878 rounded *down* to a multiple of PARM_BOUNDARY.
2879 REG must be a hard register in this case.
2880 If REG is zero but PARTIAL is not, take any all others actions for an
2881 argument partially in registers, but do not actually load any
2882 registers.
2883
2884 EXTRA is the amount in bytes of extra space to leave next to this arg.
2885 This is ignored if an argument block has already been allocated.
2886
2887 On a machine that lacks real push insns, ARGS_ADDR is the address of
2888 the bottom of the argument block for this call. We use indexing off there
2889 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2890 argument block has not been preallocated.
2891
2892 ARGS_SO_FAR is the size of args previously pushed for this call.
2893
2894 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2895 for arguments passed in registers. If nonzero, it will be the number
2896 of bytes required. */
2897
2898 void
2899 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2900 args_addr, args_so_far, reg_parm_stack_space,
2901 alignment_pad)
2902 register rtx x;
2903 enum machine_mode mode;
2904 tree type;
2905 rtx size;
2906 unsigned int align;
2907 int partial;
2908 rtx reg;
2909 int extra;
2910 rtx args_addr;
2911 rtx args_so_far;
2912 int reg_parm_stack_space;
2913 rtx alignment_pad;
2914 {
2915 rtx xinner;
2916 enum direction stack_direction
2917 #ifdef STACK_GROWS_DOWNWARD
2918 = downward;
2919 #else
2920 = upward;
2921 #endif
2922
2923 /* Decide where to pad the argument: `downward' for below,
2924 `upward' for above, or `none' for don't pad it.
2925 Default is below for small data on big-endian machines; else above. */
2926 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2927
2928 /* Invert direction if stack is post-update. */
2929 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2930 if (where_pad != none)
2931 where_pad = (where_pad == downward ? upward : downward);
2932
2933 xinner = x = protect_from_queue (x, 0);
2934
2935 if (mode == BLKmode)
2936 {
2937 /* Copy a block into the stack, entirely or partially. */
2938
2939 register rtx temp;
2940 int used = partial * UNITS_PER_WORD;
2941 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2942 int skip;
2943
2944 if (size == 0)
2945 abort ();
2946
2947 used -= offset;
2948
2949 /* USED is now the # of bytes we need not copy to the stack
2950 because registers will take care of them. */
2951
2952 if (partial != 0)
2953 xinner = change_address (xinner, BLKmode,
2954 plus_constant (XEXP (xinner, 0), used));
2955
2956 /* If the partial register-part of the arg counts in its stack size,
2957 skip the part of stack space corresponding to the registers.
2958 Otherwise, start copying to the beginning of the stack space,
2959 by setting SKIP to 0. */
2960 skip = (reg_parm_stack_space == 0) ? 0 : used;
2961
2962 #ifdef PUSH_ROUNDING
2963 /* Do it with several push insns if that doesn't take lots of insns
2964 and if there is no difficulty with push insns that skip bytes
2965 on the stack for alignment purposes. */
2966 if (args_addr == 0
2967 && GET_CODE (size) == CONST_INT
2968 && skip == 0
2969 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2970 /* Here we avoid the case of a structure whose weak alignment
2971 forces many pushes of a small amount of data,
2972 and such small pushes do rounding that causes trouble. */
2973 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
2974 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2975 || PUSH_ROUNDING (align) == align)
2976 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2977 {
2978 /* Push padding now if padding above and stack grows down,
2979 or if padding below and stack grows up.
2980 But if space already allocated, this has already been done. */
2981 if (extra && args_addr == 0
2982 && where_pad != none && where_pad != stack_direction)
2983 anti_adjust_stack (GEN_INT (extra));
2984
2985 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2986 INTVAL (size) - used, align);
2987
2988 if (current_function_check_memory_usage && ! in_check_memory_usage)
2989 {
2990 rtx temp;
2991
2992 in_check_memory_usage = 1;
2993 temp = get_push_address (INTVAL(size) - used);
2994 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2995 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2996 temp, Pmode,
2997 XEXP (xinner, 0), Pmode,
2998 GEN_INT (INTVAL(size) - used),
2999 TYPE_MODE (sizetype));
3000 else
3001 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3002 temp, Pmode,
3003 GEN_INT (INTVAL(size) - used),
3004 TYPE_MODE (sizetype),
3005 GEN_INT (MEMORY_USE_RW),
3006 TYPE_MODE (integer_type_node));
3007 in_check_memory_usage = 0;
3008 }
3009 }
3010 else
3011 #endif /* PUSH_ROUNDING */
3012 {
3013 /* Otherwise make space on the stack and copy the data
3014 to the address of that space. */
3015
3016 /* Deduct words put into registers from the size we must copy. */
3017 if (partial != 0)
3018 {
3019 if (GET_CODE (size) == CONST_INT)
3020 size = GEN_INT (INTVAL (size) - used);
3021 else
3022 size = expand_binop (GET_MODE (size), sub_optab, size,
3023 GEN_INT (used), NULL_RTX, 0,
3024 OPTAB_LIB_WIDEN);
3025 }
3026
3027 /* Get the address of the stack space.
3028 In this case, we do not deal with EXTRA separately.
3029 A single stack adjust will do. */
3030 if (! args_addr)
3031 {
3032 temp = push_block (size, extra, where_pad == downward);
3033 extra = 0;
3034 }
3035 else if (GET_CODE (args_so_far) == CONST_INT)
3036 temp = memory_address (BLKmode,
3037 plus_constant (args_addr,
3038 skip + INTVAL (args_so_far)));
3039 else
3040 temp = memory_address (BLKmode,
3041 plus_constant (gen_rtx_PLUS (Pmode,
3042 args_addr,
3043 args_so_far),
3044 skip));
3045 if (current_function_check_memory_usage && ! in_check_memory_usage)
3046 {
3047 rtx target;
3048
3049 in_check_memory_usage = 1;
3050 target = copy_to_reg (temp);
3051 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3052 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3053 target, Pmode,
3054 XEXP (xinner, 0), Pmode,
3055 size, TYPE_MODE (sizetype));
3056 else
3057 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3058 target, Pmode,
3059 size, TYPE_MODE (sizetype),
3060 GEN_INT (MEMORY_USE_RW),
3061 TYPE_MODE (integer_type_node));
3062 in_check_memory_usage = 0;
3063 }
3064
3065 /* TEMP is the address of the block. Copy the data there. */
3066 if (GET_CODE (size) == CONST_INT
3067 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3068 {
3069 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3070 INTVAL (size), align);
3071 goto ret;
3072 }
3073 else
3074 {
3075 rtx opalign = GEN_INT (align);
3076 enum machine_mode mode;
3077 rtx target = gen_rtx_MEM (BLKmode, temp);
3078
3079 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3080 mode != VOIDmode;
3081 mode = GET_MODE_WIDER_MODE (mode))
3082 {
3083 enum insn_code code = movstr_optab[(int) mode];
3084 insn_operand_predicate_fn pred;
3085
3086 if (code != CODE_FOR_nothing
3087 && ((GET_CODE (size) == CONST_INT
3088 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3089 <= (GET_MODE_MASK (mode) >> 1)))
3090 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3091 && (!(pred = insn_data[(int) code].operand[0].predicate)
3092 || ((*pred) (target, BLKmode)))
3093 && (!(pred = insn_data[(int) code].operand[1].predicate)
3094 || ((*pred) (xinner, BLKmode)))
3095 && (!(pred = insn_data[(int) code].operand[3].predicate)
3096 || ((*pred) (opalign, VOIDmode))))
3097 {
3098 rtx op2 = convert_to_mode (mode, size, 1);
3099 rtx last = get_last_insn ();
3100 rtx pat;
3101
3102 pred = insn_data[(int) code].operand[2].predicate;
3103 if (pred != 0 && ! (*pred) (op2, mode))
3104 op2 = copy_to_mode_reg (mode, op2);
3105
3106 pat = GEN_FCN ((int) code) (target, xinner,
3107 op2, opalign);
3108 if (pat)
3109 {
3110 emit_insn (pat);
3111 goto ret;
3112 }
3113 else
3114 delete_insns_since (last);
3115 }
3116 }
3117 }
3118
3119 #ifndef ACCUMULATE_OUTGOING_ARGS
3120 /* If the source is referenced relative to the stack pointer,
3121 copy it to another register to stabilize it. We do not need
3122 to do this if we know that we won't be changing sp. */
3123
3124 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3125 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3126 temp = copy_to_reg (temp);
3127 #endif
3128
3129 /* Make inhibit_defer_pop nonzero around the library call
3130 to force it to pop the bcopy-arguments right away. */
3131 NO_DEFER_POP;
3132 #ifdef TARGET_MEM_FUNCTIONS
3133 emit_library_call (memcpy_libfunc, 0,
3134 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3135 convert_to_mode (TYPE_MODE (sizetype),
3136 size, TREE_UNSIGNED (sizetype)),
3137 TYPE_MODE (sizetype));
3138 #else
3139 emit_library_call (bcopy_libfunc, 0,
3140 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3141 convert_to_mode (TYPE_MODE (integer_type_node),
3142 size,
3143 TREE_UNSIGNED (integer_type_node)),
3144 TYPE_MODE (integer_type_node));
3145 #endif
3146 OK_DEFER_POP;
3147 }
3148 }
3149 else if (partial > 0)
3150 {
3151 /* Scalar partly in registers. */
3152
3153 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3154 int i;
3155 int not_stack;
3156 /* # words of start of argument
3157 that we must make space for but need not store. */
3158 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3159 int args_offset = INTVAL (args_so_far);
3160 int skip;
3161
3162 /* Push padding now if padding above and stack grows down,
3163 or if padding below and stack grows up.
3164 But if space already allocated, this has already been done. */
3165 if (extra && args_addr == 0
3166 && where_pad != none && where_pad != stack_direction)
3167 anti_adjust_stack (GEN_INT (extra));
3168
3169 /* If we make space by pushing it, we might as well push
3170 the real data. Otherwise, we can leave OFFSET nonzero
3171 and leave the space uninitialized. */
3172 if (args_addr == 0)
3173 offset = 0;
3174
3175 /* Now NOT_STACK gets the number of words that we don't need to
3176 allocate on the stack. */
3177 not_stack = partial - offset;
3178
3179 /* If the partial register-part of the arg counts in its stack size,
3180 skip the part of stack space corresponding to the registers.
3181 Otherwise, start copying to the beginning of the stack space,
3182 by setting SKIP to 0. */
3183 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3184
3185 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3186 x = validize_mem (force_const_mem (mode, x));
3187
3188 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3189 SUBREGs of such registers are not allowed. */
3190 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3191 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3192 x = copy_to_reg (x);
3193
3194 /* Loop over all the words allocated on the stack for this arg. */
3195 /* We can do it by words, because any scalar bigger than a word
3196 has a size a multiple of a word. */
3197 #ifndef PUSH_ARGS_REVERSED
3198 for (i = not_stack; i < size; i++)
3199 #else
3200 for (i = size - 1; i >= not_stack; i--)
3201 #endif
3202 if (i >= not_stack + offset)
3203 emit_push_insn (operand_subword_force (x, i, mode),
3204 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3205 0, args_addr,
3206 GEN_INT (args_offset + ((i - not_stack + skip)
3207 * UNITS_PER_WORD)),
3208 reg_parm_stack_space, alignment_pad);
3209 }
3210 else
3211 {
3212 rtx addr;
3213 rtx target = NULL_RTX;
3214
3215 /* Push padding now if padding above and stack grows down,
3216 or if padding below and stack grows up.
3217 But if space already allocated, this has already been done. */
3218 if (extra && args_addr == 0
3219 && where_pad != none && where_pad != stack_direction)
3220 anti_adjust_stack (GEN_INT (extra));
3221
3222 #ifdef PUSH_ROUNDING
3223 if (args_addr == 0)
3224 addr = gen_push_operand ();
3225 else
3226 #endif
3227 {
3228 if (GET_CODE (args_so_far) == CONST_INT)
3229 addr
3230 = memory_address (mode,
3231 plus_constant (args_addr,
3232 INTVAL (args_so_far)));
3233 else
3234 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3235 args_so_far));
3236 target = addr;
3237 }
3238
3239 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3240
3241 if (current_function_check_memory_usage && ! in_check_memory_usage)
3242 {
3243 in_check_memory_usage = 1;
3244 if (target == 0)
3245 target = get_push_address (GET_MODE_SIZE (mode));
3246
3247 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3248 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3249 target, Pmode,
3250 XEXP (x, 0), Pmode,
3251 GEN_INT (GET_MODE_SIZE (mode)),
3252 TYPE_MODE (sizetype));
3253 else
3254 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3255 target, Pmode,
3256 GEN_INT (GET_MODE_SIZE (mode)),
3257 TYPE_MODE (sizetype),
3258 GEN_INT (MEMORY_USE_RW),
3259 TYPE_MODE (integer_type_node));
3260 in_check_memory_usage = 0;
3261 }
3262 }
3263
3264 ret:
3265 /* If part should go in registers, copy that part
3266 into the appropriate registers. Do this now, at the end,
3267 since mem-to-mem copies above may do function calls. */
3268 if (partial > 0 && reg != 0)
3269 {
3270 /* Handle calls that pass values in multiple non-contiguous locations.
3271 The Irix 6 ABI has examples of this. */
3272 if (GET_CODE (reg) == PARALLEL)
3273 emit_group_load (reg, x, -1, align); /* ??? size? */
3274 else
3275 move_block_to_reg (REGNO (reg), x, partial, mode);
3276 }
3277
3278 if (extra && args_addr == 0 && where_pad == stack_direction)
3279 anti_adjust_stack (GEN_INT (extra));
3280
3281 if (alignment_pad)
3282 anti_adjust_stack (alignment_pad);
3283 }
3284 \f
3285 /* Expand an assignment that stores the value of FROM into TO.
3286 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3287 (This may contain a QUEUED rtx;
3288 if the value is constant, this rtx is a constant.)
3289 Otherwise, the returned value is NULL_RTX.
3290
3291 SUGGEST_REG is no longer actually used.
3292 It used to mean, copy the value through a register
3293 and return that register, if that is possible.
3294 We now use WANT_VALUE to decide whether to do this. */
3295
3296 rtx
3297 expand_assignment (to, from, want_value, suggest_reg)
3298 tree to, from;
3299 int want_value;
3300 int suggest_reg ATTRIBUTE_UNUSED;
3301 {
3302 register rtx to_rtx = 0;
3303 rtx result;
3304
3305 /* Don't crash if the lhs of the assignment was erroneous. */
3306
3307 if (TREE_CODE (to) == ERROR_MARK)
3308 {
3309 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3310 return want_value ? result : NULL_RTX;
3311 }
3312
3313 /* Assignment of a structure component needs special treatment
3314 if the structure component's rtx is not simply a MEM.
3315 Assignment of an array element at a constant index, and assignment of
3316 an array element in an unaligned packed structure field, has the same
3317 problem. */
3318
3319 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3320 || TREE_CODE (to) == ARRAY_REF)
3321 {
3322 enum machine_mode mode1;
3323 int bitsize;
3324 int bitpos;
3325 tree offset;
3326 int unsignedp;
3327 int volatilep = 0;
3328 tree tem;
3329 unsigned int alignment;
3330
3331 push_temp_slots ();
3332 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3333 &unsignedp, &volatilep, &alignment);
3334
3335 /* If we are going to use store_bit_field and extract_bit_field,
3336 make sure to_rtx will be safe for multiple use. */
3337
3338 if (mode1 == VOIDmode && want_value)
3339 tem = stabilize_reference (tem);
3340
3341 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3342 if (offset != 0)
3343 {
3344 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3345
3346 if (GET_CODE (to_rtx) != MEM)
3347 abort ();
3348
3349 if (GET_MODE (offset_rtx) != ptr_mode)
3350 {
3351 #ifdef POINTERS_EXTEND_UNSIGNED
3352 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3353 #else
3354 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3355 #endif
3356 }
3357
3358 /* A constant address in TO_RTX can have VOIDmode, we must not try
3359 to call force_reg for that case. Avoid that case. */
3360 if (GET_CODE (to_rtx) == MEM
3361 && GET_MODE (to_rtx) == BLKmode
3362 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3363 && bitsize
3364 && (bitpos % bitsize) == 0
3365 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3366 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3367 {
3368 rtx temp = change_address (to_rtx, mode1,
3369 plus_constant (XEXP (to_rtx, 0),
3370 (bitpos /
3371 BITS_PER_UNIT)));
3372 if (GET_CODE (XEXP (temp, 0)) == REG)
3373 to_rtx = temp;
3374 else
3375 to_rtx = change_address (to_rtx, mode1,
3376 force_reg (GET_MODE (XEXP (temp, 0)),
3377 XEXP (temp, 0)));
3378 bitpos = 0;
3379 }
3380
3381 to_rtx = change_address (to_rtx, VOIDmode,
3382 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3383 force_reg (ptr_mode,
3384 offset_rtx)));
3385 }
3386
3387 if (volatilep)
3388 {
3389 if (GET_CODE (to_rtx) == MEM)
3390 {
3391 /* When the offset is zero, to_rtx is the address of the
3392 structure we are storing into, and hence may be shared.
3393 We must make a new MEM before setting the volatile bit. */
3394 if (offset == 0)
3395 to_rtx = copy_rtx (to_rtx);
3396
3397 MEM_VOLATILE_P (to_rtx) = 1;
3398 }
3399 #if 0 /* This was turned off because, when a field is volatile
3400 in an object which is not volatile, the object may be in a register,
3401 and then we would abort over here. */
3402 else
3403 abort ();
3404 #endif
3405 }
3406
3407 if (TREE_CODE (to) == COMPONENT_REF
3408 && TREE_READONLY (TREE_OPERAND (to, 1)))
3409 {
3410 if (offset == 0)
3411 to_rtx = copy_rtx (to_rtx);
3412
3413 RTX_UNCHANGING_P (to_rtx) = 1;
3414 }
3415
3416 /* Check the access. */
3417 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3418 {
3419 rtx to_addr;
3420 int size;
3421 int best_mode_size;
3422 enum machine_mode best_mode;
3423
3424 best_mode = get_best_mode (bitsize, bitpos,
3425 TYPE_ALIGN (TREE_TYPE (tem)),
3426 mode1, volatilep);
3427 if (best_mode == VOIDmode)
3428 best_mode = QImode;
3429
3430 best_mode_size = GET_MODE_BITSIZE (best_mode);
3431 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3432 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3433 size *= GET_MODE_SIZE (best_mode);
3434
3435 /* Check the access right of the pointer. */
3436 if (size)
3437 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3438 to_addr, Pmode,
3439 GEN_INT (size), TYPE_MODE (sizetype),
3440 GEN_INT (MEMORY_USE_WO),
3441 TYPE_MODE (integer_type_node));
3442 }
3443
3444 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3445 (want_value
3446 /* Spurious cast makes HPUX compiler happy. */
3447 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3448 : VOIDmode),
3449 unsignedp,
3450 /* Required alignment of containing datum. */
3451 alignment,
3452 int_size_in_bytes (TREE_TYPE (tem)),
3453 get_alias_set (to));
3454 preserve_temp_slots (result);
3455 free_temp_slots ();
3456 pop_temp_slots ();
3457
3458 /* If the value is meaningful, convert RESULT to the proper mode.
3459 Otherwise, return nothing. */
3460 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3461 TYPE_MODE (TREE_TYPE (from)),
3462 result,
3463 TREE_UNSIGNED (TREE_TYPE (to)))
3464 : NULL_RTX);
3465 }
3466
3467 /* If the rhs is a function call and its value is not an aggregate,
3468 call the function before we start to compute the lhs.
3469 This is needed for correct code for cases such as
3470 val = setjmp (buf) on machines where reference to val
3471 requires loading up part of an address in a separate insn.
3472
3473 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3474 a promoted variable where the zero- or sign- extension needs to be done.
3475 Handling this in the normal way is safe because no computation is done
3476 before the call. */
3477 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3478 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3479 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3480 {
3481 rtx value;
3482
3483 push_temp_slots ();
3484 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3485 if (to_rtx == 0)
3486 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3487
3488 /* Handle calls that return values in multiple non-contiguous locations.
3489 The Irix 6 ABI has examples of this. */
3490 if (GET_CODE (to_rtx) == PARALLEL)
3491 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3492 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3493 else if (GET_MODE (to_rtx) == BLKmode)
3494 emit_block_move (to_rtx, value, expr_size (from),
3495 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3496 else
3497 {
3498 #ifdef POINTERS_EXTEND_UNSIGNED
3499 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3500 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3501 value = convert_memory_address (GET_MODE (to_rtx), value);
3502 #endif
3503 emit_move_insn (to_rtx, value);
3504 }
3505 preserve_temp_slots (to_rtx);
3506 free_temp_slots ();
3507 pop_temp_slots ();
3508 return want_value ? to_rtx : NULL_RTX;
3509 }
3510
3511 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3512 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3513
3514 if (to_rtx == 0)
3515 {
3516 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3517 if (GET_CODE (to_rtx) == MEM)
3518 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3519 }
3520
3521 /* Don't move directly into a return register. */
3522 if (TREE_CODE (to) == RESULT_DECL
3523 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3524 {
3525 rtx temp;
3526
3527 push_temp_slots ();
3528 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3529
3530 if (GET_CODE (to_rtx) == PARALLEL)
3531 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3532 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3533 else
3534 emit_move_insn (to_rtx, temp);
3535
3536 preserve_temp_slots (to_rtx);
3537 free_temp_slots ();
3538 pop_temp_slots ();
3539 return want_value ? to_rtx : NULL_RTX;
3540 }
3541
3542 /* In case we are returning the contents of an object which overlaps
3543 the place the value is being stored, use a safe function when copying
3544 a value through a pointer into a structure value return block. */
3545 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3546 && current_function_returns_struct
3547 && !current_function_returns_pcc_struct)
3548 {
3549 rtx from_rtx, size;
3550
3551 push_temp_slots ();
3552 size = expr_size (from);
3553 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3554 EXPAND_MEMORY_USE_DONT);
3555
3556 /* Copy the rights of the bitmap. */
3557 if (current_function_check_memory_usage)
3558 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3559 XEXP (to_rtx, 0), Pmode,
3560 XEXP (from_rtx, 0), Pmode,
3561 convert_to_mode (TYPE_MODE (sizetype),
3562 size, TREE_UNSIGNED (sizetype)),
3563 TYPE_MODE (sizetype));
3564
3565 #ifdef TARGET_MEM_FUNCTIONS
3566 emit_library_call (memcpy_libfunc, 0,
3567 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3568 XEXP (from_rtx, 0), Pmode,
3569 convert_to_mode (TYPE_MODE (sizetype),
3570 size, TREE_UNSIGNED (sizetype)),
3571 TYPE_MODE (sizetype));
3572 #else
3573 emit_library_call (bcopy_libfunc, 0,
3574 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3575 XEXP (to_rtx, 0), Pmode,
3576 convert_to_mode (TYPE_MODE (integer_type_node),
3577 size, TREE_UNSIGNED (integer_type_node)),
3578 TYPE_MODE (integer_type_node));
3579 #endif
3580
3581 preserve_temp_slots (to_rtx);
3582 free_temp_slots ();
3583 pop_temp_slots ();
3584 return want_value ? to_rtx : NULL_RTX;
3585 }
3586
3587 /* Compute FROM and store the value in the rtx we got. */
3588
3589 push_temp_slots ();
3590 result = store_expr (from, to_rtx, want_value);
3591 preserve_temp_slots (result);
3592 free_temp_slots ();
3593 pop_temp_slots ();
3594 return want_value ? result : NULL_RTX;
3595 }
3596
3597 /* Generate code for computing expression EXP,
3598 and storing the value into TARGET.
3599 TARGET may contain a QUEUED rtx.
3600
3601 If WANT_VALUE is nonzero, return a copy of the value
3602 not in TARGET, so that we can be sure to use the proper
3603 value in a containing expression even if TARGET has something
3604 else stored in it. If possible, we copy the value through a pseudo
3605 and return that pseudo. Or, if the value is constant, we try to
3606 return the constant. In some cases, we return a pseudo
3607 copied *from* TARGET.
3608
3609 If the mode is BLKmode then we may return TARGET itself.
3610 It turns out that in BLKmode it doesn't cause a problem.
3611 because C has no operators that could combine two different
3612 assignments into the same BLKmode object with different values
3613 with no sequence point. Will other languages need this to
3614 be more thorough?
3615
3616 If WANT_VALUE is 0, we return NULL, to make sure
3617 to catch quickly any cases where the caller uses the value
3618 and fails to set WANT_VALUE. */
3619
3620 rtx
3621 store_expr (exp, target, want_value)
3622 register tree exp;
3623 register rtx target;
3624 int want_value;
3625 {
3626 register rtx temp;
3627 int dont_return_target = 0;
3628
3629 if (TREE_CODE (exp) == COMPOUND_EXPR)
3630 {
3631 /* Perform first part of compound expression, then assign from second
3632 part. */
3633 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3634 emit_queue ();
3635 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3636 }
3637 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3638 {
3639 /* For conditional expression, get safe form of the target. Then
3640 test the condition, doing the appropriate assignment on either
3641 side. This avoids the creation of unnecessary temporaries.
3642 For non-BLKmode, it is more efficient not to do this. */
3643
3644 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3645
3646 emit_queue ();
3647 target = protect_from_queue (target, 1);
3648
3649 do_pending_stack_adjust ();
3650 NO_DEFER_POP;
3651 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3652 start_cleanup_deferral ();
3653 store_expr (TREE_OPERAND (exp, 1), target, 0);
3654 end_cleanup_deferral ();
3655 emit_queue ();
3656 emit_jump_insn (gen_jump (lab2));
3657 emit_barrier ();
3658 emit_label (lab1);
3659 start_cleanup_deferral ();
3660 store_expr (TREE_OPERAND (exp, 2), target, 0);
3661 end_cleanup_deferral ();
3662 emit_queue ();
3663 emit_label (lab2);
3664 OK_DEFER_POP;
3665
3666 return want_value ? target : NULL_RTX;
3667 }
3668 else if (queued_subexp_p (target))
3669 /* If target contains a postincrement, let's not risk
3670 using it as the place to generate the rhs. */
3671 {
3672 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3673 {
3674 /* Expand EXP into a new pseudo. */
3675 temp = gen_reg_rtx (GET_MODE (target));
3676 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3677 }
3678 else
3679 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3680
3681 /* If target is volatile, ANSI requires accessing the value
3682 *from* the target, if it is accessed. So make that happen.
3683 In no case return the target itself. */
3684 if (! MEM_VOLATILE_P (target) && want_value)
3685 dont_return_target = 1;
3686 }
3687 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3688 && GET_MODE (target) != BLKmode)
3689 /* If target is in memory and caller wants value in a register instead,
3690 arrange that. Pass TARGET as target for expand_expr so that,
3691 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3692 We know expand_expr will not use the target in that case.
3693 Don't do this if TARGET is volatile because we are supposed
3694 to write it and then read it. */
3695 {
3696 temp = expand_expr (exp, target, GET_MODE (target), 0);
3697 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3698 temp = copy_to_reg (temp);
3699 dont_return_target = 1;
3700 }
3701 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3702 /* If this is an scalar in a register that is stored in a wider mode
3703 than the declared mode, compute the result into its declared mode
3704 and then convert to the wider mode. Our value is the computed
3705 expression. */
3706 {
3707 /* If we don't want a value, we can do the conversion inside EXP,
3708 which will often result in some optimizations. Do the conversion
3709 in two steps: first change the signedness, if needed, then
3710 the extend. But don't do this if the type of EXP is a subtype
3711 of something else since then the conversion might involve
3712 more than just converting modes. */
3713 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3714 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3715 {
3716 if (TREE_UNSIGNED (TREE_TYPE (exp))
3717 != SUBREG_PROMOTED_UNSIGNED_P (target))
3718 exp
3719 = convert
3720 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3721 TREE_TYPE (exp)),
3722 exp);
3723
3724 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3725 SUBREG_PROMOTED_UNSIGNED_P (target)),
3726 exp);
3727 }
3728
3729 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3730
3731 /* If TEMP is a volatile MEM and we want a result value, make
3732 the access now so it gets done only once. Likewise if
3733 it contains TARGET. */
3734 if (GET_CODE (temp) == MEM && want_value
3735 && (MEM_VOLATILE_P (temp)
3736 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3737 temp = copy_to_reg (temp);
3738
3739 /* If TEMP is a VOIDmode constant, use convert_modes to make
3740 sure that we properly convert it. */
3741 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3742 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3743 TYPE_MODE (TREE_TYPE (exp)), temp,
3744 SUBREG_PROMOTED_UNSIGNED_P (target));
3745
3746 convert_move (SUBREG_REG (target), temp,
3747 SUBREG_PROMOTED_UNSIGNED_P (target));
3748
3749 /* If we promoted a constant, change the mode back down to match
3750 target. Otherwise, the caller might get confused by a result whose
3751 mode is larger than expected. */
3752
3753 if (want_value && GET_MODE (temp) != GET_MODE (target)
3754 && GET_MODE (temp) != VOIDmode)
3755 {
3756 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3757 SUBREG_PROMOTED_VAR_P (temp) = 1;
3758 SUBREG_PROMOTED_UNSIGNED_P (temp)
3759 = SUBREG_PROMOTED_UNSIGNED_P (target);
3760 }
3761
3762 return want_value ? temp : NULL_RTX;
3763 }
3764 else
3765 {
3766 temp = expand_expr (exp, target, GET_MODE (target), 0);
3767 /* Return TARGET if it's a specified hardware register.
3768 If TARGET is a volatile mem ref, either return TARGET
3769 or return a reg copied *from* TARGET; ANSI requires this.
3770
3771 Otherwise, if TEMP is not TARGET, return TEMP
3772 if it is constant (for efficiency),
3773 or if we really want the correct value. */
3774 if (!(target && GET_CODE (target) == REG
3775 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3776 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3777 && ! rtx_equal_p (temp, target)
3778 && (CONSTANT_P (temp) || want_value))
3779 dont_return_target = 1;
3780 }
3781
3782 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3783 the same as that of TARGET, adjust the constant. This is needed, for
3784 example, in case it is a CONST_DOUBLE and we want only a word-sized
3785 value. */
3786 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3787 && TREE_CODE (exp) != ERROR_MARK
3788 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3789 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3790 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3791
3792 if (current_function_check_memory_usage
3793 && GET_CODE (target) == MEM
3794 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3795 {
3796 if (GET_CODE (temp) == MEM)
3797 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3798 XEXP (target, 0), Pmode,
3799 XEXP (temp, 0), Pmode,
3800 expr_size (exp), TYPE_MODE (sizetype));
3801 else
3802 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3803 XEXP (target, 0), Pmode,
3804 expr_size (exp), TYPE_MODE (sizetype),
3805 GEN_INT (MEMORY_USE_WO),
3806 TYPE_MODE (integer_type_node));
3807 }
3808
3809 /* If value was not generated in the target, store it there.
3810 Convert the value to TARGET's type first if nec. */
3811 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3812 one or both of them are volatile memory refs, we have to distinguish
3813 two cases:
3814 - expand_expr has used TARGET. In this case, we must not generate
3815 another copy. This can be detected by TARGET being equal according
3816 to == .
3817 - expand_expr has not used TARGET - that means that the source just
3818 happens to have the same RTX form. Since temp will have been created
3819 by expand_expr, it will compare unequal according to == .
3820 We must generate a copy in this case, to reach the correct number
3821 of volatile memory references. */
3822
3823 if ((! rtx_equal_p (temp, target)
3824 || (temp != target && (side_effects_p (temp)
3825 || side_effects_p (target))))
3826 && TREE_CODE (exp) != ERROR_MARK)
3827 {
3828 target = protect_from_queue (target, 1);
3829 if (GET_MODE (temp) != GET_MODE (target)
3830 && GET_MODE (temp) != VOIDmode)
3831 {
3832 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3833 if (dont_return_target)
3834 {
3835 /* In this case, we will return TEMP,
3836 so make sure it has the proper mode.
3837 But don't forget to store the value into TARGET. */
3838 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3839 emit_move_insn (target, temp);
3840 }
3841 else
3842 convert_move (target, temp, unsignedp);
3843 }
3844
3845 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3846 {
3847 /* Handle copying a string constant into an array.
3848 The string constant may be shorter than the array.
3849 So copy just the string's actual length, and clear the rest. */
3850 rtx size;
3851 rtx addr;
3852
3853 /* Get the size of the data type of the string,
3854 which is actually the size of the target. */
3855 size = expr_size (exp);
3856 if (GET_CODE (size) == CONST_INT
3857 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3858 emit_block_move (target, temp, size,
3859 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3860 else
3861 {
3862 /* Compute the size of the data to copy from the string. */
3863 tree copy_size
3864 = size_binop (MIN_EXPR,
3865 make_tree (sizetype, size),
3866 convert (sizetype,
3867 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3868 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3869 VOIDmode, 0);
3870 rtx label = 0;
3871
3872 /* Copy that much. */
3873 emit_block_move (target, temp, copy_size_rtx,
3874 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3875
3876 /* Figure out how much is left in TARGET that we have to clear.
3877 Do all calculations in ptr_mode. */
3878
3879 addr = XEXP (target, 0);
3880 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3881
3882 if (GET_CODE (copy_size_rtx) == CONST_INT)
3883 {
3884 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3885 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3886 }
3887 else
3888 {
3889 addr = force_reg (ptr_mode, addr);
3890 addr = expand_binop (ptr_mode, add_optab, addr,
3891 copy_size_rtx, NULL_RTX, 0,
3892 OPTAB_LIB_WIDEN);
3893
3894 size = expand_binop (ptr_mode, sub_optab, size,
3895 copy_size_rtx, NULL_RTX, 0,
3896 OPTAB_LIB_WIDEN);
3897
3898 label = gen_label_rtx ();
3899 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3900 GET_MODE (size), 0, 0, label);
3901 }
3902
3903 if (size != const0_rtx)
3904 {
3905 /* Be sure we can write on ADDR. */
3906 if (current_function_check_memory_usage)
3907 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3908 addr, Pmode,
3909 size, TYPE_MODE (sizetype),
3910 GEN_INT (MEMORY_USE_WO),
3911 TYPE_MODE (integer_type_node));
3912 #ifdef TARGET_MEM_FUNCTIONS
3913 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3914 addr, ptr_mode,
3915 const0_rtx, TYPE_MODE (integer_type_node),
3916 convert_to_mode (TYPE_MODE (sizetype),
3917 size,
3918 TREE_UNSIGNED (sizetype)),
3919 TYPE_MODE (sizetype));
3920 #else
3921 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3922 addr, ptr_mode,
3923 convert_to_mode (TYPE_MODE (integer_type_node),
3924 size,
3925 TREE_UNSIGNED (integer_type_node)),
3926 TYPE_MODE (integer_type_node));
3927 #endif
3928 }
3929
3930 if (label)
3931 emit_label (label);
3932 }
3933 }
3934 /* Handle calls that return values in multiple non-contiguous locations.
3935 The Irix 6 ABI has examples of this. */
3936 else if (GET_CODE (target) == PARALLEL)
3937 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3938 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3939 else if (GET_MODE (temp) == BLKmode)
3940 emit_block_move (target, temp, expr_size (exp),
3941 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3942 else
3943 emit_move_insn (target, temp);
3944 }
3945
3946 /* If we don't want a value, return NULL_RTX. */
3947 if (! want_value)
3948 return NULL_RTX;
3949
3950 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3951 ??? The latter test doesn't seem to make sense. */
3952 else if (dont_return_target && GET_CODE (temp) != MEM)
3953 return temp;
3954
3955 /* Return TARGET itself if it is a hard register. */
3956 else if (want_value && GET_MODE (target) != BLKmode
3957 && ! (GET_CODE (target) == REG
3958 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3959 return copy_to_reg (target);
3960
3961 else
3962 return target;
3963 }
3964 \f
3965 /* Return 1 if EXP just contains zeros. */
3966
3967 static int
3968 is_zeros_p (exp)
3969 tree exp;
3970 {
3971 tree elt;
3972
3973 switch (TREE_CODE (exp))
3974 {
3975 case CONVERT_EXPR:
3976 case NOP_EXPR:
3977 case NON_LVALUE_EXPR:
3978 return is_zeros_p (TREE_OPERAND (exp, 0));
3979
3980 case INTEGER_CST:
3981 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3982
3983 case COMPLEX_CST:
3984 return
3985 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3986
3987 case REAL_CST:
3988 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3989
3990 case CONSTRUCTOR:
3991 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3992 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3993 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3994 if (! is_zeros_p (TREE_VALUE (elt)))
3995 return 0;
3996
3997 return 1;
3998
3999 default:
4000 return 0;
4001 }
4002 }
4003
4004 /* Return 1 if EXP contains mostly (3/4) zeros. */
4005
4006 static int
4007 mostly_zeros_p (exp)
4008 tree exp;
4009 {
4010 if (TREE_CODE (exp) == CONSTRUCTOR)
4011 {
4012 int elts = 0, zeros = 0;
4013 tree elt = CONSTRUCTOR_ELTS (exp);
4014 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4015 {
4016 /* If there are no ranges of true bits, it is all zero. */
4017 return elt == NULL_TREE;
4018 }
4019 for (; elt; elt = TREE_CHAIN (elt))
4020 {
4021 /* We do not handle the case where the index is a RANGE_EXPR,
4022 so the statistic will be somewhat inaccurate.
4023 We do make a more accurate count in store_constructor itself,
4024 so since this function is only used for nested array elements,
4025 this should be close enough. */
4026 if (mostly_zeros_p (TREE_VALUE (elt)))
4027 zeros++;
4028 elts++;
4029 }
4030
4031 return 4 * zeros >= 3 * elts;
4032 }
4033
4034 return is_zeros_p (exp);
4035 }
4036 \f
4037 /* Helper function for store_constructor.
4038 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4039 TYPE is the type of the CONSTRUCTOR, not the element type.
4040 ALIGN and CLEARED are as for store_constructor.
4041
4042 This provides a recursive shortcut back to store_constructor when it isn't
4043 necessary to go through store_field. This is so that we can pass through
4044 the cleared field to let store_constructor know that we may not have to
4045 clear a substructure if the outer structure has already been cleared. */
4046
4047 static void
4048 store_constructor_field (target, bitsize, bitpos,
4049 mode, exp, type, align, cleared)
4050 rtx target;
4051 int bitsize, bitpos;
4052 enum machine_mode mode;
4053 tree exp, type;
4054 unsigned int align;
4055 int cleared;
4056 {
4057 if (TREE_CODE (exp) == CONSTRUCTOR
4058 && bitpos % BITS_PER_UNIT == 0
4059 /* If we have a non-zero bitpos for a register target, then we just
4060 let store_field do the bitfield handling. This is unlikely to
4061 generate unnecessary clear instructions anyways. */
4062 && (bitpos == 0 || GET_CODE (target) == MEM))
4063 {
4064 if (bitpos != 0)
4065 target
4066 = change_address (target,
4067 GET_MODE (target) == BLKmode
4068 || 0 != (bitpos
4069 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4070 ? BLKmode : VOIDmode,
4071 plus_constant (XEXP (target, 0),
4072 bitpos / BITS_PER_UNIT));
4073 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4074 }
4075 else
4076 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4077 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4078 int_size_in_bytes (type), 0);
4079 }
4080
4081 /* Store the value of constructor EXP into the rtx TARGET.
4082 TARGET is either a REG or a MEM.
4083 ALIGN is the maximum known alignment for TARGET, in bits.
4084 CLEARED is true if TARGET is known to have been zero'd.
4085 SIZE is the number of bytes of TARGET we are allowed to modify: this
4086 may not be the same as the size of EXP if we are assigning to a field
4087 which has been packed to exclude padding bits. */
4088
4089 static void
4090 store_constructor (exp, target, align, cleared, size)
4091 tree exp;
4092 rtx target;
4093 unsigned int align;
4094 int cleared;
4095 int size;
4096 {
4097 tree type = TREE_TYPE (exp);
4098 #ifdef WORD_REGISTER_OPERATIONS
4099 rtx exp_size = expr_size (exp);
4100 #endif
4101
4102 /* We know our target cannot conflict, since safe_from_p has been called. */
4103 #if 0
4104 /* Don't try copying piece by piece into a hard register
4105 since that is vulnerable to being clobbered by EXP.
4106 Instead, construct in a pseudo register and then copy it all. */
4107 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4108 {
4109 rtx temp = gen_reg_rtx (GET_MODE (target));
4110 store_constructor (exp, temp, align, cleared, size);
4111 emit_move_insn (target, temp);
4112 return;
4113 }
4114 #endif
4115
4116 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4117 || TREE_CODE (type) == QUAL_UNION_TYPE)
4118 {
4119 register tree elt;
4120
4121 /* Inform later passes that the whole union value is dead. */
4122 if ((TREE_CODE (type) == UNION_TYPE
4123 || TREE_CODE (type) == QUAL_UNION_TYPE)
4124 && ! cleared)
4125 {
4126 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4127
4128 /* If the constructor is empty, clear the union. */
4129 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4130 clear_storage (target, expr_size (exp),
4131 TYPE_ALIGN (type) / BITS_PER_UNIT);
4132 }
4133
4134 /* If we are building a static constructor into a register,
4135 set the initial value as zero so we can fold the value into
4136 a constant. But if more than one register is involved,
4137 this probably loses. */
4138 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4139 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4140 {
4141 if (! cleared)
4142 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4143
4144 cleared = 1;
4145 }
4146
4147 /* If the constructor has fewer fields than the structure
4148 or if we are initializing the structure to mostly zeros,
4149 clear the whole structure first. */
4150 else if (size > 0
4151 && ((list_length (CONSTRUCTOR_ELTS (exp))
4152 != list_length (TYPE_FIELDS (type)))
4153 || mostly_zeros_p (exp)))
4154 {
4155 if (! cleared)
4156 clear_storage (target, GEN_INT (size),
4157 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4158
4159 cleared = 1;
4160 }
4161 else if (! cleared)
4162 /* Inform later passes that the old value is dead. */
4163 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4164
4165 /* Store each element of the constructor into
4166 the corresponding field of TARGET. */
4167
4168 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4169 {
4170 register tree field = TREE_PURPOSE (elt);
4171 #ifdef WORD_REGISTER_OPERATIONS
4172 tree value = TREE_VALUE (elt);
4173 #endif
4174 register enum machine_mode mode;
4175 int bitsize;
4176 int bitpos = 0;
4177 int unsignedp;
4178 tree pos, constant = 0, offset = 0;
4179 rtx to_rtx = target;
4180
4181 /* Just ignore missing fields.
4182 We cleared the whole structure, above,
4183 if any fields are missing. */
4184 if (field == 0)
4185 continue;
4186
4187 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4188 continue;
4189
4190 if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST)
4191 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4192 else
4193 bitsize = -1;
4194
4195 unsignedp = TREE_UNSIGNED (field);
4196 mode = DECL_MODE (field);
4197 if (DECL_BIT_FIELD (field))
4198 mode = VOIDmode;
4199
4200 pos = DECL_FIELD_BITPOS (field);
4201 if (TREE_CODE (pos) == INTEGER_CST)
4202 constant = pos;
4203 else if (TREE_CODE (pos) == PLUS_EXPR
4204 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4205 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4206 else
4207 offset = pos;
4208
4209 if (constant)
4210 bitpos = TREE_INT_CST_LOW (constant);
4211
4212 if (offset)
4213 {
4214 rtx offset_rtx;
4215
4216 if (contains_placeholder_p (offset))
4217 offset = build (WITH_RECORD_EXPR, sizetype,
4218 offset, make_tree (TREE_TYPE (exp), target));
4219
4220 offset = size_binop (EXACT_DIV_EXPR, offset,
4221 size_int (BITS_PER_UNIT));
4222
4223 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4224 if (GET_CODE (to_rtx) != MEM)
4225 abort ();
4226
4227 if (GET_MODE (offset_rtx) != ptr_mode)
4228 {
4229 #ifdef POINTERS_EXTEND_UNSIGNED
4230 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4231 #else
4232 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4233 #endif
4234 }
4235
4236 to_rtx
4237 = change_address (to_rtx, VOIDmode,
4238 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4239 force_reg (ptr_mode,
4240 offset_rtx)));
4241 }
4242
4243 if (TREE_READONLY (field))
4244 {
4245 if (GET_CODE (to_rtx) == MEM)
4246 to_rtx = copy_rtx (to_rtx);
4247
4248 RTX_UNCHANGING_P (to_rtx) = 1;
4249 }
4250
4251 #ifdef WORD_REGISTER_OPERATIONS
4252 /* If this initializes a field that is smaller than a word, at the
4253 start of a word, try to widen it to a full word.
4254 This special case allows us to output C++ member function
4255 initializations in a form that the optimizers can understand. */
4256 if (constant
4257 && GET_CODE (target) == REG
4258 && bitsize < BITS_PER_WORD
4259 && bitpos % BITS_PER_WORD == 0
4260 && GET_MODE_CLASS (mode) == MODE_INT
4261 && TREE_CODE (value) == INTEGER_CST
4262 && GET_CODE (exp_size) == CONST_INT
4263 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4264 {
4265 tree type = TREE_TYPE (value);
4266 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4267 {
4268 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4269 value = convert (type, value);
4270 }
4271 if (BYTES_BIG_ENDIAN)
4272 value
4273 = fold (build (LSHIFT_EXPR, type, value,
4274 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4275 bitsize = BITS_PER_WORD;
4276 mode = word_mode;
4277 }
4278 #endif
4279 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4280 TREE_VALUE (elt), type,
4281 MIN (align,
4282 DECL_ALIGN (TREE_PURPOSE (elt))),
4283 cleared);
4284 }
4285 }
4286 else if (TREE_CODE (type) == ARRAY_TYPE)
4287 {
4288 register tree elt;
4289 register int i;
4290 int need_to_clear;
4291 tree domain = TYPE_DOMAIN (type);
4292 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4293 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4294 tree elttype = TREE_TYPE (type);
4295
4296 /* If the constructor has fewer elements than the array,
4297 clear the whole array first. Similarly if this is
4298 static constructor of a non-BLKmode object. */
4299 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4300 need_to_clear = 1;
4301 else
4302 {
4303 HOST_WIDE_INT count = 0, zero_count = 0;
4304 need_to_clear = 0;
4305 /* This loop is a more accurate version of the loop in
4306 mostly_zeros_p (it handles RANGE_EXPR in an index).
4307 It is also needed to check for missing elements. */
4308 for (elt = CONSTRUCTOR_ELTS (exp);
4309 elt != NULL_TREE;
4310 elt = TREE_CHAIN (elt))
4311 {
4312 tree index = TREE_PURPOSE (elt);
4313 HOST_WIDE_INT this_node_count;
4314 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4315 {
4316 tree lo_index = TREE_OPERAND (index, 0);
4317 tree hi_index = TREE_OPERAND (index, 1);
4318 if (TREE_CODE (lo_index) != INTEGER_CST
4319 || TREE_CODE (hi_index) != INTEGER_CST)
4320 {
4321 need_to_clear = 1;
4322 break;
4323 }
4324 this_node_count = TREE_INT_CST_LOW (hi_index)
4325 - TREE_INT_CST_LOW (lo_index) + 1;
4326 }
4327 else
4328 this_node_count = 1;
4329 count += this_node_count;
4330 if (mostly_zeros_p (TREE_VALUE (elt)))
4331 zero_count += this_node_count;
4332 }
4333 /* Clear the entire array first if there are any missing elements,
4334 or if the incidence of zero elements is >= 75%. */
4335 if (count < maxelt - minelt + 1
4336 || 4 * zero_count >= 3 * count)
4337 need_to_clear = 1;
4338 }
4339 if (need_to_clear && size > 0)
4340 {
4341 if (! cleared)
4342 clear_storage (target, GEN_INT (size),
4343 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4344 cleared = 1;
4345 }
4346 else
4347 /* Inform later passes that the old value is dead. */
4348 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4349
4350 /* Store each element of the constructor into
4351 the corresponding element of TARGET, determined
4352 by counting the elements. */
4353 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4354 elt;
4355 elt = TREE_CHAIN (elt), i++)
4356 {
4357 register enum machine_mode mode;
4358 int bitsize;
4359 int bitpos;
4360 int unsignedp;
4361 tree value = TREE_VALUE (elt);
4362 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4363 tree index = TREE_PURPOSE (elt);
4364 rtx xtarget = target;
4365
4366 if (cleared && is_zeros_p (value))
4367 continue;
4368
4369 unsignedp = TREE_UNSIGNED (elttype);
4370 mode = TYPE_MODE (elttype);
4371 if (mode == BLKmode)
4372 {
4373 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4374 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4375 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4376 else
4377 bitsize = -1;
4378 }
4379 else
4380 bitsize = GET_MODE_BITSIZE (mode);
4381
4382 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4383 {
4384 tree lo_index = TREE_OPERAND (index, 0);
4385 tree hi_index = TREE_OPERAND (index, 1);
4386 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4387 struct nesting *loop;
4388 HOST_WIDE_INT lo, hi, count;
4389 tree position;
4390
4391 /* If the range is constant and "small", unroll the loop. */
4392 if (TREE_CODE (lo_index) == INTEGER_CST
4393 && TREE_CODE (hi_index) == INTEGER_CST
4394 && (lo = TREE_INT_CST_LOW (lo_index),
4395 hi = TREE_INT_CST_LOW (hi_index),
4396 count = hi - lo + 1,
4397 (GET_CODE (target) != MEM
4398 || count <= 2
4399 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4400 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4401 <= 40 * 8))))
4402 {
4403 lo -= minelt; hi -= minelt;
4404 for (; lo <= hi; lo++)
4405 {
4406 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4407 store_constructor_field (target, bitsize, bitpos, mode,
4408 value, type, align, cleared);
4409 }
4410 }
4411 else
4412 {
4413 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4414 loop_top = gen_label_rtx ();
4415 loop_end = gen_label_rtx ();
4416
4417 unsignedp = TREE_UNSIGNED (domain);
4418
4419 index = build_decl (VAR_DECL, NULL_TREE, domain);
4420
4421 DECL_RTL (index) = index_r
4422 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4423 &unsignedp, 0));
4424
4425 if (TREE_CODE (value) == SAVE_EXPR
4426 && SAVE_EXPR_RTL (value) == 0)
4427 {
4428 /* Make sure value gets expanded once before the
4429 loop. */
4430 expand_expr (value, const0_rtx, VOIDmode, 0);
4431 emit_queue ();
4432 }
4433 store_expr (lo_index, index_r, 0);
4434 loop = expand_start_loop (0);
4435
4436 /* Assign value to element index. */
4437 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4438 size_int (BITS_PER_UNIT));
4439 position = size_binop (MULT_EXPR,
4440 size_binop (MINUS_EXPR, index,
4441 TYPE_MIN_VALUE (domain)),
4442 position);
4443 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4444 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4445 xtarget = change_address (target, mode, addr);
4446 if (TREE_CODE (value) == CONSTRUCTOR)
4447 store_constructor (value, xtarget, align, cleared,
4448 bitsize / BITS_PER_UNIT);
4449 else
4450 store_expr (value, xtarget, 0);
4451
4452 expand_exit_loop_if_false (loop,
4453 build (LT_EXPR, integer_type_node,
4454 index, hi_index));
4455
4456 expand_increment (build (PREINCREMENT_EXPR,
4457 TREE_TYPE (index),
4458 index, integer_one_node), 0, 0);
4459 expand_end_loop ();
4460 emit_label (loop_end);
4461
4462 /* Needed by stupid register allocation. to extend the
4463 lifetime of pseudo-regs used by target past the end
4464 of the loop. */
4465 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4466 }
4467 }
4468 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4469 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4470 {
4471 rtx pos_rtx, addr;
4472 tree position;
4473
4474 if (index == 0)
4475 index = size_int (i);
4476
4477 if (minelt)
4478 index = size_binop (MINUS_EXPR, index,
4479 TYPE_MIN_VALUE (domain));
4480 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4481 size_int (BITS_PER_UNIT));
4482 position = size_binop (MULT_EXPR, index, position);
4483 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4484 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4485 xtarget = change_address (target, mode, addr);
4486 store_expr (value, xtarget, 0);
4487 }
4488 else
4489 {
4490 if (index != 0)
4491 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4492 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4493 else
4494 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4495 store_constructor_field (target, bitsize, bitpos, mode, value,
4496 type, align, cleared);
4497 }
4498 }
4499 }
4500 /* set constructor assignments */
4501 else if (TREE_CODE (type) == SET_TYPE)
4502 {
4503 tree elt = CONSTRUCTOR_ELTS (exp);
4504 int nbytes = int_size_in_bytes (type), nbits;
4505 tree domain = TYPE_DOMAIN (type);
4506 tree domain_min, domain_max, bitlength;
4507
4508 /* The default implementation strategy is to extract the constant
4509 parts of the constructor, use that to initialize the target,
4510 and then "or" in whatever non-constant ranges we need in addition.
4511
4512 If a large set is all zero or all ones, it is
4513 probably better to set it using memset (if available) or bzero.
4514 Also, if a large set has just a single range, it may also be
4515 better to first clear all the first clear the set (using
4516 bzero/memset), and set the bits we want. */
4517
4518 /* Check for all zeros. */
4519 if (elt == NULL_TREE && size > 0)
4520 {
4521 if (!cleared)
4522 clear_storage (target, GEN_INT (size),
4523 TYPE_ALIGN (type) / BITS_PER_UNIT);
4524 return;
4525 }
4526
4527 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4528 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4529 bitlength = size_binop (PLUS_EXPR,
4530 size_binop (MINUS_EXPR, domain_max, domain_min),
4531 size_one_node);
4532
4533 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4534 abort ();
4535 nbits = TREE_INT_CST_LOW (bitlength);
4536
4537 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4538 are "complicated" (more than one range), initialize (the
4539 constant parts) by copying from a constant. */
4540 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4541 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4542 {
4543 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4544 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4545 char *bit_buffer = (char *) alloca (nbits);
4546 HOST_WIDE_INT word = 0;
4547 int bit_pos = 0;
4548 int ibit = 0;
4549 int offset = 0; /* In bytes from beginning of set. */
4550 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4551 for (;;)
4552 {
4553 if (bit_buffer[ibit])
4554 {
4555 if (BYTES_BIG_ENDIAN)
4556 word |= (1 << (set_word_size - 1 - bit_pos));
4557 else
4558 word |= 1 << bit_pos;
4559 }
4560 bit_pos++; ibit++;
4561 if (bit_pos >= set_word_size || ibit == nbits)
4562 {
4563 if (word != 0 || ! cleared)
4564 {
4565 rtx datum = GEN_INT (word);
4566 rtx to_rtx;
4567 /* The assumption here is that it is safe to use
4568 XEXP if the set is multi-word, but not if
4569 it's single-word. */
4570 if (GET_CODE (target) == MEM)
4571 {
4572 to_rtx = plus_constant (XEXP (target, 0), offset);
4573 to_rtx = change_address (target, mode, to_rtx);
4574 }
4575 else if (offset == 0)
4576 to_rtx = target;
4577 else
4578 abort ();
4579 emit_move_insn (to_rtx, datum);
4580 }
4581 if (ibit == nbits)
4582 break;
4583 word = 0;
4584 bit_pos = 0;
4585 offset += set_word_size / BITS_PER_UNIT;
4586 }
4587 }
4588 }
4589 else if (!cleared)
4590 {
4591 /* Don't bother clearing storage if the set is all ones. */
4592 if (TREE_CHAIN (elt) != NULL_TREE
4593 || (TREE_PURPOSE (elt) == NULL_TREE
4594 ? nbits != 1
4595 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4596 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4597 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4598 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4599 != nbits))))
4600 clear_storage (target, expr_size (exp),
4601 TYPE_ALIGN (type) / BITS_PER_UNIT);
4602 }
4603
4604 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4605 {
4606 /* start of range of element or NULL */
4607 tree startbit = TREE_PURPOSE (elt);
4608 /* end of range of element, or element value */
4609 tree endbit = TREE_VALUE (elt);
4610 #ifdef TARGET_MEM_FUNCTIONS
4611 HOST_WIDE_INT startb, endb;
4612 #endif
4613 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4614
4615 bitlength_rtx = expand_expr (bitlength,
4616 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4617
4618 /* handle non-range tuple element like [ expr ] */
4619 if (startbit == NULL_TREE)
4620 {
4621 startbit = save_expr (endbit);
4622 endbit = startbit;
4623 }
4624 startbit = convert (sizetype, startbit);
4625 endbit = convert (sizetype, endbit);
4626 if (! integer_zerop (domain_min))
4627 {
4628 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4629 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4630 }
4631 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4632 EXPAND_CONST_ADDRESS);
4633 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4634 EXPAND_CONST_ADDRESS);
4635
4636 if (REG_P (target))
4637 {
4638 targetx = assign_stack_temp (GET_MODE (target),
4639 GET_MODE_SIZE (GET_MODE (target)),
4640 0);
4641 emit_move_insn (targetx, target);
4642 }
4643 else if (GET_CODE (target) == MEM)
4644 targetx = target;
4645 else
4646 abort ();
4647
4648 #ifdef TARGET_MEM_FUNCTIONS
4649 /* Optimization: If startbit and endbit are
4650 constants divisible by BITS_PER_UNIT,
4651 call memset instead. */
4652 if (TREE_CODE (startbit) == INTEGER_CST
4653 && TREE_CODE (endbit) == INTEGER_CST
4654 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4655 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4656 {
4657 emit_library_call (memset_libfunc, 0,
4658 VOIDmode, 3,
4659 plus_constant (XEXP (targetx, 0),
4660 startb / BITS_PER_UNIT),
4661 Pmode,
4662 constm1_rtx, TYPE_MODE (integer_type_node),
4663 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4664 TYPE_MODE (sizetype));
4665 }
4666 else
4667 #endif
4668 {
4669 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4670 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4671 bitlength_rtx, TYPE_MODE (sizetype),
4672 startbit_rtx, TYPE_MODE (sizetype),
4673 endbit_rtx, TYPE_MODE (sizetype));
4674 }
4675 if (REG_P (target))
4676 emit_move_insn (target, targetx);
4677 }
4678 }
4679
4680 else
4681 abort ();
4682 }
4683
4684 /* Store the value of EXP (an expression tree)
4685 into a subfield of TARGET which has mode MODE and occupies
4686 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4687 If MODE is VOIDmode, it means that we are storing into a bit-field.
4688
4689 If VALUE_MODE is VOIDmode, return nothing in particular.
4690 UNSIGNEDP is not used in this case.
4691
4692 Otherwise, return an rtx for the value stored. This rtx
4693 has mode VALUE_MODE if that is convenient to do.
4694 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4695
4696 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4697 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4698
4699 ALIAS_SET is the alias set for the destination. This value will
4700 (in general) be different from that for TARGET, since TARGET is a
4701 reference to the containing structure. */
4702
4703 static rtx
4704 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4705 unsignedp, align, total_size, alias_set)
4706 rtx target;
4707 int bitsize, bitpos;
4708 enum machine_mode mode;
4709 tree exp;
4710 enum machine_mode value_mode;
4711 int unsignedp;
4712 unsigned int align;
4713 int total_size;
4714 int alias_set;
4715 {
4716 HOST_WIDE_INT width_mask = 0;
4717
4718 if (TREE_CODE (exp) == ERROR_MARK)
4719 return const0_rtx;
4720
4721 if (bitsize < HOST_BITS_PER_WIDE_INT)
4722 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4723
4724 /* If we are storing into an unaligned field of an aligned union that is
4725 in a register, we may have the mode of TARGET being an integer mode but
4726 MODE == BLKmode. In that case, get an aligned object whose size and
4727 alignment are the same as TARGET and store TARGET into it (we can avoid
4728 the store if the field being stored is the entire width of TARGET). Then
4729 call ourselves recursively to store the field into a BLKmode version of
4730 that object. Finally, load from the object into TARGET. This is not
4731 very efficient in general, but should only be slightly more expensive
4732 than the otherwise-required unaligned accesses. Perhaps this can be
4733 cleaned up later. */
4734
4735 if (mode == BLKmode
4736 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4737 {
4738 rtx object = assign_stack_temp (GET_MODE (target),
4739 GET_MODE_SIZE (GET_MODE (target)), 0);
4740 rtx blk_object = copy_rtx (object);
4741
4742 MEM_SET_IN_STRUCT_P (object, 1);
4743 MEM_SET_IN_STRUCT_P (blk_object, 1);
4744 PUT_MODE (blk_object, BLKmode);
4745
4746 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4747 emit_move_insn (object, target);
4748
4749 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4750 align, total_size, alias_set);
4751
4752 /* Even though we aren't returning target, we need to
4753 give it the updated value. */
4754 emit_move_insn (target, object);
4755
4756 return blk_object;
4757 }
4758
4759 /* If the structure is in a register or if the component
4760 is a bit field, we cannot use addressing to access it.
4761 Use bit-field techniques or SUBREG to store in it. */
4762
4763 if (mode == VOIDmode
4764 || (mode != BLKmode && ! direct_store[(int) mode]
4765 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4766 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4767 || GET_CODE (target) == REG
4768 || GET_CODE (target) == SUBREG
4769 /* If the field isn't aligned enough to store as an ordinary memref,
4770 store it as a bit field. */
4771 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4772 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4773 || bitpos % GET_MODE_ALIGNMENT (mode)))
4774 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4775 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4776 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4777 /* If the RHS and field are a constant size and the size of the
4778 RHS isn't the same size as the bitfield, we must use bitfield
4779 operations. */
4780 || ((bitsize >= 0
4781 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
4782 && (TREE_INT_CST_HIGH (TYPE_SIZE (TREE_TYPE (exp))) != 0
4783 || TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))) != bitsize)))
4784 {
4785 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4786
4787 /* If BITSIZE is narrower than the size of the type of EXP
4788 we will be narrowing TEMP. Normally, what's wanted are the
4789 low-order bits. However, if EXP's type is a record and this is
4790 big-endian machine, we want the upper BITSIZE bits. */
4791 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4792 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4793 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4794 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4795 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4796 - bitsize),
4797 temp, 1);
4798
4799 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4800 MODE. */
4801 if (mode != VOIDmode && mode != BLKmode
4802 && mode != TYPE_MODE (TREE_TYPE (exp)))
4803 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4804
4805 /* If the modes of TARGET and TEMP are both BLKmode, both
4806 must be in memory and BITPOS must be aligned on a byte
4807 boundary. If so, we simply do a block copy. */
4808 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4809 {
4810 unsigned int exp_align = expr_align (exp) / BITS_PER_UNIT;
4811
4812 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4813 || bitpos % BITS_PER_UNIT != 0)
4814 abort ();
4815
4816 target = change_address (target, VOIDmode,
4817 plus_constant (XEXP (target, 0),
4818 bitpos / BITS_PER_UNIT));
4819
4820 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4821 align = MIN (exp_align, align);
4822
4823 /* Find an alignment that is consistent with the bit position. */
4824 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4825 align >>= 1;
4826
4827 emit_block_move (target, temp,
4828 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4829 / BITS_PER_UNIT),
4830 align);
4831
4832 return value_mode == VOIDmode ? const0_rtx : target;
4833 }
4834
4835 /* Store the value in the bitfield. */
4836 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4837 if (value_mode != VOIDmode)
4838 {
4839 /* The caller wants an rtx for the value. */
4840 /* If possible, avoid refetching from the bitfield itself. */
4841 if (width_mask != 0
4842 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4843 {
4844 tree count;
4845 enum machine_mode tmode;
4846
4847 if (unsignedp)
4848 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4849 tmode = GET_MODE (temp);
4850 if (tmode == VOIDmode)
4851 tmode = value_mode;
4852 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4853 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4854 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4855 }
4856 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4857 NULL_RTX, value_mode, 0, align,
4858 total_size);
4859 }
4860 return const0_rtx;
4861 }
4862 else
4863 {
4864 rtx addr = XEXP (target, 0);
4865 rtx to_rtx;
4866
4867 /* If a value is wanted, it must be the lhs;
4868 so make the address stable for multiple use. */
4869
4870 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4871 && ! CONSTANT_ADDRESS_P (addr)
4872 /* A frame-pointer reference is already stable. */
4873 && ! (GET_CODE (addr) == PLUS
4874 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4875 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4876 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4877 addr = copy_to_reg (addr);
4878
4879 /* Now build a reference to just the desired component. */
4880
4881 to_rtx = copy_rtx (change_address (target, mode,
4882 plus_constant (addr,
4883 (bitpos
4884 / BITS_PER_UNIT))));
4885 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4886 MEM_ALIAS_SET (to_rtx) = alias_set;
4887
4888 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4889 }
4890 }
4891 \f
4892 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4893 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4894 ARRAY_REFs and find the ultimate containing object, which we return.
4895
4896 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4897 bit position, and *PUNSIGNEDP to the signedness of the field.
4898 If the position of the field is variable, we store a tree
4899 giving the variable offset (in units) in *POFFSET.
4900 This offset is in addition to the bit position.
4901 If the position is not variable, we store 0 in *POFFSET.
4902 We set *PALIGNMENT to the alignment in bytes of the address that will be
4903 computed. This is the alignment of the thing we return if *POFFSET
4904 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4905
4906 If any of the extraction expressions is volatile,
4907 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4908
4909 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4910 is a mode that can be used to access the field. In that case, *PBITSIZE
4911 is redundant.
4912
4913 If the field describes a variable-sized object, *PMODE is set to
4914 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4915 this case, but the address of the object can be found. */
4916
4917 tree
4918 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4919 punsignedp, pvolatilep, palignment)
4920 tree exp;
4921 int *pbitsize;
4922 int *pbitpos;
4923 tree *poffset;
4924 enum machine_mode *pmode;
4925 int *punsignedp;
4926 int *pvolatilep;
4927 unsigned int *palignment;
4928 {
4929 tree orig_exp = exp;
4930 tree size_tree = 0;
4931 enum machine_mode mode = VOIDmode;
4932 tree offset = integer_zero_node;
4933 unsigned int alignment = BIGGEST_ALIGNMENT;
4934
4935 if (TREE_CODE (exp) == COMPONENT_REF)
4936 {
4937 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4938 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4939 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4940 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4941 }
4942 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4943 {
4944 size_tree = TREE_OPERAND (exp, 1);
4945 *punsignedp = TREE_UNSIGNED (exp);
4946 }
4947 else
4948 {
4949 mode = TYPE_MODE (TREE_TYPE (exp));
4950 if (mode == BLKmode)
4951 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4952
4953 *pbitsize = GET_MODE_BITSIZE (mode);
4954 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4955 }
4956
4957 if (size_tree)
4958 {
4959 if (TREE_CODE (size_tree) != INTEGER_CST)
4960 mode = BLKmode, *pbitsize = -1;
4961 else
4962 *pbitsize = TREE_INT_CST_LOW (size_tree);
4963 }
4964
4965 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4966 and find the ultimate containing object. */
4967
4968 *pbitpos = 0;
4969
4970 while (1)
4971 {
4972 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4973 {
4974 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4975 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4976 : TREE_OPERAND (exp, 2));
4977 tree constant = integer_zero_node, var = pos;
4978
4979 /* If this field hasn't been filled in yet, don't go
4980 past it. This should only happen when folding expressions
4981 made during type construction. */
4982 if (pos == 0)
4983 break;
4984
4985 /* Assume here that the offset is a multiple of a unit.
4986 If not, there should be an explicitly added constant. */
4987 if (TREE_CODE (pos) == PLUS_EXPR
4988 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4989 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4990 else if (TREE_CODE (pos) == INTEGER_CST)
4991 constant = pos, var = integer_zero_node;
4992
4993 *pbitpos += TREE_INT_CST_LOW (constant);
4994 offset = size_binop (PLUS_EXPR, offset,
4995 size_binop (EXACT_DIV_EXPR, var,
4996 size_int (BITS_PER_UNIT)));
4997 }
4998
4999 else if (TREE_CODE (exp) == ARRAY_REF)
5000 {
5001 /* This code is based on the code in case ARRAY_REF in expand_expr
5002 below. We assume here that the size of an array element is
5003 always an integral multiple of BITS_PER_UNIT. */
5004
5005 tree index = TREE_OPERAND (exp, 1);
5006 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5007 tree low_bound
5008 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5009 tree index_type = TREE_TYPE (index);
5010 tree xindex;
5011
5012 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5013 {
5014 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5015 index);
5016 index_type = TREE_TYPE (index);
5017 }
5018
5019 /* Optimize the special-case of a zero lower bound.
5020
5021 We convert the low_bound to sizetype to avoid some problems
5022 with constant folding. (E.g. suppose the lower bound is 1,
5023 and its mode is QI. Without the conversion, (ARRAY
5024 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5025 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5026
5027 But sizetype isn't quite right either (especially if
5028 the lowbound is negative). FIXME */
5029
5030 if (! integer_zerop (low_bound))
5031 index = fold (build (MINUS_EXPR, index_type, index,
5032 convert (sizetype, low_bound)));
5033
5034 if (TREE_CODE (index) == INTEGER_CST)
5035 {
5036 index = convert (sbitsizetype, index);
5037 index_type = TREE_TYPE (index);
5038 }
5039
5040 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5041 convert (sbitsizetype,
5042 TYPE_SIZE (TREE_TYPE (exp)))));
5043
5044 if (TREE_CODE (xindex) == INTEGER_CST
5045 && TREE_INT_CST_HIGH (xindex) == 0)
5046 *pbitpos += TREE_INT_CST_LOW (xindex);
5047 else
5048 {
5049 /* Either the bit offset calculated above is not constant, or
5050 it overflowed. In either case, redo the multiplication
5051 against the size in units. This is especially important
5052 in the non-constant case to avoid a division at runtime. */
5053 xindex = fold (build (MULT_EXPR, ssizetype, index,
5054 convert (ssizetype,
5055 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5056
5057 if (contains_placeholder_p (xindex))
5058 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
5059
5060 offset = size_binop (PLUS_EXPR, offset, xindex);
5061 }
5062 }
5063 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5064 && ! ((TREE_CODE (exp) == NOP_EXPR
5065 || TREE_CODE (exp) == CONVERT_EXPR)
5066 && (TYPE_MODE (TREE_TYPE (exp))
5067 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5068 break;
5069
5070 /* If any reference in the chain is volatile, the effect is volatile. */
5071 if (TREE_THIS_VOLATILE (exp))
5072 *pvolatilep = 1;
5073
5074 /* If the offset is non-constant already, then we can't assume any
5075 alignment more than the alignment here. */
5076 if (! integer_zerop (offset))
5077 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5078
5079 exp = TREE_OPERAND (exp, 0);
5080 }
5081
5082 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5083 alignment = MIN (alignment, DECL_ALIGN (exp));
5084 else if (TREE_TYPE (exp) != 0)
5085 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5086
5087 if (integer_zerop (offset))
5088 offset = 0;
5089
5090 if (offset != 0 && contains_placeholder_p (offset))
5091 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5092
5093 *pmode = mode;
5094 *poffset = offset;
5095 *palignment = alignment / BITS_PER_UNIT;
5096 return exp;
5097 }
5098
5099 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5100 static enum memory_use_mode
5101 get_memory_usage_from_modifier (modifier)
5102 enum expand_modifier modifier;
5103 {
5104 switch (modifier)
5105 {
5106 case EXPAND_NORMAL:
5107 case EXPAND_SUM:
5108 return MEMORY_USE_RO;
5109 break;
5110 case EXPAND_MEMORY_USE_WO:
5111 return MEMORY_USE_WO;
5112 break;
5113 case EXPAND_MEMORY_USE_RW:
5114 return MEMORY_USE_RW;
5115 break;
5116 case EXPAND_MEMORY_USE_DONT:
5117 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5118 MEMORY_USE_DONT, because they are modifiers to a call of
5119 expand_expr in the ADDR_EXPR case of expand_expr. */
5120 case EXPAND_CONST_ADDRESS:
5121 case EXPAND_INITIALIZER:
5122 return MEMORY_USE_DONT;
5123 case EXPAND_MEMORY_USE_BAD:
5124 default:
5125 abort ();
5126 }
5127 }
5128 \f
5129 /* Given an rtx VALUE that may contain additions and multiplications,
5130 return an equivalent value that just refers to a register or memory.
5131 This is done by generating instructions to perform the arithmetic
5132 and returning a pseudo-register containing the value.
5133
5134 The returned value may be a REG, SUBREG, MEM or constant. */
5135
5136 rtx
5137 force_operand (value, target)
5138 rtx value, target;
5139 {
5140 register optab binoptab = 0;
5141 /* Use a temporary to force order of execution of calls to
5142 `force_operand'. */
5143 rtx tmp;
5144 register rtx op2;
5145 /* Use subtarget as the target for operand 0 of a binary operation. */
5146 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5147
5148 /* Check for a PIC address load. */
5149 if (flag_pic
5150 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5151 && XEXP (value, 0) == pic_offset_table_rtx
5152 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5153 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5154 || GET_CODE (XEXP (value, 1)) == CONST))
5155 {
5156 if (!subtarget)
5157 subtarget = gen_reg_rtx (GET_MODE (value));
5158 emit_move_insn (subtarget, value);
5159 return subtarget;
5160 }
5161
5162 if (GET_CODE (value) == PLUS)
5163 binoptab = add_optab;
5164 else if (GET_CODE (value) == MINUS)
5165 binoptab = sub_optab;
5166 else if (GET_CODE (value) == MULT)
5167 {
5168 op2 = XEXP (value, 1);
5169 if (!CONSTANT_P (op2)
5170 && !(GET_CODE (op2) == REG && op2 != subtarget))
5171 subtarget = 0;
5172 tmp = force_operand (XEXP (value, 0), subtarget);
5173 return expand_mult (GET_MODE (value), tmp,
5174 force_operand (op2, NULL_RTX),
5175 target, 0);
5176 }
5177
5178 if (binoptab)
5179 {
5180 op2 = XEXP (value, 1);
5181 if (!CONSTANT_P (op2)
5182 && !(GET_CODE (op2) == REG && op2 != subtarget))
5183 subtarget = 0;
5184 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5185 {
5186 binoptab = add_optab;
5187 op2 = negate_rtx (GET_MODE (value), op2);
5188 }
5189
5190 /* Check for an addition with OP2 a constant integer and our first
5191 operand a PLUS of a virtual register and something else. In that
5192 case, we want to emit the sum of the virtual register and the
5193 constant first and then add the other value. This allows virtual
5194 register instantiation to simply modify the constant rather than
5195 creating another one around this addition. */
5196 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5197 && GET_CODE (XEXP (value, 0)) == PLUS
5198 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5199 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5200 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5201 {
5202 rtx temp = expand_binop (GET_MODE (value), binoptab,
5203 XEXP (XEXP (value, 0), 0), op2,
5204 subtarget, 0, OPTAB_LIB_WIDEN);
5205 return expand_binop (GET_MODE (value), binoptab, temp,
5206 force_operand (XEXP (XEXP (value, 0), 1), 0),
5207 target, 0, OPTAB_LIB_WIDEN);
5208 }
5209
5210 tmp = force_operand (XEXP (value, 0), subtarget);
5211 return expand_binop (GET_MODE (value), binoptab, tmp,
5212 force_operand (op2, NULL_RTX),
5213 target, 0, OPTAB_LIB_WIDEN);
5214 /* We give UNSIGNEDP = 0 to expand_binop
5215 because the only operations we are expanding here are signed ones. */
5216 }
5217 return value;
5218 }
5219 \f
5220 /* Subroutine of expand_expr:
5221 save the non-copied parts (LIST) of an expr (LHS), and return a list
5222 which can restore these values to their previous values,
5223 should something modify their storage. */
5224
5225 static tree
5226 save_noncopied_parts (lhs, list)
5227 tree lhs;
5228 tree list;
5229 {
5230 tree tail;
5231 tree parts = 0;
5232
5233 for (tail = list; tail; tail = TREE_CHAIN (tail))
5234 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5235 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5236 else
5237 {
5238 tree part = TREE_VALUE (tail);
5239 tree part_type = TREE_TYPE (part);
5240 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5241 rtx target = assign_temp (part_type, 0, 1, 1);
5242 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5243 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5244 parts = tree_cons (to_be_saved,
5245 build (RTL_EXPR, part_type, NULL_TREE,
5246 (tree) target),
5247 parts);
5248 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5249 }
5250 return parts;
5251 }
5252
5253 /* Subroutine of expand_expr:
5254 record the non-copied parts (LIST) of an expr (LHS), and return a list
5255 which specifies the initial values of these parts. */
5256
5257 static tree
5258 init_noncopied_parts (lhs, list)
5259 tree lhs;
5260 tree list;
5261 {
5262 tree tail;
5263 tree parts = 0;
5264
5265 for (tail = list; tail; tail = TREE_CHAIN (tail))
5266 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5267 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5268 else if (TREE_PURPOSE (tail))
5269 {
5270 tree part = TREE_VALUE (tail);
5271 tree part_type = TREE_TYPE (part);
5272 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5273 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5274 }
5275 return parts;
5276 }
5277
5278 /* Subroutine of expand_expr: return nonzero iff there is no way that
5279 EXP can reference X, which is being modified. TOP_P is nonzero if this
5280 call is going to be used to determine whether we need a temporary
5281 for EXP, as opposed to a recursive call to this function.
5282
5283 It is always safe for this routine to return zero since it merely
5284 searches for optimization opportunities. */
5285
5286 static int
5287 safe_from_p (x, exp, top_p)
5288 rtx x;
5289 tree exp;
5290 int top_p;
5291 {
5292 rtx exp_rtl = 0;
5293 int i, nops;
5294 static int save_expr_count;
5295 static int save_expr_size = 0;
5296 static tree *save_expr_rewritten;
5297 static tree save_expr_trees[256];
5298
5299 if (x == 0
5300 /* If EXP has varying size, we MUST use a target since we currently
5301 have no way of allocating temporaries of variable size
5302 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5303 So we assume here that something at a higher level has prevented a
5304 clash. This is somewhat bogus, but the best we can do. Only
5305 do this when X is BLKmode and when we are at the top level. */
5306 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5307 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5308 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5309 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5310 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5311 != INTEGER_CST)
5312 && GET_MODE (x) == BLKmode))
5313 return 1;
5314
5315 if (top_p && save_expr_size == 0)
5316 {
5317 int rtn;
5318
5319 save_expr_count = 0;
5320 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5321 save_expr_rewritten = &save_expr_trees[0];
5322
5323 rtn = safe_from_p (x, exp, 1);
5324
5325 for (i = 0; i < save_expr_count; ++i)
5326 {
5327 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5328 abort ();
5329 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5330 }
5331
5332 save_expr_size = 0;
5333
5334 return rtn;
5335 }
5336
5337 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5338 find the underlying pseudo. */
5339 if (GET_CODE (x) == SUBREG)
5340 {
5341 x = SUBREG_REG (x);
5342 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5343 return 0;
5344 }
5345
5346 /* If X is a location in the outgoing argument area, it is always safe. */
5347 if (GET_CODE (x) == MEM
5348 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5349 || (GET_CODE (XEXP (x, 0)) == PLUS
5350 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5351 return 1;
5352
5353 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5354 {
5355 case 'd':
5356 exp_rtl = DECL_RTL (exp);
5357 break;
5358
5359 case 'c':
5360 return 1;
5361
5362 case 'x':
5363 if (TREE_CODE (exp) == TREE_LIST)
5364 return ((TREE_VALUE (exp) == 0
5365 || safe_from_p (x, TREE_VALUE (exp), 0))
5366 && (TREE_CHAIN (exp) == 0
5367 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5368 else if (TREE_CODE (exp) == ERROR_MARK)
5369 return 1; /* An already-visited SAVE_EXPR? */
5370 else
5371 return 0;
5372
5373 case '1':
5374 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5375
5376 case '2':
5377 case '<':
5378 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5379 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5380
5381 case 'e':
5382 case 'r':
5383 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5384 the expression. If it is set, we conflict iff we are that rtx or
5385 both are in memory. Otherwise, we check all operands of the
5386 expression recursively. */
5387
5388 switch (TREE_CODE (exp))
5389 {
5390 case ADDR_EXPR:
5391 return (staticp (TREE_OPERAND (exp, 0))
5392 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5393 || TREE_STATIC (exp));
5394
5395 case INDIRECT_REF:
5396 if (GET_CODE (x) == MEM)
5397 return 0;
5398 break;
5399
5400 case CALL_EXPR:
5401 exp_rtl = CALL_EXPR_RTL (exp);
5402 if (exp_rtl == 0)
5403 {
5404 /* Assume that the call will clobber all hard registers and
5405 all of memory. */
5406 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5407 || GET_CODE (x) == MEM)
5408 return 0;
5409 }
5410
5411 break;
5412
5413 case RTL_EXPR:
5414 /* If a sequence exists, we would have to scan every instruction
5415 in the sequence to see if it was safe. This is probably not
5416 worthwhile. */
5417 if (RTL_EXPR_SEQUENCE (exp))
5418 return 0;
5419
5420 exp_rtl = RTL_EXPR_RTL (exp);
5421 break;
5422
5423 case WITH_CLEANUP_EXPR:
5424 exp_rtl = RTL_EXPR_RTL (exp);
5425 break;
5426
5427 case CLEANUP_POINT_EXPR:
5428 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5429
5430 case SAVE_EXPR:
5431 exp_rtl = SAVE_EXPR_RTL (exp);
5432 if (exp_rtl)
5433 break;
5434
5435 /* This SAVE_EXPR might appear many times in the top-level
5436 safe_from_p() expression, and if it has a complex
5437 subexpression, examining it multiple times could result
5438 in a combinatorial explosion. E.g. on an Alpha
5439 running at least 200MHz, a Fortran test case compiled with
5440 optimization took about 28 minutes to compile -- even though
5441 it was only a few lines long, and the complicated line causing
5442 so much time to be spent in the earlier version of safe_from_p()
5443 had only 293 or so unique nodes.
5444
5445 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5446 where it is so we can turn it back in the top-level safe_from_p()
5447 when we're done. */
5448
5449 /* For now, don't bother re-sizing the array. */
5450 if (save_expr_count >= save_expr_size)
5451 return 0;
5452 save_expr_rewritten[save_expr_count++] = exp;
5453
5454 nops = tree_code_length[(int) SAVE_EXPR];
5455 for (i = 0; i < nops; i++)
5456 {
5457 tree operand = TREE_OPERAND (exp, i);
5458 if (operand == NULL_TREE)
5459 continue;
5460 TREE_SET_CODE (exp, ERROR_MARK);
5461 if (!safe_from_p (x, operand, 0))
5462 return 0;
5463 TREE_SET_CODE (exp, SAVE_EXPR);
5464 }
5465 TREE_SET_CODE (exp, ERROR_MARK);
5466 return 1;
5467
5468 case BIND_EXPR:
5469 /* The only operand we look at is operand 1. The rest aren't
5470 part of the expression. */
5471 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5472
5473 case METHOD_CALL_EXPR:
5474 /* This takes a rtx argument, but shouldn't appear here. */
5475 abort ();
5476
5477 default:
5478 break;
5479 }
5480
5481 /* If we have an rtx, we do not need to scan our operands. */
5482 if (exp_rtl)
5483 break;
5484
5485 nops = tree_code_length[(int) TREE_CODE (exp)];
5486 for (i = 0; i < nops; i++)
5487 if (TREE_OPERAND (exp, i) != 0
5488 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5489 return 0;
5490 }
5491
5492 /* If we have an rtl, find any enclosed object. Then see if we conflict
5493 with it. */
5494 if (exp_rtl)
5495 {
5496 if (GET_CODE (exp_rtl) == SUBREG)
5497 {
5498 exp_rtl = SUBREG_REG (exp_rtl);
5499 if (GET_CODE (exp_rtl) == REG
5500 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5501 return 0;
5502 }
5503
5504 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5505 are memory and EXP is not readonly. */
5506 return ! (rtx_equal_p (x, exp_rtl)
5507 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5508 && ! TREE_READONLY (exp)));
5509 }
5510
5511 /* If we reach here, it is safe. */
5512 return 1;
5513 }
5514
5515 /* Subroutine of expand_expr: return nonzero iff EXP is an
5516 expression whose type is statically determinable. */
5517
5518 static int
5519 fixed_type_p (exp)
5520 tree exp;
5521 {
5522 if (TREE_CODE (exp) == PARM_DECL
5523 || TREE_CODE (exp) == VAR_DECL
5524 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5525 || TREE_CODE (exp) == COMPONENT_REF
5526 || TREE_CODE (exp) == ARRAY_REF)
5527 return 1;
5528 return 0;
5529 }
5530
5531 /* Subroutine of expand_expr: return rtx if EXP is a
5532 variable or parameter; else return 0. */
5533
5534 static rtx
5535 var_rtx (exp)
5536 tree exp;
5537 {
5538 STRIP_NOPS (exp);
5539 switch (TREE_CODE (exp))
5540 {
5541 case PARM_DECL:
5542 case VAR_DECL:
5543 return DECL_RTL (exp);
5544 default:
5545 return 0;
5546 }
5547 }
5548
5549 #ifdef MAX_INTEGER_COMPUTATION_MODE
5550 void
5551 check_max_integer_computation_mode (exp)
5552 tree exp;
5553 {
5554 enum tree_code code;
5555 enum machine_mode mode;
5556
5557 /* Strip any NOPs that don't change the mode. */
5558 STRIP_NOPS (exp);
5559 code = TREE_CODE (exp);
5560
5561 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5562 if (code == NOP_EXPR
5563 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5564 return;
5565
5566 /* First check the type of the overall operation. We need only look at
5567 unary, binary and relational operations. */
5568 if (TREE_CODE_CLASS (code) == '1'
5569 || TREE_CODE_CLASS (code) == '2'
5570 || TREE_CODE_CLASS (code) == '<')
5571 {
5572 mode = TYPE_MODE (TREE_TYPE (exp));
5573 if (GET_MODE_CLASS (mode) == MODE_INT
5574 && mode > MAX_INTEGER_COMPUTATION_MODE)
5575 fatal ("unsupported wide integer operation");
5576 }
5577
5578 /* Check operand of a unary op. */
5579 if (TREE_CODE_CLASS (code) == '1')
5580 {
5581 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5582 if (GET_MODE_CLASS (mode) == MODE_INT
5583 && mode > MAX_INTEGER_COMPUTATION_MODE)
5584 fatal ("unsupported wide integer operation");
5585 }
5586
5587 /* Check operands of a binary/comparison op. */
5588 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5589 {
5590 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5591 if (GET_MODE_CLASS (mode) == MODE_INT
5592 && mode > MAX_INTEGER_COMPUTATION_MODE)
5593 fatal ("unsupported wide integer operation");
5594
5595 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5596 if (GET_MODE_CLASS (mode) == MODE_INT
5597 && mode > MAX_INTEGER_COMPUTATION_MODE)
5598 fatal ("unsupported wide integer operation");
5599 }
5600 }
5601 #endif
5602
5603 \f
5604 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5605 has any readonly fields. If any of the fields have types that
5606 contain readonly fields, return true as well. */
5607
5608 static int
5609 readonly_fields_p (type)
5610 tree type;
5611 {
5612 tree field;
5613
5614 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5615 if (TREE_CODE (field) == FIELD_DECL
5616 && (TREE_READONLY (field)
5617 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5618 && readonly_fields_p (TREE_TYPE (field)))))
5619 return 1;
5620
5621 return 0;
5622 }
5623 \f
5624 /* expand_expr: generate code for computing expression EXP.
5625 An rtx for the computed value is returned. The value is never null.
5626 In the case of a void EXP, const0_rtx is returned.
5627
5628 The value may be stored in TARGET if TARGET is nonzero.
5629 TARGET is just a suggestion; callers must assume that
5630 the rtx returned may not be the same as TARGET.
5631
5632 If TARGET is CONST0_RTX, it means that the value will be ignored.
5633
5634 If TMODE is not VOIDmode, it suggests generating the
5635 result in mode TMODE. But this is done only when convenient.
5636 Otherwise, TMODE is ignored and the value generated in its natural mode.
5637 TMODE is just a suggestion; callers must assume that
5638 the rtx returned may not have mode TMODE.
5639
5640 Note that TARGET may have neither TMODE nor MODE. In that case, it
5641 probably will not be used.
5642
5643 If MODIFIER is EXPAND_SUM then when EXP is an addition
5644 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5645 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5646 products as above, or REG or MEM, or constant.
5647 Ordinarily in such cases we would output mul or add instructions
5648 and then return a pseudo reg containing the sum.
5649
5650 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5651 it also marks a label as absolutely required (it can't be dead).
5652 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5653 This is used for outputting expressions used in initializers.
5654
5655 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5656 with a constant address even if that address is not normally legitimate.
5657 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5658
5659 rtx
5660 expand_expr (exp, target, tmode, modifier)
5661 register tree exp;
5662 rtx target;
5663 enum machine_mode tmode;
5664 enum expand_modifier modifier;
5665 {
5666 register rtx op0, op1, temp;
5667 tree type = TREE_TYPE (exp);
5668 int unsignedp = TREE_UNSIGNED (type);
5669 register enum machine_mode mode;
5670 register enum tree_code code = TREE_CODE (exp);
5671 optab this_optab;
5672 rtx subtarget, original_target;
5673 int ignore;
5674 tree context;
5675 /* Used by check-memory-usage to make modifier read only. */
5676 enum expand_modifier ro_modifier;
5677
5678 /* Handle ERROR_MARK before anybody tries to access its type. */
5679 if (TREE_CODE (exp) == ERROR_MARK)
5680 {
5681 op0 = CONST0_RTX (tmode);
5682 if (op0 != 0)
5683 return op0;
5684 return const0_rtx;
5685 }
5686
5687 mode = TYPE_MODE (type);
5688 /* Use subtarget as the target for operand 0 of a binary operation. */
5689 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5690 original_target = target;
5691 ignore = (target == const0_rtx
5692 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5693 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5694 || code == COND_EXPR)
5695 && TREE_CODE (type) == VOID_TYPE));
5696
5697 /* Make a read-only version of the modifier. */
5698 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5699 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5700 ro_modifier = modifier;
5701 else
5702 ro_modifier = EXPAND_NORMAL;
5703
5704 /* Don't use hard regs as subtargets, because the combiner
5705 can only handle pseudo regs. */
5706 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5707 subtarget = 0;
5708 /* Avoid subtargets inside loops,
5709 since they hide some invariant expressions. */
5710 if (preserve_subexpressions_p ())
5711 subtarget = 0;
5712
5713 /* If we are going to ignore this result, we need only do something
5714 if there is a side-effect somewhere in the expression. If there
5715 is, short-circuit the most common cases here. Note that we must
5716 not call expand_expr with anything but const0_rtx in case this
5717 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5718
5719 if (ignore)
5720 {
5721 if (! TREE_SIDE_EFFECTS (exp))
5722 return const0_rtx;
5723
5724 /* Ensure we reference a volatile object even if value is ignored, but
5725 don't do this if all we are doing is taking its address. */
5726 if (TREE_THIS_VOLATILE (exp)
5727 && TREE_CODE (exp) != FUNCTION_DECL
5728 && mode != VOIDmode && mode != BLKmode
5729 && modifier != EXPAND_CONST_ADDRESS)
5730 {
5731 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5732 if (GET_CODE (temp) == MEM)
5733 temp = copy_to_reg (temp);
5734 return const0_rtx;
5735 }
5736
5737 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5738 || code == INDIRECT_REF || code == BUFFER_REF)
5739 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5740 VOIDmode, ro_modifier);
5741 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5742 || code == ARRAY_REF)
5743 {
5744 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5745 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5746 return const0_rtx;
5747 }
5748 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5749 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5750 /* If the second operand has no side effects, just evaluate
5751 the first. */
5752 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5753 VOIDmode, ro_modifier);
5754 else if (code == BIT_FIELD_REF)
5755 {
5756 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5757 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5758 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5759 return const0_rtx;
5760 }
5761 ;
5762 target = 0;
5763 }
5764
5765 #ifdef MAX_INTEGER_COMPUTATION_MODE
5766 /* Only check stuff here if the mode we want is different from the mode
5767 of the expression; if it's the same, check_max_integer_computiation_mode
5768 will handle it. Do we really need to check this stuff at all? */
5769
5770 if (target
5771 && GET_MODE (target) != mode
5772 && TREE_CODE (exp) != INTEGER_CST
5773 && TREE_CODE (exp) != PARM_DECL
5774 && TREE_CODE (exp) != ARRAY_REF
5775 && TREE_CODE (exp) != COMPONENT_REF
5776 && TREE_CODE (exp) != BIT_FIELD_REF
5777 && TREE_CODE (exp) != INDIRECT_REF
5778 && TREE_CODE (exp) != CALL_EXPR
5779 && TREE_CODE (exp) != VAR_DECL
5780 && TREE_CODE (exp) != RTL_EXPR)
5781 {
5782 enum machine_mode mode = GET_MODE (target);
5783
5784 if (GET_MODE_CLASS (mode) == MODE_INT
5785 && mode > MAX_INTEGER_COMPUTATION_MODE)
5786 fatal ("unsupported wide integer operation");
5787 }
5788
5789 if (tmode != mode
5790 && TREE_CODE (exp) != INTEGER_CST
5791 && TREE_CODE (exp) != PARM_DECL
5792 && TREE_CODE (exp) != ARRAY_REF
5793 && TREE_CODE (exp) != COMPONENT_REF
5794 && TREE_CODE (exp) != BIT_FIELD_REF
5795 && TREE_CODE (exp) != INDIRECT_REF
5796 && TREE_CODE (exp) != VAR_DECL
5797 && TREE_CODE (exp) != CALL_EXPR
5798 && TREE_CODE (exp) != RTL_EXPR
5799 && GET_MODE_CLASS (tmode) == MODE_INT
5800 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5801 fatal ("unsupported wide integer operation");
5802
5803 check_max_integer_computation_mode (exp);
5804 #endif
5805
5806 /* If will do cse, generate all results into pseudo registers
5807 since 1) that allows cse to find more things
5808 and 2) otherwise cse could produce an insn the machine
5809 cannot support. */
5810
5811 if (! cse_not_expected && mode != BLKmode && target
5812 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5813 target = subtarget;
5814
5815 switch (code)
5816 {
5817 case LABEL_DECL:
5818 {
5819 tree function = decl_function_context (exp);
5820 /* Handle using a label in a containing function. */
5821 if (function != current_function_decl
5822 && function != inline_function_decl && function != 0)
5823 {
5824 struct function *p = find_function_data (function);
5825 /* Allocate in the memory associated with the function
5826 that the label is in. */
5827 push_obstacks (p->function_obstack,
5828 p->function_maybepermanent_obstack);
5829
5830 p->expr->x_forced_labels
5831 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5832 p->expr->x_forced_labels);
5833 pop_obstacks ();
5834 }
5835 else
5836 {
5837 if (modifier == EXPAND_INITIALIZER)
5838 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5839 label_rtx (exp),
5840 forced_labels);
5841 }
5842
5843 temp = gen_rtx_MEM (FUNCTION_MODE,
5844 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5845 if (function != current_function_decl
5846 && function != inline_function_decl && function != 0)
5847 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5848 return temp;
5849 }
5850
5851 case PARM_DECL:
5852 if (DECL_RTL (exp) == 0)
5853 {
5854 error_with_decl (exp, "prior parameter's size depends on `%s'");
5855 return CONST0_RTX (mode);
5856 }
5857
5858 /* ... fall through ... */
5859
5860 case VAR_DECL:
5861 /* If a static var's type was incomplete when the decl was written,
5862 but the type is complete now, lay out the decl now. */
5863 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5864 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5865 {
5866 push_obstacks_nochange ();
5867 end_temporary_allocation ();
5868 layout_decl (exp, 0);
5869 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5870 pop_obstacks ();
5871 }
5872
5873 /* Although static-storage variables start off initialized, according to
5874 ANSI C, a memcpy could overwrite them with uninitialized values. So
5875 we check them too. This also lets us check for read-only variables
5876 accessed via a non-const declaration, in case it won't be detected
5877 any other way (e.g., in an embedded system or OS kernel without
5878 memory protection).
5879
5880 Aggregates are not checked here; they're handled elsewhere. */
5881 if (cfun && current_function_check_memory_usage
5882 && code == VAR_DECL
5883 && GET_CODE (DECL_RTL (exp)) == MEM
5884 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5885 {
5886 enum memory_use_mode memory_usage;
5887 memory_usage = get_memory_usage_from_modifier (modifier);
5888
5889 if (memory_usage != MEMORY_USE_DONT)
5890 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5891 XEXP (DECL_RTL (exp), 0), Pmode,
5892 GEN_INT (int_size_in_bytes (type)),
5893 TYPE_MODE (sizetype),
5894 GEN_INT (memory_usage),
5895 TYPE_MODE (integer_type_node));
5896 }
5897
5898 /* ... fall through ... */
5899
5900 case FUNCTION_DECL:
5901 case RESULT_DECL:
5902 if (DECL_RTL (exp) == 0)
5903 abort ();
5904
5905 /* Ensure variable marked as used even if it doesn't go through
5906 a parser. If it hasn't be used yet, write out an external
5907 definition. */
5908 if (! TREE_USED (exp))
5909 {
5910 assemble_external (exp);
5911 TREE_USED (exp) = 1;
5912 }
5913
5914 /* Show we haven't gotten RTL for this yet. */
5915 temp = 0;
5916
5917 /* Handle variables inherited from containing functions. */
5918 context = decl_function_context (exp);
5919
5920 /* We treat inline_function_decl as an alias for the current function
5921 because that is the inline function whose vars, types, etc.
5922 are being merged into the current function.
5923 See expand_inline_function. */
5924
5925 if (context != 0 && context != current_function_decl
5926 && context != inline_function_decl
5927 /* If var is static, we don't need a static chain to access it. */
5928 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5929 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5930 {
5931 rtx addr;
5932
5933 /* Mark as non-local and addressable. */
5934 DECL_NONLOCAL (exp) = 1;
5935 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5936 abort ();
5937 mark_addressable (exp);
5938 if (GET_CODE (DECL_RTL (exp)) != MEM)
5939 abort ();
5940 addr = XEXP (DECL_RTL (exp), 0);
5941 if (GET_CODE (addr) == MEM)
5942 addr = gen_rtx_MEM (Pmode,
5943 fix_lexical_addr (XEXP (addr, 0), exp));
5944 else
5945 addr = fix_lexical_addr (addr, exp);
5946 temp = change_address (DECL_RTL (exp), mode, addr);
5947 }
5948
5949 /* This is the case of an array whose size is to be determined
5950 from its initializer, while the initializer is still being parsed.
5951 See expand_decl. */
5952
5953 else if (GET_CODE (DECL_RTL (exp)) == MEM
5954 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5955 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5956 XEXP (DECL_RTL (exp), 0));
5957
5958 /* If DECL_RTL is memory, we are in the normal case and either
5959 the address is not valid or it is not a register and -fforce-addr
5960 is specified, get the address into a register. */
5961
5962 else if (GET_CODE (DECL_RTL (exp)) == MEM
5963 && modifier != EXPAND_CONST_ADDRESS
5964 && modifier != EXPAND_SUM
5965 && modifier != EXPAND_INITIALIZER
5966 && (! memory_address_p (DECL_MODE (exp),
5967 XEXP (DECL_RTL (exp), 0))
5968 || (flag_force_addr
5969 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5970 temp = change_address (DECL_RTL (exp), VOIDmode,
5971 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5972
5973 /* If we got something, return it. But first, set the alignment
5974 the address is a register. */
5975 if (temp != 0)
5976 {
5977 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5978 mark_reg_pointer (XEXP (temp, 0),
5979 DECL_ALIGN (exp) / BITS_PER_UNIT);
5980
5981 return temp;
5982 }
5983
5984 /* If the mode of DECL_RTL does not match that of the decl, it
5985 must be a promoted value. We return a SUBREG of the wanted mode,
5986 but mark it so that we know that it was already extended. */
5987
5988 if (GET_CODE (DECL_RTL (exp)) == REG
5989 && GET_MODE (DECL_RTL (exp)) != mode)
5990 {
5991 /* Get the signedness used for this variable. Ensure we get the
5992 same mode we got when the variable was declared. */
5993 if (GET_MODE (DECL_RTL (exp))
5994 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5995 abort ();
5996
5997 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5998 SUBREG_PROMOTED_VAR_P (temp) = 1;
5999 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6000 return temp;
6001 }
6002
6003 return DECL_RTL (exp);
6004
6005 case INTEGER_CST:
6006 return immed_double_const (TREE_INT_CST_LOW (exp),
6007 TREE_INT_CST_HIGH (exp),
6008 mode);
6009
6010 case CONST_DECL:
6011 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6012 EXPAND_MEMORY_USE_BAD);
6013
6014 case REAL_CST:
6015 /* If optimized, generate immediate CONST_DOUBLE
6016 which will be turned into memory by reload if necessary.
6017
6018 We used to force a register so that loop.c could see it. But
6019 this does not allow gen_* patterns to perform optimizations with
6020 the constants. It also produces two insns in cases like "x = 1.0;".
6021 On most machines, floating-point constants are not permitted in
6022 many insns, so we'd end up copying it to a register in any case.
6023
6024 Now, we do the copying in expand_binop, if appropriate. */
6025 return immed_real_const (exp);
6026
6027 case COMPLEX_CST:
6028 case STRING_CST:
6029 if (! TREE_CST_RTL (exp))
6030 output_constant_def (exp);
6031
6032 /* TREE_CST_RTL probably contains a constant address.
6033 On RISC machines where a constant address isn't valid,
6034 make some insns to get that address into a register. */
6035 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6036 && modifier != EXPAND_CONST_ADDRESS
6037 && modifier != EXPAND_INITIALIZER
6038 && modifier != EXPAND_SUM
6039 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6040 || (flag_force_addr
6041 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6042 return change_address (TREE_CST_RTL (exp), VOIDmode,
6043 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6044 return TREE_CST_RTL (exp);
6045
6046 case EXPR_WITH_FILE_LOCATION:
6047 {
6048 rtx to_return;
6049 char *saved_input_filename = input_filename;
6050 int saved_lineno = lineno;
6051 input_filename = EXPR_WFL_FILENAME (exp);
6052 lineno = EXPR_WFL_LINENO (exp);
6053 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6054 emit_line_note (input_filename, lineno);
6055 /* Possibly avoid switching back and force here */
6056 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6057 input_filename = saved_input_filename;
6058 lineno = saved_lineno;
6059 return to_return;
6060 }
6061
6062 case SAVE_EXPR:
6063 context = decl_function_context (exp);
6064
6065 /* If this SAVE_EXPR was at global context, assume we are an
6066 initialization function and move it into our context. */
6067 if (context == 0)
6068 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6069
6070 /* We treat inline_function_decl as an alias for the current function
6071 because that is the inline function whose vars, types, etc.
6072 are being merged into the current function.
6073 See expand_inline_function. */
6074 if (context == current_function_decl || context == inline_function_decl)
6075 context = 0;
6076
6077 /* If this is non-local, handle it. */
6078 if (context)
6079 {
6080 /* The following call just exists to abort if the context is
6081 not of a containing function. */
6082 find_function_data (context);
6083
6084 temp = SAVE_EXPR_RTL (exp);
6085 if (temp && GET_CODE (temp) == REG)
6086 {
6087 put_var_into_stack (exp);
6088 temp = SAVE_EXPR_RTL (exp);
6089 }
6090 if (temp == 0 || GET_CODE (temp) != MEM)
6091 abort ();
6092 return change_address (temp, mode,
6093 fix_lexical_addr (XEXP (temp, 0), exp));
6094 }
6095 if (SAVE_EXPR_RTL (exp) == 0)
6096 {
6097 if (mode == VOIDmode)
6098 temp = const0_rtx;
6099 else
6100 temp = assign_temp (type, 3, 0, 0);
6101
6102 SAVE_EXPR_RTL (exp) = temp;
6103 if (!optimize && GET_CODE (temp) == REG)
6104 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6105 save_expr_regs);
6106
6107 /* If the mode of TEMP does not match that of the expression, it
6108 must be a promoted value. We pass store_expr a SUBREG of the
6109 wanted mode but mark it so that we know that it was already
6110 extended. Note that `unsignedp' was modified above in
6111 this case. */
6112
6113 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6114 {
6115 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6116 SUBREG_PROMOTED_VAR_P (temp) = 1;
6117 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6118 }
6119
6120 if (temp == const0_rtx)
6121 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6122 EXPAND_MEMORY_USE_BAD);
6123 else
6124 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6125
6126 TREE_USED (exp) = 1;
6127 }
6128
6129 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6130 must be a promoted value. We return a SUBREG of the wanted mode,
6131 but mark it so that we know that it was already extended. */
6132
6133 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6134 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6135 {
6136 /* Compute the signedness and make the proper SUBREG. */
6137 promote_mode (type, mode, &unsignedp, 0);
6138 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6139 SUBREG_PROMOTED_VAR_P (temp) = 1;
6140 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6141 return temp;
6142 }
6143
6144 return SAVE_EXPR_RTL (exp);
6145
6146 case UNSAVE_EXPR:
6147 {
6148 rtx temp;
6149 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6150 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6151 return temp;
6152 }
6153
6154 case PLACEHOLDER_EXPR:
6155 {
6156 tree placeholder_expr;
6157
6158 /* If there is an object on the head of the placeholder list,
6159 see if some object in it of type TYPE or a pointer to it. For
6160 further information, see tree.def. */
6161 for (placeholder_expr = placeholder_list;
6162 placeholder_expr != 0;
6163 placeholder_expr = TREE_CHAIN (placeholder_expr))
6164 {
6165 tree need_type = TYPE_MAIN_VARIANT (type);
6166 tree object = 0;
6167 tree old_list = placeholder_list;
6168 tree elt;
6169
6170 /* Find the outermost reference that is of the type we want.
6171 If none, see if any object has a type that is a pointer to
6172 the type we want. */
6173 for (elt = TREE_PURPOSE (placeholder_expr);
6174 elt != 0 && object == 0;
6175 elt
6176 = ((TREE_CODE (elt) == COMPOUND_EXPR
6177 || TREE_CODE (elt) == COND_EXPR)
6178 ? TREE_OPERAND (elt, 1)
6179 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6180 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6181 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6182 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6183 ? TREE_OPERAND (elt, 0) : 0))
6184 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6185 object = elt;
6186
6187 for (elt = TREE_PURPOSE (placeholder_expr);
6188 elt != 0 && object == 0;
6189 elt
6190 = ((TREE_CODE (elt) == COMPOUND_EXPR
6191 || TREE_CODE (elt) == COND_EXPR)
6192 ? TREE_OPERAND (elt, 1)
6193 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6194 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6195 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6196 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6197 ? TREE_OPERAND (elt, 0) : 0))
6198 if (POINTER_TYPE_P (TREE_TYPE (elt))
6199 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6200 == need_type))
6201 object = build1 (INDIRECT_REF, need_type, elt);
6202
6203 if (object != 0)
6204 {
6205 /* Expand this object skipping the list entries before
6206 it was found in case it is also a PLACEHOLDER_EXPR.
6207 In that case, we want to translate it using subsequent
6208 entries. */
6209 placeholder_list = TREE_CHAIN (placeholder_expr);
6210 temp = expand_expr (object, original_target, tmode,
6211 ro_modifier);
6212 placeholder_list = old_list;
6213 return temp;
6214 }
6215 }
6216 }
6217
6218 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6219 abort ();
6220
6221 case WITH_RECORD_EXPR:
6222 /* Put the object on the placeholder list, expand our first operand,
6223 and pop the list. */
6224 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6225 placeholder_list);
6226 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6227 tmode, ro_modifier);
6228 placeholder_list = TREE_CHAIN (placeholder_list);
6229 return target;
6230
6231 case GOTO_EXPR:
6232 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6233 expand_goto (TREE_OPERAND (exp, 0));
6234 else
6235 expand_computed_goto (TREE_OPERAND (exp, 0));
6236 return const0_rtx;
6237
6238 case EXIT_EXPR:
6239 expand_exit_loop_if_false (NULL_PTR,
6240 invert_truthvalue (TREE_OPERAND (exp, 0)));
6241 return const0_rtx;
6242
6243 case LABELED_BLOCK_EXPR:
6244 if (LABELED_BLOCK_BODY (exp))
6245 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6246 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6247 return const0_rtx;
6248
6249 case EXIT_BLOCK_EXPR:
6250 if (EXIT_BLOCK_RETURN (exp))
6251 sorry ("returned value in block_exit_expr");
6252 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6253 return const0_rtx;
6254
6255 case LOOP_EXPR:
6256 push_temp_slots ();
6257 expand_start_loop (1);
6258 expand_expr_stmt (TREE_OPERAND (exp, 0));
6259 expand_end_loop ();
6260 pop_temp_slots ();
6261
6262 return const0_rtx;
6263
6264 case BIND_EXPR:
6265 {
6266 tree vars = TREE_OPERAND (exp, 0);
6267 int vars_need_expansion = 0;
6268
6269 /* Need to open a binding contour here because
6270 if there are any cleanups they must be contained here. */
6271 expand_start_bindings (2);
6272
6273 /* Mark the corresponding BLOCK for output in its proper place. */
6274 if (TREE_OPERAND (exp, 2) != 0
6275 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6276 insert_block (TREE_OPERAND (exp, 2));
6277
6278 /* If VARS have not yet been expanded, expand them now. */
6279 while (vars)
6280 {
6281 if (DECL_RTL (vars) == 0)
6282 {
6283 vars_need_expansion = 1;
6284 expand_decl (vars);
6285 }
6286 expand_decl_init (vars);
6287 vars = TREE_CHAIN (vars);
6288 }
6289
6290 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6291
6292 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6293
6294 return temp;
6295 }
6296
6297 case RTL_EXPR:
6298 if (RTL_EXPR_SEQUENCE (exp))
6299 {
6300 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6301 abort ();
6302 emit_insns (RTL_EXPR_SEQUENCE (exp));
6303 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6304 }
6305 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6306 free_temps_for_rtl_expr (exp);
6307 return RTL_EXPR_RTL (exp);
6308
6309 case CONSTRUCTOR:
6310 /* If we don't need the result, just ensure we evaluate any
6311 subexpressions. */
6312 if (ignore)
6313 {
6314 tree elt;
6315 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6316 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6317 EXPAND_MEMORY_USE_BAD);
6318 return const0_rtx;
6319 }
6320
6321 /* All elts simple constants => refer to a constant in memory. But
6322 if this is a non-BLKmode mode, let it store a field at a time
6323 since that should make a CONST_INT or CONST_DOUBLE when we
6324 fold. Likewise, if we have a target we can use, it is best to
6325 store directly into the target unless the type is large enough
6326 that memcpy will be used. If we are making an initializer and
6327 all operands are constant, put it in memory as well. */
6328 else if ((TREE_STATIC (exp)
6329 && ((mode == BLKmode
6330 && ! (target != 0 && safe_from_p (target, exp, 1)))
6331 || TREE_ADDRESSABLE (exp)
6332 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6333 && (!MOVE_BY_PIECES_P
6334 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6335 TYPE_ALIGN (type) / BITS_PER_UNIT))
6336 && ! mostly_zeros_p (exp))))
6337 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6338 {
6339 rtx constructor = output_constant_def (exp);
6340 if (modifier != EXPAND_CONST_ADDRESS
6341 && modifier != EXPAND_INITIALIZER
6342 && modifier != EXPAND_SUM
6343 && (! memory_address_p (GET_MODE (constructor),
6344 XEXP (constructor, 0))
6345 || (flag_force_addr
6346 && GET_CODE (XEXP (constructor, 0)) != REG)))
6347 constructor = change_address (constructor, VOIDmode,
6348 XEXP (constructor, 0));
6349 return constructor;
6350 }
6351
6352 else
6353 {
6354 /* Handle calls that pass values in multiple non-contiguous
6355 locations. The Irix 6 ABI has examples of this. */
6356 if (target == 0 || ! safe_from_p (target, exp, 1)
6357 || GET_CODE (target) == PARALLEL)
6358 {
6359 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6360 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6361 else
6362 target = assign_temp (type, 0, 1, 1);
6363 }
6364
6365 if (TREE_READONLY (exp))
6366 {
6367 if (GET_CODE (target) == MEM)
6368 target = copy_rtx (target);
6369
6370 RTX_UNCHANGING_P (target) = 1;
6371 }
6372
6373 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6374 int_size_in_bytes (TREE_TYPE (exp)));
6375 return target;
6376 }
6377
6378 case INDIRECT_REF:
6379 {
6380 tree exp1 = TREE_OPERAND (exp, 0);
6381 tree exp2;
6382 tree index;
6383 tree string = string_constant (exp1, &index);
6384 int i;
6385
6386 /* Try to optimize reads from const strings. */
6387 if (string
6388 && TREE_CODE (string) == STRING_CST
6389 && TREE_CODE (index) == INTEGER_CST
6390 && !TREE_INT_CST_HIGH (index)
6391 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6392 && GET_MODE_CLASS (mode) == MODE_INT
6393 && GET_MODE_SIZE (mode) == 1
6394 && modifier != EXPAND_MEMORY_USE_WO)
6395 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6396
6397 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6398 op0 = memory_address (mode, op0);
6399
6400 if (cfun && current_function_check_memory_usage
6401 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6402 {
6403 enum memory_use_mode memory_usage;
6404 memory_usage = get_memory_usage_from_modifier (modifier);
6405
6406 if (memory_usage != MEMORY_USE_DONT)
6407 {
6408 in_check_memory_usage = 1;
6409 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6410 op0, Pmode,
6411 GEN_INT (int_size_in_bytes (type)),
6412 TYPE_MODE (sizetype),
6413 GEN_INT (memory_usage),
6414 TYPE_MODE (integer_type_node));
6415 in_check_memory_usage = 0;
6416 }
6417 }
6418
6419 temp = gen_rtx_MEM (mode, op0);
6420 /* If address was computed by addition,
6421 mark this as an element of an aggregate. */
6422 if (TREE_CODE (exp1) == PLUS_EXPR
6423 || (TREE_CODE (exp1) == SAVE_EXPR
6424 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6425 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6426 || (TREE_CODE (exp1) == ADDR_EXPR
6427 && (exp2 = TREE_OPERAND (exp1, 0))
6428 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6429 MEM_SET_IN_STRUCT_P (temp, 1);
6430
6431 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6432 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6433
6434 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6435 here, because, in C and C++, the fact that a location is accessed
6436 through a pointer to const does not mean that the value there can
6437 never change. Languages where it can never change should
6438 also set TREE_STATIC. */
6439 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6440
6441 /* If we are writing to this object and its type is a record with
6442 readonly fields, we must mark it as readonly so it will
6443 conflict with readonly references to those fields. */
6444 if (modifier == EXPAND_MEMORY_USE_WO
6445 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6446 RTX_UNCHANGING_P (temp) = 1;
6447
6448 return temp;
6449 }
6450
6451 case ARRAY_REF:
6452 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6453 abort ();
6454
6455 {
6456 tree array = TREE_OPERAND (exp, 0);
6457 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6458 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6459 tree index = TREE_OPERAND (exp, 1);
6460 tree index_type = TREE_TYPE (index);
6461 HOST_WIDE_INT i;
6462
6463 /* Optimize the special-case of a zero lower bound.
6464
6465 We convert the low_bound to sizetype to avoid some problems
6466 with constant folding. (E.g. suppose the lower bound is 1,
6467 and its mode is QI. Without the conversion, (ARRAY
6468 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6469 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6470
6471 But sizetype isn't quite right either (especially if
6472 the lowbound is negative). FIXME */
6473
6474 if (! integer_zerop (low_bound))
6475 index = fold (build (MINUS_EXPR, index_type, index,
6476 convert (sizetype, low_bound)));
6477
6478 /* Fold an expression like: "foo"[2].
6479 This is not done in fold so it won't happen inside &.
6480 Don't fold if this is for wide characters since it's too
6481 difficult to do correctly and this is a very rare case. */
6482
6483 if (TREE_CODE (array) == STRING_CST
6484 && TREE_CODE (index) == INTEGER_CST
6485 && !TREE_INT_CST_HIGH (index)
6486 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6487 && GET_MODE_CLASS (mode) == MODE_INT
6488 && GET_MODE_SIZE (mode) == 1)
6489 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6490
6491 /* If this is a constant index into a constant array,
6492 just get the value from the array. Handle both the cases when
6493 we have an explicit constructor and when our operand is a variable
6494 that was declared const. */
6495
6496 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6497 {
6498 if (TREE_CODE (index) == INTEGER_CST
6499 && TREE_INT_CST_HIGH (index) == 0)
6500 {
6501 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6502
6503 i = TREE_INT_CST_LOW (index);
6504 while (elem && i--)
6505 elem = TREE_CHAIN (elem);
6506 if (elem)
6507 return expand_expr (fold (TREE_VALUE (elem)), target,
6508 tmode, ro_modifier);
6509 }
6510 }
6511
6512 else if (optimize >= 1
6513 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6514 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6515 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6516 {
6517 if (TREE_CODE (index) == INTEGER_CST)
6518 {
6519 tree init = DECL_INITIAL (array);
6520
6521 i = TREE_INT_CST_LOW (index);
6522 if (TREE_CODE (init) == CONSTRUCTOR)
6523 {
6524 tree elem = CONSTRUCTOR_ELTS (init);
6525
6526 while (elem
6527 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6528 elem = TREE_CHAIN (elem);
6529 if (elem)
6530 return expand_expr (fold (TREE_VALUE (elem)), target,
6531 tmode, ro_modifier);
6532 }
6533 else if (TREE_CODE (init) == STRING_CST
6534 && TREE_INT_CST_HIGH (index) == 0
6535 && (TREE_INT_CST_LOW (index)
6536 < TREE_STRING_LENGTH (init)))
6537 return (GEN_INT
6538 (TREE_STRING_POINTER
6539 (init)[TREE_INT_CST_LOW (index)]));
6540 }
6541 }
6542 }
6543
6544 /* ... fall through ... */
6545
6546 case COMPONENT_REF:
6547 case BIT_FIELD_REF:
6548 /* If the operand is a CONSTRUCTOR, we can just extract the
6549 appropriate field if it is present. Don't do this if we have
6550 already written the data since we want to refer to that copy
6551 and varasm.c assumes that's what we'll do. */
6552 if (code != ARRAY_REF
6553 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6554 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6555 {
6556 tree elt;
6557
6558 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6559 elt = TREE_CHAIN (elt))
6560 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6561 /* We can normally use the value of the field in the
6562 CONSTRUCTOR. However, if this is a bitfield in
6563 an integral mode that we can fit in a HOST_WIDE_INT,
6564 we must mask only the number of bits in the bitfield,
6565 since this is done implicitly by the constructor. If
6566 the bitfield does not meet either of those conditions,
6567 we can't do this optimization. */
6568 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6569 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6570 == MODE_INT)
6571 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6572 <= HOST_BITS_PER_WIDE_INT))))
6573 {
6574 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6575 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6576 {
6577 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6578
6579 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6580 {
6581 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6582 op0 = expand_and (op0, op1, target);
6583 }
6584 else
6585 {
6586 enum machine_mode imode
6587 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6588 tree count
6589 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6590 0);
6591
6592 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6593 target, 0);
6594 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6595 target, 0);
6596 }
6597 }
6598
6599 return op0;
6600 }
6601 }
6602
6603 {
6604 enum machine_mode mode1;
6605 int bitsize;
6606 int bitpos;
6607 tree offset;
6608 int volatilep = 0;
6609 unsigned int alignment;
6610 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6611 &mode1, &unsignedp, &volatilep,
6612 &alignment);
6613
6614 /* If we got back the original object, something is wrong. Perhaps
6615 we are evaluating an expression too early. In any event, don't
6616 infinitely recurse. */
6617 if (tem == exp)
6618 abort ();
6619
6620 /* If TEM's type is a union of variable size, pass TARGET to the inner
6621 computation, since it will need a temporary and TARGET is known
6622 to have to do. This occurs in unchecked conversion in Ada. */
6623
6624 op0 = expand_expr (tem,
6625 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6626 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6627 != INTEGER_CST)
6628 ? target : NULL_RTX),
6629 VOIDmode,
6630 (modifier == EXPAND_INITIALIZER
6631 || modifier == EXPAND_CONST_ADDRESS)
6632 ? modifier : EXPAND_NORMAL);
6633
6634 /* If this is a constant, put it into a register if it is a
6635 legitimate constant and OFFSET is 0 and memory if it isn't. */
6636 if (CONSTANT_P (op0))
6637 {
6638 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6639 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6640 && offset == 0)
6641 op0 = force_reg (mode, op0);
6642 else
6643 op0 = validize_mem (force_const_mem (mode, op0));
6644 }
6645
6646 if (offset != 0)
6647 {
6648 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6649
6650 /* If this object is in memory, put it into a register.
6651 This case can't occur in C, but can in Ada if we have
6652 unchecked conversion of an expression from a scalar type to
6653 an array or record type. */
6654 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6655 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6656 {
6657 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6658
6659 mark_temp_addr_taken (memloc);
6660 emit_move_insn (memloc, op0);
6661 op0 = memloc;
6662 }
6663
6664 if (GET_CODE (op0) != MEM)
6665 abort ();
6666
6667 if (GET_MODE (offset_rtx) != ptr_mode)
6668 {
6669 #ifdef POINTERS_EXTEND_UNSIGNED
6670 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6671 #else
6672 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6673 #endif
6674 }
6675
6676 /* A constant address in OP0 can have VOIDmode, we must not try
6677 to call force_reg for that case. Avoid that case. */
6678 if (GET_CODE (op0) == MEM
6679 && GET_MODE (op0) == BLKmode
6680 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6681 && bitsize != 0
6682 && (bitpos % bitsize) == 0
6683 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6684 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6685 {
6686 rtx temp = change_address (op0, mode1,
6687 plus_constant (XEXP (op0, 0),
6688 (bitpos /
6689 BITS_PER_UNIT)));
6690 if (GET_CODE (XEXP (temp, 0)) == REG)
6691 op0 = temp;
6692 else
6693 op0 = change_address (op0, mode1,
6694 force_reg (GET_MODE (XEXP (temp, 0)),
6695 XEXP (temp, 0)));
6696 bitpos = 0;
6697 }
6698
6699
6700 op0 = change_address (op0, VOIDmode,
6701 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6702 force_reg (ptr_mode,
6703 offset_rtx)));
6704 }
6705
6706 /* Don't forget about volatility even if this is a bitfield. */
6707 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6708 {
6709 op0 = copy_rtx (op0);
6710 MEM_VOLATILE_P (op0) = 1;
6711 }
6712
6713 /* Check the access. */
6714 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6715 {
6716 enum memory_use_mode memory_usage;
6717 memory_usage = get_memory_usage_from_modifier (modifier);
6718
6719 if (memory_usage != MEMORY_USE_DONT)
6720 {
6721 rtx to;
6722 int size;
6723
6724 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6725 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6726
6727 /* Check the access right of the pointer. */
6728 if (size > BITS_PER_UNIT)
6729 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6730 to, Pmode,
6731 GEN_INT (size / BITS_PER_UNIT),
6732 TYPE_MODE (sizetype),
6733 GEN_INT (memory_usage),
6734 TYPE_MODE (integer_type_node));
6735 }
6736 }
6737
6738 /* In cases where an aligned union has an unaligned object
6739 as a field, we might be extracting a BLKmode value from
6740 an integer-mode (e.g., SImode) object. Handle this case
6741 by doing the extract into an object as wide as the field
6742 (which we know to be the width of a basic mode), then
6743 storing into memory, and changing the mode to BLKmode.
6744 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6745 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6746 if (mode1 == VOIDmode
6747 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6748 || (modifier != EXPAND_CONST_ADDRESS
6749 && modifier != EXPAND_INITIALIZER
6750 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6751 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6752 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6753 /* If the field isn't aligned enough to fetch as a memref,
6754 fetch it as a bit field. */
6755 || (mode1 != BLKmode
6756 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6757 && ((TYPE_ALIGN (TREE_TYPE (tem))
6758 < (unsigned int) GET_MODE_ALIGNMENT (mode))
6759 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))))
6760 || (modifier != EXPAND_CONST_ADDRESS
6761 && modifier != EXPAND_INITIALIZER
6762 && mode == BLKmode
6763 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6764 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6765 || bitpos % TYPE_ALIGN (type) != 0)))
6766 {
6767 enum machine_mode ext_mode = mode;
6768
6769 if (ext_mode == BLKmode
6770 && ! (target != 0 && GET_CODE (op0) == MEM
6771 && GET_CODE (target) == MEM
6772 && bitpos % BITS_PER_UNIT == 0))
6773 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6774
6775 if (ext_mode == BLKmode)
6776 {
6777 /* In this case, BITPOS must start at a byte boundary and
6778 TARGET, if specified, must be a MEM. */
6779 if (GET_CODE (op0) != MEM
6780 || (target != 0 && GET_CODE (target) != MEM)
6781 || bitpos % BITS_PER_UNIT != 0)
6782 abort ();
6783
6784 op0 = change_address (op0, VOIDmode,
6785 plus_constant (XEXP (op0, 0),
6786 bitpos / BITS_PER_UNIT));
6787 if (target == 0)
6788 target = assign_temp (type, 0, 1, 1);
6789
6790 emit_block_move (target, op0,
6791 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6792 / BITS_PER_UNIT),
6793 1);
6794
6795 return target;
6796 }
6797
6798 op0 = validize_mem (op0);
6799
6800 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6801 mark_reg_pointer (XEXP (op0, 0), alignment);
6802
6803 op0 = extract_bit_field (op0, bitsize, bitpos,
6804 unsignedp, target, ext_mode, ext_mode,
6805 alignment,
6806 int_size_in_bytes (TREE_TYPE (tem)));
6807
6808 /* If the result is a record type and BITSIZE is narrower than
6809 the mode of OP0, an integral mode, and this is a big endian
6810 machine, we must put the field into the high-order bits. */
6811 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6812 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6813 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6814 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6815 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6816 - bitsize),
6817 op0, 1);
6818
6819 if (mode == BLKmode)
6820 {
6821 rtx new = assign_stack_temp (ext_mode,
6822 bitsize / BITS_PER_UNIT, 0);
6823
6824 emit_move_insn (new, op0);
6825 op0 = copy_rtx (new);
6826 PUT_MODE (op0, BLKmode);
6827 MEM_SET_IN_STRUCT_P (op0, 1);
6828 }
6829
6830 return op0;
6831 }
6832
6833 /* If the result is BLKmode, use that to access the object
6834 now as well. */
6835 if (mode == BLKmode)
6836 mode1 = BLKmode;
6837
6838 /* Get a reference to just this component. */
6839 if (modifier == EXPAND_CONST_ADDRESS
6840 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6841 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6842 (bitpos / BITS_PER_UNIT)));
6843 else
6844 op0 = change_address (op0, mode1,
6845 plus_constant (XEXP (op0, 0),
6846 (bitpos / BITS_PER_UNIT)));
6847
6848 if (GET_CODE (op0) == MEM)
6849 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6850
6851 if (GET_CODE (XEXP (op0, 0)) == REG)
6852 mark_reg_pointer (XEXP (op0, 0), alignment);
6853
6854 MEM_SET_IN_STRUCT_P (op0, 1);
6855 MEM_VOLATILE_P (op0) |= volatilep;
6856 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6857 || modifier == EXPAND_CONST_ADDRESS
6858 || modifier == EXPAND_INITIALIZER)
6859 return op0;
6860 else if (target == 0)
6861 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6862
6863 convert_move (target, op0, unsignedp);
6864 return target;
6865 }
6866
6867 /* Intended for a reference to a buffer of a file-object in Pascal.
6868 But it's not certain that a special tree code will really be
6869 necessary for these. INDIRECT_REF might work for them. */
6870 case BUFFER_REF:
6871 abort ();
6872
6873 case IN_EXPR:
6874 {
6875 /* Pascal set IN expression.
6876
6877 Algorithm:
6878 rlo = set_low - (set_low%bits_per_word);
6879 the_word = set [ (index - rlo)/bits_per_word ];
6880 bit_index = index % bits_per_word;
6881 bitmask = 1 << bit_index;
6882 return !!(the_word & bitmask); */
6883
6884 tree set = TREE_OPERAND (exp, 0);
6885 tree index = TREE_OPERAND (exp, 1);
6886 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6887 tree set_type = TREE_TYPE (set);
6888 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6889 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6890 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6891 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6892 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6893 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6894 rtx setaddr = XEXP (setval, 0);
6895 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6896 rtx rlow;
6897 rtx diff, quo, rem, addr, bit, result;
6898
6899 preexpand_calls (exp);
6900
6901 /* If domain is empty, answer is no. Likewise if index is constant
6902 and out of bounds. */
6903 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6904 && TREE_CODE (set_low_bound) == INTEGER_CST
6905 && tree_int_cst_lt (set_high_bound, set_low_bound))
6906 || (TREE_CODE (index) == INTEGER_CST
6907 && TREE_CODE (set_low_bound) == INTEGER_CST
6908 && tree_int_cst_lt (index, set_low_bound))
6909 || (TREE_CODE (set_high_bound) == INTEGER_CST
6910 && TREE_CODE (index) == INTEGER_CST
6911 && tree_int_cst_lt (set_high_bound, index))))
6912 return const0_rtx;
6913
6914 if (target == 0)
6915 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6916
6917 /* If we get here, we have to generate the code for both cases
6918 (in range and out of range). */
6919
6920 op0 = gen_label_rtx ();
6921 op1 = gen_label_rtx ();
6922
6923 if (! (GET_CODE (index_val) == CONST_INT
6924 && GET_CODE (lo_r) == CONST_INT))
6925 {
6926 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6927 GET_MODE (index_val), iunsignedp, 0, op1);
6928 }
6929
6930 if (! (GET_CODE (index_val) == CONST_INT
6931 && GET_CODE (hi_r) == CONST_INT))
6932 {
6933 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6934 GET_MODE (index_val), iunsignedp, 0, op1);
6935 }
6936
6937 /* Calculate the element number of bit zero in the first word
6938 of the set. */
6939 if (GET_CODE (lo_r) == CONST_INT)
6940 rlow = GEN_INT (INTVAL (lo_r)
6941 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6942 else
6943 rlow = expand_binop (index_mode, and_optab, lo_r,
6944 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6945 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6946
6947 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6948 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6949
6950 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6951 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6952 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6953 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6954
6955 addr = memory_address (byte_mode,
6956 expand_binop (index_mode, add_optab, diff,
6957 setaddr, NULL_RTX, iunsignedp,
6958 OPTAB_LIB_WIDEN));
6959
6960 /* Extract the bit we want to examine */
6961 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6962 gen_rtx_MEM (byte_mode, addr),
6963 make_tree (TREE_TYPE (index), rem),
6964 NULL_RTX, 1);
6965 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6966 GET_MODE (target) == byte_mode ? target : 0,
6967 1, OPTAB_LIB_WIDEN);
6968
6969 if (result != target)
6970 convert_move (target, result, 1);
6971
6972 /* Output the code to handle the out-of-range case. */
6973 emit_jump (op0);
6974 emit_label (op1);
6975 emit_move_insn (target, const0_rtx);
6976 emit_label (op0);
6977 return target;
6978 }
6979
6980 case WITH_CLEANUP_EXPR:
6981 if (RTL_EXPR_RTL (exp) == 0)
6982 {
6983 RTL_EXPR_RTL (exp)
6984 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6985 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6986
6987 /* That's it for this cleanup. */
6988 TREE_OPERAND (exp, 2) = 0;
6989 }
6990 return RTL_EXPR_RTL (exp);
6991
6992 case CLEANUP_POINT_EXPR:
6993 {
6994 /* Start a new binding layer that will keep track of all cleanup
6995 actions to be performed. */
6996 expand_start_bindings (2);
6997
6998 target_temp_slot_level = temp_slot_level;
6999
7000 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7001 /* If we're going to use this value, load it up now. */
7002 if (! ignore)
7003 op0 = force_not_mem (op0);
7004 preserve_temp_slots (op0);
7005 expand_end_bindings (NULL_TREE, 0, 0);
7006 }
7007 return op0;
7008
7009 case CALL_EXPR:
7010 /* Check for a built-in function. */
7011 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7012 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7013 == FUNCTION_DECL)
7014 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7015 return expand_builtin (exp, target, subtarget, tmode, ignore);
7016
7017 /* If this call was expanded already by preexpand_calls,
7018 just return the result we got. */
7019 if (CALL_EXPR_RTL (exp) != 0)
7020 return CALL_EXPR_RTL (exp);
7021
7022 return expand_call (exp, target, ignore);
7023
7024 case NON_LVALUE_EXPR:
7025 case NOP_EXPR:
7026 case CONVERT_EXPR:
7027 case REFERENCE_EXPR:
7028 if (TREE_CODE (type) == UNION_TYPE)
7029 {
7030 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7031
7032 /* If both input and output are BLKmode, this conversion
7033 isn't actually doing anything unless we need to make the
7034 alignment stricter. */
7035 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7036 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7037 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7038 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7039 modifier);
7040
7041 if (target == 0)
7042 {
7043 if (mode != BLKmode)
7044 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7045 else
7046 target = assign_temp (type, 0, 1, 1);
7047 }
7048
7049 if (GET_CODE (target) == MEM)
7050 /* Store data into beginning of memory target. */
7051 store_expr (TREE_OPERAND (exp, 0),
7052 change_address (target, TYPE_MODE (valtype), 0), 0);
7053
7054 else if (GET_CODE (target) == REG)
7055 /* Store this field into a union of the proper type. */
7056 store_field (target,
7057 MIN ((int_size_in_bytes (TREE_TYPE
7058 (TREE_OPERAND (exp, 0)))
7059 * BITS_PER_UNIT),
7060 GET_MODE_BITSIZE (mode)),
7061 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7062 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
7063 else
7064 abort ();
7065
7066 /* Return the entire union. */
7067 return target;
7068 }
7069
7070 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7071 {
7072 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7073 ro_modifier);
7074
7075 /* If the signedness of the conversion differs and OP0 is
7076 a promoted SUBREG, clear that indication since we now
7077 have to do the proper extension. */
7078 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7079 && GET_CODE (op0) == SUBREG)
7080 SUBREG_PROMOTED_VAR_P (op0) = 0;
7081
7082 return op0;
7083 }
7084
7085 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7086 if (GET_MODE (op0) == mode)
7087 return op0;
7088
7089 /* If OP0 is a constant, just convert it into the proper mode. */
7090 if (CONSTANT_P (op0))
7091 return
7092 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7093 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7094
7095 if (modifier == EXPAND_INITIALIZER)
7096 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7097
7098 if (target == 0)
7099 return
7100 convert_to_mode (mode, op0,
7101 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7102 else
7103 convert_move (target, op0,
7104 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7105 return target;
7106
7107 case PLUS_EXPR:
7108 /* We come here from MINUS_EXPR when the second operand is a
7109 constant. */
7110 plus_expr:
7111 this_optab = add_optab;
7112
7113 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7114 something else, make sure we add the register to the constant and
7115 then to the other thing. This case can occur during strength
7116 reduction and doing it this way will produce better code if the
7117 frame pointer or argument pointer is eliminated.
7118
7119 fold-const.c will ensure that the constant is always in the inner
7120 PLUS_EXPR, so the only case we need to do anything about is if
7121 sp, ap, or fp is our second argument, in which case we must swap
7122 the innermost first argument and our second argument. */
7123
7124 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7125 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7126 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7127 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7128 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7129 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7130 {
7131 tree t = TREE_OPERAND (exp, 1);
7132
7133 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7134 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7135 }
7136
7137 /* If the result is to be ptr_mode and we are adding an integer to
7138 something, we might be forming a constant. So try to use
7139 plus_constant. If it produces a sum and we can't accept it,
7140 use force_operand. This allows P = &ARR[const] to generate
7141 efficient code on machines where a SYMBOL_REF is not a valid
7142 address.
7143
7144 If this is an EXPAND_SUM call, always return the sum. */
7145 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7146 || mode == ptr_mode)
7147 {
7148 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7149 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7150 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7151 {
7152 rtx constant_part;
7153
7154 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7155 EXPAND_SUM);
7156 /* Use immed_double_const to ensure that the constant is
7157 truncated according to the mode of OP1, then sign extended
7158 to a HOST_WIDE_INT. Using the constant directly can result
7159 in non-canonical RTL in a 64x32 cross compile. */
7160 constant_part
7161 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7162 (HOST_WIDE_INT) 0,
7163 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7164 op1 = plus_constant (op1, INTVAL (constant_part));
7165 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7166 op1 = force_operand (op1, target);
7167 return op1;
7168 }
7169
7170 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7171 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7172 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7173 {
7174 rtx constant_part;
7175
7176 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7177 EXPAND_SUM);
7178 if (! CONSTANT_P (op0))
7179 {
7180 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7181 VOIDmode, modifier);
7182 /* Don't go to both_summands if modifier
7183 says it's not right to return a PLUS. */
7184 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7185 goto binop2;
7186 goto both_summands;
7187 }
7188 /* Use immed_double_const to ensure that the constant is
7189 truncated according to the mode of OP1, then sign extended
7190 to a HOST_WIDE_INT. Using the constant directly can result
7191 in non-canonical RTL in a 64x32 cross compile. */
7192 constant_part
7193 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7194 (HOST_WIDE_INT) 0,
7195 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7196 op0 = plus_constant (op0, INTVAL (constant_part));
7197 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7198 op0 = force_operand (op0, target);
7199 return op0;
7200 }
7201 }
7202
7203 /* No sense saving up arithmetic to be done
7204 if it's all in the wrong mode to form part of an address.
7205 And force_operand won't know whether to sign-extend or
7206 zero-extend. */
7207 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7208 || mode != ptr_mode)
7209 goto binop;
7210
7211 preexpand_calls (exp);
7212 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7213 subtarget = 0;
7214
7215 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7216 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7217
7218 both_summands:
7219 /* Make sure any term that's a sum with a constant comes last. */
7220 if (GET_CODE (op0) == PLUS
7221 && CONSTANT_P (XEXP (op0, 1)))
7222 {
7223 temp = op0;
7224 op0 = op1;
7225 op1 = temp;
7226 }
7227 /* If adding to a sum including a constant,
7228 associate it to put the constant outside. */
7229 if (GET_CODE (op1) == PLUS
7230 && CONSTANT_P (XEXP (op1, 1)))
7231 {
7232 rtx constant_term = const0_rtx;
7233
7234 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7235 if (temp != 0)
7236 op0 = temp;
7237 /* Ensure that MULT comes first if there is one. */
7238 else if (GET_CODE (op0) == MULT)
7239 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7240 else
7241 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7242
7243 /* Let's also eliminate constants from op0 if possible. */
7244 op0 = eliminate_constant_term (op0, &constant_term);
7245
7246 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7247 their sum should be a constant. Form it into OP1, since the
7248 result we want will then be OP0 + OP1. */
7249
7250 temp = simplify_binary_operation (PLUS, mode, constant_term,
7251 XEXP (op1, 1));
7252 if (temp != 0)
7253 op1 = temp;
7254 else
7255 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7256 }
7257
7258 /* Put a constant term last and put a multiplication first. */
7259 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7260 temp = op1, op1 = op0, op0 = temp;
7261
7262 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7263 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7264
7265 case MINUS_EXPR:
7266 /* For initializers, we are allowed to return a MINUS of two
7267 symbolic constants. Here we handle all cases when both operands
7268 are constant. */
7269 /* Handle difference of two symbolic constants,
7270 for the sake of an initializer. */
7271 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7272 && really_constant_p (TREE_OPERAND (exp, 0))
7273 && really_constant_p (TREE_OPERAND (exp, 1)))
7274 {
7275 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7276 VOIDmode, ro_modifier);
7277 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7278 VOIDmode, ro_modifier);
7279
7280 /* If the last operand is a CONST_INT, use plus_constant of
7281 the negated constant. Else make the MINUS. */
7282 if (GET_CODE (op1) == CONST_INT)
7283 return plus_constant (op0, - INTVAL (op1));
7284 else
7285 return gen_rtx_MINUS (mode, op0, op1);
7286 }
7287 /* Convert A - const to A + (-const). */
7288 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7289 {
7290 tree negated = fold (build1 (NEGATE_EXPR, type,
7291 TREE_OPERAND (exp, 1)));
7292
7293 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7294 /* If we can't negate the constant in TYPE, leave it alone and
7295 expand_binop will negate it for us. We used to try to do it
7296 here in the signed version of TYPE, but that doesn't work
7297 on POINTER_TYPEs. */;
7298 else
7299 {
7300 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7301 goto plus_expr;
7302 }
7303 }
7304 this_optab = sub_optab;
7305 goto binop;
7306
7307 case MULT_EXPR:
7308 preexpand_calls (exp);
7309 /* If first operand is constant, swap them.
7310 Thus the following special case checks need only
7311 check the second operand. */
7312 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7313 {
7314 register tree t1 = TREE_OPERAND (exp, 0);
7315 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7316 TREE_OPERAND (exp, 1) = t1;
7317 }
7318
7319 /* Attempt to return something suitable for generating an
7320 indexed address, for machines that support that. */
7321
7322 if (modifier == EXPAND_SUM && mode == ptr_mode
7323 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7324 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7325 {
7326 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7327 EXPAND_SUM);
7328
7329 /* Apply distributive law if OP0 is x+c. */
7330 if (GET_CODE (op0) == PLUS
7331 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7332 return
7333 gen_rtx_PLUS
7334 (mode,
7335 gen_rtx_MULT
7336 (mode, XEXP (op0, 0),
7337 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7338 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7339 * INTVAL (XEXP (op0, 1))));
7340
7341 if (GET_CODE (op0) != REG)
7342 op0 = force_operand (op0, NULL_RTX);
7343 if (GET_CODE (op0) != REG)
7344 op0 = copy_to_mode_reg (mode, op0);
7345
7346 return
7347 gen_rtx_MULT (mode, op0,
7348 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7349 }
7350
7351 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7352 subtarget = 0;
7353
7354 /* Check for multiplying things that have been extended
7355 from a narrower type. If this machine supports multiplying
7356 in that narrower type with a result in the desired type,
7357 do it that way, and avoid the explicit type-conversion. */
7358 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7359 && TREE_CODE (type) == INTEGER_TYPE
7360 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7361 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7362 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7363 && int_fits_type_p (TREE_OPERAND (exp, 1),
7364 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7365 /* Don't use a widening multiply if a shift will do. */
7366 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7367 > HOST_BITS_PER_WIDE_INT)
7368 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7369 ||
7370 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7371 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7372 ==
7373 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7374 /* If both operands are extended, they must either both
7375 be zero-extended or both be sign-extended. */
7376 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7377 ==
7378 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7379 {
7380 enum machine_mode innermode
7381 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7382 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7383 ? smul_widen_optab : umul_widen_optab);
7384 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7385 ? umul_widen_optab : smul_widen_optab);
7386 if (mode == GET_MODE_WIDER_MODE (innermode))
7387 {
7388 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7389 {
7390 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7391 NULL_RTX, VOIDmode, 0);
7392 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7393 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7394 VOIDmode, 0);
7395 else
7396 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7397 NULL_RTX, VOIDmode, 0);
7398 goto binop2;
7399 }
7400 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7401 && innermode == word_mode)
7402 {
7403 rtx htem;
7404 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7405 NULL_RTX, VOIDmode, 0);
7406 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7407 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7408 VOIDmode, 0);
7409 else
7410 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7411 NULL_RTX, VOIDmode, 0);
7412 temp = expand_binop (mode, other_optab, op0, op1, target,
7413 unsignedp, OPTAB_LIB_WIDEN);
7414 htem = expand_mult_highpart_adjust (innermode,
7415 gen_highpart (innermode, temp),
7416 op0, op1,
7417 gen_highpart (innermode, temp),
7418 unsignedp);
7419 emit_move_insn (gen_highpart (innermode, temp), htem);
7420 return temp;
7421 }
7422 }
7423 }
7424 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7425 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7426 return expand_mult (mode, op0, op1, target, unsignedp);
7427
7428 case TRUNC_DIV_EXPR:
7429 case FLOOR_DIV_EXPR:
7430 case CEIL_DIV_EXPR:
7431 case ROUND_DIV_EXPR:
7432 case EXACT_DIV_EXPR:
7433 preexpand_calls (exp);
7434 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7435 subtarget = 0;
7436 /* Possible optimization: compute the dividend with EXPAND_SUM
7437 then if the divisor is constant can optimize the case
7438 where some terms of the dividend have coeffs divisible by it. */
7439 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7440 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7441 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7442
7443 case RDIV_EXPR:
7444 this_optab = flodiv_optab;
7445 goto binop;
7446
7447 case TRUNC_MOD_EXPR:
7448 case FLOOR_MOD_EXPR:
7449 case CEIL_MOD_EXPR:
7450 case ROUND_MOD_EXPR:
7451 preexpand_calls (exp);
7452 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7453 subtarget = 0;
7454 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7455 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7456 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7457
7458 case FIX_ROUND_EXPR:
7459 case FIX_FLOOR_EXPR:
7460 case FIX_CEIL_EXPR:
7461 abort (); /* Not used for C. */
7462
7463 case FIX_TRUNC_EXPR:
7464 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7465 if (target == 0)
7466 target = gen_reg_rtx (mode);
7467 expand_fix (target, op0, unsignedp);
7468 return target;
7469
7470 case FLOAT_EXPR:
7471 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7472 if (target == 0)
7473 target = gen_reg_rtx (mode);
7474 /* expand_float can't figure out what to do if FROM has VOIDmode.
7475 So give it the correct mode. With -O, cse will optimize this. */
7476 if (GET_MODE (op0) == VOIDmode)
7477 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7478 op0);
7479 expand_float (target, op0,
7480 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7481 return target;
7482
7483 case NEGATE_EXPR:
7484 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7485 temp = expand_unop (mode, neg_optab, op0, target, 0);
7486 if (temp == 0)
7487 abort ();
7488 return temp;
7489
7490 case ABS_EXPR:
7491 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7492
7493 /* Handle complex values specially. */
7494 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7495 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7496 return expand_complex_abs (mode, op0, target, unsignedp);
7497
7498 /* Unsigned abs is simply the operand. Testing here means we don't
7499 risk generating incorrect code below. */
7500 if (TREE_UNSIGNED (type))
7501 return op0;
7502
7503 return expand_abs (mode, op0, target,
7504 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7505
7506 case MAX_EXPR:
7507 case MIN_EXPR:
7508 target = original_target;
7509 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7510 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7511 || GET_MODE (target) != mode
7512 || (GET_CODE (target) == REG
7513 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7514 target = gen_reg_rtx (mode);
7515 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7516 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7517
7518 /* First try to do it with a special MIN or MAX instruction.
7519 If that does not win, use a conditional jump to select the proper
7520 value. */
7521 this_optab = (TREE_UNSIGNED (type)
7522 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7523 : (code == MIN_EXPR ? smin_optab : smax_optab));
7524
7525 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7526 OPTAB_WIDEN);
7527 if (temp != 0)
7528 return temp;
7529
7530 /* At this point, a MEM target is no longer useful; we will get better
7531 code without it. */
7532
7533 if (GET_CODE (target) == MEM)
7534 target = gen_reg_rtx (mode);
7535
7536 if (target != op0)
7537 emit_move_insn (target, op0);
7538
7539 op0 = gen_label_rtx ();
7540
7541 /* If this mode is an integer too wide to compare properly,
7542 compare word by word. Rely on cse to optimize constant cases. */
7543 if (GET_MODE_CLASS (mode) == MODE_INT
7544 && ! can_compare_p (GE, mode, ccp_jump))
7545 {
7546 if (code == MAX_EXPR)
7547 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7548 target, op1, NULL_RTX, op0);
7549 else
7550 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7551 op1, target, NULL_RTX, op0);
7552 }
7553 else
7554 {
7555 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7556 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7557 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7558 op0);
7559 }
7560 emit_move_insn (target, op1);
7561 emit_label (op0);
7562 return target;
7563
7564 case BIT_NOT_EXPR:
7565 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7566 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7567 if (temp == 0)
7568 abort ();
7569 return temp;
7570
7571 case FFS_EXPR:
7572 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7573 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7574 if (temp == 0)
7575 abort ();
7576 return temp;
7577
7578 /* ??? Can optimize bitwise operations with one arg constant.
7579 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7580 and (a bitwise1 b) bitwise2 b (etc)
7581 but that is probably not worth while. */
7582
7583 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7584 boolean values when we want in all cases to compute both of them. In
7585 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7586 as actual zero-or-1 values and then bitwise anding. In cases where
7587 there cannot be any side effects, better code would be made by
7588 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7589 how to recognize those cases. */
7590
7591 case TRUTH_AND_EXPR:
7592 case BIT_AND_EXPR:
7593 this_optab = and_optab;
7594 goto binop;
7595
7596 case TRUTH_OR_EXPR:
7597 case BIT_IOR_EXPR:
7598 this_optab = ior_optab;
7599 goto binop;
7600
7601 case TRUTH_XOR_EXPR:
7602 case BIT_XOR_EXPR:
7603 this_optab = xor_optab;
7604 goto binop;
7605
7606 case LSHIFT_EXPR:
7607 case RSHIFT_EXPR:
7608 case LROTATE_EXPR:
7609 case RROTATE_EXPR:
7610 preexpand_calls (exp);
7611 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7612 subtarget = 0;
7613 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7614 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7615 unsignedp);
7616
7617 /* Could determine the answer when only additive constants differ. Also,
7618 the addition of one can be handled by changing the condition. */
7619 case LT_EXPR:
7620 case LE_EXPR:
7621 case GT_EXPR:
7622 case GE_EXPR:
7623 case EQ_EXPR:
7624 case NE_EXPR:
7625 case UNORDERED_EXPR:
7626 case ORDERED_EXPR:
7627 case UNLT_EXPR:
7628 case UNLE_EXPR:
7629 case UNGT_EXPR:
7630 case UNGE_EXPR:
7631 case UNEQ_EXPR:
7632 preexpand_calls (exp);
7633 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7634 if (temp != 0)
7635 return temp;
7636
7637 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7638 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7639 && original_target
7640 && GET_CODE (original_target) == REG
7641 && (GET_MODE (original_target)
7642 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7643 {
7644 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7645 VOIDmode, 0);
7646
7647 if (temp != original_target)
7648 temp = copy_to_reg (temp);
7649
7650 op1 = gen_label_rtx ();
7651 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7652 GET_MODE (temp), unsignedp, 0, op1);
7653 emit_move_insn (temp, const1_rtx);
7654 emit_label (op1);
7655 return temp;
7656 }
7657
7658 /* If no set-flag instruction, must generate a conditional
7659 store into a temporary variable. Drop through
7660 and handle this like && and ||. */
7661
7662 case TRUTH_ANDIF_EXPR:
7663 case TRUTH_ORIF_EXPR:
7664 if (! ignore
7665 && (target == 0 || ! safe_from_p (target, exp, 1)
7666 /* Make sure we don't have a hard reg (such as function's return
7667 value) live across basic blocks, if not optimizing. */
7668 || (!optimize && GET_CODE (target) == REG
7669 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7670 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7671
7672 if (target)
7673 emit_clr_insn (target);
7674
7675 op1 = gen_label_rtx ();
7676 jumpifnot (exp, op1);
7677
7678 if (target)
7679 emit_0_to_1_insn (target);
7680
7681 emit_label (op1);
7682 return ignore ? const0_rtx : target;
7683
7684 case TRUTH_NOT_EXPR:
7685 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7686 /* The parser is careful to generate TRUTH_NOT_EXPR
7687 only with operands that are always zero or one. */
7688 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7689 target, 1, OPTAB_LIB_WIDEN);
7690 if (temp == 0)
7691 abort ();
7692 return temp;
7693
7694 case COMPOUND_EXPR:
7695 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7696 emit_queue ();
7697 return expand_expr (TREE_OPERAND (exp, 1),
7698 (ignore ? const0_rtx : target),
7699 VOIDmode, 0);
7700
7701 case COND_EXPR:
7702 /* If we would have a "singleton" (see below) were it not for a
7703 conversion in each arm, bring that conversion back out. */
7704 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7705 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7706 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7707 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7708 {
7709 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7710 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7711
7712 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7713 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7714 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7715 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7716 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7717 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7718 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7719 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7720 return expand_expr (build1 (NOP_EXPR, type,
7721 build (COND_EXPR, TREE_TYPE (true),
7722 TREE_OPERAND (exp, 0),
7723 true, false)),
7724 target, tmode, modifier);
7725 }
7726
7727 {
7728 /* Note that COND_EXPRs whose type is a structure or union
7729 are required to be constructed to contain assignments of
7730 a temporary variable, so that we can evaluate them here
7731 for side effect only. If type is void, we must do likewise. */
7732
7733 /* If an arm of the branch requires a cleanup,
7734 only that cleanup is performed. */
7735
7736 tree singleton = 0;
7737 tree binary_op = 0, unary_op = 0;
7738
7739 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7740 convert it to our mode, if necessary. */
7741 if (integer_onep (TREE_OPERAND (exp, 1))
7742 && integer_zerop (TREE_OPERAND (exp, 2))
7743 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7744 {
7745 if (ignore)
7746 {
7747 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7748 ro_modifier);
7749 return const0_rtx;
7750 }
7751
7752 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7753 if (GET_MODE (op0) == mode)
7754 return op0;
7755
7756 if (target == 0)
7757 target = gen_reg_rtx (mode);
7758 convert_move (target, op0, unsignedp);
7759 return target;
7760 }
7761
7762 /* Check for X ? A + B : A. If we have this, we can copy A to the
7763 output and conditionally add B. Similarly for unary operations.
7764 Don't do this if X has side-effects because those side effects
7765 might affect A or B and the "?" operation is a sequence point in
7766 ANSI. (operand_equal_p tests for side effects.) */
7767
7768 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7769 && operand_equal_p (TREE_OPERAND (exp, 2),
7770 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7771 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7772 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7773 && operand_equal_p (TREE_OPERAND (exp, 1),
7774 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7775 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7776 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7777 && operand_equal_p (TREE_OPERAND (exp, 2),
7778 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7779 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7780 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7781 && operand_equal_p (TREE_OPERAND (exp, 1),
7782 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7783 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7784
7785 /* If we are not to produce a result, we have no target. Otherwise,
7786 if a target was specified use it; it will not be used as an
7787 intermediate target unless it is safe. If no target, use a
7788 temporary. */
7789
7790 if (ignore)
7791 temp = 0;
7792 else if (original_target
7793 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7794 || (singleton && GET_CODE (original_target) == REG
7795 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7796 && original_target == var_rtx (singleton)))
7797 && GET_MODE (original_target) == mode
7798 #ifdef HAVE_conditional_move
7799 && (! can_conditionally_move_p (mode)
7800 || GET_CODE (original_target) == REG
7801 || TREE_ADDRESSABLE (type))
7802 #endif
7803 && ! (GET_CODE (original_target) == MEM
7804 && MEM_VOLATILE_P (original_target)))
7805 temp = original_target;
7806 else if (TREE_ADDRESSABLE (type))
7807 abort ();
7808 else
7809 temp = assign_temp (type, 0, 0, 1);
7810
7811 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7812 do the test of X as a store-flag operation, do this as
7813 A + ((X != 0) << log C). Similarly for other simple binary
7814 operators. Only do for C == 1 if BRANCH_COST is low. */
7815 if (temp && singleton && binary_op
7816 && (TREE_CODE (binary_op) == PLUS_EXPR
7817 || TREE_CODE (binary_op) == MINUS_EXPR
7818 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7819 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7820 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7821 : integer_onep (TREE_OPERAND (binary_op, 1)))
7822 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7823 {
7824 rtx result;
7825 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7826 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7827 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7828 : xor_optab);
7829
7830 /* If we had X ? A : A + 1, do this as A + (X == 0).
7831
7832 We have to invert the truth value here and then put it
7833 back later if do_store_flag fails. We cannot simply copy
7834 TREE_OPERAND (exp, 0) to another variable and modify that
7835 because invert_truthvalue can modify the tree pointed to
7836 by its argument. */
7837 if (singleton == TREE_OPERAND (exp, 1))
7838 TREE_OPERAND (exp, 0)
7839 = invert_truthvalue (TREE_OPERAND (exp, 0));
7840
7841 result = do_store_flag (TREE_OPERAND (exp, 0),
7842 (safe_from_p (temp, singleton, 1)
7843 ? temp : NULL_RTX),
7844 mode, BRANCH_COST <= 1);
7845
7846 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7847 result = expand_shift (LSHIFT_EXPR, mode, result,
7848 build_int_2 (tree_log2
7849 (TREE_OPERAND
7850 (binary_op, 1)),
7851 0),
7852 (safe_from_p (temp, singleton, 1)
7853 ? temp : NULL_RTX), 0);
7854
7855 if (result)
7856 {
7857 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7858 return expand_binop (mode, boptab, op1, result, temp,
7859 unsignedp, OPTAB_LIB_WIDEN);
7860 }
7861 else if (singleton == TREE_OPERAND (exp, 1))
7862 TREE_OPERAND (exp, 0)
7863 = invert_truthvalue (TREE_OPERAND (exp, 0));
7864 }
7865
7866 do_pending_stack_adjust ();
7867 NO_DEFER_POP;
7868 op0 = gen_label_rtx ();
7869
7870 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7871 {
7872 if (temp != 0)
7873 {
7874 /* If the target conflicts with the other operand of the
7875 binary op, we can't use it. Also, we can't use the target
7876 if it is a hard register, because evaluating the condition
7877 might clobber it. */
7878 if ((binary_op
7879 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7880 || (GET_CODE (temp) == REG
7881 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7882 temp = gen_reg_rtx (mode);
7883 store_expr (singleton, temp, 0);
7884 }
7885 else
7886 expand_expr (singleton,
7887 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7888 if (singleton == TREE_OPERAND (exp, 1))
7889 jumpif (TREE_OPERAND (exp, 0), op0);
7890 else
7891 jumpifnot (TREE_OPERAND (exp, 0), op0);
7892
7893 start_cleanup_deferral ();
7894 if (binary_op && temp == 0)
7895 /* Just touch the other operand. */
7896 expand_expr (TREE_OPERAND (binary_op, 1),
7897 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7898 else if (binary_op)
7899 store_expr (build (TREE_CODE (binary_op), type,
7900 make_tree (type, temp),
7901 TREE_OPERAND (binary_op, 1)),
7902 temp, 0);
7903 else
7904 store_expr (build1 (TREE_CODE (unary_op), type,
7905 make_tree (type, temp)),
7906 temp, 0);
7907 op1 = op0;
7908 }
7909 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7910 comparison operator. If we have one of these cases, set the
7911 output to A, branch on A (cse will merge these two references),
7912 then set the output to FOO. */
7913 else if (temp
7914 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7915 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7916 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7917 TREE_OPERAND (exp, 1), 0)
7918 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7919 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7920 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7921 {
7922 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7923 temp = gen_reg_rtx (mode);
7924 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7925 jumpif (TREE_OPERAND (exp, 0), op0);
7926
7927 start_cleanup_deferral ();
7928 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7929 op1 = op0;
7930 }
7931 else if (temp
7932 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7933 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7934 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7935 TREE_OPERAND (exp, 2), 0)
7936 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7937 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7938 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7939 {
7940 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7941 temp = gen_reg_rtx (mode);
7942 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7943 jumpifnot (TREE_OPERAND (exp, 0), op0);
7944
7945 start_cleanup_deferral ();
7946 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7947 op1 = op0;
7948 }
7949 else
7950 {
7951 op1 = gen_label_rtx ();
7952 jumpifnot (TREE_OPERAND (exp, 0), op0);
7953
7954 start_cleanup_deferral ();
7955
7956 /* One branch of the cond can be void, if it never returns. For
7957 example A ? throw : E */
7958 if (temp != 0
7959 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7960 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7961 else
7962 expand_expr (TREE_OPERAND (exp, 1),
7963 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7964 end_cleanup_deferral ();
7965 emit_queue ();
7966 emit_jump_insn (gen_jump (op1));
7967 emit_barrier ();
7968 emit_label (op0);
7969 start_cleanup_deferral ();
7970 if (temp != 0
7971 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7972 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7973 else
7974 expand_expr (TREE_OPERAND (exp, 2),
7975 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7976 }
7977
7978 end_cleanup_deferral ();
7979
7980 emit_queue ();
7981 emit_label (op1);
7982 OK_DEFER_POP;
7983
7984 return temp;
7985 }
7986
7987 case TARGET_EXPR:
7988 {
7989 /* Something needs to be initialized, but we didn't know
7990 where that thing was when building the tree. For example,
7991 it could be the return value of a function, or a parameter
7992 to a function which lays down in the stack, or a temporary
7993 variable which must be passed by reference.
7994
7995 We guarantee that the expression will either be constructed
7996 or copied into our original target. */
7997
7998 tree slot = TREE_OPERAND (exp, 0);
7999 tree cleanups = NULL_TREE;
8000 tree exp1;
8001
8002 if (TREE_CODE (slot) != VAR_DECL)
8003 abort ();
8004
8005 if (! ignore)
8006 target = original_target;
8007
8008 /* Set this here so that if we get a target that refers to a
8009 register variable that's already been used, put_reg_into_stack
8010 knows that it should fix up those uses. */
8011 TREE_USED (slot) = 1;
8012
8013 if (target == 0)
8014 {
8015 if (DECL_RTL (slot) != 0)
8016 {
8017 target = DECL_RTL (slot);
8018 /* If we have already expanded the slot, so don't do
8019 it again. (mrs) */
8020 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8021 return target;
8022 }
8023 else
8024 {
8025 target = assign_temp (type, 2, 0, 1);
8026 /* All temp slots at this level must not conflict. */
8027 preserve_temp_slots (target);
8028 DECL_RTL (slot) = target;
8029 if (TREE_ADDRESSABLE (slot))
8030 {
8031 TREE_ADDRESSABLE (slot) = 0;
8032 mark_addressable (slot);
8033 }
8034
8035 /* Since SLOT is not known to the called function
8036 to belong to its stack frame, we must build an explicit
8037 cleanup. This case occurs when we must build up a reference
8038 to pass the reference as an argument. In this case,
8039 it is very likely that such a reference need not be
8040 built here. */
8041
8042 if (TREE_OPERAND (exp, 2) == 0)
8043 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8044 cleanups = TREE_OPERAND (exp, 2);
8045 }
8046 }
8047 else
8048 {
8049 /* This case does occur, when expanding a parameter which
8050 needs to be constructed on the stack. The target
8051 is the actual stack address that we want to initialize.
8052 The function we call will perform the cleanup in this case. */
8053
8054 /* If we have already assigned it space, use that space,
8055 not target that we were passed in, as our target
8056 parameter is only a hint. */
8057 if (DECL_RTL (slot) != 0)
8058 {
8059 target = DECL_RTL (slot);
8060 /* If we have already expanded the slot, so don't do
8061 it again. (mrs) */
8062 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8063 return target;
8064 }
8065 else
8066 {
8067 DECL_RTL (slot) = target;
8068 /* If we must have an addressable slot, then make sure that
8069 the RTL that we just stored in slot is OK. */
8070 if (TREE_ADDRESSABLE (slot))
8071 {
8072 TREE_ADDRESSABLE (slot) = 0;
8073 mark_addressable (slot);
8074 }
8075 }
8076 }
8077
8078 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8079 /* Mark it as expanded. */
8080 TREE_OPERAND (exp, 1) = NULL_TREE;
8081
8082 store_expr (exp1, target, 0);
8083
8084 expand_decl_cleanup (NULL_TREE, cleanups);
8085
8086 return target;
8087 }
8088
8089 case INIT_EXPR:
8090 {
8091 tree lhs = TREE_OPERAND (exp, 0);
8092 tree rhs = TREE_OPERAND (exp, 1);
8093 tree noncopied_parts = 0;
8094 tree lhs_type = TREE_TYPE (lhs);
8095
8096 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8097 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8098 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8099 TYPE_NONCOPIED_PARTS (lhs_type));
8100 while (noncopied_parts != 0)
8101 {
8102 expand_assignment (TREE_VALUE (noncopied_parts),
8103 TREE_PURPOSE (noncopied_parts), 0, 0);
8104 noncopied_parts = TREE_CHAIN (noncopied_parts);
8105 }
8106 return temp;
8107 }
8108
8109 case MODIFY_EXPR:
8110 {
8111 /* If lhs is complex, expand calls in rhs before computing it.
8112 That's so we don't compute a pointer and save it over a call.
8113 If lhs is simple, compute it first so we can give it as a
8114 target if the rhs is just a call. This avoids an extra temp and copy
8115 and that prevents a partial-subsumption which makes bad code.
8116 Actually we could treat component_ref's of vars like vars. */
8117
8118 tree lhs = TREE_OPERAND (exp, 0);
8119 tree rhs = TREE_OPERAND (exp, 1);
8120 tree noncopied_parts = 0;
8121 tree lhs_type = TREE_TYPE (lhs);
8122
8123 temp = 0;
8124
8125 if (TREE_CODE (lhs) != VAR_DECL
8126 && TREE_CODE (lhs) != RESULT_DECL
8127 && TREE_CODE (lhs) != PARM_DECL
8128 && ! (TREE_CODE (lhs) == INDIRECT_REF
8129 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8130 preexpand_calls (exp);
8131
8132 /* Check for |= or &= of a bitfield of size one into another bitfield
8133 of size 1. In this case, (unless we need the result of the
8134 assignment) we can do this more efficiently with a
8135 test followed by an assignment, if necessary.
8136
8137 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8138 things change so we do, this code should be enhanced to
8139 support it. */
8140 if (ignore
8141 && TREE_CODE (lhs) == COMPONENT_REF
8142 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8143 || TREE_CODE (rhs) == BIT_AND_EXPR)
8144 && TREE_OPERAND (rhs, 0) == lhs
8145 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8146 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
8147 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
8148 {
8149 rtx label = gen_label_rtx ();
8150
8151 do_jump (TREE_OPERAND (rhs, 1),
8152 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8153 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8154 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8155 (TREE_CODE (rhs) == BIT_IOR_EXPR
8156 ? integer_one_node
8157 : integer_zero_node)),
8158 0, 0);
8159 do_pending_stack_adjust ();
8160 emit_label (label);
8161 return const0_rtx;
8162 }
8163
8164 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8165 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8166 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8167 TYPE_NONCOPIED_PARTS (lhs_type));
8168
8169 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8170 while (noncopied_parts != 0)
8171 {
8172 expand_assignment (TREE_PURPOSE (noncopied_parts),
8173 TREE_VALUE (noncopied_parts), 0, 0);
8174 noncopied_parts = TREE_CHAIN (noncopied_parts);
8175 }
8176 return temp;
8177 }
8178
8179 case RETURN_EXPR:
8180 if (!TREE_OPERAND (exp, 0))
8181 expand_null_return ();
8182 else
8183 expand_return (TREE_OPERAND (exp, 0));
8184 return const0_rtx;
8185
8186 case PREINCREMENT_EXPR:
8187 case PREDECREMENT_EXPR:
8188 return expand_increment (exp, 0, ignore);
8189
8190 case POSTINCREMENT_EXPR:
8191 case POSTDECREMENT_EXPR:
8192 /* Faster to treat as pre-increment if result is not used. */
8193 return expand_increment (exp, ! ignore, ignore);
8194
8195 case ADDR_EXPR:
8196 /* If nonzero, TEMP will be set to the address of something that might
8197 be a MEM corresponding to a stack slot. */
8198 temp = 0;
8199
8200 /* Are we taking the address of a nested function? */
8201 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8202 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8203 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8204 && ! TREE_STATIC (exp))
8205 {
8206 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8207 op0 = force_operand (op0, target);
8208 }
8209 /* If we are taking the address of something erroneous, just
8210 return a zero. */
8211 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8212 return const0_rtx;
8213 else
8214 {
8215 /* We make sure to pass const0_rtx down if we came in with
8216 ignore set, to avoid doing the cleanups twice for something. */
8217 op0 = expand_expr (TREE_OPERAND (exp, 0),
8218 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8219 (modifier == EXPAND_INITIALIZER
8220 ? modifier : EXPAND_CONST_ADDRESS));
8221
8222 /* If we are going to ignore the result, OP0 will have been set
8223 to const0_rtx, so just return it. Don't get confused and
8224 think we are taking the address of the constant. */
8225 if (ignore)
8226 return op0;
8227
8228 op0 = protect_from_queue (op0, 0);
8229
8230 /* We would like the object in memory. If it is a constant, we can
8231 have it be statically allocated into memory. For a non-constant,
8232 we need to allocate some memory and store the value into it. */
8233
8234 if (CONSTANT_P (op0))
8235 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8236 op0);
8237 else if (GET_CODE (op0) == MEM)
8238 {
8239 mark_temp_addr_taken (op0);
8240 temp = XEXP (op0, 0);
8241 }
8242
8243 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8244 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8245 {
8246 /* If this object is in a register, it must be not
8247 be BLKmode. */
8248 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8249 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8250
8251 mark_temp_addr_taken (memloc);
8252 emit_move_insn (memloc, op0);
8253 op0 = memloc;
8254 }
8255
8256 if (GET_CODE (op0) != MEM)
8257 abort ();
8258
8259 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8260 {
8261 temp = XEXP (op0, 0);
8262 #ifdef POINTERS_EXTEND_UNSIGNED
8263 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8264 && mode == ptr_mode)
8265 temp = convert_memory_address (ptr_mode, temp);
8266 #endif
8267 return temp;
8268 }
8269
8270 op0 = force_operand (XEXP (op0, 0), target);
8271 }
8272
8273 if (flag_force_addr && GET_CODE (op0) != REG)
8274 op0 = force_reg (Pmode, op0);
8275
8276 if (GET_CODE (op0) == REG
8277 && ! REG_USERVAR_P (op0))
8278 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8279
8280 /* If we might have had a temp slot, add an equivalent address
8281 for it. */
8282 if (temp != 0)
8283 update_temp_slot_address (temp, op0);
8284
8285 #ifdef POINTERS_EXTEND_UNSIGNED
8286 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8287 && mode == ptr_mode)
8288 op0 = convert_memory_address (ptr_mode, op0);
8289 #endif
8290
8291 return op0;
8292
8293 case ENTRY_VALUE_EXPR:
8294 abort ();
8295
8296 /* COMPLEX type for Extended Pascal & Fortran */
8297 case COMPLEX_EXPR:
8298 {
8299 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8300 rtx insns;
8301
8302 /* Get the rtx code of the operands. */
8303 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8304 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8305
8306 if (! target)
8307 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8308
8309 start_sequence ();
8310
8311 /* Move the real (op0) and imaginary (op1) parts to their location. */
8312 emit_move_insn (gen_realpart (mode, target), op0);
8313 emit_move_insn (gen_imagpart (mode, target), op1);
8314
8315 insns = get_insns ();
8316 end_sequence ();
8317
8318 /* Complex construction should appear as a single unit. */
8319 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8320 each with a separate pseudo as destination.
8321 It's not correct for flow to treat them as a unit. */
8322 if (GET_CODE (target) != CONCAT)
8323 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8324 else
8325 emit_insns (insns);
8326
8327 return target;
8328 }
8329
8330 case REALPART_EXPR:
8331 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8332 return gen_realpart (mode, op0);
8333
8334 case IMAGPART_EXPR:
8335 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8336 return gen_imagpart (mode, op0);
8337
8338 case CONJ_EXPR:
8339 {
8340 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8341 rtx imag_t;
8342 rtx insns;
8343
8344 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8345
8346 if (! target)
8347 target = gen_reg_rtx (mode);
8348
8349 start_sequence ();
8350
8351 /* Store the realpart and the negated imagpart to target. */
8352 emit_move_insn (gen_realpart (partmode, target),
8353 gen_realpart (partmode, op0));
8354
8355 imag_t = gen_imagpart (partmode, target);
8356 temp = expand_unop (partmode, neg_optab,
8357 gen_imagpart (partmode, op0), imag_t, 0);
8358 if (temp != imag_t)
8359 emit_move_insn (imag_t, temp);
8360
8361 insns = get_insns ();
8362 end_sequence ();
8363
8364 /* Conjugate should appear as a single unit
8365 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8366 each with a separate pseudo as destination.
8367 It's not correct for flow to treat them as a unit. */
8368 if (GET_CODE (target) != CONCAT)
8369 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8370 else
8371 emit_insns (insns);
8372
8373 return target;
8374 }
8375
8376 case TRY_CATCH_EXPR:
8377 {
8378 tree handler = TREE_OPERAND (exp, 1);
8379
8380 expand_eh_region_start ();
8381
8382 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8383
8384 expand_eh_region_end (handler);
8385
8386 return op0;
8387 }
8388
8389 case TRY_FINALLY_EXPR:
8390 {
8391 tree try_block = TREE_OPERAND (exp, 0);
8392 tree finally_block = TREE_OPERAND (exp, 1);
8393 rtx finally_label = gen_label_rtx ();
8394 rtx done_label = gen_label_rtx ();
8395 rtx return_link = gen_reg_rtx (Pmode);
8396 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8397 (tree) finally_label, (tree) return_link);
8398 TREE_SIDE_EFFECTS (cleanup) = 1;
8399
8400 /* Start a new binding layer that will keep track of all cleanup
8401 actions to be performed. */
8402 expand_start_bindings (2);
8403
8404 target_temp_slot_level = temp_slot_level;
8405
8406 expand_decl_cleanup (NULL_TREE, cleanup);
8407 op0 = expand_expr (try_block, target, tmode, modifier);
8408
8409 preserve_temp_slots (op0);
8410 expand_end_bindings (NULL_TREE, 0, 0);
8411 emit_jump (done_label);
8412 emit_label (finally_label);
8413 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8414 emit_indirect_jump (return_link);
8415 emit_label (done_label);
8416 return op0;
8417 }
8418
8419 case GOTO_SUBROUTINE_EXPR:
8420 {
8421 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8422 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8423 rtx return_address = gen_label_rtx ();
8424 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8425 emit_jump (subr);
8426 emit_label (return_address);
8427 return const0_rtx;
8428 }
8429
8430 case POPDCC_EXPR:
8431 {
8432 rtx dcc = get_dynamic_cleanup_chain ();
8433 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8434 return const0_rtx;
8435 }
8436
8437 case POPDHC_EXPR:
8438 {
8439 rtx dhc = get_dynamic_handler_chain ();
8440 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8441 return const0_rtx;
8442 }
8443
8444 case VA_ARG_EXPR:
8445 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8446
8447 default:
8448 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8449 }
8450
8451 /* Here to do an ordinary binary operator, generating an instruction
8452 from the optab already placed in `this_optab'. */
8453 binop:
8454 preexpand_calls (exp);
8455 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8456 subtarget = 0;
8457 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8458 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8459 binop2:
8460 temp = expand_binop (mode, this_optab, op0, op1, target,
8461 unsignedp, OPTAB_LIB_WIDEN);
8462 if (temp == 0)
8463 abort ();
8464 return temp;
8465 }
8466 \f
8467 /* Similar to expand_expr, except that we don't specify a target, target
8468 mode, or modifier and we return the alignment of the inner type. This is
8469 used in cases where it is not necessary to align the result to the
8470 alignment of its type as long as we know the alignment of the result, for
8471 example for comparisons of BLKmode values. */
8472
8473 static rtx
8474 expand_expr_unaligned (exp, palign)
8475 register tree exp;
8476 unsigned int *palign;
8477 {
8478 register rtx op0;
8479 tree type = TREE_TYPE (exp);
8480 register enum machine_mode mode = TYPE_MODE (type);
8481
8482 /* Default the alignment we return to that of the type. */
8483 *palign = TYPE_ALIGN (type);
8484
8485 /* The only cases in which we do anything special is if the resulting mode
8486 is BLKmode. */
8487 if (mode != BLKmode)
8488 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8489
8490 switch (TREE_CODE (exp))
8491 {
8492 case CONVERT_EXPR:
8493 case NOP_EXPR:
8494 case NON_LVALUE_EXPR:
8495 /* Conversions between BLKmode values don't change the underlying
8496 alignment or value. */
8497 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8498 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8499 break;
8500
8501 case ARRAY_REF:
8502 /* Much of the code for this case is copied directly from expand_expr.
8503 We need to duplicate it here because we will do something different
8504 in the fall-through case, so we need to handle the same exceptions
8505 it does. */
8506 {
8507 tree array = TREE_OPERAND (exp, 0);
8508 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8509 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8510 tree index = TREE_OPERAND (exp, 1);
8511 tree index_type = TREE_TYPE (index);
8512 HOST_WIDE_INT i;
8513
8514 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8515 abort ();
8516
8517 /* Optimize the special-case of a zero lower bound.
8518
8519 We convert the low_bound to sizetype to avoid some problems
8520 with constant folding. (E.g. suppose the lower bound is 1,
8521 and its mode is QI. Without the conversion, (ARRAY
8522 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8523 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
8524
8525 But sizetype isn't quite right either (especially if
8526 the lowbound is negative). FIXME */
8527
8528 if (! integer_zerop (low_bound))
8529 index = fold (build (MINUS_EXPR, index_type, index,
8530 convert (sizetype, low_bound)));
8531
8532 /* If this is a constant index into a constant array,
8533 just get the value from the array. Handle both the cases when
8534 we have an explicit constructor and when our operand is a variable
8535 that was declared const. */
8536
8537 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
8538 {
8539 if (TREE_CODE (index) == INTEGER_CST
8540 && TREE_INT_CST_HIGH (index) == 0)
8541 {
8542 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
8543
8544 i = TREE_INT_CST_LOW (index);
8545 while (elem && i--)
8546 elem = TREE_CHAIN (elem);
8547 if (elem)
8548 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8549 palign);
8550 }
8551 }
8552
8553 else if (optimize >= 1
8554 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8555 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8556 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8557 {
8558 if (TREE_CODE (index) == INTEGER_CST)
8559 {
8560 tree init = DECL_INITIAL (array);
8561
8562 i = TREE_INT_CST_LOW (index);
8563 if (TREE_CODE (init) == CONSTRUCTOR)
8564 {
8565 tree elem = CONSTRUCTOR_ELTS (init);
8566
8567 while (elem
8568 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
8569 elem = TREE_CHAIN (elem);
8570 if (elem)
8571 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8572 palign);
8573 }
8574 }
8575 }
8576 }
8577
8578 /* ... fall through ... */
8579
8580 case COMPONENT_REF:
8581 case BIT_FIELD_REF:
8582 /* If the operand is a CONSTRUCTOR, we can just extract the
8583 appropriate field if it is present. Don't do this if we have
8584 already written the data since we want to refer to that copy
8585 and varasm.c assumes that's what we'll do. */
8586 if (TREE_CODE (exp) != ARRAY_REF
8587 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8588 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8589 {
8590 tree elt;
8591
8592 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8593 elt = TREE_CHAIN (elt))
8594 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8595 /* Note that unlike the case in expand_expr, we know this is
8596 BLKmode and hence not an integer. */
8597 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8598 }
8599
8600 {
8601 enum machine_mode mode1;
8602 int bitsize;
8603 int bitpos;
8604 tree offset;
8605 int volatilep = 0;
8606 unsigned int alignment;
8607 int unsignedp;
8608 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8609 &mode1, &unsignedp, &volatilep,
8610 &alignment);
8611
8612 /* If we got back the original object, something is wrong. Perhaps
8613 we are evaluating an expression too early. In any event, don't
8614 infinitely recurse. */
8615 if (tem == exp)
8616 abort ();
8617
8618 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8619
8620 /* If this is a constant, put it into a register if it is a
8621 legitimate constant and OFFSET is 0 and memory if it isn't. */
8622 if (CONSTANT_P (op0))
8623 {
8624 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8625
8626 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8627 && offset == 0)
8628 op0 = force_reg (inner_mode, op0);
8629 else
8630 op0 = validize_mem (force_const_mem (inner_mode, op0));
8631 }
8632
8633 if (offset != 0)
8634 {
8635 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8636
8637 /* If this object is in a register, put it into memory.
8638 This case can't occur in C, but can in Ada if we have
8639 unchecked conversion of an expression from a scalar type to
8640 an array or record type. */
8641 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8642 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8643 {
8644 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8645
8646 mark_temp_addr_taken (memloc);
8647 emit_move_insn (memloc, op0);
8648 op0 = memloc;
8649 }
8650
8651 if (GET_CODE (op0) != MEM)
8652 abort ();
8653
8654 if (GET_MODE (offset_rtx) != ptr_mode)
8655 {
8656 #ifdef POINTERS_EXTEND_UNSIGNED
8657 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8658 #else
8659 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8660 #endif
8661 }
8662
8663 op0 = change_address (op0, VOIDmode,
8664 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8665 force_reg (ptr_mode,
8666 offset_rtx)));
8667 }
8668
8669 /* Don't forget about volatility even if this is a bitfield. */
8670 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8671 {
8672 op0 = copy_rtx (op0);
8673 MEM_VOLATILE_P (op0) = 1;
8674 }
8675
8676 /* Check the access. */
8677 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8678 {
8679 rtx to;
8680 int size;
8681
8682 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8683 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8684
8685 /* Check the access right of the pointer. */
8686 if (size > BITS_PER_UNIT)
8687 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8688 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8689 TYPE_MODE (sizetype),
8690 GEN_INT (MEMORY_USE_RO),
8691 TYPE_MODE (integer_type_node));
8692 }
8693
8694 /* In cases where an aligned union has an unaligned object
8695 as a field, we might be extracting a BLKmode value from
8696 an integer-mode (e.g., SImode) object. Handle this case
8697 by doing the extract into an object as wide as the field
8698 (which we know to be the width of a basic mode), then
8699 storing into memory, and changing the mode to BLKmode.
8700 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8701 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8702 if (mode1 == VOIDmode
8703 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8704 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8705 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
8706 || bitpos % TYPE_ALIGN (type) != 0)))
8707 {
8708 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8709
8710 if (ext_mode == BLKmode)
8711 {
8712 /* In this case, BITPOS must start at a byte boundary. */
8713 if (GET_CODE (op0) != MEM
8714 || bitpos % BITS_PER_UNIT != 0)
8715 abort ();
8716
8717 op0 = change_address (op0, VOIDmode,
8718 plus_constant (XEXP (op0, 0),
8719 bitpos / BITS_PER_UNIT));
8720 }
8721 else
8722 {
8723 rtx new = assign_stack_temp (ext_mode,
8724 bitsize / BITS_PER_UNIT, 0);
8725
8726 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8727 unsignedp, NULL_RTX, ext_mode,
8728 ext_mode, alignment,
8729 int_size_in_bytes (TREE_TYPE (tem)));
8730
8731 /* If the result is a record type and BITSIZE is narrower than
8732 the mode of OP0, an integral mode, and this is a big endian
8733 machine, we must put the field into the high-order bits. */
8734 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8735 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8736 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8737 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8738 size_int (GET_MODE_BITSIZE
8739 (GET_MODE (op0))
8740 - bitsize),
8741 op0, 1);
8742
8743
8744 emit_move_insn (new, op0);
8745 op0 = copy_rtx (new);
8746 PUT_MODE (op0, BLKmode);
8747 }
8748 }
8749 else
8750 /* Get a reference to just this component. */
8751 op0 = change_address (op0, mode1,
8752 plus_constant (XEXP (op0, 0),
8753 (bitpos / BITS_PER_UNIT)));
8754
8755 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8756
8757 /* Adjust the alignment in case the bit position is not
8758 a multiple of the alignment of the inner object. */
8759 while (bitpos % alignment != 0)
8760 alignment >>= 1;
8761
8762 if (GET_CODE (XEXP (op0, 0)) == REG)
8763 mark_reg_pointer (XEXP (op0, 0), alignment);
8764
8765 MEM_IN_STRUCT_P (op0) = 1;
8766 MEM_VOLATILE_P (op0) |= volatilep;
8767
8768 *palign = alignment;
8769 return op0;
8770 }
8771
8772 default:
8773 break;
8774
8775 }
8776
8777 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8778 }
8779 \f
8780 /* Return the tree node and offset if a given argument corresponds to
8781 a string constant. */
8782
8783 tree
8784 string_constant (arg, ptr_offset)
8785 tree arg;
8786 tree *ptr_offset;
8787 {
8788 STRIP_NOPS (arg);
8789
8790 if (TREE_CODE (arg) == ADDR_EXPR
8791 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8792 {
8793 *ptr_offset = integer_zero_node;
8794 return TREE_OPERAND (arg, 0);
8795 }
8796 else if (TREE_CODE (arg) == PLUS_EXPR)
8797 {
8798 tree arg0 = TREE_OPERAND (arg, 0);
8799 tree arg1 = TREE_OPERAND (arg, 1);
8800
8801 STRIP_NOPS (arg0);
8802 STRIP_NOPS (arg1);
8803
8804 if (TREE_CODE (arg0) == ADDR_EXPR
8805 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8806 {
8807 *ptr_offset = arg1;
8808 return TREE_OPERAND (arg0, 0);
8809 }
8810 else if (TREE_CODE (arg1) == ADDR_EXPR
8811 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8812 {
8813 *ptr_offset = arg0;
8814 return TREE_OPERAND (arg1, 0);
8815 }
8816 }
8817
8818 return 0;
8819 }
8820 \f
8821 /* Expand code for a post- or pre- increment or decrement
8822 and return the RTX for the result.
8823 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8824
8825 static rtx
8826 expand_increment (exp, post, ignore)
8827 register tree exp;
8828 int post, ignore;
8829 {
8830 register rtx op0, op1;
8831 register rtx temp, value;
8832 register tree incremented = TREE_OPERAND (exp, 0);
8833 optab this_optab = add_optab;
8834 int icode;
8835 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8836 int op0_is_copy = 0;
8837 int single_insn = 0;
8838 /* 1 means we can't store into OP0 directly,
8839 because it is a subreg narrower than a word,
8840 and we don't dare clobber the rest of the word. */
8841 int bad_subreg = 0;
8842
8843 /* Stabilize any component ref that might need to be
8844 evaluated more than once below. */
8845 if (!post
8846 || TREE_CODE (incremented) == BIT_FIELD_REF
8847 || (TREE_CODE (incremented) == COMPONENT_REF
8848 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8849 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8850 incremented = stabilize_reference (incremented);
8851 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8852 ones into save exprs so that they don't accidentally get evaluated
8853 more than once by the code below. */
8854 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8855 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8856 incremented = save_expr (incremented);
8857
8858 /* Compute the operands as RTX.
8859 Note whether OP0 is the actual lvalue or a copy of it:
8860 I believe it is a copy iff it is a register or subreg
8861 and insns were generated in computing it. */
8862
8863 temp = get_last_insn ();
8864 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8865
8866 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8867 in place but instead must do sign- or zero-extension during assignment,
8868 so we copy it into a new register and let the code below use it as
8869 a copy.
8870
8871 Note that we can safely modify this SUBREG since it is know not to be
8872 shared (it was made by the expand_expr call above). */
8873
8874 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8875 {
8876 if (post)
8877 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8878 else
8879 bad_subreg = 1;
8880 }
8881 else if (GET_CODE (op0) == SUBREG
8882 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8883 {
8884 /* We cannot increment this SUBREG in place. If we are
8885 post-incrementing, get a copy of the old value. Otherwise,
8886 just mark that we cannot increment in place. */
8887 if (post)
8888 op0 = copy_to_reg (op0);
8889 else
8890 bad_subreg = 1;
8891 }
8892
8893 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8894 && temp != get_last_insn ());
8895 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8896 EXPAND_MEMORY_USE_BAD);
8897
8898 /* Decide whether incrementing or decrementing. */
8899 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8900 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8901 this_optab = sub_optab;
8902
8903 /* Convert decrement by a constant into a negative increment. */
8904 if (this_optab == sub_optab
8905 && GET_CODE (op1) == CONST_INT)
8906 {
8907 op1 = GEN_INT (- INTVAL (op1));
8908 this_optab = add_optab;
8909 }
8910
8911 /* For a preincrement, see if we can do this with a single instruction. */
8912 if (!post)
8913 {
8914 icode = (int) this_optab->handlers[(int) mode].insn_code;
8915 if (icode != (int) CODE_FOR_nothing
8916 /* Make sure that OP0 is valid for operands 0 and 1
8917 of the insn we want to queue. */
8918 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8919 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8920 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8921 single_insn = 1;
8922 }
8923
8924 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8925 then we cannot just increment OP0. We must therefore contrive to
8926 increment the original value. Then, for postincrement, we can return
8927 OP0 since it is a copy of the old value. For preincrement, expand here
8928 unless we can do it with a single insn.
8929
8930 Likewise if storing directly into OP0 would clobber high bits
8931 we need to preserve (bad_subreg). */
8932 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8933 {
8934 /* This is the easiest way to increment the value wherever it is.
8935 Problems with multiple evaluation of INCREMENTED are prevented
8936 because either (1) it is a component_ref or preincrement,
8937 in which case it was stabilized above, or (2) it is an array_ref
8938 with constant index in an array in a register, which is
8939 safe to reevaluate. */
8940 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8941 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8942 ? MINUS_EXPR : PLUS_EXPR),
8943 TREE_TYPE (exp),
8944 incremented,
8945 TREE_OPERAND (exp, 1));
8946
8947 while (TREE_CODE (incremented) == NOP_EXPR
8948 || TREE_CODE (incremented) == CONVERT_EXPR)
8949 {
8950 newexp = convert (TREE_TYPE (incremented), newexp);
8951 incremented = TREE_OPERAND (incremented, 0);
8952 }
8953
8954 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8955 return post ? op0 : temp;
8956 }
8957
8958 if (post)
8959 {
8960 /* We have a true reference to the value in OP0.
8961 If there is an insn to add or subtract in this mode, queue it.
8962 Queueing the increment insn avoids the register shuffling
8963 that often results if we must increment now and first save
8964 the old value for subsequent use. */
8965
8966 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8967 op0 = stabilize (op0);
8968 #endif
8969
8970 icode = (int) this_optab->handlers[(int) mode].insn_code;
8971 if (icode != (int) CODE_FOR_nothing
8972 /* Make sure that OP0 is valid for operands 0 and 1
8973 of the insn we want to queue. */
8974 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8975 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8976 {
8977 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8978 op1 = force_reg (mode, op1);
8979
8980 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8981 }
8982 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8983 {
8984 rtx addr = (general_operand (XEXP (op0, 0), mode)
8985 ? force_reg (Pmode, XEXP (op0, 0))
8986 : copy_to_reg (XEXP (op0, 0)));
8987 rtx temp, result;
8988
8989 op0 = change_address (op0, VOIDmode, addr);
8990 temp = force_reg (GET_MODE (op0), op0);
8991 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8992 op1 = force_reg (mode, op1);
8993
8994 /* The increment queue is LIFO, thus we have to `queue'
8995 the instructions in reverse order. */
8996 enqueue_insn (op0, gen_move_insn (op0, temp));
8997 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8998 return result;
8999 }
9000 }
9001
9002 /* Preincrement, or we can't increment with one simple insn. */
9003 if (post)
9004 /* Save a copy of the value before inc or dec, to return it later. */
9005 temp = value = copy_to_reg (op0);
9006 else
9007 /* Arrange to return the incremented value. */
9008 /* Copy the rtx because expand_binop will protect from the queue,
9009 and the results of that would be invalid for us to return
9010 if our caller does emit_queue before using our result. */
9011 temp = copy_rtx (value = op0);
9012
9013 /* Increment however we can. */
9014 op1 = expand_binop (mode, this_optab, value, op1,
9015 current_function_check_memory_usage ? NULL_RTX : op0,
9016 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9017 /* Make sure the value is stored into OP0. */
9018 if (op1 != op0)
9019 emit_move_insn (op0, op1);
9020
9021 return temp;
9022 }
9023 \f
9024 /* Expand all function calls contained within EXP, innermost ones first.
9025 But don't look within expressions that have sequence points.
9026 For each CALL_EXPR, record the rtx for its value
9027 in the CALL_EXPR_RTL field. */
9028
9029 static void
9030 preexpand_calls (exp)
9031 tree exp;
9032 {
9033 register int nops, i;
9034 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9035
9036 if (! do_preexpand_calls)
9037 return;
9038
9039 /* Only expressions and references can contain calls. */
9040
9041 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9042 return;
9043
9044 switch (TREE_CODE (exp))
9045 {
9046 case CALL_EXPR:
9047 /* Do nothing if already expanded. */
9048 if (CALL_EXPR_RTL (exp) != 0
9049 /* Do nothing if the call returns a variable-sized object. */
9050 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9051 /* Do nothing to built-in functions. */
9052 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9053 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9054 == FUNCTION_DECL)
9055 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9056 return;
9057
9058 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9059 return;
9060
9061 case COMPOUND_EXPR:
9062 case COND_EXPR:
9063 case TRUTH_ANDIF_EXPR:
9064 case TRUTH_ORIF_EXPR:
9065 /* If we find one of these, then we can be sure
9066 the adjust will be done for it (since it makes jumps).
9067 Do it now, so that if this is inside an argument
9068 of a function, we don't get the stack adjustment
9069 after some other args have already been pushed. */
9070 do_pending_stack_adjust ();
9071 return;
9072
9073 case BLOCK:
9074 case RTL_EXPR:
9075 case WITH_CLEANUP_EXPR:
9076 case CLEANUP_POINT_EXPR:
9077 case TRY_CATCH_EXPR:
9078 return;
9079
9080 case SAVE_EXPR:
9081 if (SAVE_EXPR_RTL (exp) != 0)
9082 return;
9083
9084 default:
9085 break;
9086 }
9087
9088 nops = tree_code_length[(int) TREE_CODE (exp)];
9089 for (i = 0; i < nops; i++)
9090 if (TREE_OPERAND (exp, i) != 0)
9091 {
9092 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9093 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9094 It doesn't happen before the call is made. */
9095 ;
9096 else
9097 {
9098 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9099 if (type == 'e' || type == '<' || type == '1' || type == '2'
9100 || type == 'r')
9101 preexpand_calls (TREE_OPERAND (exp, i));
9102 }
9103 }
9104 }
9105 \f
9106 /* At the start of a function, record that we have no previously-pushed
9107 arguments waiting to be popped. */
9108
9109 void
9110 init_pending_stack_adjust ()
9111 {
9112 pending_stack_adjust = 0;
9113 }
9114
9115 /* When exiting from function, if safe, clear out any pending stack adjust
9116 so the adjustment won't get done.
9117
9118 Note, if the current function calls alloca, then it must have a
9119 frame pointer regardless of the value of flag_omit_frame_pointer. */
9120
9121 void
9122 clear_pending_stack_adjust ()
9123 {
9124 #ifdef EXIT_IGNORE_STACK
9125 if (optimize > 0
9126 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9127 && EXIT_IGNORE_STACK
9128 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9129 && ! flag_inline_functions)
9130 pending_stack_adjust = 0;
9131 #endif
9132 }
9133
9134 /* Pop any previously-pushed arguments that have not been popped yet. */
9135
9136 void
9137 do_pending_stack_adjust ()
9138 {
9139 if (inhibit_defer_pop == 0)
9140 {
9141 if (pending_stack_adjust != 0)
9142 adjust_stack (GEN_INT (pending_stack_adjust));
9143 pending_stack_adjust = 0;
9144 }
9145 }
9146 \f
9147 /* Expand conditional expressions. */
9148
9149 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9150 LABEL is an rtx of code CODE_LABEL, in this function and all the
9151 functions here. */
9152
9153 void
9154 jumpifnot (exp, label)
9155 tree exp;
9156 rtx label;
9157 {
9158 do_jump (exp, label, NULL_RTX);
9159 }
9160
9161 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9162
9163 void
9164 jumpif (exp, label)
9165 tree exp;
9166 rtx label;
9167 {
9168 do_jump (exp, NULL_RTX, label);
9169 }
9170
9171 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9172 the result is zero, or IF_TRUE_LABEL if the result is one.
9173 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9174 meaning fall through in that case.
9175
9176 do_jump always does any pending stack adjust except when it does not
9177 actually perform a jump. An example where there is no jump
9178 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9179
9180 This function is responsible for optimizing cases such as
9181 &&, || and comparison operators in EXP. */
9182
9183 void
9184 do_jump (exp, if_false_label, if_true_label)
9185 tree exp;
9186 rtx if_false_label, if_true_label;
9187 {
9188 register enum tree_code code = TREE_CODE (exp);
9189 /* Some cases need to create a label to jump to
9190 in order to properly fall through.
9191 These cases set DROP_THROUGH_LABEL nonzero. */
9192 rtx drop_through_label = 0;
9193 rtx temp;
9194 int i;
9195 tree type;
9196 enum machine_mode mode;
9197
9198 #ifdef MAX_INTEGER_COMPUTATION_MODE
9199 check_max_integer_computation_mode (exp);
9200 #endif
9201
9202 emit_queue ();
9203
9204 switch (code)
9205 {
9206 case ERROR_MARK:
9207 break;
9208
9209 case INTEGER_CST:
9210 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9211 if (temp)
9212 emit_jump (temp);
9213 break;
9214
9215 #if 0
9216 /* This is not true with #pragma weak */
9217 case ADDR_EXPR:
9218 /* The address of something can never be zero. */
9219 if (if_true_label)
9220 emit_jump (if_true_label);
9221 break;
9222 #endif
9223
9224 case NOP_EXPR:
9225 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9226 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9227 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9228 goto normal;
9229 case CONVERT_EXPR:
9230 /* If we are narrowing the operand, we have to do the compare in the
9231 narrower mode. */
9232 if ((TYPE_PRECISION (TREE_TYPE (exp))
9233 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9234 goto normal;
9235 case NON_LVALUE_EXPR:
9236 case REFERENCE_EXPR:
9237 case ABS_EXPR:
9238 case NEGATE_EXPR:
9239 case LROTATE_EXPR:
9240 case RROTATE_EXPR:
9241 /* These cannot change zero->non-zero or vice versa. */
9242 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9243 break;
9244
9245 case WITH_RECORD_EXPR:
9246 /* Put the object on the placeholder list, recurse through our first
9247 operand, and pop the list. */
9248 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9249 placeholder_list);
9250 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9251 placeholder_list = TREE_CHAIN (placeholder_list);
9252 break;
9253
9254 #if 0
9255 /* This is never less insns than evaluating the PLUS_EXPR followed by
9256 a test and can be longer if the test is eliminated. */
9257 case PLUS_EXPR:
9258 /* Reduce to minus. */
9259 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9260 TREE_OPERAND (exp, 0),
9261 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9262 TREE_OPERAND (exp, 1))));
9263 /* Process as MINUS. */
9264 #endif
9265
9266 case MINUS_EXPR:
9267 /* Non-zero iff operands of minus differ. */
9268 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9269 TREE_OPERAND (exp, 0),
9270 TREE_OPERAND (exp, 1)),
9271 NE, NE, if_false_label, if_true_label);
9272 break;
9273
9274 case BIT_AND_EXPR:
9275 /* If we are AND'ing with a small constant, do this comparison in the
9276 smallest type that fits. If the machine doesn't have comparisons
9277 that small, it will be converted back to the wider comparison.
9278 This helps if we are testing the sign bit of a narrower object.
9279 combine can't do this for us because it can't know whether a
9280 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9281
9282 if (! SLOW_BYTE_ACCESS
9283 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9284 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9285 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9286 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9287 && (type = type_for_mode (mode, 1)) != 0
9288 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9289 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9290 != CODE_FOR_nothing))
9291 {
9292 do_jump (convert (type, exp), if_false_label, if_true_label);
9293 break;
9294 }
9295 goto normal;
9296
9297 case TRUTH_NOT_EXPR:
9298 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9299 break;
9300
9301 case TRUTH_ANDIF_EXPR:
9302 if (if_false_label == 0)
9303 if_false_label = drop_through_label = gen_label_rtx ();
9304 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9305 start_cleanup_deferral ();
9306 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9307 end_cleanup_deferral ();
9308 break;
9309
9310 case TRUTH_ORIF_EXPR:
9311 if (if_true_label == 0)
9312 if_true_label = drop_through_label = gen_label_rtx ();
9313 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9314 start_cleanup_deferral ();
9315 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9316 end_cleanup_deferral ();
9317 break;
9318
9319 case COMPOUND_EXPR:
9320 push_temp_slots ();
9321 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9322 preserve_temp_slots (NULL_RTX);
9323 free_temp_slots ();
9324 pop_temp_slots ();
9325 emit_queue ();
9326 do_pending_stack_adjust ();
9327 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9328 break;
9329
9330 case COMPONENT_REF:
9331 case BIT_FIELD_REF:
9332 case ARRAY_REF:
9333 {
9334 int bitsize, bitpos, unsignedp;
9335 enum machine_mode mode;
9336 tree type;
9337 tree offset;
9338 int volatilep = 0;
9339 unsigned int alignment;
9340
9341 /* Get description of this reference. We don't actually care
9342 about the underlying object here. */
9343 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9344 &mode, &unsignedp, &volatilep,
9345 &alignment);
9346
9347 type = type_for_size (bitsize, unsignedp);
9348 if (! SLOW_BYTE_ACCESS
9349 && type != 0 && bitsize >= 0
9350 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9351 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9352 != CODE_FOR_nothing))
9353 {
9354 do_jump (convert (type, exp), if_false_label, if_true_label);
9355 break;
9356 }
9357 goto normal;
9358 }
9359
9360 case COND_EXPR:
9361 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9362 if (integer_onep (TREE_OPERAND (exp, 1))
9363 && integer_zerop (TREE_OPERAND (exp, 2)))
9364 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9365
9366 else if (integer_zerop (TREE_OPERAND (exp, 1))
9367 && integer_onep (TREE_OPERAND (exp, 2)))
9368 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9369
9370 else
9371 {
9372 register rtx label1 = gen_label_rtx ();
9373 drop_through_label = gen_label_rtx ();
9374
9375 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9376
9377 start_cleanup_deferral ();
9378 /* Now the THEN-expression. */
9379 do_jump (TREE_OPERAND (exp, 1),
9380 if_false_label ? if_false_label : drop_through_label,
9381 if_true_label ? if_true_label : drop_through_label);
9382 /* In case the do_jump just above never jumps. */
9383 do_pending_stack_adjust ();
9384 emit_label (label1);
9385
9386 /* Now the ELSE-expression. */
9387 do_jump (TREE_OPERAND (exp, 2),
9388 if_false_label ? if_false_label : drop_through_label,
9389 if_true_label ? if_true_label : drop_through_label);
9390 end_cleanup_deferral ();
9391 }
9392 break;
9393
9394 case EQ_EXPR:
9395 {
9396 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9397
9398 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9399 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9400 {
9401 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9402 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9403 do_jump
9404 (fold
9405 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9406 fold (build (EQ_EXPR, TREE_TYPE (exp),
9407 fold (build1 (REALPART_EXPR,
9408 TREE_TYPE (inner_type),
9409 exp0)),
9410 fold (build1 (REALPART_EXPR,
9411 TREE_TYPE (inner_type),
9412 exp1)))),
9413 fold (build (EQ_EXPR, TREE_TYPE (exp),
9414 fold (build1 (IMAGPART_EXPR,
9415 TREE_TYPE (inner_type),
9416 exp0)),
9417 fold (build1 (IMAGPART_EXPR,
9418 TREE_TYPE (inner_type),
9419 exp1)))))),
9420 if_false_label, if_true_label);
9421 }
9422
9423 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9424 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9425
9426 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9427 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9428 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9429 else
9430 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9431 break;
9432 }
9433
9434 case NE_EXPR:
9435 {
9436 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9437
9438 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9439 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9440 {
9441 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9442 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9443 do_jump
9444 (fold
9445 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9446 fold (build (NE_EXPR, TREE_TYPE (exp),
9447 fold (build1 (REALPART_EXPR,
9448 TREE_TYPE (inner_type),
9449 exp0)),
9450 fold (build1 (REALPART_EXPR,
9451 TREE_TYPE (inner_type),
9452 exp1)))),
9453 fold (build (NE_EXPR, TREE_TYPE (exp),
9454 fold (build1 (IMAGPART_EXPR,
9455 TREE_TYPE (inner_type),
9456 exp0)),
9457 fold (build1 (IMAGPART_EXPR,
9458 TREE_TYPE (inner_type),
9459 exp1)))))),
9460 if_false_label, if_true_label);
9461 }
9462
9463 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9464 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9465
9466 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9467 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9468 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9469 else
9470 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9471 break;
9472 }
9473
9474 case LT_EXPR:
9475 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9476 if (GET_MODE_CLASS (mode) == MODE_INT
9477 && ! can_compare_p (LT, mode, ccp_jump))
9478 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9479 else
9480 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9481 break;
9482
9483 case LE_EXPR:
9484 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9485 if (GET_MODE_CLASS (mode) == MODE_INT
9486 && ! can_compare_p (LE, mode, ccp_jump))
9487 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9488 else
9489 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9490 break;
9491
9492 case GT_EXPR:
9493 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9494 if (GET_MODE_CLASS (mode) == MODE_INT
9495 && ! can_compare_p (GT, mode, ccp_jump))
9496 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9497 else
9498 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9499 break;
9500
9501 case GE_EXPR:
9502 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9503 if (GET_MODE_CLASS (mode) == MODE_INT
9504 && ! can_compare_p (GE, mode, ccp_jump))
9505 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9506 else
9507 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9508 break;
9509
9510 case UNORDERED_EXPR:
9511 case ORDERED_EXPR:
9512 {
9513 enum rtx_code cmp, rcmp;
9514 int do_rev;
9515
9516 if (code == UNORDERED_EXPR)
9517 cmp = UNORDERED, rcmp = ORDERED;
9518 else
9519 cmp = ORDERED, rcmp = UNORDERED;
9520 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9521
9522 do_rev = 0;
9523 if (! can_compare_p (cmp, mode, ccp_jump)
9524 && (can_compare_p (rcmp, mode, ccp_jump)
9525 /* If the target doesn't provide either UNORDERED or ORDERED
9526 comparisons, canonicalize on UNORDERED for the library. */
9527 || rcmp == UNORDERED))
9528 do_rev = 1;
9529
9530 if (! do_rev)
9531 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9532 else
9533 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9534 }
9535 break;
9536
9537 {
9538 enum rtx_code rcode1;
9539 enum tree_code tcode2;
9540
9541 case UNLT_EXPR:
9542 rcode1 = UNLT;
9543 tcode2 = LT_EXPR;
9544 goto unordered_bcc;
9545 case UNLE_EXPR:
9546 rcode1 = UNLE;
9547 tcode2 = LE_EXPR;
9548 goto unordered_bcc;
9549 case UNGT_EXPR:
9550 rcode1 = UNGT;
9551 tcode2 = GT_EXPR;
9552 goto unordered_bcc;
9553 case UNGE_EXPR:
9554 rcode1 = UNGE;
9555 tcode2 = GE_EXPR;
9556 goto unordered_bcc;
9557 case UNEQ_EXPR:
9558 rcode1 = UNEQ;
9559 tcode2 = EQ_EXPR;
9560 goto unordered_bcc;
9561
9562 unordered_bcc:
9563 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9564 if (can_compare_p (rcode1, mode, ccp_jump))
9565 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9566 if_true_label);
9567 else
9568 {
9569 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9570 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9571 tree cmp0, cmp1;
9572
9573 /* If the target doesn't support combined unordered
9574 compares, decompose into UNORDERED + comparison. */
9575 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9576 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9577 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9578 do_jump (exp, if_false_label, if_true_label);
9579 }
9580 }
9581 break;
9582
9583 default:
9584 normal:
9585 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9586 #if 0
9587 /* This is not needed any more and causes poor code since it causes
9588 comparisons and tests from non-SI objects to have different code
9589 sequences. */
9590 /* Copy to register to avoid generating bad insns by cse
9591 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9592 if (!cse_not_expected && GET_CODE (temp) == MEM)
9593 temp = copy_to_reg (temp);
9594 #endif
9595 do_pending_stack_adjust ();
9596 /* Do any postincrements in the expression that was tested. */
9597 emit_queue ();
9598
9599 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9600 {
9601 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9602 if (target)
9603 emit_jump (target);
9604 }
9605 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9606 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9607 /* Note swapping the labels gives us not-equal. */
9608 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9609 else if (GET_MODE (temp) != VOIDmode)
9610 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9611 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9612 GET_MODE (temp), NULL_RTX, 0,
9613 if_false_label, if_true_label);
9614 else
9615 abort ();
9616 }
9617
9618 if (drop_through_label)
9619 {
9620 /* If do_jump produces code that might be jumped around,
9621 do any stack adjusts from that code, before the place
9622 where control merges in. */
9623 do_pending_stack_adjust ();
9624 emit_label (drop_through_label);
9625 }
9626 }
9627 \f
9628 /* Given a comparison expression EXP for values too wide to be compared
9629 with one insn, test the comparison and jump to the appropriate label.
9630 The code of EXP is ignored; we always test GT if SWAP is 0,
9631 and LT if SWAP is 1. */
9632
9633 static void
9634 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9635 tree exp;
9636 int swap;
9637 rtx if_false_label, if_true_label;
9638 {
9639 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9640 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9641 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9642 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9643
9644 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9645 }
9646
9647 /* Compare OP0 with OP1, word at a time, in mode MODE.
9648 UNSIGNEDP says to do unsigned comparison.
9649 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9650
9651 void
9652 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9653 enum machine_mode mode;
9654 int unsignedp;
9655 rtx op0, op1;
9656 rtx if_false_label, if_true_label;
9657 {
9658 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9659 rtx drop_through_label = 0;
9660 int i;
9661
9662 if (! if_true_label || ! if_false_label)
9663 drop_through_label = gen_label_rtx ();
9664 if (! if_true_label)
9665 if_true_label = drop_through_label;
9666 if (! if_false_label)
9667 if_false_label = drop_through_label;
9668
9669 /* Compare a word at a time, high order first. */
9670 for (i = 0; i < nwords; i++)
9671 {
9672 rtx op0_word, op1_word;
9673
9674 if (WORDS_BIG_ENDIAN)
9675 {
9676 op0_word = operand_subword_force (op0, i, mode);
9677 op1_word = operand_subword_force (op1, i, mode);
9678 }
9679 else
9680 {
9681 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9682 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9683 }
9684
9685 /* All but high-order word must be compared as unsigned. */
9686 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9687 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9688 NULL_RTX, if_true_label);
9689
9690 /* Consider lower words only if these are equal. */
9691 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9692 NULL_RTX, 0, NULL_RTX, if_false_label);
9693 }
9694
9695 if (if_false_label)
9696 emit_jump (if_false_label);
9697 if (drop_through_label)
9698 emit_label (drop_through_label);
9699 }
9700
9701 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9702 with one insn, test the comparison and jump to the appropriate label. */
9703
9704 static void
9705 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9706 tree exp;
9707 rtx if_false_label, if_true_label;
9708 {
9709 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9710 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9711 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9712 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9713 int i;
9714 rtx drop_through_label = 0;
9715
9716 if (! if_false_label)
9717 drop_through_label = if_false_label = gen_label_rtx ();
9718
9719 for (i = 0; i < nwords; i++)
9720 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9721 operand_subword_force (op1, i, mode),
9722 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9723 word_mode, NULL_RTX, 0, if_false_label,
9724 NULL_RTX);
9725
9726 if (if_true_label)
9727 emit_jump (if_true_label);
9728 if (drop_through_label)
9729 emit_label (drop_through_label);
9730 }
9731 \f
9732 /* Jump according to whether OP0 is 0.
9733 We assume that OP0 has an integer mode that is too wide
9734 for the available compare insns. */
9735
9736 void
9737 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9738 rtx op0;
9739 rtx if_false_label, if_true_label;
9740 {
9741 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9742 rtx part;
9743 int i;
9744 rtx drop_through_label = 0;
9745
9746 /* The fastest way of doing this comparison on almost any machine is to
9747 "or" all the words and compare the result. If all have to be loaded
9748 from memory and this is a very wide item, it's possible this may
9749 be slower, but that's highly unlikely. */
9750
9751 part = gen_reg_rtx (word_mode);
9752 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9753 for (i = 1; i < nwords && part != 0; i++)
9754 part = expand_binop (word_mode, ior_optab, part,
9755 operand_subword_force (op0, i, GET_MODE (op0)),
9756 part, 1, OPTAB_WIDEN);
9757
9758 if (part != 0)
9759 {
9760 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9761 NULL_RTX, 0, if_false_label, if_true_label);
9762
9763 return;
9764 }
9765
9766 /* If we couldn't do the "or" simply, do this with a series of compares. */
9767 if (! if_false_label)
9768 drop_through_label = if_false_label = gen_label_rtx ();
9769
9770 for (i = 0; i < nwords; i++)
9771 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9772 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9773 if_false_label, NULL_RTX);
9774
9775 if (if_true_label)
9776 emit_jump (if_true_label);
9777
9778 if (drop_through_label)
9779 emit_label (drop_through_label);
9780 }
9781 \f
9782 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9783 (including code to compute the values to be compared)
9784 and set (CC0) according to the result.
9785 The decision as to signed or unsigned comparison must be made by the caller.
9786
9787 We force a stack adjustment unless there are currently
9788 things pushed on the stack that aren't yet used.
9789
9790 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9791 compared.
9792
9793 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9794 size of MODE should be used. */
9795
9796 rtx
9797 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9798 register rtx op0, op1;
9799 enum rtx_code code;
9800 int unsignedp;
9801 enum machine_mode mode;
9802 rtx size;
9803 unsigned int align;
9804 {
9805 rtx tem;
9806
9807 /* If one operand is constant, make it the second one. Only do this
9808 if the other operand is not constant as well. */
9809
9810 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9811 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9812 {
9813 tem = op0;
9814 op0 = op1;
9815 op1 = tem;
9816 code = swap_condition (code);
9817 }
9818
9819 if (flag_force_mem)
9820 {
9821 op0 = force_not_mem (op0);
9822 op1 = force_not_mem (op1);
9823 }
9824
9825 do_pending_stack_adjust ();
9826
9827 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9828 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9829 return tem;
9830
9831 #if 0
9832 /* There's no need to do this now that combine.c can eliminate lots of
9833 sign extensions. This can be less efficient in certain cases on other
9834 machines. */
9835
9836 /* If this is a signed equality comparison, we can do it as an
9837 unsigned comparison since zero-extension is cheaper than sign
9838 extension and comparisons with zero are done as unsigned. This is
9839 the case even on machines that can do fast sign extension, since
9840 zero-extension is easier to combine with other operations than
9841 sign-extension is. If we are comparing against a constant, we must
9842 convert it to what it would look like unsigned. */
9843 if ((code == EQ || code == NE) && ! unsignedp
9844 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9845 {
9846 if (GET_CODE (op1) == CONST_INT
9847 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9848 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9849 unsignedp = 1;
9850 }
9851 #endif
9852
9853 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9854
9855 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9856 }
9857
9858 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9859 The decision as to signed or unsigned comparison must be made by the caller.
9860
9861 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9862 compared.
9863
9864 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9865 size of MODE should be used. */
9866
9867 void
9868 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9869 if_false_label, if_true_label)
9870 register rtx op0, op1;
9871 enum rtx_code code;
9872 int unsignedp;
9873 enum machine_mode mode;
9874 rtx size;
9875 unsigned int align;
9876 rtx if_false_label, if_true_label;
9877 {
9878 rtx tem;
9879 int dummy_true_label = 0;
9880
9881 /* Reverse the comparison if that is safe and we want to jump if it is
9882 false. */
9883 if (! if_true_label && ! FLOAT_MODE_P (mode))
9884 {
9885 if_true_label = if_false_label;
9886 if_false_label = 0;
9887 code = reverse_condition (code);
9888 }
9889
9890 /* If one operand is constant, make it the second one. Only do this
9891 if the other operand is not constant as well. */
9892
9893 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9894 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9895 {
9896 tem = op0;
9897 op0 = op1;
9898 op1 = tem;
9899 code = swap_condition (code);
9900 }
9901
9902 if (flag_force_mem)
9903 {
9904 op0 = force_not_mem (op0);
9905 op1 = force_not_mem (op1);
9906 }
9907
9908 do_pending_stack_adjust ();
9909
9910 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9911 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9912 {
9913 if (tem == const_true_rtx)
9914 {
9915 if (if_true_label)
9916 emit_jump (if_true_label);
9917 }
9918 else
9919 {
9920 if (if_false_label)
9921 emit_jump (if_false_label);
9922 }
9923 return;
9924 }
9925
9926 #if 0
9927 /* There's no need to do this now that combine.c can eliminate lots of
9928 sign extensions. This can be less efficient in certain cases on other
9929 machines. */
9930
9931 /* If this is a signed equality comparison, we can do it as an
9932 unsigned comparison since zero-extension is cheaper than sign
9933 extension and comparisons with zero are done as unsigned. This is
9934 the case even on machines that can do fast sign extension, since
9935 zero-extension is easier to combine with other operations than
9936 sign-extension is. If we are comparing against a constant, we must
9937 convert it to what it would look like unsigned. */
9938 if ((code == EQ || code == NE) && ! unsignedp
9939 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9940 {
9941 if (GET_CODE (op1) == CONST_INT
9942 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9943 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9944 unsignedp = 1;
9945 }
9946 #endif
9947
9948 if (! if_true_label)
9949 {
9950 dummy_true_label = 1;
9951 if_true_label = gen_label_rtx ();
9952 }
9953
9954 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9955 if_true_label);
9956
9957 if (if_false_label)
9958 emit_jump (if_false_label);
9959 if (dummy_true_label)
9960 emit_label (if_true_label);
9961 }
9962
9963 /* Generate code for a comparison expression EXP (including code to compute
9964 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9965 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9966 generated code will drop through.
9967 SIGNED_CODE should be the rtx operation for this comparison for
9968 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9969
9970 We force a stack adjustment unless there are currently
9971 things pushed on the stack that aren't yet used. */
9972
9973 static void
9974 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9975 if_true_label)
9976 register tree exp;
9977 enum rtx_code signed_code, unsigned_code;
9978 rtx if_false_label, if_true_label;
9979 {
9980 unsigned int align0, align1;
9981 register rtx op0, op1;
9982 register tree type;
9983 register enum machine_mode mode;
9984 int unsignedp;
9985 enum rtx_code code;
9986
9987 /* Don't crash if the comparison was erroneous. */
9988 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
9989 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9990 return;
9991
9992 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
9993 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9994 mode = TYPE_MODE (type);
9995 unsignedp = TREE_UNSIGNED (type);
9996 code = unsignedp ? unsigned_code : signed_code;
9997
9998 #ifdef HAVE_canonicalize_funcptr_for_compare
9999 /* If function pointers need to be "canonicalized" before they can
10000 be reliably compared, then canonicalize them. */
10001 if (HAVE_canonicalize_funcptr_for_compare
10002 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10003 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10004 == FUNCTION_TYPE))
10005 {
10006 rtx new_op0 = gen_reg_rtx (mode);
10007
10008 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10009 op0 = new_op0;
10010 }
10011
10012 if (HAVE_canonicalize_funcptr_for_compare
10013 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10014 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10015 == FUNCTION_TYPE))
10016 {
10017 rtx new_op1 = gen_reg_rtx (mode);
10018
10019 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10020 op1 = new_op1;
10021 }
10022 #endif
10023
10024 /* Do any postincrements in the expression that was tested. */
10025 emit_queue ();
10026
10027 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10028 ((mode == BLKmode)
10029 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10030 MIN (align0, align1) / BITS_PER_UNIT,
10031 if_false_label, if_true_label);
10032 }
10033 \f
10034 /* Generate code to calculate EXP using a store-flag instruction
10035 and return an rtx for the result. EXP is either a comparison
10036 or a TRUTH_NOT_EXPR whose operand is a comparison.
10037
10038 If TARGET is nonzero, store the result there if convenient.
10039
10040 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10041 cheap.
10042
10043 Return zero if there is no suitable set-flag instruction
10044 available on this machine.
10045
10046 Once expand_expr has been called on the arguments of the comparison,
10047 we are committed to doing the store flag, since it is not safe to
10048 re-evaluate the expression. We emit the store-flag insn by calling
10049 emit_store_flag, but only expand the arguments if we have a reason
10050 to believe that emit_store_flag will be successful. If we think that
10051 it will, but it isn't, we have to simulate the store-flag with a
10052 set/jump/set sequence. */
10053
10054 static rtx
10055 do_store_flag (exp, target, mode, only_cheap)
10056 tree exp;
10057 rtx target;
10058 enum machine_mode mode;
10059 int only_cheap;
10060 {
10061 enum rtx_code code;
10062 tree arg0, arg1, type;
10063 tree tem;
10064 enum machine_mode operand_mode;
10065 int invert = 0;
10066 int unsignedp;
10067 rtx op0, op1;
10068 enum insn_code icode;
10069 rtx subtarget = target;
10070 rtx result, label;
10071
10072 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10073 result at the end. We can't simply invert the test since it would
10074 have already been inverted if it were valid. This case occurs for
10075 some floating-point comparisons. */
10076
10077 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10078 invert = 1, exp = TREE_OPERAND (exp, 0);
10079
10080 arg0 = TREE_OPERAND (exp, 0);
10081 arg1 = TREE_OPERAND (exp, 1);
10082 type = TREE_TYPE (arg0);
10083 operand_mode = TYPE_MODE (type);
10084 unsignedp = TREE_UNSIGNED (type);
10085
10086 /* We won't bother with BLKmode store-flag operations because it would mean
10087 passing a lot of information to emit_store_flag. */
10088 if (operand_mode == BLKmode)
10089 return 0;
10090
10091 /* We won't bother with store-flag operations involving function pointers
10092 when function pointers must be canonicalized before comparisons. */
10093 #ifdef HAVE_canonicalize_funcptr_for_compare
10094 if (HAVE_canonicalize_funcptr_for_compare
10095 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10096 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10097 == FUNCTION_TYPE))
10098 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10099 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10100 == FUNCTION_TYPE))))
10101 return 0;
10102 #endif
10103
10104 STRIP_NOPS (arg0);
10105 STRIP_NOPS (arg1);
10106
10107 /* Get the rtx comparison code to use. We know that EXP is a comparison
10108 operation of some type. Some comparisons against 1 and -1 can be
10109 converted to comparisons with zero. Do so here so that the tests
10110 below will be aware that we have a comparison with zero. These
10111 tests will not catch constants in the first operand, but constants
10112 are rarely passed as the first operand. */
10113
10114 switch (TREE_CODE (exp))
10115 {
10116 case EQ_EXPR:
10117 code = EQ;
10118 break;
10119 case NE_EXPR:
10120 code = NE;
10121 break;
10122 case LT_EXPR:
10123 if (integer_onep (arg1))
10124 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10125 else
10126 code = unsignedp ? LTU : LT;
10127 break;
10128 case LE_EXPR:
10129 if (! unsignedp && integer_all_onesp (arg1))
10130 arg1 = integer_zero_node, code = LT;
10131 else
10132 code = unsignedp ? LEU : LE;
10133 break;
10134 case GT_EXPR:
10135 if (! unsignedp && integer_all_onesp (arg1))
10136 arg1 = integer_zero_node, code = GE;
10137 else
10138 code = unsignedp ? GTU : GT;
10139 break;
10140 case GE_EXPR:
10141 if (integer_onep (arg1))
10142 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10143 else
10144 code = unsignedp ? GEU : GE;
10145 break;
10146
10147 case UNORDERED_EXPR:
10148 code = UNORDERED;
10149 break;
10150 case ORDERED_EXPR:
10151 code = ORDERED;
10152 break;
10153 case UNLT_EXPR:
10154 code = UNLT;
10155 break;
10156 case UNLE_EXPR:
10157 code = UNLE;
10158 break;
10159 case UNGT_EXPR:
10160 code = UNGT;
10161 break;
10162 case UNGE_EXPR:
10163 code = UNGE;
10164 break;
10165 case UNEQ_EXPR:
10166 code = UNEQ;
10167 break;
10168
10169 default:
10170 abort ();
10171 }
10172
10173 /* Put a constant second. */
10174 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10175 {
10176 tem = arg0; arg0 = arg1; arg1 = tem;
10177 code = swap_condition (code);
10178 }
10179
10180 /* If this is an equality or inequality test of a single bit, we can
10181 do this by shifting the bit being tested to the low-order bit and
10182 masking the result with the constant 1. If the condition was EQ,
10183 we xor it with 1. This does not require an scc insn and is faster
10184 than an scc insn even if we have it. */
10185
10186 if ((code == NE || code == EQ)
10187 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10188 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10189 {
10190 tree inner = TREE_OPERAND (arg0, 0);
10191 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10192 int ops_unsignedp;
10193
10194 /* If INNER is a right shift of a constant and it plus BITNUM does
10195 not overflow, adjust BITNUM and INNER. */
10196
10197 if (TREE_CODE (inner) == RSHIFT_EXPR
10198 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10199 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10200 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10201 < TYPE_PRECISION (type)))
10202 {
10203 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10204 inner = TREE_OPERAND (inner, 0);
10205 }
10206
10207 /* If we are going to be able to omit the AND below, we must do our
10208 operations as unsigned. If we must use the AND, we have a choice.
10209 Normally unsigned is faster, but for some machines signed is. */
10210 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10211 #ifdef LOAD_EXTEND_OP
10212 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10213 #else
10214 : 1
10215 #endif
10216 );
10217
10218 if (subtarget == 0 || GET_CODE (subtarget) != REG
10219 || GET_MODE (subtarget) != operand_mode
10220 || ! safe_from_p (subtarget, inner, 1))
10221 subtarget = 0;
10222
10223 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10224
10225 if (bitnum != 0)
10226 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10227 size_int (bitnum), subtarget, ops_unsignedp);
10228
10229 if (GET_MODE (op0) != mode)
10230 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10231
10232 if ((code == EQ && ! invert) || (code == NE && invert))
10233 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10234 ops_unsignedp, OPTAB_LIB_WIDEN);
10235
10236 /* Put the AND last so it can combine with more things. */
10237 if (bitnum != TYPE_PRECISION (type) - 1)
10238 op0 = expand_and (op0, const1_rtx, subtarget);
10239
10240 return op0;
10241 }
10242
10243 /* Now see if we are likely to be able to do this. Return if not. */
10244 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10245 return 0;
10246
10247 icode = setcc_gen_code[(int) code];
10248 if (icode == CODE_FOR_nothing
10249 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10250 {
10251 /* We can only do this if it is one of the special cases that
10252 can be handled without an scc insn. */
10253 if ((code == LT && integer_zerop (arg1))
10254 || (! only_cheap && code == GE && integer_zerop (arg1)))
10255 ;
10256 else if (BRANCH_COST >= 0
10257 && ! only_cheap && (code == NE || code == EQ)
10258 && TREE_CODE (type) != REAL_TYPE
10259 && ((abs_optab->handlers[(int) operand_mode].insn_code
10260 != CODE_FOR_nothing)
10261 || (ffs_optab->handlers[(int) operand_mode].insn_code
10262 != CODE_FOR_nothing)))
10263 ;
10264 else
10265 return 0;
10266 }
10267
10268 preexpand_calls (exp);
10269 if (subtarget == 0 || GET_CODE (subtarget) != REG
10270 || GET_MODE (subtarget) != operand_mode
10271 || ! safe_from_p (subtarget, arg1, 1))
10272 subtarget = 0;
10273
10274 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10275 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10276
10277 if (target == 0)
10278 target = gen_reg_rtx (mode);
10279
10280 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10281 because, if the emit_store_flag does anything it will succeed and
10282 OP0 and OP1 will not be used subsequently. */
10283
10284 result = emit_store_flag (target, code,
10285 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10286 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10287 operand_mode, unsignedp, 1);
10288
10289 if (result)
10290 {
10291 if (invert)
10292 result = expand_binop (mode, xor_optab, result, const1_rtx,
10293 result, 0, OPTAB_LIB_WIDEN);
10294 return result;
10295 }
10296
10297 /* If this failed, we have to do this with set/compare/jump/set code. */
10298 if (GET_CODE (target) != REG
10299 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10300 target = gen_reg_rtx (GET_MODE (target));
10301
10302 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10303 result = compare_from_rtx (op0, op1, code, unsignedp,
10304 operand_mode, NULL_RTX, 0);
10305 if (GET_CODE (result) == CONST_INT)
10306 return (((result == const0_rtx && ! invert)
10307 || (result != const0_rtx && invert))
10308 ? const0_rtx : const1_rtx);
10309
10310 label = gen_label_rtx ();
10311 if (bcc_gen_fctn[(int) code] == 0)
10312 abort ();
10313
10314 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10315 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10316 emit_label (label);
10317
10318 return target;
10319 }
10320 \f
10321 /* Generate a tablejump instruction (used for switch statements). */
10322
10323 #ifdef HAVE_tablejump
10324
10325 /* INDEX is the value being switched on, with the lowest value
10326 in the table already subtracted.
10327 MODE is its expected mode (needed if INDEX is constant).
10328 RANGE is the length of the jump table.
10329 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10330
10331 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10332 index value is out of range. */
10333
10334 void
10335 do_tablejump (index, mode, range, table_label, default_label)
10336 rtx index, range, table_label, default_label;
10337 enum machine_mode mode;
10338 {
10339 register rtx temp, vector;
10340
10341 /* Do an unsigned comparison (in the proper mode) between the index
10342 expression and the value which represents the length of the range.
10343 Since we just finished subtracting the lower bound of the range
10344 from the index expression, this comparison allows us to simultaneously
10345 check that the original index expression value is both greater than
10346 or equal to the minimum value of the range and less than or equal to
10347 the maximum value of the range. */
10348
10349 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10350 0, default_label);
10351
10352 /* If index is in range, it must fit in Pmode.
10353 Convert to Pmode so we can index with it. */
10354 if (mode != Pmode)
10355 index = convert_to_mode (Pmode, index, 1);
10356
10357 /* Don't let a MEM slip thru, because then INDEX that comes
10358 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10359 and break_out_memory_refs will go to work on it and mess it up. */
10360 #ifdef PIC_CASE_VECTOR_ADDRESS
10361 if (flag_pic && GET_CODE (index) != REG)
10362 index = copy_to_mode_reg (Pmode, index);
10363 #endif
10364
10365 /* If flag_force_addr were to affect this address
10366 it could interfere with the tricky assumptions made
10367 about addresses that contain label-refs,
10368 which may be valid only very near the tablejump itself. */
10369 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10370 GET_MODE_SIZE, because this indicates how large insns are. The other
10371 uses should all be Pmode, because they are addresses. This code
10372 could fail if addresses and insns are not the same size. */
10373 index = gen_rtx_PLUS (Pmode,
10374 gen_rtx_MULT (Pmode, index,
10375 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10376 gen_rtx_LABEL_REF (Pmode, table_label));
10377 #ifdef PIC_CASE_VECTOR_ADDRESS
10378 if (flag_pic)
10379 index = PIC_CASE_VECTOR_ADDRESS (index);
10380 else
10381 #endif
10382 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10383 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10384 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10385 RTX_UNCHANGING_P (vector) = 1;
10386 convert_move (temp, vector, 0);
10387
10388 emit_jump_insn (gen_tablejump (temp, table_label));
10389
10390 /* If we are generating PIC code or if the table is PC-relative, the
10391 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10392 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10393 emit_barrier ();
10394 }
10395
10396 #endif /* HAVE_tablejump */