1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
36 #include "insn-config.h"
39 #include "typeclass.h"
42 #define CEIL(x,y) (((x) + (y) - 1) / (y))
44 /* Decide whether a function's arguments should be processed
45 from first to last or from last to first.
47 They should if the stack and args grow in opposite directions, but
48 only if we have push insns. */
52 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
53 #define PUSH_ARGS_REVERSED /* If it's last to first */
58 #ifndef STACK_PUSH_CODE
59 #ifdef STACK_GROWS_DOWNWARD
60 #define STACK_PUSH_CODE PRE_DEC
62 #define STACK_PUSH_CODE PRE_INC
66 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
67 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls
= 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust
;
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop
;
97 /* When temporaries are created by TARGET_EXPRs, they are created at
98 this level of temp_slot_level, so that they can remain allocated
99 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
101 int target_temp_slot_level
;
103 /* Nonzero means __builtin_saveregs has already been done in this function.
104 The value is the pseudoreg containing the value __builtin_saveregs
106 static rtx saveregs_value
;
108 /* Similarly for __builtin_apply_args. */
109 static rtx apply_args_value
;
111 /* Don't check memory usage, since code is being emitted to check a memory
112 usage. Used when flag_check_memory_usage is true, to avoid infinite
114 static int in_check_memory_usage
;
116 /* This structure is used by move_by_pieces to describe the move to
118 struct move_by_pieces
128 int explicit_inc_from
;
135 /* This structure is used by clear_by_pieces to describe the clear to
138 struct clear_by_pieces
150 extern struct obstack permanent_obstack
;
151 extern rtx arg_pointer_save_area
;
153 static rtx get_push_address
PROTO ((int));
155 static rtx enqueue_insn
PROTO((rtx
, rtx
));
156 static int queued_subexp_p
PROTO((rtx
));
157 static void init_queue
PROTO((void));
158 static void move_by_pieces
PROTO((rtx
, rtx
, int, int));
159 static int move_by_pieces_ninsns
PROTO((unsigned int, int));
160 static void move_by_pieces_1
PROTO((rtx (*) (rtx
, ...), enum machine_mode
,
161 struct move_by_pieces
*));
162 static void clear_by_pieces
PROTO((rtx
, int, int));
163 static void clear_by_pieces_1
PROTO((rtx (*) (rtx
, ...), enum machine_mode
,
164 struct clear_by_pieces
*));
165 static int is_zeros_p
PROTO((tree
));
166 static int mostly_zeros_p
PROTO((tree
));
167 static void store_constructor
PROTO((tree
, rtx
, int));
168 static rtx store_field
PROTO((rtx
, int, int, enum machine_mode
, tree
,
169 enum machine_mode
, int, int, int));
170 static tree save_noncopied_parts
PROTO((tree
, tree
));
171 static tree init_noncopied_parts
PROTO((tree
, tree
));
172 static int safe_from_p
PROTO((rtx
, tree
, int));
173 static int fixed_type_p
PROTO((tree
));
174 static rtx var_rtx
PROTO((tree
));
175 static int get_pointer_alignment
PROTO((tree
, unsigned));
176 static tree string_constant
PROTO((tree
, tree
*));
177 static tree c_strlen
PROTO((tree
));
178 static rtx expand_builtin
PROTO((tree
, rtx
, rtx
,
179 enum machine_mode
, int));
180 static int apply_args_size
PROTO((void));
181 static int apply_result_size
PROTO((void));
182 static rtx result_vector
PROTO((int, rtx
));
183 static rtx expand_builtin_apply_args
PROTO((void));
184 static rtx expand_builtin_apply
PROTO((rtx
, rtx
, rtx
));
185 static void expand_builtin_return
PROTO((rtx
));
186 static rtx expand_increment
PROTO((tree
, int, int));
187 static void preexpand_calls
PROTO((tree
));
188 static void do_jump_by_parts_greater
PROTO((tree
, int, rtx
, rtx
));
189 void do_jump_by_parts_greater_rtx
PROTO((enum machine_mode
, int, rtx
, rtx
, rtx
, rtx
));
190 static void do_jump_by_parts_equality
PROTO((tree
, rtx
, rtx
));
191 static void do_jump_by_parts_equality_rtx
PROTO((rtx
, rtx
, rtx
));
192 static void do_jump_for_compare
PROTO((rtx
, rtx
, rtx
));
193 static rtx compare
PROTO((tree
, enum rtx_code
, enum rtx_code
));
194 static rtx do_store_flag
PROTO((tree
, rtx
, enum machine_mode
, int));
195 extern tree truthvalue_conversion
PROTO((tree
));
197 /* Record for each mode whether we can move a register directly to or
198 from an object of that mode in memory. If we can't, we won't try
199 to use that mode directly when accessing a field of that mode. */
201 static char direct_load
[NUM_MACHINE_MODES
];
202 static char direct_store
[NUM_MACHINE_MODES
];
204 /* MOVE_RATIO is the number of move instructions that is better than
208 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
211 /* A value of around 6 would minimize code size; infinity would minimize
213 #define MOVE_RATIO 15
217 /* This array records the insn_code of insns to perform block moves. */
218 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
220 /* This array records the insn_code of insns to perform block clears. */
221 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
223 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
225 #ifndef SLOW_UNALIGNED_ACCESS
226 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
229 /* Register mappings for target machines without register windows. */
230 #ifndef INCOMING_REGNO
231 #define INCOMING_REGNO(OUT) (OUT)
233 #ifndef OUTGOING_REGNO
234 #define OUTGOING_REGNO(IN) (IN)
237 /* This is run once per compilation to set up which modes can be used
238 directly in memory and to initialize the block move optab. */
244 enum machine_mode mode
;
245 /* Try indexing by frame ptr and try by stack ptr.
246 It is known that on the Convex the stack ptr isn't a valid index.
247 With luck, one or the other is valid on any machine. */
248 rtx mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
249 rtx mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
252 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
253 pat
= PATTERN (insn
);
255 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
256 mode
= (enum machine_mode
) ((int) mode
+ 1))
262 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
263 PUT_MODE (mem
, mode
);
264 PUT_MODE (mem1
, mode
);
266 /* See if there is some register that can be used in this mode and
267 directly loaded or stored from memory. */
269 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
270 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
271 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
274 if (! HARD_REGNO_MODE_OK (regno
, mode
))
277 reg
= gen_rtx_REG (mode
, regno
);
280 SET_DEST (pat
) = reg
;
281 if (recog (pat
, insn
, &num_clobbers
) >= 0)
282 direct_load
[(int) mode
] = 1;
284 SET_SRC (pat
) = mem1
;
285 SET_DEST (pat
) = reg
;
286 if (recog (pat
, insn
, &num_clobbers
) >= 0)
287 direct_load
[(int) mode
] = 1;
290 SET_DEST (pat
) = mem
;
291 if (recog (pat
, insn
, &num_clobbers
) >= 0)
292 direct_store
[(int) mode
] = 1;
295 SET_DEST (pat
) = mem1
;
296 if (recog (pat
, insn
, &num_clobbers
) >= 0)
297 direct_store
[(int) mode
] = 1;
304 /* This is run at the start of compiling a function. */
311 pending_stack_adjust
= 0;
312 inhibit_defer_pop
= 0;
314 apply_args_value
= 0;
318 /* Save all variables describing the current status into the structure *P.
319 This is used before starting a nested function. */
325 /* Instead of saving the postincrement queue, empty it. */
328 p
->pending_stack_adjust
= pending_stack_adjust
;
329 p
->inhibit_defer_pop
= inhibit_defer_pop
;
330 p
->saveregs_value
= saveregs_value
;
331 p
->apply_args_value
= apply_args_value
;
332 p
->forced_labels
= forced_labels
;
334 pending_stack_adjust
= 0;
335 inhibit_defer_pop
= 0;
337 apply_args_value
= 0;
341 /* Restore all variables describing the current status from the structure *P.
342 This is used after a nested function. */
345 restore_expr_status (p
)
348 pending_stack_adjust
= p
->pending_stack_adjust
;
349 inhibit_defer_pop
= p
->inhibit_defer_pop
;
350 saveregs_value
= p
->saveregs_value
;
351 apply_args_value
= p
->apply_args_value
;
352 forced_labels
= p
->forced_labels
;
355 /* Manage the queue of increment instructions to be output
356 for POSTINCREMENT_EXPR expressions, etc. */
358 static rtx pending_chain
;
360 /* Queue up to increment (or change) VAR later. BODY says how:
361 BODY should be the same thing you would pass to emit_insn
362 to increment right away. It will go to emit_insn later on.
364 The value is a QUEUED expression to be used in place of VAR
365 where you want to guarantee the pre-incrementation value of VAR. */
368 enqueue_insn (var
, body
)
371 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
),
372 var
, NULL_RTX
, NULL_RTX
, body
,
374 return pending_chain
;
377 /* Use protect_from_queue to convert a QUEUED expression
378 into something that you can put immediately into an instruction.
379 If the queued incrementation has not happened yet,
380 protect_from_queue returns the variable itself.
381 If the incrementation has happened, protect_from_queue returns a temp
382 that contains a copy of the old value of the variable.
384 Any time an rtx which might possibly be a QUEUED is to be put
385 into an instruction, it must be passed through protect_from_queue first.
386 QUEUED expressions are not meaningful in instructions.
388 Do not pass a value through protect_from_queue and then hold
389 on to it for a while before putting it in an instruction!
390 If the queue is flushed in between, incorrect code will result. */
393 protect_from_queue (x
, modify
)
397 register RTX_CODE code
= GET_CODE (x
);
399 #if 0 /* A QUEUED can hang around after the queue is forced out. */
400 /* Shortcut for most common case. */
401 if (pending_chain
== 0)
407 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
408 use of autoincrement. Make a copy of the contents of the memory
409 location rather than a copy of the address, but not if the value is
410 of mode BLKmode. Don't modify X in place since it might be
412 if (code
== MEM
&& GET_MODE (x
) != BLKmode
413 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
415 register rtx y
= XEXP (x
, 0);
416 register rtx
new = gen_rtx_MEM (GET_MODE (x
), QUEUED_VAR (y
));
418 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
419 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
420 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
424 register rtx temp
= gen_reg_rtx (GET_MODE (new));
425 emit_insn_before (gen_move_insn (temp
, new),
431 /* Otherwise, recursively protect the subexpressions of all
432 the kinds of rtx's that can contain a QUEUED. */
435 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
436 if (tem
!= XEXP (x
, 0))
442 else if (code
== PLUS
|| code
== MULT
)
444 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
445 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
446 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
455 /* If the increment has not happened, use the variable itself. */
456 if (QUEUED_INSN (x
) == 0)
457 return QUEUED_VAR (x
);
458 /* If the increment has happened and a pre-increment copy exists,
460 if (QUEUED_COPY (x
) != 0)
461 return QUEUED_COPY (x
);
462 /* The increment has happened but we haven't set up a pre-increment copy.
463 Set one up now, and use it. */
464 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
465 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
467 return QUEUED_COPY (x
);
470 /* Return nonzero if X contains a QUEUED expression:
471 if it contains anything that will be altered by a queued increment.
472 We handle only combinations of MEM, PLUS, MINUS and MULT operators
473 since memory addresses generally contain only those. */
479 register enum rtx_code code
= GET_CODE (x
);
485 return queued_subexp_p (XEXP (x
, 0));
489 return (queued_subexp_p (XEXP (x
, 0))
490 || queued_subexp_p (XEXP (x
, 1)));
496 /* Perform all the pending incrementations. */
502 while ((p
= pending_chain
))
504 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
505 pending_chain
= QUEUED_NEXT (p
);
516 /* Copy data from FROM to TO, where the machine modes are not the same.
517 Both modes may be integer, or both may be floating.
518 UNSIGNEDP should be nonzero if FROM is an unsigned type.
519 This causes zero-extension instead of sign-extension. */
522 convert_move (to
, from
, unsignedp
)
523 register rtx to
, from
;
526 enum machine_mode to_mode
= GET_MODE (to
);
527 enum machine_mode from_mode
= GET_MODE (from
);
528 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
529 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
533 /* rtx code for making an equivalent value. */
534 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
536 to
= protect_from_queue (to
, 1);
537 from
= protect_from_queue (from
, 0);
539 if (to_real
!= from_real
)
542 /* If FROM is a SUBREG that indicates that we have already done at least
543 the required extension, strip it. We don't handle such SUBREGs as
546 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
547 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
548 >= GET_MODE_SIZE (to_mode
))
549 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
550 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
552 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
555 if (to_mode
== from_mode
556 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
558 emit_move_insn (to
, from
);
566 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
568 /* Try converting directly if the insn is supported. */
569 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
572 emit_unop_insn (code
, to
, from
, UNKNOWN
);
577 #ifdef HAVE_trunchfqf2
578 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
580 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
584 #ifdef HAVE_trunctqfqf2
585 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
587 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
591 #ifdef HAVE_truncsfqf2
592 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
594 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
598 #ifdef HAVE_truncdfqf2
599 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
601 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
605 #ifdef HAVE_truncxfqf2
606 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
608 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
612 #ifdef HAVE_trunctfqf2
613 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
615 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
620 #ifdef HAVE_trunctqfhf2
621 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
623 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
627 #ifdef HAVE_truncsfhf2
628 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
630 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
634 #ifdef HAVE_truncdfhf2
635 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
637 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
641 #ifdef HAVE_truncxfhf2
642 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
644 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
648 #ifdef HAVE_trunctfhf2
649 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
651 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
656 #ifdef HAVE_truncsftqf2
657 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
659 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
663 #ifdef HAVE_truncdftqf2
664 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
666 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
670 #ifdef HAVE_truncxftqf2
671 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
673 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
677 #ifdef HAVE_trunctftqf2
678 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
680 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
685 #ifdef HAVE_truncdfsf2
686 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
688 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
692 #ifdef HAVE_truncxfsf2
693 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
695 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
699 #ifdef HAVE_trunctfsf2
700 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
702 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
706 #ifdef HAVE_truncxfdf2
707 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
709 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
713 #ifdef HAVE_trunctfdf2
714 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
716 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
728 libcall
= extendsfdf2_libfunc
;
732 libcall
= extendsfxf2_libfunc
;
736 libcall
= extendsftf2_libfunc
;
748 libcall
= truncdfsf2_libfunc
;
752 libcall
= extenddfxf2_libfunc
;
756 libcall
= extenddftf2_libfunc
;
768 libcall
= truncxfsf2_libfunc
;
772 libcall
= truncxfdf2_libfunc
;
784 libcall
= trunctfsf2_libfunc
;
788 libcall
= trunctfdf2_libfunc
;
800 if (libcall
== (rtx
) 0)
801 /* This conversion is not implemented yet. */
804 value
= emit_library_call_value (libcall
, NULL_RTX
, 1, to_mode
,
806 emit_move_insn (to
, value
);
810 /* Now both modes are integers. */
812 /* Handle expanding beyond a word. */
813 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
814 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
821 enum machine_mode lowpart_mode
;
822 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
824 /* Try converting directly if the insn is supported. */
825 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
828 /* If FROM is a SUBREG, put it into a register. Do this
829 so that we always generate the same set of insns for
830 better cse'ing; if an intermediate assignment occurred,
831 we won't be doing the operation directly on the SUBREG. */
832 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
833 from
= force_reg (from_mode
, from
);
834 emit_unop_insn (code
, to
, from
, equiv_code
);
837 /* Next, try converting via full word. */
838 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
839 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
840 != CODE_FOR_nothing
))
842 if (GET_CODE (to
) == REG
)
843 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
844 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
845 emit_unop_insn (code
, to
,
846 gen_lowpart (word_mode
, to
), equiv_code
);
850 /* No special multiword conversion insn; do it by hand. */
853 /* Since we will turn this into a no conflict block, we must ensure
854 that the source does not overlap the target. */
856 if (reg_overlap_mentioned_p (to
, from
))
857 from
= force_reg (from_mode
, from
);
859 /* Get a copy of FROM widened to a word, if necessary. */
860 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
861 lowpart_mode
= word_mode
;
863 lowpart_mode
= from_mode
;
865 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
867 lowpart
= gen_lowpart (lowpart_mode
, to
);
868 emit_move_insn (lowpart
, lowfrom
);
870 /* Compute the value to put in each remaining word. */
872 fill_value
= const0_rtx
;
877 && insn_operand_mode
[(int) CODE_FOR_slt
][0] == word_mode
878 && STORE_FLAG_VALUE
== -1)
880 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
882 fill_value
= gen_reg_rtx (word_mode
);
883 emit_insn (gen_slt (fill_value
));
889 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
890 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
892 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
896 /* Fill the remaining words. */
897 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
899 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
900 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
905 if (fill_value
!= subword
)
906 emit_move_insn (subword
, fill_value
);
909 insns
= get_insns ();
912 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
913 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
917 /* Truncating multi-word to a word or less. */
918 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
919 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
921 if (!((GET_CODE (from
) == MEM
922 && ! MEM_VOLATILE_P (from
)
923 && direct_load
[(int) to_mode
]
924 && ! mode_dependent_address_p (XEXP (from
, 0)))
925 || GET_CODE (from
) == REG
926 || GET_CODE (from
) == SUBREG
))
927 from
= force_reg (from_mode
, from
);
928 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
932 /* Handle pointer conversion */ /* SPEE 900220 */
933 if (to_mode
== PQImode
)
935 if (from_mode
!= QImode
)
936 from
= convert_to_mode (QImode
, from
, unsignedp
);
938 #ifdef HAVE_truncqipqi2
939 if (HAVE_truncqipqi2
)
941 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
944 #endif /* HAVE_truncqipqi2 */
948 if (from_mode
== PQImode
)
950 if (to_mode
!= QImode
)
952 from
= convert_to_mode (QImode
, from
, unsignedp
);
957 #ifdef HAVE_extendpqiqi2
958 if (HAVE_extendpqiqi2
)
960 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
963 #endif /* HAVE_extendpqiqi2 */
968 if (to_mode
== PSImode
)
970 if (from_mode
!= SImode
)
971 from
= convert_to_mode (SImode
, from
, unsignedp
);
973 #ifdef HAVE_truncsipsi2
974 if (HAVE_truncsipsi2
)
976 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
979 #endif /* HAVE_truncsipsi2 */
983 if (from_mode
== PSImode
)
985 if (to_mode
!= SImode
)
987 from
= convert_to_mode (SImode
, from
, unsignedp
);
992 #ifdef HAVE_extendpsisi2
993 if (HAVE_extendpsisi2
)
995 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
998 #endif /* HAVE_extendpsisi2 */
1003 if (to_mode
== PDImode
)
1005 if (from_mode
!= DImode
)
1006 from
= convert_to_mode (DImode
, from
, unsignedp
);
1008 #ifdef HAVE_truncdipdi2
1009 if (HAVE_truncdipdi2
)
1011 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1014 #endif /* HAVE_truncdipdi2 */
1018 if (from_mode
== PDImode
)
1020 if (to_mode
!= DImode
)
1022 from
= convert_to_mode (DImode
, from
, unsignedp
);
1027 #ifdef HAVE_extendpdidi2
1028 if (HAVE_extendpdidi2
)
1030 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1033 #endif /* HAVE_extendpdidi2 */
1038 /* Now follow all the conversions between integers
1039 no more than a word long. */
1041 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1042 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1043 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1044 GET_MODE_BITSIZE (from_mode
)))
1046 if (!((GET_CODE (from
) == MEM
1047 && ! MEM_VOLATILE_P (from
)
1048 && direct_load
[(int) to_mode
]
1049 && ! mode_dependent_address_p (XEXP (from
, 0)))
1050 || GET_CODE (from
) == REG
1051 || GET_CODE (from
) == SUBREG
))
1052 from
= force_reg (from_mode
, from
);
1053 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1054 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1055 from
= copy_to_reg (from
);
1056 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1060 /* Handle extension. */
1061 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1063 /* Convert directly if that works. */
1064 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1065 != CODE_FOR_nothing
)
1067 emit_unop_insn (code
, to
, from
, equiv_code
);
1072 enum machine_mode intermediate
;
1074 /* Search for a mode to convert via. */
1075 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1076 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1077 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1078 != CODE_FOR_nothing
)
1079 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1080 && TRULY_NOOP_TRUNCATION (to_mode
, intermediate
)))
1081 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1082 != CODE_FOR_nothing
))
1084 convert_move (to
, convert_to_mode (intermediate
, from
,
1085 unsignedp
), unsignedp
);
1089 /* No suitable intermediate mode. */
1094 /* Support special truncate insns for certain modes. */
1096 if (from_mode
== DImode
&& to_mode
== SImode
)
1098 #ifdef HAVE_truncdisi2
1099 if (HAVE_truncdisi2
)
1101 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1105 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1109 if (from_mode
== DImode
&& to_mode
== HImode
)
1111 #ifdef HAVE_truncdihi2
1112 if (HAVE_truncdihi2
)
1114 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1118 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1122 if (from_mode
== DImode
&& to_mode
== QImode
)
1124 #ifdef HAVE_truncdiqi2
1125 if (HAVE_truncdiqi2
)
1127 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1131 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1135 if (from_mode
== SImode
&& to_mode
== HImode
)
1137 #ifdef HAVE_truncsihi2
1138 if (HAVE_truncsihi2
)
1140 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1144 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1148 if (from_mode
== SImode
&& to_mode
== QImode
)
1150 #ifdef HAVE_truncsiqi2
1151 if (HAVE_truncsiqi2
)
1153 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1157 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1161 if (from_mode
== HImode
&& to_mode
== QImode
)
1163 #ifdef HAVE_trunchiqi2
1164 if (HAVE_trunchiqi2
)
1166 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1170 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1174 if (from_mode
== TImode
&& to_mode
== DImode
)
1176 #ifdef HAVE_trunctidi2
1177 if (HAVE_trunctidi2
)
1179 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1183 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1187 if (from_mode
== TImode
&& to_mode
== SImode
)
1189 #ifdef HAVE_trunctisi2
1190 if (HAVE_trunctisi2
)
1192 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1196 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1200 if (from_mode
== TImode
&& to_mode
== HImode
)
1202 #ifdef HAVE_trunctihi2
1203 if (HAVE_trunctihi2
)
1205 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1209 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1213 if (from_mode
== TImode
&& to_mode
== QImode
)
1215 #ifdef HAVE_trunctiqi2
1216 if (HAVE_trunctiqi2
)
1218 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1222 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1226 /* Handle truncation of volatile memrefs, and so on;
1227 the things that couldn't be truncated directly,
1228 and for which there was no special instruction. */
1229 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1231 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1232 emit_move_insn (to
, temp
);
1236 /* Mode combination is not recognized. */
1240 /* Return an rtx for a value that would result
1241 from converting X to mode MODE.
1242 Both X and MODE may be floating, or both integer.
1243 UNSIGNEDP is nonzero if X is an unsigned value.
1244 This can be done by referring to a part of X in place
1245 or by copying to a new temporary with conversion.
1247 This function *must not* call protect_from_queue
1248 except when putting X into an insn (in which case convert_move does it). */
1251 convert_to_mode (mode
, x
, unsignedp
)
1252 enum machine_mode mode
;
1256 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1259 /* Return an rtx for a value that would result
1260 from converting X from mode OLDMODE to mode MODE.
1261 Both modes may be floating, or both integer.
1262 UNSIGNEDP is nonzero if X is an unsigned value.
1264 This can be done by referring to a part of X in place
1265 or by copying to a new temporary with conversion.
1267 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1269 This function *must not* call protect_from_queue
1270 except when putting X into an insn (in which case convert_move does it). */
1273 convert_modes (mode
, oldmode
, x
, unsignedp
)
1274 enum machine_mode mode
, oldmode
;
1280 /* If FROM is a SUBREG that indicates that we have already done at least
1281 the required extension, strip it. */
1283 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1284 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1285 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1286 x
= gen_lowpart (mode
, x
);
1288 if (GET_MODE (x
) != VOIDmode
)
1289 oldmode
= GET_MODE (x
);
1291 if (mode
== oldmode
)
1294 /* There is one case that we must handle specially: If we are converting
1295 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1296 we are to interpret the constant as unsigned, gen_lowpart will do
1297 the wrong if the constant appears negative. What we want to do is
1298 make the high-order word of the constant zero, not all ones. */
1300 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1301 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1302 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1304 HOST_WIDE_INT val
= INTVAL (x
);
1306 if (oldmode
!= VOIDmode
1307 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1309 int width
= GET_MODE_BITSIZE (oldmode
);
1311 /* We need to zero extend VAL. */
1312 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1315 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1318 /* We can do this with a gen_lowpart if both desired and current modes
1319 are integer, and this is either a constant integer, a register, or a
1320 non-volatile MEM. Except for the constant case where MODE is no
1321 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1323 if ((GET_CODE (x
) == CONST_INT
1324 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1325 || (GET_MODE_CLASS (mode
) == MODE_INT
1326 && GET_MODE_CLASS (oldmode
) == MODE_INT
1327 && (GET_CODE (x
) == CONST_DOUBLE
1328 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1329 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1330 && direct_load
[(int) mode
])
1331 || (GET_CODE (x
) == REG
1332 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1333 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1335 /* ?? If we don't know OLDMODE, we have to assume here that
1336 X does not need sign- or zero-extension. This may not be
1337 the case, but it's the best we can do. */
1338 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1339 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1341 HOST_WIDE_INT val
= INTVAL (x
);
1342 int width
= GET_MODE_BITSIZE (oldmode
);
1344 /* We must sign or zero-extend in this case. Start by
1345 zero-extending, then sign extend if we need to. */
1346 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1348 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1349 val
|= (HOST_WIDE_INT
) (-1) << width
;
1351 return GEN_INT (val
);
1354 return gen_lowpart (mode
, x
);
1357 temp
= gen_reg_rtx (mode
);
1358 convert_move (temp
, x
, unsignedp
);
1362 /* Generate several move instructions to copy LEN bytes
1363 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1364 The caller must pass FROM and TO
1365 through protect_from_queue before calling.
1366 ALIGN (in bytes) is maximum alignment we can assume. */
1369 move_by_pieces (to
, from
, len
, align
)
1373 struct move_by_pieces data
;
1374 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1375 int max_size
= MOVE_MAX
+ 1;
1378 data
.to_addr
= to_addr
;
1379 data
.from_addr
= from_addr
;
1383 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1384 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1386 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1387 || GET_CODE (from_addr
) == POST_INC
1388 || GET_CODE (from_addr
) == POST_DEC
);
1390 data
.explicit_inc_from
= 0;
1391 data
.explicit_inc_to
= 0;
1393 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1394 if (data
.reverse
) data
.offset
= len
;
1397 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1398 data
.from_struct
= MEM_IN_STRUCT_P (from
);
1400 /* If copying requires more than two move insns,
1401 copy addresses to registers (to make displacements shorter)
1402 and use post-increment if available. */
1403 if (!(data
.autinc_from
&& data
.autinc_to
)
1404 && move_by_pieces_ninsns (len
, align
) > 2)
1406 #ifdef HAVE_PRE_DECREMENT
1407 if (data
.reverse
&& ! data
.autinc_from
)
1409 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1410 data
.autinc_from
= 1;
1411 data
.explicit_inc_from
= -1;
1414 #ifdef HAVE_POST_INCREMENT
1415 if (! data
.autinc_from
)
1417 data
.from_addr
= copy_addr_to_reg (from_addr
);
1418 data
.autinc_from
= 1;
1419 data
.explicit_inc_from
= 1;
1422 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1423 data
.from_addr
= copy_addr_to_reg (from_addr
);
1424 #ifdef HAVE_PRE_DECREMENT
1425 if (data
.reverse
&& ! data
.autinc_to
)
1427 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1429 data
.explicit_inc_to
= -1;
1432 #ifdef HAVE_POST_INCREMENT
1433 if (! data
.reverse
&& ! data
.autinc_to
)
1435 data
.to_addr
= copy_addr_to_reg (to_addr
);
1437 data
.explicit_inc_to
= 1;
1440 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1441 data
.to_addr
= copy_addr_to_reg (to_addr
);
1444 if (! SLOW_UNALIGNED_ACCESS
1445 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1448 /* First move what we can in the largest integer mode, then go to
1449 successively smaller modes. */
1451 while (max_size
> 1)
1453 enum machine_mode mode
= VOIDmode
, tmode
;
1454 enum insn_code icode
;
1456 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1457 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1458 if (GET_MODE_SIZE (tmode
) < max_size
)
1461 if (mode
== VOIDmode
)
1464 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1465 if (icode
!= CODE_FOR_nothing
1466 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1467 GET_MODE_SIZE (mode
)))
1468 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1470 max_size
= GET_MODE_SIZE (mode
);
1473 /* The code above should have handled everything. */
1478 /* Return number of insns required to move L bytes by pieces.
1479 ALIGN (in bytes) is maximum alignment we can assume. */
1482 move_by_pieces_ninsns (l
, align
)
1486 register int n_insns
= 0;
1487 int max_size
= MOVE_MAX
+ 1;
1489 if (! SLOW_UNALIGNED_ACCESS
1490 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1493 while (max_size
> 1)
1495 enum machine_mode mode
= VOIDmode
, tmode
;
1496 enum insn_code icode
;
1498 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1499 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1500 if (GET_MODE_SIZE (tmode
) < max_size
)
1503 if (mode
== VOIDmode
)
1506 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1507 if (icode
!= CODE_FOR_nothing
1508 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1509 GET_MODE_SIZE (mode
)))
1510 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1512 max_size
= GET_MODE_SIZE (mode
);
1518 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1519 with move instructions for mode MODE. GENFUN is the gen_... function
1520 to make a move insn for that mode. DATA has all the other info. */
1523 move_by_pieces_1 (genfun
, mode
, data
)
1524 rtx (*genfun
) PROTO ((rtx
, ...));
1525 enum machine_mode mode
;
1526 struct move_by_pieces
*data
;
1528 register int size
= GET_MODE_SIZE (mode
);
1529 register rtx to1
, from1
;
1531 while (data
->len
>= size
)
1533 if (data
->reverse
) data
->offset
-= size
;
1535 to1
= (data
->autinc_to
1536 ? gen_rtx_MEM (mode
, data
->to_addr
)
1537 : copy_rtx (change_address (data
->to
, mode
,
1538 plus_constant (data
->to_addr
,
1540 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
1543 = (data
->autinc_from
1544 ? gen_rtx_MEM (mode
, data
->from_addr
)
1545 : copy_rtx (change_address (data
->from
, mode
,
1546 plus_constant (data
->from_addr
,
1548 MEM_IN_STRUCT_P (from1
) = data
->from_struct
;
1550 #ifdef HAVE_PRE_DECREMENT
1551 if (data
->explicit_inc_to
< 0)
1552 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1553 if (data
->explicit_inc_from
< 0)
1554 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1557 emit_insn ((*genfun
) (to1
, from1
));
1558 #ifdef HAVE_POST_INCREMENT
1559 if (data
->explicit_inc_to
> 0)
1560 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1561 if (data
->explicit_inc_from
> 0)
1562 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1565 if (! data
->reverse
) data
->offset
+= size
;
1571 /* Emit code to move a block Y to a block X.
1572 This may be done with string-move instructions,
1573 with multiple scalar move instructions, or with a library call.
1575 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1577 SIZE is an rtx that says how long they are.
1578 ALIGN is the maximum alignment we can assume they have,
1581 Return the address of the new block, if memcpy is called and returns it,
1585 emit_block_move (x
, y
, size
, align
)
1592 if (GET_MODE (x
) != BLKmode
)
1595 if (GET_MODE (y
) != BLKmode
)
1598 x
= protect_from_queue (x
, 1);
1599 y
= protect_from_queue (y
, 0);
1600 size
= protect_from_queue (size
, 0);
1602 if (GET_CODE (x
) != MEM
)
1604 if (GET_CODE (y
) != MEM
)
1609 if (GET_CODE (size
) == CONST_INT
1610 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
1611 move_by_pieces (x
, y
, INTVAL (size
), align
);
1614 /* Try the most limited insn first, because there's no point
1615 including more than one in the machine description unless
1616 the more limited one has some advantage. */
1618 rtx opalign
= GEN_INT (align
);
1619 enum machine_mode mode
;
1621 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1622 mode
= GET_MODE_WIDER_MODE (mode
))
1624 enum insn_code code
= movstr_optab
[(int) mode
];
1626 if (code
!= CODE_FOR_nothing
1627 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1628 here because if SIZE is less than the mode mask, as it is
1629 returned by the macro, it will definitely be less than the
1630 actual mode mask. */
1631 && ((GET_CODE (size
) == CONST_INT
1632 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1633 <= (GET_MODE_MASK (mode
) >> 1)))
1634 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1635 && (insn_operand_predicate
[(int) code
][0] == 0
1636 || (*insn_operand_predicate
[(int) code
][0]) (x
, BLKmode
))
1637 && (insn_operand_predicate
[(int) code
][1] == 0
1638 || (*insn_operand_predicate
[(int) code
][1]) (y
, BLKmode
))
1639 && (insn_operand_predicate
[(int) code
][3] == 0
1640 || (*insn_operand_predicate
[(int) code
][3]) (opalign
,
1644 rtx last
= get_last_insn ();
1647 op2
= convert_to_mode (mode
, size
, 1);
1648 if (insn_operand_predicate
[(int) code
][2] != 0
1649 && ! (*insn_operand_predicate
[(int) code
][2]) (op2
, mode
))
1650 op2
= copy_to_mode_reg (mode
, op2
);
1652 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1659 delete_insns_since (last
);
1663 #ifdef TARGET_MEM_FUNCTIONS
1665 = emit_library_call_value (memcpy_libfunc
, NULL_RTX
, 0,
1666 ptr_mode
, 3, XEXP (x
, 0), Pmode
,
1668 convert_to_mode (TYPE_MODE (sizetype
), size
,
1669 TREE_UNSIGNED (sizetype
)),
1670 TYPE_MODE (sizetype
));
1672 emit_library_call (bcopy_libfunc
, 0,
1673 VOIDmode
, 3, XEXP (y
, 0), Pmode
,
1675 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1676 TREE_UNSIGNED (integer_type_node
)),
1677 TYPE_MODE (integer_type_node
));
1684 /* Copy all or part of a value X into registers starting at REGNO.
1685 The number of registers to be filled is NREGS. */
1688 move_block_to_reg (regno
, x
, nregs
, mode
)
1692 enum machine_mode mode
;
1695 #ifdef HAVE_load_multiple
1703 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1704 x
= validize_mem (force_const_mem (mode
, x
));
1706 /* See if the machine can do this with a load multiple insn. */
1707 #ifdef HAVE_load_multiple
1708 if (HAVE_load_multiple
)
1710 last
= get_last_insn ();
1711 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1719 delete_insns_since (last
);
1723 for (i
= 0; i
< nregs
; i
++)
1724 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1725 operand_subword_force (x
, i
, mode
));
1728 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1729 The number of registers to be filled is NREGS. SIZE indicates the number
1730 of bytes in the object X. */
1734 move_block_from_reg (regno
, x
, nregs
, size
)
1741 #ifdef HAVE_store_multiple
1745 enum machine_mode mode
;
1747 /* If SIZE is that of a mode no bigger than a word, just use that
1748 mode's store operation. */
1749 if (size
<= UNITS_PER_WORD
1750 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1752 emit_move_insn (change_address (x
, mode
, NULL
),
1753 gen_rtx_REG (mode
, regno
));
1757 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1758 to the left before storing to memory. Note that the previous test
1759 doesn't handle all cases (e.g. SIZE == 3). */
1760 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1762 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1768 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1769 gen_rtx_REG (word_mode
, regno
),
1770 build_int_2 ((UNITS_PER_WORD
- size
)
1771 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1772 emit_move_insn (tem
, shift
);
1776 /* See if the machine can do this with a store multiple insn. */
1777 #ifdef HAVE_store_multiple
1778 if (HAVE_store_multiple
)
1780 last
= get_last_insn ();
1781 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1789 delete_insns_since (last
);
1793 for (i
= 0; i
< nregs
; i
++)
1795 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1800 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1804 /* Emit code to move a block Y to a block X, where X is non-consecutive
1805 registers represented by a PARALLEL. */
1808 emit_group_load (x
, y
)
1811 rtx target_reg
, source
;
1814 if (GET_CODE (x
) != PARALLEL
)
1817 /* Check for a NULL entry, used to indicate that the parameter goes
1818 both on the stack and in registers. */
1819 if (XEXP (XVECEXP (x
, 0, 0), 0))
1824 for (; i
< XVECLEN (x
, 0); i
++)
1826 rtx element
= XVECEXP (x
, 0, i
);
1828 target_reg
= XEXP (element
, 0);
1830 if (GET_CODE (y
) == MEM
)
1831 source
= change_address (y
, GET_MODE (target_reg
),
1832 plus_constant (XEXP (y
, 0),
1833 INTVAL (XEXP (element
, 1))));
1834 else if (XEXP (element
, 1) == const0_rtx
)
1836 if (GET_MODE (target_reg
) == GET_MODE (y
))
1838 /* Allow for the target_reg to be smaller than the input register
1839 to allow for AIX with 4 DF arguments after a single SI arg. The
1840 last DF argument will only load 1 word into the integer registers,
1841 but load a DF value into the float registers. */
1842 else if ((GET_MODE_SIZE (GET_MODE (target_reg
))
1843 <= GET_MODE_SIZE (GET_MODE (y
)))
1844 && GET_MODE (target_reg
) == word_mode
)
1845 /* This might be a const_double, so we can't just use SUBREG. */
1846 source
= operand_subword (y
, 0, 0, VOIDmode
);
1847 else if (GET_MODE_SIZE (GET_MODE (target_reg
))
1848 == GET_MODE_SIZE (GET_MODE (y
)))
1849 source
= gen_lowpart (GET_MODE (target_reg
), y
);
1856 emit_move_insn (target_reg
, source
);
1860 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1861 registers represented by a PARALLEL. */
1864 emit_group_store (x
, y
)
1867 rtx source_reg
, target
;
1870 if (GET_CODE (y
) != PARALLEL
)
1873 /* Check for a NULL entry, used to indicate that the parameter goes
1874 both on the stack and in registers. */
1875 if (XEXP (XVECEXP (y
, 0, 0), 0))
1880 for (; i
< XVECLEN (y
, 0); i
++)
1882 rtx element
= XVECEXP (y
, 0, i
);
1884 source_reg
= XEXP (element
, 0);
1886 if (GET_CODE (x
) == MEM
)
1887 target
= change_address (x
, GET_MODE (source_reg
),
1888 plus_constant (XEXP (x
, 0),
1889 INTVAL (XEXP (element
, 1))));
1890 else if (XEXP (element
, 1) == const0_rtx
)
1893 if (GET_MODE (target
) != GET_MODE (source_reg
))
1894 target
= gen_lowpart (GET_MODE (source_reg
), target
);
1899 emit_move_insn (target
, source_reg
);
1903 /* Add a USE expression for REG to the (possibly empty) list pointed
1904 to by CALL_FUSAGE. REG must denote a hard register. */
1907 use_reg (call_fusage
, reg
)
1908 rtx
*call_fusage
, reg
;
1910 if (GET_CODE (reg
) != REG
1911 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
1915 = gen_rtx_EXPR_LIST (VOIDmode
,
1916 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
1919 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1920 starting at REGNO. All of these registers must be hard registers. */
1923 use_regs (call_fusage
, regno
, nregs
)
1930 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
1933 for (i
= 0; i
< nregs
; i
++)
1934 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
1937 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1938 PARALLEL REGS. This is for calls that pass values in multiple
1939 non-contiguous locations. The Irix 6 ABI has examples of this. */
1942 use_group_regs (call_fusage
, regs
)
1948 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
1950 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
1952 /* A NULL entry means the parameter goes both on the stack and in
1953 registers. This can also be a MEM for targets that pass values
1954 partially on the stack and partially in registers. */
1955 if (reg
!= 0 && GET_CODE (reg
) == REG
)
1956 use_reg (call_fusage
, reg
);
1960 /* Generate several move instructions to clear LEN bytes of block TO.
1961 (A MEM rtx with BLKmode). The caller must pass TO through
1962 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1966 clear_by_pieces (to
, len
, align
)
1970 struct clear_by_pieces data
;
1971 rtx to_addr
= XEXP (to
, 0);
1972 int max_size
= MOVE_MAX
+ 1;
1975 data
.to_addr
= to_addr
;
1978 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1979 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1981 data
.explicit_inc_to
= 0;
1983 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1984 if (data
.reverse
) data
.offset
= len
;
1987 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1989 /* If copying requires more than two move insns,
1990 copy addresses to registers (to make displacements shorter)
1991 and use post-increment if available. */
1993 && move_by_pieces_ninsns (len
, align
) > 2)
1995 #ifdef HAVE_PRE_DECREMENT
1996 if (data
.reverse
&& ! data
.autinc_to
)
1998 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
2000 data
.explicit_inc_to
= -1;
2003 #ifdef HAVE_POST_INCREMENT
2004 if (! data
.reverse
&& ! data
.autinc_to
)
2006 data
.to_addr
= copy_addr_to_reg (to_addr
);
2008 data
.explicit_inc_to
= 1;
2011 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
2012 data
.to_addr
= copy_addr_to_reg (to_addr
);
2015 if (! SLOW_UNALIGNED_ACCESS
2016 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
2019 /* First move what we can in the largest integer mode, then go to
2020 successively smaller modes. */
2022 while (max_size
> 1)
2024 enum machine_mode mode
= VOIDmode
, tmode
;
2025 enum insn_code icode
;
2027 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2028 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2029 if (GET_MODE_SIZE (tmode
) < max_size
)
2032 if (mode
== VOIDmode
)
2035 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2036 if (icode
!= CODE_FOR_nothing
2037 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
2038 GET_MODE_SIZE (mode
)))
2039 clear_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
2041 max_size
= GET_MODE_SIZE (mode
);
2044 /* The code above should have handled everything. */
2049 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2050 with move instructions for mode MODE. GENFUN is the gen_... function
2051 to make a move insn for that mode. DATA has all the other info. */
2054 clear_by_pieces_1 (genfun
, mode
, data
)
2055 rtx (*genfun
) PROTO ((rtx
, ...));
2056 enum machine_mode mode
;
2057 struct clear_by_pieces
*data
;
2059 register int size
= GET_MODE_SIZE (mode
);
2062 while (data
->len
>= size
)
2064 if (data
->reverse
) data
->offset
-= size
;
2066 to1
= (data
->autinc_to
2067 ? gen_rtx_MEM (mode
, data
->to_addr
)
2068 : copy_rtx (change_address (data
->to
, mode
,
2069 plus_constant (data
->to_addr
,
2071 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
2073 #ifdef HAVE_PRE_DECREMENT
2074 if (data
->explicit_inc_to
< 0)
2075 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
2078 emit_insn ((*genfun
) (to1
, const0_rtx
));
2079 #ifdef HAVE_POST_INCREMENT
2080 if (data
->explicit_inc_to
> 0)
2081 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2084 if (! data
->reverse
) data
->offset
+= size
;
2090 /* Write zeros through the storage of OBJECT.
2091 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2092 the maximum alignment we can is has, measured in bytes.
2094 If we call a function that returns the length of the block, return it. */
2097 clear_storage (object
, size
, align
)
2104 if (GET_MODE (object
) == BLKmode
)
2106 object
= protect_from_queue (object
, 1);
2107 size
= protect_from_queue (size
, 0);
2109 if (GET_CODE (size
) == CONST_INT
2110 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
2111 clear_by_pieces (object
, INTVAL (size
), align
);
2115 /* Try the most limited insn first, because there's no point
2116 including more than one in the machine description unless
2117 the more limited one has some advantage. */
2119 rtx opalign
= GEN_INT (align
);
2120 enum machine_mode mode
;
2122 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2123 mode
= GET_MODE_WIDER_MODE (mode
))
2125 enum insn_code code
= clrstr_optab
[(int) mode
];
2127 if (code
!= CODE_FOR_nothing
2128 /* We don't need MODE to be narrower than
2129 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2130 the mode mask, as it is returned by the macro, it will
2131 definitely be less than the actual mode mask. */
2132 && ((GET_CODE (size
) == CONST_INT
2133 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2134 <= (GET_MODE_MASK (mode
) >> 1)))
2135 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2136 && (insn_operand_predicate
[(int) code
][0] == 0
2137 || (*insn_operand_predicate
[(int) code
][0]) (object
,
2139 && (insn_operand_predicate
[(int) code
][2] == 0
2140 || (*insn_operand_predicate
[(int) code
][2]) (opalign
,
2144 rtx last
= get_last_insn ();
2147 op1
= convert_to_mode (mode
, size
, 1);
2148 if (insn_operand_predicate
[(int) code
][1] != 0
2149 && ! (*insn_operand_predicate
[(int) code
][1]) (op1
,
2151 op1
= copy_to_mode_reg (mode
, op1
);
2153 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2160 delete_insns_since (last
);
2165 #ifdef TARGET_MEM_FUNCTIONS
2167 = emit_library_call_value (memset_libfunc
, NULL_RTX
, 0,
2169 XEXP (object
, 0), Pmode
,
2171 TYPE_MODE (integer_type_node
),
2173 (TYPE_MODE (sizetype
), size
,
2174 TREE_UNSIGNED (sizetype
)),
2175 TYPE_MODE (sizetype
));
2177 emit_library_call (bzero_libfunc
, 0,
2179 XEXP (object
, 0), Pmode
,
2181 (TYPE_MODE (integer_type_node
), size
,
2182 TREE_UNSIGNED (integer_type_node
)),
2183 TYPE_MODE (integer_type_node
));
2188 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2193 /* Generate code to copy Y into X.
2194 Both Y and X must have the same mode, except that
2195 Y can be a constant with VOIDmode.
2196 This mode cannot be BLKmode; use emit_block_move for that.
2198 Return the last instruction emitted. */
2201 emit_move_insn (x
, y
)
2204 enum machine_mode mode
= GET_MODE (x
);
2206 x
= protect_from_queue (x
, 1);
2207 y
= protect_from_queue (y
, 0);
2209 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2212 if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2213 y
= force_const_mem (mode
, y
);
2215 /* If X or Y are memory references, verify that their addresses are valid
2217 if (GET_CODE (x
) == MEM
2218 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2219 && ! push_operand (x
, GET_MODE (x
)))
2221 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2222 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2224 if (GET_CODE (y
) == MEM
2225 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2227 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2228 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2230 if (mode
== BLKmode
)
2233 return emit_move_insn_1 (x
, y
);
2236 /* Low level part of emit_move_insn.
2237 Called just like emit_move_insn, but assumes X and Y
2238 are basically valid. */
2241 emit_move_insn_1 (x
, y
)
2244 enum machine_mode mode
= GET_MODE (x
);
2245 enum machine_mode submode
;
2246 enum mode_class
class = GET_MODE_CLASS (mode
);
2249 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2251 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2253 /* Expand complex moves by moving real part and imag part, if possible. */
2254 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2255 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2257 (class == MODE_COMPLEX_INT
2258 ? MODE_INT
: MODE_FLOAT
),
2260 && (mov_optab
->handlers
[(int) submode
].insn_code
2261 != CODE_FOR_nothing
))
2263 /* Don't split destination if it is a stack push. */
2264 int stack
= push_operand (x
, GET_MODE (x
));
2266 /* If this is a stack, push the highpart first, so it
2267 will be in the argument order.
2269 In that case, change_address is used only to convert
2270 the mode, not to change the address. */
2273 /* Note that the real part always precedes the imag part in memory
2274 regardless of machine's endianness. */
2275 #ifdef STACK_GROWS_DOWNWARD
2276 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2277 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2278 gen_imagpart (submode
, y
)));
2279 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2280 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2281 gen_realpart (submode
, y
)));
2283 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2284 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2285 gen_realpart (submode
, y
)));
2286 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2287 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2288 gen_imagpart (submode
, y
)));
2293 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2294 (gen_realpart (submode
, x
), gen_realpart (submode
, y
)));
2295 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2296 (gen_imagpart (submode
, x
), gen_imagpart (submode
, y
)));
2299 return get_last_insn ();
2302 /* This will handle any multi-word mode that lacks a move_insn pattern.
2303 However, you will get better code if you define such patterns,
2304 even if they must turn into multiple assembler instructions. */
2305 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2309 #ifdef PUSH_ROUNDING
2311 /* If X is a push on the stack, do the push now and replace
2312 X with a reference to the stack pointer. */
2313 if (push_operand (x
, GET_MODE (x
)))
2315 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2316 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2320 /* Show the output dies here. */
2322 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2325 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2328 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2329 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2331 /* If we can't get a part of Y, put Y into memory if it is a
2332 constant. Otherwise, force it into a register. If we still
2333 can't get a part of Y, abort. */
2334 if (ypart
== 0 && CONSTANT_P (y
))
2336 y
= force_const_mem (mode
, y
);
2337 ypart
= operand_subword (y
, i
, 1, mode
);
2339 else if (ypart
== 0)
2340 ypart
= operand_subword_force (y
, i
, mode
);
2342 if (xpart
== 0 || ypart
== 0)
2345 last_insn
= emit_move_insn (xpart
, ypart
);
2354 /* Pushing data onto the stack. */
2356 /* Push a block of length SIZE (perhaps variable)
2357 and return an rtx to address the beginning of the block.
2358 Note that it is not possible for the value returned to be a QUEUED.
2359 The value may be virtual_outgoing_args_rtx.
2361 EXTRA is the number of bytes of padding to push in addition to SIZE.
2362 BELOW nonzero means this padding comes at low addresses;
2363 otherwise, the padding comes at high addresses. */
2366 push_block (size
, extra
, below
)
2372 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2373 if (CONSTANT_P (size
))
2374 anti_adjust_stack (plus_constant (size
, extra
));
2375 else if (GET_CODE (size
) == REG
&& extra
== 0)
2376 anti_adjust_stack (size
);
2379 rtx temp
= copy_to_mode_reg (Pmode
, size
);
2381 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2382 temp
, 0, OPTAB_LIB_WIDEN
);
2383 anti_adjust_stack (temp
);
2386 #ifdef STACK_GROWS_DOWNWARD
2387 temp
= virtual_outgoing_args_rtx
;
2388 if (extra
!= 0 && below
)
2389 temp
= plus_constant (temp
, extra
);
2391 if (GET_CODE (size
) == CONST_INT
)
2392 temp
= plus_constant (virtual_outgoing_args_rtx
,
2393 - INTVAL (size
) - (below
? 0 : extra
));
2394 else if (extra
!= 0 && !below
)
2395 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2396 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2398 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2399 negate_rtx (Pmode
, size
));
2402 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2408 return gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2411 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2412 block of SIZE bytes. */
2415 get_push_address (size
)
2420 if (STACK_PUSH_CODE
== POST_DEC
)
2421 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2422 else if (STACK_PUSH_CODE
== POST_INC
)
2423 temp
= gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2425 temp
= stack_pointer_rtx
;
2427 return copy_to_reg (temp
);
2430 /* Generate code to push X onto the stack, assuming it has mode MODE and
2432 MODE is redundant except when X is a CONST_INT (since they don't
2434 SIZE is an rtx for the size of data to be copied (in bytes),
2435 needed only if X is BLKmode.
2437 ALIGN (in bytes) is maximum alignment we can assume.
2439 If PARTIAL and REG are both nonzero, then copy that many of the first
2440 words of X into registers starting with REG, and push the rest of X.
2441 The amount of space pushed is decreased by PARTIAL words,
2442 rounded *down* to a multiple of PARM_BOUNDARY.
2443 REG must be a hard register in this case.
2444 If REG is zero but PARTIAL is not, take any all others actions for an
2445 argument partially in registers, but do not actually load any
2448 EXTRA is the amount in bytes of extra space to leave next to this arg.
2449 This is ignored if an argument block has already been allocated.
2451 On a machine that lacks real push insns, ARGS_ADDR is the address of
2452 the bottom of the argument block for this call. We use indexing off there
2453 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2454 argument block has not been preallocated.
2456 ARGS_SO_FAR is the size of args previously pushed for this call.
2458 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2459 for arguments passed in registers. If nonzero, it will be the number
2460 of bytes required. */
2463 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2464 args_addr
, args_so_far
, reg_parm_stack_space
)
2466 enum machine_mode mode
;
2475 int reg_parm_stack_space
;
2478 enum direction stack_direction
2479 #ifdef STACK_GROWS_DOWNWARD
2485 /* Decide where to pad the argument: `downward' for below,
2486 `upward' for above, or `none' for don't pad it.
2487 Default is below for small data on big-endian machines; else above. */
2488 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2490 /* Invert direction if stack is post-update. */
2491 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2492 if (where_pad
!= none
)
2493 where_pad
= (where_pad
== downward
? upward
: downward
);
2495 xinner
= x
= protect_from_queue (x
, 0);
2497 if (mode
== BLKmode
)
2499 /* Copy a block into the stack, entirely or partially. */
2502 int used
= partial
* UNITS_PER_WORD
;
2503 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
2511 /* USED is now the # of bytes we need not copy to the stack
2512 because registers will take care of them. */
2515 xinner
= change_address (xinner
, BLKmode
,
2516 plus_constant (XEXP (xinner
, 0), used
));
2518 /* If the partial register-part of the arg counts in its stack size,
2519 skip the part of stack space corresponding to the registers.
2520 Otherwise, start copying to the beginning of the stack space,
2521 by setting SKIP to 0. */
2522 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
2524 #ifdef PUSH_ROUNDING
2525 /* Do it with several push insns if that doesn't take lots of insns
2526 and if there is no difficulty with push insns that skip bytes
2527 on the stack for alignment purposes. */
2529 && GET_CODE (size
) == CONST_INT
2531 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
) - used
, align
)
2533 /* Here we avoid the case of a structure whose weak alignment
2534 forces many pushes of a small amount of data,
2535 and such small pushes do rounding that causes trouble. */
2536 && ((! SLOW_UNALIGNED_ACCESS
)
2537 || align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
2538 || PUSH_ROUNDING (align
) == align
)
2539 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
2541 /* Push padding now if padding above and stack grows down,
2542 or if padding below and stack grows up.
2543 But if space already allocated, this has already been done. */
2544 if (extra
&& args_addr
== 0
2545 && where_pad
!= none
&& where_pad
!= stack_direction
)
2546 anti_adjust_stack (GEN_INT (extra
));
2548 move_by_pieces (gen_rtx_MEM (BLKmode
, gen_push_operand ()), xinner
,
2549 INTVAL (size
) - used
, align
);
2551 if (flag_check_memory_usage
&& ! in_check_memory_usage
)
2555 in_check_memory_usage
= 1;
2556 temp
= get_push_address (INTVAL(size
) - used
);
2557 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
2558 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
2560 XEXP (xinner
, 0), ptr_mode
,
2561 GEN_INT (INTVAL(size
) - used
),
2562 TYPE_MODE (sizetype
));
2564 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
2566 GEN_INT (INTVAL(size
) - used
),
2567 TYPE_MODE (sizetype
),
2568 GEN_INT (MEMORY_USE_RW
),
2569 TYPE_MODE (integer_type_node
));
2570 in_check_memory_usage
= 0;
2574 #endif /* PUSH_ROUNDING */
2576 /* Otherwise make space on the stack and copy the data
2577 to the address of that space. */
2579 /* Deduct words put into registers from the size we must copy. */
2582 if (GET_CODE (size
) == CONST_INT
)
2583 size
= GEN_INT (INTVAL (size
) - used
);
2585 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
2586 GEN_INT (used
), NULL_RTX
, 0,
2590 /* Get the address of the stack space.
2591 In this case, we do not deal with EXTRA separately.
2592 A single stack adjust will do. */
2595 temp
= push_block (size
, extra
, where_pad
== downward
);
2598 else if (GET_CODE (args_so_far
) == CONST_INT
)
2599 temp
= memory_address (BLKmode
,
2600 plus_constant (args_addr
,
2601 skip
+ INTVAL (args_so_far
)));
2603 temp
= memory_address (BLKmode
,
2604 plus_constant (gen_rtx_PLUS (Pmode
,
2608 if (flag_check_memory_usage
&& ! in_check_memory_usage
)
2612 in_check_memory_usage
= 1;
2613 target
= copy_to_reg (temp
);
2614 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
2615 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
2617 XEXP (xinner
, 0), ptr_mode
,
2618 size
, TYPE_MODE (sizetype
));
2620 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
2622 size
, TYPE_MODE (sizetype
),
2623 GEN_INT (MEMORY_USE_RW
),
2624 TYPE_MODE (integer_type_node
));
2625 in_check_memory_usage
= 0;
2628 /* TEMP is the address of the block. Copy the data there. */
2629 if (GET_CODE (size
) == CONST_INT
2630 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
), align
)
2633 move_by_pieces (gen_rtx_MEM (BLKmode
, temp
), xinner
,
2634 INTVAL (size
), align
);
2639 rtx opalign
= GEN_INT (align
);
2640 enum machine_mode mode
;
2641 rtx target
= gen_rtx (MEM
, BLKmode
, temp
);
2643 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2645 mode
= GET_MODE_WIDER_MODE (mode
))
2647 enum insn_code code
= movstr_optab
[(int) mode
];
2649 if (code
!= CODE_FOR_nothing
2650 && ((GET_CODE (size
) == CONST_INT
2651 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2652 <= (GET_MODE_MASK (mode
) >> 1)))
2653 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2654 && (insn_operand_predicate
[(int) code
][0] == 0
2655 || ((*insn_operand_predicate
[(int) code
][0])
2657 && (insn_operand_predicate
[(int) code
][1] == 0
2658 || ((*insn_operand_predicate
[(int) code
][1])
2660 && (insn_operand_predicate
[(int) code
][3] == 0
2661 || ((*insn_operand_predicate
[(int) code
][3])
2662 (opalign
, VOIDmode
))))
2664 rtx op2
= convert_to_mode (mode
, size
, 1);
2665 rtx last
= get_last_insn ();
2668 if (insn_operand_predicate
[(int) code
][2] != 0
2669 && ! ((*insn_operand_predicate
[(int) code
][2])
2671 op2
= copy_to_mode_reg (mode
, op2
);
2673 pat
= GEN_FCN ((int) code
) (target
, xinner
,
2681 delete_insns_since (last
);
2686 #ifndef ACCUMULATE_OUTGOING_ARGS
2687 /* If the source is referenced relative to the stack pointer,
2688 copy it to another register to stabilize it. We do not need
2689 to do this if we know that we won't be changing sp. */
2691 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
2692 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
2693 temp
= copy_to_reg (temp
);
2696 /* Make inhibit_defer_pop nonzero around the library call
2697 to force it to pop the bcopy-arguments right away. */
2699 #ifdef TARGET_MEM_FUNCTIONS
2700 emit_library_call (memcpy_libfunc
, 0,
2701 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
2702 convert_to_mode (TYPE_MODE (sizetype
),
2703 size
, TREE_UNSIGNED (sizetype
)),
2704 TYPE_MODE (sizetype
));
2706 emit_library_call (bcopy_libfunc
, 0,
2707 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
2708 convert_to_mode (TYPE_MODE (integer_type_node
),
2710 TREE_UNSIGNED (integer_type_node
)),
2711 TYPE_MODE (integer_type_node
));
2716 else if (partial
> 0)
2718 /* Scalar partly in registers. */
2720 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
2723 /* # words of start of argument
2724 that we must make space for but need not store. */
2725 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
2726 int args_offset
= INTVAL (args_so_far
);
2729 /* Push padding now if padding above and stack grows down,
2730 or if padding below and stack grows up.
2731 But if space already allocated, this has already been done. */
2732 if (extra
&& args_addr
== 0
2733 && where_pad
!= none
&& where_pad
!= stack_direction
)
2734 anti_adjust_stack (GEN_INT (extra
));
2736 /* If we make space by pushing it, we might as well push
2737 the real data. Otherwise, we can leave OFFSET nonzero
2738 and leave the space uninitialized. */
2742 /* Now NOT_STACK gets the number of words that we don't need to
2743 allocate on the stack. */
2744 not_stack
= partial
- offset
;
2746 /* If the partial register-part of the arg counts in its stack size,
2747 skip the part of stack space corresponding to the registers.
2748 Otherwise, start copying to the beginning of the stack space,
2749 by setting SKIP to 0. */
2750 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
2752 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2753 x
= validize_mem (force_const_mem (mode
, x
));
2755 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2756 SUBREGs of such registers are not allowed. */
2757 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
2758 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
2759 x
= copy_to_reg (x
);
2761 /* Loop over all the words allocated on the stack for this arg. */
2762 /* We can do it by words, because any scalar bigger than a word
2763 has a size a multiple of a word. */
2764 #ifndef PUSH_ARGS_REVERSED
2765 for (i
= not_stack
; i
< size
; i
++)
2767 for (i
= size
- 1; i
>= not_stack
; i
--)
2769 if (i
>= not_stack
+ offset
)
2770 emit_push_insn (operand_subword_force (x
, i
, mode
),
2771 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
2773 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
2775 reg_parm_stack_space
);
2780 rtx target
= NULL_RTX
;
2782 /* Push padding now if padding above and stack grows down,
2783 or if padding below and stack grows up.
2784 But if space already allocated, this has already been done. */
2785 if (extra
&& args_addr
== 0
2786 && where_pad
!= none
&& where_pad
!= stack_direction
)
2787 anti_adjust_stack (GEN_INT (extra
));
2789 #ifdef PUSH_ROUNDING
2791 addr
= gen_push_operand ();
2795 if (GET_CODE (args_so_far
) == CONST_INT
)
2797 = memory_address (mode
,
2798 plus_constant (args_addr
,
2799 INTVAL (args_so_far
)));
2801 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
2806 emit_move_insn (gen_rtx_MEM (mode
, addr
), x
);
2808 if (flag_check_memory_usage
&& ! in_check_memory_usage
)
2810 in_check_memory_usage
= 1;
2812 target
= get_push_address (GET_MODE_SIZE (mode
));
2814 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
2815 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
2817 XEXP (x
, 0), ptr_mode
,
2818 GEN_INT (GET_MODE_SIZE (mode
)),
2819 TYPE_MODE (sizetype
));
2821 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
2823 GEN_INT (GET_MODE_SIZE (mode
)),
2824 TYPE_MODE (sizetype
),
2825 GEN_INT (MEMORY_USE_RW
),
2826 TYPE_MODE (integer_type_node
));
2827 in_check_memory_usage
= 0;
2832 /* If part should go in registers, copy that part
2833 into the appropriate registers. Do this now, at the end,
2834 since mem-to-mem copies above may do function calls. */
2835 if (partial
> 0 && reg
!= 0)
2837 /* Handle calls that pass values in multiple non-contiguous locations.
2838 The Irix 6 ABI has examples of this. */
2839 if (GET_CODE (reg
) == PARALLEL
)
2840 emit_group_load (reg
, x
);
2842 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
2845 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
2846 anti_adjust_stack (GEN_INT (extra
));
2849 /* Expand an assignment that stores the value of FROM into TO.
2850 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2851 (This may contain a QUEUED rtx;
2852 if the value is constant, this rtx is a constant.)
2853 Otherwise, the returned value is NULL_RTX.
2855 SUGGEST_REG is no longer actually used.
2856 It used to mean, copy the value through a register
2857 and return that register, if that is possible.
2858 We now use WANT_VALUE to decide whether to do this. */
2861 expand_assignment (to
, from
, want_value
, suggest_reg
)
2866 register rtx to_rtx
= 0;
2869 /* Don't crash if the lhs of the assignment was erroneous. */
2871 if (TREE_CODE (to
) == ERROR_MARK
)
2873 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2874 return want_value
? result
: NULL_RTX
;
2877 /* Assignment of a structure component needs special treatment
2878 if the structure component's rtx is not simply a MEM.
2879 Assignment of an array element at a constant index, and assignment of
2880 an array element in an unaligned packed structure field, has the same
2883 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
2884 || TREE_CODE (to
) == ARRAY_REF
)
2886 enum machine_mode mode1
;
2896 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
2897 &unsignedp
, &volatilep
, &alignment
);
2899 /* If we are going to use store_bit_field and extract_bit_field,
2900 make sure to_rtx will be safe for multiple use. */
2902 if (mode1
== VOIDmode
&& want_value
)
2903 tem
= stabilize_reference (tem
);
2905 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_DONT
);
2908 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
2910 if (GET_CODE (to_rtx
) != MEM
)
2912 to_rtx
= change_address (to_rtx
, VOIDmode
,
2913 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
2914 force_reg (ptr_mode
, offset_rtx
)));
2918 if (GET_CODE (to_rtx
) == MEM
)
2920 /* When the offset is zero, to_rtx is the address of the
2921 structure we are storing into, and hence may be shared.
2922 We must make a new MEM before setting the volatile bit. */
2924 to_rtx
= copy_rtx (to_rtx
);
2926 MEM_VOLATILE_P (to_rtx
) = 1;
2928 #if 0 /* This was turned off because, when a field is volatile
2929 in an object which is not volatile, the object may be in a register,
2930 and then we would abort over here. */
2936 if (TREE_CODE (to
) == COMPONENT_REF
2937 && TREE_READONLY (TREE_OPERAND (to
, 1)))
2940 to_rtx
= copy_rtx (to_rtx
);
2942 RTX_UNCHANGING_P (to_rtx
) = 1;
2945 /* Check the access. */
2946 if (flag_check_memory_usage
&& GET_CODE (to_rtx
) == MEM
)
2951 enum machine_mode best_mode
;
2953 best_mode
= get_best_mode (bitsize
, bitpos
,
2954 TYPE_ALIGN (TREE_TYPE (tem
)),
2956 if (best_mode
== VOIDmode
)
2959 best_mode_size
= GET_MODE_BITSIZE (best_mode
);
2960 to_addr
= plus_constant (XEXP (to_rtx
, 0), (bitpos
/ BITS_PER_UNIT
));
2961 size
= CEIL ((bitpos
% best_mode_size
) + bitsize
, best_mode_size
);
2962 size
*= GET_MODE_SIZE (best_mode
);
2964 /* Check the access right of the pointer. */
2966 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
2968 GEN_INT (size
), TYPE_MODE (sizetype
),
2969 GEN_INT (MEMORY_USE_WO
),
2970 TYPE_MODE (integer_type_node
));
2973 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
2975 /* Spurious cast makes HPUX compiler happy. */
2976 ? (enum machine_mode
) TYPE_MODE (TREE_TYPE (to
))
2979 /* Required alignment of containing datum. */
2981 int_size_in_bytes (TREE_TYPE (tem
)));
2982 preserve_temp_slots (result
);
2986 /* If the value is meaningful, convert RESULT to the proper mode.
2987 Otherwise, return nothing. */
2988 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
2989 TYPE_MODE (TREE_TYPE (from
)),
2991 TREE_UNSIGNED (TREE_TYPE (to
)))
2995 /* If the rhs is a function call and its value is not an aggregate,
2996 call the function before we start to compute the lhs.
2997 This is needed for correct code for cases such as
2998 val = setjmp (buf) on machines where reference to val
2999 requires loading up part of an address in a separate insn.
3001 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3002 a promoted variable where the zero- or sign- extension needs to be done.
3003 Handling this in the normal way is safe because no computation is done
3005 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3006 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3007 && ! (TREE_CODE (to
) == VAR_DECL
&& GET_CODE (DECL_RTL (to
)) == REG
))
3012 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3014 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3016 /* Handle calls that return values in multiple non-contiguous locations.
3017 The Irix 6 ABI has examples of this. */
3018 if (GET_CODE (to_rtx
) == PARALLEL
)
3019 emit_group_load (to_rtx
, value
);
3020 else if (GET_MODE (to_rtx
) == BLKmode
)
3021 emit_block_move (to_rtx
, value
, expr_size (from
),
3022 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
3024 emit_move_insn (to_rtx
, value
);
3025 preserve_temp_slots (to_rtx
);
3028 return want_value
? to_rtx
: NULL_RTX
;
3031 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3032 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3035 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3037 /* Don't move directly into a return register. */
3038 if (TREE_CODE (to
) == RESULT_DECL
&& GET_CODE (to_rtx
) == REG
)
3043 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3044 emit_move_insn (to_rtx
, temp
);
3045 preserve_temp_slots (to_rtx
);
3048 return want_value
? to_rtx
: NULL_RTX
;
3051 /* In case we are returning the contents of an object which overlaps
3052 the place the value is being stored, use a safe function when copying
3053 a value through a pointer into a structure value return block. */
3054 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3055 && current_function_returns_struct
3056 && !current_function_returns_pcc_struct
)
3061 size
= expr_size (from
);
3062 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
,
3063 EXPAND_MEMORY_USE_DONT
);
3065 /* Copy the rights of the bitmap. */
3066 if (flag_check_memory_usage
)
3067 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3068 XEXP (to_rtx
, 0), ptr_mode
,
3069 XEXP (from_rtx
, 0), ptr_mode
,
3070 convert_to_mode (TYPE_MODE (sizetype
),
3071 size
, TREE_UNSIGNED (sizetype
)),
3072 TYPE_MODE (sizetype
));
3074 #ifdef TARGET_MEM_FUNCTIONS
3075 emit_library_call (memcpy_libfunc
, 0,
3076 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3077 XEXP (from_rtx
, 0), Pmode
,
3078 convert_to_mode (TYPE_MODE (sizetype
),
3079 size
, TREE_UNSIGNED (sizetype
)),
3080 TYPE_MODE (sizetype
));
3082 emit_library_call (bcopy_libfunc
, 0,
3083 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3084 XEXP (to_rtx
, 0), Pmode
,
3085 convert_to_mode (TYPE_MODE (integer_type_node
),
3086 size
, TREE_UNSIGNED (integer_type_node
)),
3087 TYPE_MODE (integer_type_node
));
3090 preserve_temp_slots (to_rtx
);
3093 return want_value
? to_rtx
: NULL_RTX
;
3096 /* Compute FROM and store the value in the rtx we got. */
3099 result
= store_expr (from
, to_rtx
, want_value
);
3100 preserve_temp_slots (result
);
3103 return want_value
? result
: NULL_RTX
;
3106 /* Generate code for computing expression EXP,
3107 and storing the value into TARGET.
3108 TARGET may contain a QUEUED rtx.
3110 If WANT_VALUE is nonzero, return a copy of the value
3111 not in TARGET, so that we can be sure to use the proper
3112 value in a containing expression even if TARGET has something
3113 else stored in it. If possible, we copy the value through a pseudo
3114 and return that pseudo. Or, if the value is constant, we try to
3115 return the constant. In some cases, we return a pseudo
3116 copied *from* TARGET.
3118 If the mode is BLKmode then we may return TARGET itself.
3119 It turns out that in BLKmode it doesn't cause a problem.
3120 because C has no operators that could combine two different
3121 assignments into the same BLKmode object with different values
3122 with no sequence point. Will other languages need this to
3125 If WANT_VALUE is 0, we return NULL, to make sure
3126 to catch quickly any cases where the caller uses the value
3127 and fails to set WANT_VALUE. */
3130 store_expr (exp
, target
, want_value
)
3132 register rtx target
;
3136 int dont_return_target
= 0;
3138 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
3140 /* Perform first part of compound expression, then assign from second
3142 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
3144 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3146 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3148 /* For conditional expression, get safe form of the target. Then
3149 test the condition, doing the appropriate assignment on either
3150 side. This avoids the creation of unnecessary temporaries.
3151 For non-BLKmode, it is more efficient not to do this. */
3153 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3156 target
= protect_from_queue (target
, 1);
3158 do_pending_stack_adjust ();
3160 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3161 start_cleanup_deferral ();
3162 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3163 end_cleanup_deferral ();
3165 emit_jump_insn (gen_jump (lab2
));
3168 start_cleanup_deferral ();
3169 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3170 end_cleanup_deferral ();
3175 return want_value
? target
: NULL_RTX
;
3177 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3178 && GET_MODE (target
) != BLKmode
)
3179 /* If target is in memory and caller wants value in a register instead,
3180 arrange that. Pass TARGET as target for expand_expr so that,
3181 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3182 We know expand_expr will not use the target in that case.
3183 Don't do this if TARGET is volatile because we are supposed
3184 to write it and then read it. */
3186 temp
= expand_expr (exp
, cse_not_expected
? NULL_RTX
: target
,
3187 GET_MODE (target
), 0);
3188 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3189 temp
= copy_to_reg (temp
);
3190 dont_return_target
= 1;
3192 else if (queued_subexp_p (target
))
3193 /* If target contains a postincrement, let's not risk
3194 using it as the place to generate the rhs. */
3196 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3198 /* Expand EXP into a new pseudo. */
3199 temp
= gen_reg_rtx (GET_MODE (target
));
3200 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3203 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3205 /* If target is volatile, ANSI requires accessing the value
3206 *from* the target, if it is accessed. So make that happen.
3207 In no case return the target itself. */
3208 if (! MEM_VOLATILE_P (target
) && want_value
)
3209 dont_return_target
= 1;
3211 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3212 /* If this is an scalar in a register that is stored in a wider mode
3213 than the declared mode, compute the result into its declared mode
3214 and then convert to the wider mode. Our value is the computed
3217 /* If we don't want a value, we can do the conversion inside EXP,
3218 which will often result in some optimizations. Do the conversion
3219 in two steps: first change the signedness, if needed, then
3220 the extend. But don't do this if the type of EXP is a subtype
3221 of something else since then the conversion might involve
3222 more than just converting modes. */
3223 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
3224 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
3226 if (TREE_UNSIGNED (TREE_TYPE (exp
))
3227 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3230 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
3234 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
3235 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3239 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3241 /* If TEMP is a volatile MEM and we want a result value, make
3242 the access now so it gets done only once. Likewise if
3243 it contains TARGET. */
3244 if (GET_CODE (temp
) == MEM
&& want_value
3245 && (MEM_VOLATILE_P (temp
)
3246 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
3247 temp
= copy_to_reg (temp
);
3249 /* If TEMP is a VOIDmode constant, use convert_modes to make
3250 sure that we properly convert it. */
3251 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3252 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3253 TYPE_MODE (TREE_TYPE (exp
)), temp
,
3254 SUBREG_PROMOTED_UNSIGNED_P (target
));
3256 convert_move (SUBREG_REG (target
), temp
,
3257 SUBREG_PROMOTED_UNSIGNED_P (target
));
3258 return want_value
? temp
: NULL_RTX
;
3262 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3263 /* Return TARGET if it's a specified hardware register.
3264 If TARGET is a volatile mem ref, either return TARGET
3265 or return a reg copied *from* TARGET; ANSI requires this.
3267 Otherwise, if TEMP is not TARGET, return TEMP
3268 if it is constant (for efficiency),
3269 or if we really want the correct value. */
3270 if (!(target
&& GET_CODE (target
) == REG
3271 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3272 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
3273 && ! rtx_equal_p (temp
, target
)
3274 && (CONSTANT_P (temp
) || want_value
))
3275 dont_return_target
= 1;
3278 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3279 the same as that of TARGET, adjust the constant. This is needed, for
3280 example, in case it is a CONST_DOUBLE and we want only a word-sized
3282 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3283 && TREE_CODE (exp
) != ERROR_MARK
3284 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3285 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3286 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
3288 if (flag_check_memory_usage
3289 && GET_CODE (target
) == MEM
3290 && AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
3292 if (GET_CODE (temp
) == MEM
)
3293 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3294 XEXP (target
, 0), ptr_mode
,
3295 XEXP (temp
, 0), ptr_mode
,
3296 expr_size (exp
), TYPE_MODE (sizetype
));
3298 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3299 XEXP (target
, 0), ptr_mode
,
3300 expr_size (exp
), TYPE_MODE (sizetype
),
3301 GEN_INT (MEMORY_USE_WO
),
3302 TYPE_MODE (integer_type_node
));
3305 /* If value was not generated in the target, store it there.
3306 Convert the value to TARGET's type first if nec. */
3308 if (! rtx_equal_p (temp
, target
) && TREE_CODE (exp
) != ERROR_MARK
)
3310 target
= protect_from_queue (target
, 1);
3311 if (GET_MODE (temp
) != GET_MODE (target
)
3312 && GET_MODE (temp
) != VOIDmode
)
3314 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3315 if (dont_return_target
)
3317 /* In this case, we will return TEMP,
3318 so make sure it has the proper mode.
3319 But don't forget to store the value into TARGET. */
3320 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
3321 emit_move_insn (target
, temp
);
3324 convert_move (target
, temp
, unsignedp
);
3327 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
3329 /* Handle copying a string constant into an array.
3330 The string constant may be shorter than the array.
3331 So copy just the string's actual length, and clear the rest. */
3335 /* Get the size of the data type of the string,
3336 which is actually the size of the target. */
3337 size
= expr_size (exp
);
3338 if (GET_CODE (size
) == CONST_INT
3339 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3340 emit_block_move (target
, temp
, size
,
3341 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3344 /* Compute the size of the data to copy from the string. */
3346 = size_binop (MIN_EXPR
,
3347 make_tree (sizetype
, size
),
3349 build_int_2 (TREE_STRING_LENGTH (exp
), 0)));
3350 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
3354 /* Copy that much. */
3355 emit_block_move (target
, temp
, copy_size_rtx
,
3356 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3358 /* Figure out how much is left in TARGET that we have to clear.
3359 Do all calculations in ptr_mode. */
3361 addr
= XEXP (target
, 0);
3362 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
3364 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
3366 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
3367 size
= plus_constant (size
, - TREE_STRING_LENGTH (exp
));
3371 addr
= force_reg (ptr_mode
, addr
);
3372 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
3373 copy_size_rtx
, NULL_RTX
, 0,
3376 size
= expand_binop (ptr_mode
, sub_optab
, size
,
3377 copy_size_rtx
, NULL_RTX
, 0,
3380 emit_cmp_insn (size
, const0_rtx
, LT
, NULL_RTX
,
3381 GET_MODE (size
), 0, 0);
3382 label
= gen_label_rtx ();
3383 emit_jump_insn (gen_blt (label
));
3386 if (size
!= const0_rtx
)
3388 /* Be sure we can write on ADDR. */
3389 if (flag_check_memory_usage
)
3390 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3392 size
, TYPE_MODE (sizetype
),
3393 GEN_INT (MEMORY_USE_WO
),
3394 TYPE_MODE (integer_type_node
));
3395 #ifdef TARGET_MEM_FUNCTIONS
3396 emit_library_call (memset_libfunc
, 0, VOIDmode
, 3,
3398 const0_rtx
, TYPE_MODE (integer_type_node
),
3399 convert_to_mode (TYPE_MODE (sizetype
),
3401 TREE_UNSIGNED (sizetype
)),
3402 TYPE_MODE (sizetype
));
3404 emit_library_call (bzero_libfunc
, 0, VOIDmode
, 2,
3406 convert_to_mode (TYPE_MODE (integer_type_node
),
3408 TREE_UNSIGNED (integer_type_node
)),
3409 TYPE_MODE (integer_type_node
));
3417 /* Handle calls that return values in multiple non-contiguous locations.
3418 The Irix 6 ABI has examples of this. */
3419 else if (GET_CODE (target
) == PARALLEL
)
3420 emit_group_load (target
, temp
);
3421 else if (GET_MODE (temp
) == BLKmode
)
3422 emit_block_move (target
, temp
, expr_size (exp
),
3423 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3425 emit_move_insn (target
, temp
);
3428 /* If we don't want a value, return NULL_RTX. */
3432 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3433 ??? The latter test doesn't seem to make sense. */
3434 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
3437 /* Return TARGET itself if it is a hard register. */
3438 else if (want_value
&& GET_MODE (target
) != BLKmode
3439 && ! (GET_CODE (target
) == REG
3440 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
3441 return copy_to_reg (target
);
3447 /* Return 1 if EXP just contains zeros. */
3455 switch (TREE_CODE (exp
))
3459 case NON_LVALUE_EXPR
:
3460 return is_zeros_p (TREE_OPERAND (exp
, 0));
3463 return TREE_INT_CST_LOW (exp
) == 0 && TREE_INT_CST_HIGH (exp
) == 0;
3467 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
3470 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
3473 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3474 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
3475 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3476 if (! is_zeros_p (TREE_VALUE (elt
)))
3486 /* Return 1 if EXP contains mostly (3/4) zeros. */
3489 mostly_zeros_p (exp
)
3492 if (TREE_CODE (exp
) == CONSTRUCTOR
)
3494 int elts
= 0, zeros
= 0;
3495 tree elt
= CONSTRUCTOR_ELTS (exp
);
3496 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3498 /* If there are no ranges of true bits, it is all zero. */
3499 return elt
== NULL_TREE
;
3501 for (; elt
; elt
= TREE_CHAIN (elt
))
3503 /* We do not handle the case where the index is a RANGE_EXPR,
3504 so the statistic will be somewhat inaccurate.
3505 We do make a more accurate count in store_constructor itself,
3506 so since this function is only used for nested array elements,
3507 this should be close enough. */
3508 if (mostly_zeros_p (TREE_VALUE (elt
)))
3513 return 4 * zeros
>= 3 * elts
;
3516 return is_zeros_p (exp
);
3519 /* Helper function for store_constructor.
3520 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3521 TYPE is the type of the CONSTRUCTOR, not the element type.
3522 CLEARED is as for store_constructor.
3524 This provides a recursive shortcut back to store_constructor when it isn't
3525 necessary to go through store_field. This is so that we can pass through
3526 the cleared field to let store_constructor know that we may not have to
3527 clear a substructure if the outer structure has already been cleared. */
3530 store_constructor_field (target
, bitsize
, bitpos
,
3531 mode
, exp
, type
, cleared
)
3533 int bitsize
, bitpos
;
3534 enum machine_mode mode
;
3538 if (TREE_CODE (exp
) == CONSTRUCTOR
3539 && bitpos
% BITS_PER_UNIT
== 0
3540 /* If we have a non-zero bitpos for a register target, then we just
3541 let store_field do the bitfield handling. This is unlikely to
3542 generate unnecessary clear instructions anyways. */
3543 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
3546 target
= change_address (target
, VOIDmode
,
3547 plus_constant (XEXP (target
, 0),
3548 bitpos
/ BITS_PER_UNIT
));
3549 store_constructor (exp
, target
, cleared
);
3552 store_field (target
, bitsize
, bitpos
, mode
, exp
,
3553 VOIDmode
, 0, TYPE_ALIGN (type
) / BITS_PER_UNIT
,
3554 int_size_in_bytes (type
));
3557 /* Store the value of constructor EXP into the rtx TARGET.
3558 TARGET is either a REG or a MEM.
3559 CLEARED is true if TARGET is known to have been zero'd. */
3562 store_constructor (exp
, target
, cleared
)
3567 tree type
= TREE_TYPE (exp
);
3569 /* We know our target cannot conflict, since safe_from_p has been called. */
3571 /* Don't try copying piece by piece into a hard register
3572 since that is vulnerable to being clobbered by EXP.
3573 Instead, construct in a pseudo register and then copy it all. */
3574 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3576 rtx temp
= gen_reg_rtx (GET_MODE (target
));
3577 store_constructor (exp
, temp
, 0);
3578 emit_move_insn (target
, temp
);
3583 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
3584 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3588 /* Inform later passes that the whole union value is dead. */
3589 if (TREE_CODE (type
) == UNION_TYPE
3590 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3591 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
3593 /* If we are building a static constructor into a register,
3594 set the initial value as zero so we can fold the value into
3595 a constant. But if more than one register is involved,
3596 this probably loses. */
3597 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
3598 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
3601 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
3606 /* If the constructor has fewer fields than the structure
3607 or if we are initializing the structure to mostly zeros,
3608 clear the whole structure first. */
3609 else if ((list_length (CONSTRUCTOR_ELTS (exp
))
3610 != list_length (TYPE_FIELDS (type
)))
3611 || mostly_zeros_p (exp
))
3614 clear_storage (target
, expr_size (exp
),
3615 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3620 /* Inform later passes that the old value is dead. */
3621 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
3623 /* Store each element of the constructor into
3624 the corresponding field of TARGET. */
3626 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3628 register tree field
= TREE_PURPOSE (elt
);
3629 register enum machine_mode mode
;
3633 tree pos
, constant
= 0, offset
= 0;
3634 rtx to_rtx
= target
;
3636 /* Just ignore missing fields.
3637 We cleared the whole structure, above,
3638 if any fields are missing. */
3642 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
3645 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
3646 unsignedp
= TREE_UNSIGNED (field
);
3647 mode
= DECL_MODE (field
);
3648 if (DECL_BIT_FIELD (field
))
3651 pos
= DECL_FIELD_BITPOS (field
);
3652 if (TREE_CODE (pos
) == INTEGER_CST
)
3654 else if (TREE_CODE (pos
) == PLUS_EXPR
3655 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
3656 constant
= TREE_OPERAND (pos
, 1), offset
= TREE_OPERAND (pos
, 0);
3661 bitpos
= TREE_INT_CST_LOW (constant
);
3667 if (contains_placeholder_p (offset
))
3668 offset
= build (WITH_RECORD_EXPR
, sizetype
,
3669 offset
, make_tree (TREE_TYPE (exp
), target
));
3671 offset
= size_binop (FLOOR_DIV_EXPR
, offset
,
3672 size_int (BITS_PER_UNIT
));
3674 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3675 if (GET_CODE (to_rtx
) != MEM
)
3679 = change_address (to_rtx
, VOIDmode
,
3680 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
3681 force_reg (ptr_mode
, offset_rtx
)));
3683 if (TREE_READONLY (field
))
3685 if (GET_CODE (to_rtx
) == MEM
)
3686 to_rtx
= copy_rtx (to_rtx
);
3688 RTX_UNCHANGING_P (to_rtx
) = 1;
3691 store_constructor_field (to_rtx
, bitsize
, bitpos
,
3692 mode
, TREE_VALUE (elt
), type
, cleared
);
3695 else if (TREE_CODE (type
) == ARRAY_TYPE
)
3700 tree domain
= TYPE_DOMAIN (type
);
3701 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
3702 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
3703 tree elttype
= TREE_TYPE (type
);
3705 /* If the constructor has fewer elements than the array,
3706 clear the whole array first. Similarly if this this is
3707 static constructor of a non-BLKmode object. */
3708 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
3712 HOST_WIDE_INT count
= 0, zero_count
= 0;
3714 /* This loop is a more accurate version of the loop in
3715 mostly_zeros_p (it handles RANGE_EXPR in an index).
3716 It is also needed to check for missing elements. */
3717 for (elt
= CONSTRUCTOR_ELTS (exp
);
3719 elt
= TREE_CHAIN (elt
))
3721 tree index
= TREE_PURPOSE (elt
);
3722 HOST_WIDE_INT this_node_count
;
3723 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3725 tree lo_index
= TREE_OPERAND (index
, 0);
3726 tree hi_index
= TREE_OPERAND (index
, 1);
3727 if (TREE_CODE (lo_index
) != INTEGER_CST
3728 || TREE_CODE (hi_index
) != INTEGER_CST
)
3733 this_node_count
= TREE_INT_CST_LOW (hi_index
)
3734 - TREE_INT_CST_LOW (lo_index
) + 1;
3737 this_node_count
= 1;
3738 count
+= this_node_count
;
3739 if (mostly_zeros_p (TREE_VALUE (elt
)))
3740 zero_count
+= this_node_count
;
3742 /* Clear the entire array first if there are any missing elements,
3743 or if the incidence of zero elements is >= 75%. */
3744 if (count
< maxelt
- minelt
+ 1
3745 || 4 * zero_count
>= 3 * count
)
3751 clear_storage (target
, expr_size (exp
),
3752 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3756 /* Inform later passes that the old value is dead. */
3757 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
3759 /* Store each element of the constructor into
3760 the corresponding element of TARGET, determined
3761 by counting the elements. */
3762 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
3764 elt
= TREE_CHAIN (elt
), i
++)
3766 register enum machine_mode mode
;
3770 tree value
= TREE_VALUE (elt
);
3771 tree index
= TREE_PURPOSE (elt
);
3772 rtx xtarget
= target
;
3774 if (cleared
&& is_zeros_p (value
))
3777 mode
= TYPE_MODE (elttype
);
3778 bitsize
= GET_MODE_BITSIZE (mode
);
3779 unsignedp
= TREE_UNSIGNED (elttype
);
3781 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3783 tree lo_index
= TREE_OPERAND (index
, 0);
3784 tree hi_index
= TREE_OPERAND (index
, 1);
3785 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
3786 struct nesting
*loop
;
3787 HOST_WIDE_INT lo
, hi
, count
;
3790 /* If the range is constant and "small", unroll the loop. */
3791 if (TREE_CODE (lo_index
) == INTEGER_CST
3792 && TREE_CODE (hi_index
) == INTEGER_CST
3793 && (lo
= TREE_INT_CST_LOW (lo_index
),
3794 hi
= TREE_INT_CST_LOW (hi_index
),
3795 count
= hi
- lo
+ 1,
3796 (GET_CODE (target
) != MEM
3798 || (TREE_CODE (TYPE_SIZE (elttype
)) == INTEGER_CST
3799 && TREE_INT_CST_LOW (TYPE_SIZE (elttype
)) * count
3802 lo
-= minelt
; hi
-= minelt
;
3803 for (; lo
<= hi
; lo
++)
3805 bitpos
= lo
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
));
3806 store_constructor_field (target
, bitsize
, bitpos
,
3807 mode
, value
, type
, cleared
);
3812 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
3813 loop_top
= gen_label_rtx ();
3814 loop_end
= gen_label_rtx ();
3816 unsignedp
= TREE_UNSIGNED (domain
);
3818 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
3820 DECL_RTL (index
) = index_r
3821 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
3824 if (TREE_CODE (value
) == SAVE_EXPR
3825 && SAVE_EXPR_RTL (value
) == 0)
3827 /* Make sure value gets expanded once before the
3829 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
3832 store_expr (lo_index
, index_r
, 0);
3833 loop
= expand_start_loop (0);
3835 /* Assign value to element index. */
3836 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3837 size_int (BITS_PER_UNIT
));
3838 position
= size_binop (MULT_EXPR
,
3839 size_binop (MINUS_EXPR
, index
,
3840 TYPE_MIN_VALUE (domain
)),
3842 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3843 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
3844 xtarget
= change_address (target
, mode
, addr
);
3845 if (TREE_CODE (value
) == CONSTRUCTOR
)
3846 store_constructor (value
, xtarget
, cleared
);
3848 store_expr (value
, xtarget
, 0);
3850 expand_exit_loop_if_false (loop
,
3851 build (LT_EXPR
, integer_type_node
,
3854 expand_increment (build (PREINCREMENT_EXPR
,
3856 index
, integer_one_node
), 0, 0);
3858 emit_label (loop_end
);
3860 /* Needed by stupid register allocation. to extend the
3861 lifetime of pseudo-regs used by target past the end
3863 emit_insn (gen_rtx_USE (GET_MODE (target
), target
));
3866 else if ((index
!= 0 && TREE_CODE (index
) != INTEGER_CST
)
3867 || TREE_CODE (TYPE_SIZE (elttype
)) != INTEGER_CST
)
3873 index
= size_int (i
);
3876 index
= size_binop (MINUS_EXPR
, index
,
3877 TYPE_MIN_VALUE (domain
));
3878 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3879 size_int (BITS_PER_UNIT
));
3880 position
= size_binop (MULT_EXPR
, index
, position
);
3881 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3882 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
3883 xtarget
= change_address (target
, mode
, addr
);
3884 store_expr (value
, xtarget
, 0);
3889 bitpos
= ((TREE_INT_CST_LOW (index
) - minelt
)
3890 * TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3892 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3893 store_constructor_field (target
, bitsize
, bitpos
,
3894 mode
, value
, type
, cleared
);
3898 /* set constructor assignments */
3899 else if (TREE_CODE (type
) == SET_TYPE
)
3901 tree elt
= CONSTRUCTOR_ELTS (exp
);
3902 int nbytes
= int_size_in_bytes (type
), nbits
;
3903 tree domain
= TYPE_DOMAIN (type
);
3904 tree domain_min
, domain_max
, bitlength
;
3906 /* The default implementation strategy is to extract the constant
3907 parts of the constructor, use that to initialize the target,
3908 and then "or" in whatever non-constant ranges we need in addition.
3910 If a large set is all zero or all ones, it is
3911 probably better to set it using memset (if available) or bzero.
3912 Also, if a large set has just a single range, it may also be
3913 better to first clear all the first clear the set (using
3914 bzero/memset), and set the bits we want. */
3916 /* Check for all zeros. */
3917 if (elt
== NULL_TREE
)
3920 clear_storage (target
, expr_size (exp
),
3921 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3925 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
3926 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
3927 bitlength
= size_binop (PLUS_EXPR
,
3928 size_binop (MINUS_EXPR
, domain_max
, domain_min
),
3931 if (nbytes
< 0 || TREE_CODE (bitlength
) != INTEGER_CST
)
3933 nbits
= TREE_INT_CST_LOW (bitlength
);
3935 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3936 are "complicated" (more than one range), initialize (the
3937 constant parts) by copying from a constant. */
3938 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
3939 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
3941 int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
3942 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
3943 char *bit_buffer
= (char *) alloca (nbits
);
3944 HOST_WIDE_INT word
= 0;
3947 int offset
= 0; /* In bytes from beginning of set. */
3948 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
3951 if (bit_buffer
[ibit
])
3953 if (BYTES_BIG_ENDIAN
)
3954 word
|= (1 << (set_word_size
- 1 - bit_pos
));
3956 word
|= 1 << bit_pos
;
3959 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
3961 if (word
!= 0 || ! cleared
)
3963 rtx datum
= GEN_INT (word
);
3965 /* The assumption here is that it is safe to use
3966 XEXP if the set is multi-word, but not if
3967 it's single-word. */
3968 if (GET_CODE (target
) == MEM
)
3970 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
3971 to_rtx
= change_address (target
, mode
, to_rtx
);
3973 else if (offset
== 0)
3977 emit_move_insn (to_rtx
, datum
);
3983 offset
+= set_word_size
/ BITS_PER_UNIT
;
3989 /* Don't bother clearing storage if the set is all ones. */
3990 if (TREE_CHAIN (elt
) != NULL_TREE
3991 || (TREE_PURPOSE (elt
) == NULL_TREE
3993 : (TREE_CODE (TREE_VALUE (elt
)) != INTEGER_CST
3994 || TREE_CODE (TREE_PURPOSE (elt
)) != INTEGER_CST
3995 || (TREE_INT_CST_LOW (TREE_VALUE (elt
))
3996 - TREE_INT_CST_LOW (TREE_PURPOSE (elt
)) + 1
3998 clear_storage (target
, expr_size (exp
),
3999 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
4002 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
4004 /* start of range of element or NULL */
4005 tree startbit
= TREE_PURPOSE (elt
);
4006 /* end of range of element, or element value */
4007 tree endbit
= TREE_VALUE (elt
);
4008 #ifdef TARGET_MEM_FUNCTIONS
4009 HOST_WIDE_INT startb
, endb
;
4011 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
4013 bitlength_rtx
= expand_expr (bitlength
,
4014 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
4016 /* handle non-range tuple element like [ expr ] */
4017 if (startbit
== NULL_TREE
)
4019 startbit
= save_expr (endbit
);
4022 startbit
= convert (sizetype
, startbit
);
4023 endbit
= convert (sizetype
, endbit
);
4024 if (! integer_zerop (domain_min
))
4026 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
4027 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
4029 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
4030 EXPAND_CONST_ADDRESS
);
4031 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
4032 EXPAND_CONST_ADDRESS
);
4036 targetx
= assign_stack_temp (GET_MODE (target
),
4037 GET_MODE_SIZE (GET_MODE (target
)),
4039 emit_move_insn (targetx
, target
);
4041 else if (GET_CODE (target
) == MEM
)
4046 #ifdef TARGET_MEM_FUNCTIONS
4047 /* Optimization: If startbit and endbit are
4048 constants divisible by BITS_PER_UNIT,
4049 call memset instead. */
4050 if (TREE_CODE (startbit
) == INTEGER_CST
4051 && TREE_CODE (endbit
) == INTEGER_CST
4052 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
4053 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
4055 emit_library_call (memset_libfunc
, 0,
4057 plus_constant (XEXP (targetx
, 0),
4058 startb
/ BITS_PER_UNIT
),
4060 constm1_rtx
, TYPE_MODE (integer_type_node
),
4061 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
4062 TYPE_MODE (sizetype
));
4067 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
4068 0, VOIDmode
, 4, XEXP (targetx
, 0), Pmode
,
4069 bitlength_rtx
, TYPE_MODE (sizetype
),
4070 startbit_rtx
, TYPE_MODE (sizetype
),
4071 endbit_rtx
, TYPE_MODE (sizetype
));
4074 emit_move_insn (target
, targetx
);
4082 /* Store the value of EXP (an expression tree)
4083 into a subfield of TARGET which has mode MODE and occupies
4084 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4085 If MODE is VOIDmode, it means that we are storing into a bit-field.
4087 If VALUE_MODE is VOIDmode, return nothing in particular.
4088 UNSIGNEDP is not used in this case.
4090 Otherwise, return an rtx for the value stored. This rtx
4091 has mode VALUE_MODE if that is convenient to do.
4092 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4094 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4095 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4098 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
4099 unsignedp
, align
, total_size
)
4101 int bitsize
, bitpos
;
4102 enum machine_mode mode
;
4104 enum machine_mode value_mode
;
4109 HOST_WIDE_INT width_mask
= 0;
4111 if (TREE_CODE (exp
) == ERROR_MARK
)
4114 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
4115 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
4117 /* If we are storing into an unaligned field of an aligned union that is
4118 in a register, we may have the mode of TARGET being an integer mode but
4119 MODE == BLKmode. In that case, get an aligned object whose size and
4120 alignment are the same as TARGET and store TARGET into it (we can avoid
4121 the store if the field being stored is the entire width of TARGET). Then
4122 call ourselves recursively to store the field into a BLKmode version of
4123 that object. Finally, load from the object into TARGET. This is not
4124 very efficient in general, but should only be slightly more expensive
4125 than the otherwise-required unaligned accesses. Perhaps this can be
4126 cleaned up later. */
4129 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
4131 rtx object
= assign_stack_temp (GET_MODE (target
),
4132 GET_MODE_SIZE (GET_MODE (target
)), 0);
4133 rtx blk_object
= copy_rtx (object
);
4135 MEM_IN_STRUCT_P (object
) = 1;
4136 MEM_IN_STRUCT_P (blk_object
) = 1;
4137 PUT_MODE (blk_object
, BLKmode
);
4139 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
4140 emit_move_insn (object
, target
);
4142 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
4145 /* Even though we aren't returning target, we need to
4146 give it the updated value. */
4147 emit_move_insn (target
, object
);
4152 /* If the structure is in a register or if the component
4153 is a bit field, we cannot use addressing to access it.
4154 Use bit-field techniques or SUBREG to store in it. */
4156 if (mode
== VOIDmode
4157 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
])
4158 || GET_CODE (target
) == REG
4159 || GET_CODE (target
) == SUBREG
4160 /* If the field isn't aligned enough to store as an ordinary memref,
4161 store it as a bit field. */
4162 || (SLOW_UNALIGNED_ACCESS
4163 && align
* BITS_PER_UNIT
< GET_MODE_ALIGNMENT (mode
))
4164 || (SLOW_UNALIGNED_ACCESS
&& bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
4166 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4168 /* If BITSIZE is narrower than the size of the type of EXP
4169 we will be narrowing TEMP. Normally, what's wanted are the
4170 low-order bits. However, if EXP's type is a record and this is
4171 big-endian machine, we want the upper BITSIZE bits. */
4172 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
4173 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
4174 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
4175 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
4176 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
4180 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4182 if (mode
!= VOIDmode
&& mode
!= BLKmode
4183 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
4184 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
4186 /* If the modes of TARGET and TEMP are both BLKmode, both
4187 must be in memory and BITPOS must be aligned on a byte
4188 boundary. If so, we simply do a block copy. */
4189 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
4191 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
4192 || bitpos
% BITS_PER_UNIT
!= 0)
4195 target
= change_address (target
, VOIDmode
,
4196 plus_constant (XEXP (target
, 0),
4197 bitpos
/ BITS_PER_UNIT
));
4199 emit_block_move (target
, temp
,
4200 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
4204 return value_mode
== VOIDmode
? const0_rtx
: target
;
4207 /* Store the value in the bitfield. */
4208 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
4209 if (value_mode
!= VOIDmode
)
4211 /* The caller wants an rtx for the value. */
4212 /* If possible, avoid refetching from the bitfield itself. */
4214 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
4217 enum machine_mode tmode
;
4220 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
4221 tmode
= GET_MODE (temp
);
4222 if (tmode
== VOIDmode
)
4224 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
4225 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4226 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4228 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
4229 NULL_RTX
, value_mode
, 0, align
,
4236 rtx addr
= XEXP (target
, 0);
4239 /* If a value is wanted, it must be the lhs;
4240 so make the address stable for multiple use. */
4242 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
4243 && ! CONSTANT_ADDRESS_P (addr
)
4244 /* A frame-pointer reference is already stable. */
4245 && ! (GET_CODE (addr
) == PLUS
4246 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
4247 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
4248 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
4249 addr
= copy_to_reg (addr
);
4251 /* Now build a reference to just the desired component. */
4253 to_rtx
= copy_rtx (change_address (target
, mode
,
4254 plus_constant (addr
,
4256 / BITS_PER_UNIT
))));
4257 MEM_IN_STRUCT_P (to_rtx
) = 1;
4259 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
4263 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4264 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4265 ARRAY_REFs and find the ultimate containing object, which we return.
4267 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4268 bit position, and *PUNSIGNEDP to the signedness of the field.
4269 If the position of the field is variable, we store a tree
4270 giving the variable offset (in units) in *POFFSET.
4271 This offset is in addition to the bit position.
4272 If the position is not variable, we store 0 in *POFFSET.
4273 We set *PALIGNMENT to the alignment in bytes of the address that will be
4274 computed. This is the alignment of the thing we return if *POFFSET
4275 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4277 If any of the extraction expressions is volatile,
4278 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4280 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4281 is a mode that can be used to access the field. In that case, *PBITSIZE
4284 If the field describes a variable-sized object, *PMODE is set to
4285 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4286 this case, but the address of the object can be found. */
4289 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
4290 punsignedp
, pvolatilep
, palignment
)
4295 enum machine_mode
*pmode
;
4300 tree orig_exp
= exp
;
4302 enum machine_mode mode
= VOIDmode
;
4303 tree offset
= integer_zero_node
;
4304 int alignment
= BIGGEST_ALIGNMENT
;
4306 if (TREE_CODE (exp
) == COMPONENT_REF
)
4308 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
4309 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
4310 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
4311 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
4313 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
4315 size_tree
= TREE_OPERAND (exp
, 1);
4316 *punsignedp
= TREE_UNSIGNED (exp
);
4320 mode
= TYPE_MODE (TREE_TYPE (exp
));
4321 *pbitsize
= GET_MODE_BITSIZE (mode
);
4322 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4327 if (TREE_CODE (size_tree
) != INTEGER_CST
)
4328 mode
= BLKmode
, *pbitsize
= -1;
4330 *pbitsize
= TREE_INT_CST_LOW (size_tree
);
4333 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4334 and find the ultimate containing object. */
4340 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4342 tree pos
= (TREE_CODE (exp
) == COMPONENT_REF
4343 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 1))
4344 : TREE_OPERAND (exp
, 2));
4345 tree constant
= integer_zero_node
, var
= pos
;
4347 /* If this field hasn't been filled in yet, don't go
4348 past it. This should only happen when folding expressions
4349 made during type construction. */
4353 /* Assume here that the offset is a multiple of a unit.
4354 If not, there should be an explicitly added constant. */
4355 if (TREE_CODE (pos
) == PLUS_EXPR
4356 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
4357 constant
= TREE_OPERAND (pos
, 1), var
= TREE_OPERAND (pos
, 0);
4358 else if (TREE_CODE (pos
) == INTEGER_CST
)
4359 constant
= pos
, var
= integer_zero_node
;
4361 *pbitpos
+= TREE_INT_CST_LOW (constant
);
4362 offset
= size_binop (PLUS_EXPR
, offset
,
4363 size_binop (EXACT_DIV_EXPR
, var
,
4364 size_int (BITS_PER_UNIT
)));
4367 else if (TREE_CODE (exp
) == ARRAY_REF
)
4369 /* This code is based on the code in case ARRAY_REF in expand_expr
4370 below. We assume here that the size of an array element is
4371 always an integral multiple of BITS_PER_UNIT. */
4373 tree index
= TREE_OPERAND (exp
, 1);
4374 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4376 = domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
4377 tree index_type
= TREE_TYPE (index
);
4379 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
4381 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
4383 index_type
= TREE_TYPE (index
);
4386 if (! integer_zerop (low_bound
))
4387 index
= fold (build (MINUS_EXPR
, index_type
, index
, low_bound
));
4389 if (TREE_CODE (index
) == INTEGER_CST
)
4391 index
= convert (sbitsizetype
, index
);
4392 index_type
= TREE_TYPE (index
);
4395 index
= fold (build (MULT_EXPR
, sbitsizetype
, index
,
4396 convert (sbitsizetype
,
4397 TYPE_SIZE (TREE_TYPE (exp
)))));
4399 if (TREE_CODE (index
) == INTEGER_CST
4400 && TREE_INT_CST_HIGH (index
) == 0)
4401 *pbitpos
+= TREE_INT_CST_LOW (index
);
4404 if (contains_placeholder_p (index
))
4405 index
= build (WITH_RECORD_EXPR
, sizetype
, index
, exp
);
4407 offset
= size_binop (PLUS_EXPR
, offset
,
4408 size_binop (FLOOR_DIV_EXPR
, index
,
4409 size_int (BITS_PER_UNIT
)));
4412 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
4413 && ! ((TREE_CODE (exp
) == NOP_EXPR
4414 || TREE_CODE (exp
) == CONVERT_EXPR
)
4415 && ! (TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
4416 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4418 && (TYPE_MODE (TREE_TYPE (exp
))
4419 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4422 /* If any reference in the chain is volatile, the effect is volatile. */
4423 if (TREE_THIS_VOLATILE (exp
))
4426 /* If the offset is non-constant already, then we can't assume any
4427 alignment more than the alignment here. */
4428 if (! integer_zerop (offset
))
4429 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
4431 exp
= TREE_OPERAND (exp
, 0);
4434 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
4435 alignment
= MIN (alignment
, DECL_ALIGN (exp
));
4436 else if (TREE_TYPE (exp
) != 0)
4437 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
4439 if (integer_zerop (offset
))
4442 if (offset
!= 0 && contains_placeholder_p (offset
))
4443 offset
= build (WITH_RECORD_EXPR
, sizetype
, offset
, orig_exp
);
4447 *palignment
= alignment
/ BITS_PER_UNIT
;
4451 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4452 static enum memory_use_mode
4453 get_memory_usage_from_modifier (modifier
)
4454 enum expand_modifier modifier
;
4460 return MEMORY_USE_RO
;
4462 case EXPAND_MEMORY_USE_WO
:
4463 return MEMORY_USE_WO
;
4465 case EXPAND_MEMORY_USE_RW
:
4466 return MEMORY_USE_RW
;
4468 case EXPAND_MEMORY_USE_DONT
:
4469 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4470 MEMORY_USE_DONT, because they are modifiers to a call of
4471 expand_expr in the ADDR_EXPR case of expand_expr. */
4472 case EXPAND_CONST_ADDRESS
:
4473 case EXPAND_INITIALIZER
:
4474 return MEMORY_USE_DONT
;
4475 case EXPAND_MEMORY_USE_BAD
:
4481 /* Given an rtx VALUE that may contain additions and multiplications,
4482 return an equivalent value that just refers to a register or memory.
4483 This is done by generating instructions to perform the arithmetic
4484 and returning a pseudo-register containing the value.
4486 The returned value may be a REG, SUBREG, MEM or constant. */
4489 force_operand (value
, target
)
4492 register optab binoptab
= 0;
4493 /* Use a temporary to force order of execution of calls to
4497 /* Use subtarget as the target for operand 0 of a binary operation. */
4498 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4500 if (GET_CODE (value
) == PLUS
)
4501 binoptab
= add_optab
;
4502 else if (GET_CODE (value
) == MINUS
)
4503 binoptab
= sub_optab
;
4504 else if (GET_CODE (value
) == MULT
)
4506 op2
= XEXP (value
, 1);
4507 if (!CONSTANT_P (op2
)
4508 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4510 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4511 return expand_mult (GET_MODE (value
), tmp
,
4512 force_operand (op2
, NULL_RTX
),
4518 op2
= XEXP (value
, 1);
4519 if (!CONSTANT_P (op2
)
4520 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4522 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
4524 binoptab
= add_optab
;
4525 op2
= negate_rtx (GET_MODE (value
), op2
);
4528 /* Check for an addition with OP2 a constant integer and our first
4529 operand a PLUS of a virtual register and something else. In that
4530 case, we want to emit the sum of the virtual register and the
4531 constant first and then add the other value. This allows virtual
4532 register instantiation to simply modify the constant rather than
4533 creating another one around this addition. */
4534 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
4535 && GET_CODE (XEXP (value
, 0)) == PLUS
4536 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
4537 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4538 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
4540 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
4541 XEXP (XEXP (value
, 0), 0), op2
,
4542 subtarget
, 0, OPTAB_LIB_WIDEN
);
4543 return expand_binop (GET_MODE (value
), binoptab
, temp
,
4544 force_operand (XEXP (XEXP (value
, 0), 1), 0),
4545 target
, 0, OPTAB_LIB_WIDEN
);
4548 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4549 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
4550 force_operand (op2
, NULL_RTX
),
4551 target
, 0, OPTAB_LIB_WIDEN
);
4552 /* We give UNSIGNEDP = 0 to expand_binop
4553 because the only operations we are expanding here are signed ones. */
4558 /* Subroutine of expand_expr:
4559 save the non-copied parts (LIST) of an expr (LHS), and return a list
4560 which can restore these values to their previous values,
4561 should something modify their storage. */
4564 save_noncopied_parts (lhs
, list
)
4571 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4572 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4573 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4576 tree part
= TREE_VALUE (tail
);
4577 tree part_type
= TREE_TYPE (part
);
4578 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4579 rtx target
= assign_temp (part_type
, 0, 1, 1);
4580 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
4581 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
4582 parts
= tree_cons (to_be_saved
,
4583 build (RTL_EXPR
, part_type
, NULL_TREE
,
4586 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
4591 /* Subroutine of expand_expr:
4592 record the non-copied parts (LIST) of an expr (LHS), and return a list
4593 which specifies the initial values of these parts. */
4596 init_noncopied_parts (lhs
, list
)
4603 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4604 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4605 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4608 tree part
= TREE_VALUE (tail
);
4609 tree part_type
= TREE_TYPE (part
);
4610 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4611 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
4616 /* Subroutine of expand_expr: return nonzero iff there is no way that
4617 EXP can reference X, which is being modified. TOP_P is nonzero if this
4618 call is going to be used to determine whether we need a temporary
4619 for EXP, as opposed to a recursive call to this function. */
4622 safe_from_p (x
, exp
, top_p
)
4631 /* If EXP has varying size, we MUST use a target since we currently
4632 have no way of allocating temporaries of variable size
4633 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4634 So we assume here that something at a higher level has prevented a
4635 clash. This is somewhat bogus, but the best we can do. Only
4636 do this when X is BLKmode and when we are at the top level. */
4637 || (top_p
&& TREE_TYPE (exp
) != 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4638 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
4639 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
4640 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
4641 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
4643 && GET_MODE (x
) == BLKmode
))
4646 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4647 find the underlying pseudo. */
4648 if (GET_CODE (x
) == SUBREG
)
4651 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4655 /* If X is a location in the outgoing argument area, it is always safe. */
4656 if (GET_CODE (x
) == MEM
4657 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
4658 || (GET_CODE (XEXP (x
, 0)) == PLUS
4659 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
4662 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4665 exp_rtl
= DECL_RTL (exp
);
4672 if (TREE_CODE (exp
) == TREE_LIST
)
4673 return ((TREE_VALUE (exp
) == 0
4674 || safe_from_p (x
, TREE_VALUE (exp
), 0))
4675 && (TREE_CHAIN (exp
) == 0
4676 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
4681 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
4685 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
4686 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
4690 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4691 the expression. If it is set, we conflict iff we are that rtx or
4692 both are in memory. Otherwise, we check all operands of the
4693 expression recursively. */
4695 switch (TREE_CODE (exp
))
4698 return (staticp (TREE_OPERAND (exp
, 0))
4699 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
4700 || TREE_STATIC (exp
));
4703 if (GET_CODE (x
) == MEM
)
4708 exp_rtl
= CALL_EXPR_RTL (exp
);
4711 /* Assume that the call will clobber all hard registers and
4713 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4714 || GET_CODE (x
) == MEM
)
4721 /* If a sequence exists, we would have to scan every instruction
4722 in the sequence to see if it was safe. This is probably not
4724 if (RTL_EXPR_SEQUENCE (exp
))
4727 exp_rtl
= RTL_EXPR_RTL (exp
);
4730 case WITH_CLEANUP_EXPR
:
4731 exp_rtl
= RTL_EXPR_RTL (exp
);
4734 case CLEANUP_POINT_EXPR
:
4735 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
4738 exp_rtl
= SAVE_EXPR_RTL (exp
);
4742 /* The only operand we look at is operand 1. The rest aren't
4743 part of the expression. */
4744 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
4746 case METHOD_CALL_EXPR
:
4747 /* This takes a rtx argument, but shouldn't appear here. */
4754 /* If we have an rtx, we do not need to scan our operands. */
4758 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
4759 for (i
= 0; i
< nops
; i
++)
4760 if (TREE_OPERAND (exp
, i
) != 0
4761 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
4765 /* If we have an rtl, find any enclosed object. Then see if we conflict
4769 if (GET_CODE (exp_rtl
) == SUBREG
)
4771 exp_rtl
= SUBREG_REG (exp_rtl
);
4772 if (GET_CODE (exp_rtl
) == REG
4773 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
4777 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4778 are memory and EXP is not readonly. */
4779 return ! (rtx_equal_p (x
, exp_rtl
)
4780 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
4781 && ! TREE_READONLY (exp
)));
4784 /* If we reach here, it is safe. */
4788 /* Subroutine of expand_expr: return nonzero iff EXP is an
4789 expression whose type is statically determinable. */
4795 if (TREE_CODE (exp
) == PARM_DECL
4796 || TREE_CODE (exp
) == VAR_DECL
4797 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
4798 || TREE_CODE (exp
) == COMPONENT_REF
4799 || TREE_CODE (exp
) == ARRAY_REF
)
4804 /* Subroutine of expand_expr: return rtx if EXP is a
4805 variable or parameter; else return 0. */
4812 switch (TREE_CODE (exp
))
4816 return DECL_RTL (exp
);
4822 /* expand_expr: generate code for computing expression EXP.
4823 An rtx for the computed value is returned. The value is never null.
4824 In the case of a void EXP, const0_rtx is returned.
4826 The value may be stored in TARGET if TARGET is nonzero.
4827 TARGET is just a suggestion; callers must assume that
4828 the rtx returned may not be the same as TARGET.
4830 If TARGET is CONST0_RTX, it means that the value will be ignored.
4832 If TMODE is not VOIDmode, it suggests generating the
4833 result in mode TMODE. But this is done only when convenient.
4834 Otherwise, TMODE is ignored and the value generated in its natural mode.
4835 TMODE is just a suggestion; callers must assume that
4836 the rtx returned may not have mode TMODE.
4838 Note that TARGET may have neither TMODE nor MODE. In that case, it
4839 probably will not be used.
4841 If MODIFIER is EXPAND_SUM then when EXP is an addition
4842 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4843 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4844 products as above, or REG or MEM, or constant.
4845 Ordinarily in such cases we would output mul or add instructions
4846 and then return a pseudo reg containing the sum.
4848 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4849 it also marks a label as absolutely required (it can't be dead).
4850 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4851 This is used for outputting expressions used in initializers.
4853 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4854 with a constant address even if that address is not normally legitimate.
4855 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4858 expand_expr (exp
, target
, tmode
, modifier
)
4861 enum machine_mode tmode
;
4862 enum expand_modifier modifier
;
4864 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4865 This is static so it will be accessible to our recursive callees. */
4866 static tree placeholder_list
= 0;
4867 register rtx op0
, op1
, temp
;
4868 tree type
= TREE_TYPE (exp
);
4869 int unsignedp
= TREE_UNSIGNED (type
);
4870 register enum machine_mode mode
= TYPE_MODE (type
);
4871 register enum tree_code code
= TREE_CODE (exp
);
4873 /* Use subtarget as the target for operand 0 of a binary operation. */
4874 rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4875 rtx original_target
= target
;
4876 int ignore
= (target
== const0_rtx
4877 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
4878 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
4879 || code
== COND_EXPR
)
4880 && TREE_CODE (type
) == VOID_TYPE
));
4882 /* Used by check-memory-usage to make modifier read only. */
4883 enum expand_modifier ro_modifier
;
4885 /* Make a read-only version of the modifier. */
4886 if (modifier
== EXPAND_NORMAL
|| modifier
== EXPAND_SUM
4887 || modifier
== EXPAND_CONST_ADDRESS
|| modifier
== EXPAND_INITIALIZER
)
4888 ro_modifier
= modifier
;
4890 ro_modifier
= EXPAND_NORMAL
;
4892 /* Don't use hard regs as subtargets, because the combiner
4893 can only handle pseudo regs. */
4894 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
4896 /* Avoid subtargets inside loops,
4897 since they hide some invariant expressions. */
4898 if (preserve_subexpressions_p ())
4901 /* If we are going to ignore this result, we need only do something
4902 if there is a side-effect somewhere in the expression. If there
4903 is, short-circuit the most common cases here. Note that we must
4904 not call expand_expr with anything but const0_rtx in case this
4905 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4909 if (! TREE_SIDE_EFFECTS (exp
))
4912 /* Ensure we reference a volatile object even if value is ignored. */
4913 if (TREE_THIS_VOLATILE (exp
)
4914 && TREE_CODE (exp
) != FUNCTION_DECL
4915 && mode
!= VOIDmode
&& mode
!= BLKmode
)
4917 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, ro_modifier
);
4918 if (GET_CODE (temp
) == MEM
)
4919 temp
= copy_to_reg (temp
);
4923 if (TREE_CODE_CLASS (code
) == '1')
4924 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4925 VOIDmode
, ro_modifier
);
4926 else if (TREE_CODE_CLASS (code
) == '2'
4927 || TREE_CODE_CLASS (code
) == '<')
4929 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
4930 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
4933 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
4934 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
4935 /* If the second operand has no side effects, just evaluate
4937 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4938 VOIDmode
, ro_modifier
);
4943 /* If will do cse, generate all results into pseudo registers
4944 since 1) that allows cse to find more things
4945 and 2) otherwise cse could produce an insn the machine
4948 if (! cse_not_expected
&& mode
!= BLKmode
&& target
4949 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4956 tree function
= decl_function_context (exp
);
4957 /* Handle using a label in a containing function. */
4958 if (function
!= current_function_decl
4959 && function
!= inline_function_decl
&& function
!= 0)
4961 struct function
*p
= find_function_data (function
);
4962 /* Allocate in the memory associated with the function
4963 that the label is in. */
4964 push_obstacks (p
->function_obstack
,
4965 p
->function_maybepermanent_obstack
);
4967 p
->forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
4972 else if (modifier
== EXPAND_INITIALIZER
)
4973 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
4974 label_rtx (exp
), forced_labels
);
4975 temp
= gen_rtx_MEM (FUNCTION_MODE
,
4976 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
4977 if (function
!= current_function_decl
4978 && function
!= inline_function_decl
&& function
!= 0)
4979 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
4984 if (DECL_RTL (exp
) == 0)
4986 error_with_decl (exp
, "prior parameter's size depends on `%s'");
4987 return CONST0_RTX (mode
);
4990 /* ... fall through ... */
4993 /* If a static var's type was incomplete when the decl was written,
4994 but the type is complete now, lay out the decl now. */
4995 if (DECL_SIZE (exp
) == 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4996 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
4998 push_obstacks_nochange ();
4999 end_temporary_allocation ();
5000 layout_decl (exp
, 0);
5001 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
5005 /* Only check automatic variables. Currently, function arguments are
5006 not checked (this can be done at compile-time with prototypes).
5007 Aggregates are not checked. */
5008 if (flag_check_memory_usage
&& code
== VAR_DECL
5009 && GET_CODE (DECL_RTL (exp
)) == MEM
5010 && DECL_CONTEXT (exp
) != NULL_TREE
5011 && ! TREE_STATIC (exp
)
5012 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
5014 enum memory_use_mode memory_usage
;
5015 memory_usage
= get_memory_usage_from_modifier (modifier
);
5017 if (memory_usage
!= MEMORY_USE_DONT
)
5018 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
5019 XEXP (DECL_RTL (exp
), 0), ptr_mode
,
5020 GEN_INT (int_size_in_bytes (type
)),
5021 TYPE_MODE (sizetype
),
5022 GEN_INT (memory_usage
),
5023 TYPE_MODE (integer_type_node
));
5026 /* ... fall through ... */
5030 if (DECL_RTL (exp
) == 0)
5033 /* Ensure variable marked as used even if it doesn't go through
5034 a parser. If it hasn't be used yet, write out an external
5036 if (! TREE_USED (exp
))
5038 assemble_external (exp
);
5039 TREE_USED (exp
) = 1;
5042 /* Show we haven't gotten RTL for this yet. */
5045 /* Handle variables inherited from containing functions. */
5046 context
= decl_function_context (exp
);
5048 /* We treat inline_function_decl as an alias for the current function
5049 because that is the inline function whose vars, types, etc.
5050 are being merged into the current function.
5051 See expand_inline_function. */
5053 if (context
!= 0 && context
!= current_function_decl
5054 && context
!= inline_function_decl
5055 /* If var is static, we don't need a static chain to access it. */
5056 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
5057 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
5061 /* Mark as non-local and addressable. */
5062 DECL_NONLOCAL (exp
) = 1;
5063 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
5065 mark_addressable (exp
);
5066 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
5068 addr
= XEXP (DECL_RTL (exp
), 0);
5069 if (GET_CODE (addr
) == MEM
)
5070 addr
= gen_rtx_MEM (Pmode
,
5071 fix_lexical_addr (XEXP (addr
, 0), exp
));
5073 addr
= fix_lexical_addr (addr
, exp
);
5074 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
5077 /* This is the case of an array whose size is to be determined
5078 from its initializer, while the initializer is still being parsed.
5081 else if (GET_CODE (DECL_RTL (exp
)) == MEM
5082 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
5083 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
5084 XEXP (DECL_RTL (exp
), 0));
5086 /* If DECL_RTL is memory, we are in the normal case and either
5087 the address is not valid or it is not a register and -fforce-addr
5088 is specified, get the address into a register. */
5090 else if (GET_CODE (DECL_RTL (exp
)) == MEM
5091 && modifier
!= EXPAND_CONST_ADDRESS
5092 && modifier
!= EXPAND_SUM
5093 && modifier
!= EXPAND_INITIALIZER
5094 && (! memory_address_p (DECL_MODE (exp
),
5095 XEXP (DECL_RTL (exp
), 0))
5097 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
5098 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
5099 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
5101 /* If we got something, return it. But first, set the alignment
5102 the address is a register. */
5105 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
5106 mark_reg_pointer (XEXP (temp
, 0),
5107 DECL_ALIGN (exp
) / BITS_PER_UNIT
);
5112 /* If the mode of DECL_RTL does not match that of the decl, it
5113 must be a promoted value. We return a SUBREG of the wanted mode,
5114 but mark it so that we know that it was already extended. */
5116 if (GET_CODE (DECL_RTL (exp
)) == REG
5117 && GET_MODE (DECL_RTL (exp
)) != mode
)
5119 /* Get the signedness used for this variable. Ensure we get the
5120 same mode we got when the variable was declared. */
5121 if (GET_MODE (DECL_RTL (exp
))
5122 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
5125 temp
= gen_rtx_SUBREG (mode
, DECL_RTL (exp
), 0);
5126 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5127 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5131 return DECL_RTL (exp
);
5134 return immed_double_const (TREE_INT_CST_LOW (exp
),
5135 TREE_INT_CST_HIGH (exp
),
5139 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
,
5140 EXPAND_MEMORY_USE_BAD
);
5143 /* If optimized, generate immediate CONST_DOUBLE
5144 which will be turned into memory by reload if necessary.
5146 We used to force a register so that loop.c could see it. But
5147 this does not allow gen_* patterns to perform optimizations with
5148 the constants. It also produces two insns in cases like "x = 1.0;".
5149 On most machines, floating-point constants are not permitted in
5150 many insns, so we'd end up copying it to a register in any case.
5152 Now, we do the copying in expand_binop, if appropriate. */
5153 return immed_real_const (exp
);
5157 if (! TREE_CST_RTL (exp
))
5158 output_constant_def (exp
);
5160 /* TREE_CST_RTL probably contains a constant address.
5161 On RISC machines where a constant address isn't valid,
5162 make some insns to get that address into a register. */
5163 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
5164 && modifier
!= EXPAND_CONST_ADDRESS
5165 && modifier
!= EXPAND_INITIALIZER
5166 && modifier
!= EXPAND_SUM
5167 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
5169 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
5170 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
5171 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
5172 return TREE_CST_RTL (exp
);
5175 context
= decl_function_context (exp
);
5177 /* If this SAVE_EXPR was at global context, assume we are an
5178 initialization function and move it into our context. */
5180 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
5182 /* We treat inline_function_decl as an alias for the current function
5183 because that is the inline function whose vars, types, etc.
5184 are being merged into the current function.
5185 See expand_inline_function. */
5186 if (context
== current_function_decl
|| context
== inline_function_decl
)
5189 /* If this is non-local, handle it. */
5192 /* The following call just exists to abort if the context is
5193 not of a containing function. */
5194 find_function_data (context
);
5196 temp
= SAVE_EXPR_RTL (exp
);
5197 if (temp
&& GET_CODE (temp
) == REG
)
5199 put_var_into_stack (exp
);
5200 temp
= SAVE_EXPR_RTL (exp
);
5202 if (temp
== 0 || GET_CODE (temp
) != MEM
)
5204 return change_address (temp
, mode
,
5205 fix_lexical_addr (XEXP (temp
, 0), exp
));
5207 if (SAVE_EXPR_RTL (exp
) == 0)
5209 if (mode
== VOIDmode
)
5212 temp
= assign_temp (type
, 3, 0, 0);
5214 SAVE_EXPR_RTL (exp
) = temp
;
5215 if (!optimize
&& GET_CODE (temp
) == REG
)
5216 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
5219 /* If the mode of TEMP does not match that of the expression, it
5220 must be a promoted value. We pass store_expr a SUBREG of the
5221 wanted mode but mark it so that we know that it was already
5222 extended. Note that `unsignedp' was modified above in
5225 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
5227 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
5228 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5229 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5232 if (temp
== const0_rtx
)
5233 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5234 EXPAND_MEMORY_USE_BAD
);
5236 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
5238 TREE_USED (exp
) = 1;
5241 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5242 must be a promoted value. We return a SUBREG of the wanted mode,
5243 but mark it so that we know that it was already extended. */
5245 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
5246 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
5248 /* Compute the signedness and make the proper SUBREG. */
5249 promote_mode (type
, mode
, &unsignedp
, 0);
5250 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
5251 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5252 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5256 return SAVE_EXPR_RTL (exp
);
5261 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5262 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
5266 case PLACEHOLDER_EXPR
:
5268 tree placeholder_expr
;
5270 /* If there is an object on the head of the placeholder list,
5271 see if some object in it of type TYPE or a pointer to it. For
5272 further information, see tree.def. */
5273 for (placeholder_expr
= placeholder_list
;
5274 placeholder_expr
!= 0;
5275 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
5277 tree need_type
= TYPE_MAIN_VARIANT (type
);
5279 tree old_list
= placeholder_list
;
5282 /* Find the outermost reference that is of the type we want.
5283 If none, see if any object has a type that is a pointer to
5284 the type we want. */
5285 for (elt
= TREE_PURPOSE (placeholder_expr
);
5286 elt
!= 0 && object
== 0;
5288 = ((TREE_CODE (elt
) == COMPOUND_EXPR
5289 || TREE_CODE (elt
) == COND_EXPR
)
5290 ? TREE_OPERAND (elt
, 1)
5291 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5292 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
5293 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
5294 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
5295 ? TREE_OPERAND (elt
, 0) : 0))
5296 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
5299 for (elt
= TREE_PURPOSE (placeholder_expr
);
5300 elt
!= 0 && object
== 0;
5302 = ((TREE_CODE (elt
) == COMPOUND_EXPR
5303 || TREE_CODE (elt
) == COND_EXPR
)
5304 ? TREE_OPERAND (elt
, 1)
5305 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5306 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
5307 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
5308 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
5309 ? TREE_OPERAND (elt
, 0) : 0))
5310 if (POINTER_TYPE_P (TREE_TYPE (elt
))
5311 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
5313 object
= build1 (INDIRECT_REF
, need_type
, elt
);
5317 /* Expand this object skipping the list entries before
5318 it was found in case it is also a PLACEHOLDER_EXPR.
5319 In that case, we want to translate it using subsequent
5321 placeholder_list
= TREE_CHAIN (placeholder_expr
);
5322 temp
= expand_expr (object
, original_target
, tmode
,
5324 placeholder_list
= old_list
;
5330 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5333 case WITH_RECORD_EXPR
:
5334 /* Put the object on the placeholder list, expand our first operand,
5335 and pop the list. */
5336 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
5338 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
5339 tmode
, ro_modifier
);
5340 placeholder_list
= TREE_CHAIN (placeholder_list
);
5344 expand_exit_loop_if_false (NULL_PTR
,
5345 invert_truthvalue (TREE_OPERAND (exp
, 0)));
5350 expand_start_loop (1);
5351 expand_expr_stmt (TREE_OPERAND (exp
, 0));
5359 tree vars
= TREE_OPERAND (exp
, 0);
5360 int vars_need_expansion
= 0;
5362 /* Need to open a binding contour here because
5363 if there are any cleanups they must be contained here. */
5364 expand_start_bindings (0);
5366 /* Mark the corresponding BLOCK for output in its proper place. */
5367 if (TREE_OPERAND (exp
, 2) != 0
5368 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
5369 insert_block (TREE_OPERAND (exp
, 2));
5371 /* If VARS have not yet been expanded, expand them now. */
5374 if (DECL_RTL (vars
) == 0)
5376 vars_need_expansion
= 1;
5379 expand_decl_init (vars
);
5380 vars
= TREE_CHAIN (vars
);
5383 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, ro_modifier
);
5385 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
5391 if (RTL_EXPR_SEQUENCE (exp
))
5393 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
5395 emit_insns (RTL_EXPR_SEQUENCE (exp
));
5396 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
5398 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
5399 free_temps_for_rtl_expr (exp
);
5400 return RTL_EXPR_RTL (exp
);
5403 /* If we don't need the result, just ensure we evaluate any
5408 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
5409 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
,
5410 EXPAND_MEMORY_USE_BAD
);
5414 /* All elts simple constants => refer to a constant in memory. But
5415 if this is a non-BLKmode mode, let it store a field at a time
5416 since that should make a CONST_INT or CONST_DOUBLE when we
5417 fold. Likewise, if we have a target we can use, it is best to
5418 store directly into the target unless the type is large enough
5419 that memcpy will be used. If we are making an initializer and
5420 all operands are constant, put it in memory as well. */
5421 else if ((TREE_STATIC (exp
)
5422 && ((mode
== BLKmode
5423 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
5424 || TREE_ADDRESSABLE (exp
)
5425 || (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
5426 && (move_by_pieces_ninsns
5427 (TREE_INT_CST_LOW (TYPE_SIZE (type
))/BITS_PER_UNIT
,
5428 TYPE_ALIGN (type
) / BITS_PER_UNIT
)
5430 && ! mostly_zeros_p (exp
))))
5431 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
5433 rtx constructor
= output_constant_def (exp
);
5434 if (modifier
!= EXPAND_CONST_ADDRESS
5435 && modifier
!= EXPAND_INITIALIZER
5436 && modifier
!= EXPAND_SUM
5437 && (! memory_address_p (GET_MODE (constructor
),
5438 XEXP (constructor
, 0))
5440 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
5441 constructor
= change_address (constructor
, VOIDmode
,
5442 XEXP (constructor
, 0));
5448 /* Handle calls that pass values in multiple non-contiguous
5449 locations. The Irix 6 ABI has examples of this. */
5450 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
5451 || GET_CODE (target
) == PARALLEL
)
5453 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
5454 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5456 target
= assign_temp (type
, 0, 1, 1);
5459 if (TREE_READONLY (exp
))
5461 if (GET_CODE (target
) == MEM
)
5462 target
= copy_rtx (target
);
5464 RTX_UNCHANGING_P (target
) = 1;
5467 store_constructor (exp
, target
, 0);
5473 tree exp1
= TREE_OPERAND (exp
, 0);
5476 tree string
= string_constant (exp1
, &index
);
5480 && TREE_CODE (string
) == STRING_CST
5481 && TREE_CODE (index
) == INTEGER_CST
5482 && !TREE_INT_CST_HIGH (index
)
5483 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (string
)
5484 && GET_MODE_CLASS (mode
) == MODE_INT
5485 && GET_MODE_SIZE (mode
) == 1)
5486 return GEN_INT (TREE_STRING_POINTER (string
)[i
]);
5488 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5489 op0
= memory_address (mode
, op0
);
5491 if (flag_check_memory_usage
&& !AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
5493 enum memory_use_mode memory_usage
;
5494 memory_usage
= get_memory_usage_from_modifier (modifier
);
5496 if (memory_usage
!= MEMORY_USE_DONT
)
5498 in_check_memory_usage
= 1;
5499 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
5501 GEN_INT (int_size_in_bytes (type
)),
5502 TYPE_MODE (sizetype
),
5503 GEN_INT (memory_usage
),
5504 TYPE_MODE (integer_type_node
));
5505 in_check_memory_usage
= 0;
5509 temp
= gen_rtx_MEM (mode
, op0
);
5510 /* If address was computed by addition,
5511 mark this as an element of an aggregate. */
5512 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
5513 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
5514 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == PLUS_EXPR
)
5515 || AGGREGATE_TYPE_P (TREE_TYPE (exp
))
5516 || (TREE_CODE (exp1
) == ADDR_EXPR
5517 && (exp2
= TREE_OPERAND (exp1
, 0))
5518 && AGGREGATE_TYPE_P (TREE_TYPE (exp2
))))
5519 MEM_IN_STRUCT_P (temp
) = 1;
5520 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) | flag_volatile
;
5522 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5523 here, because, in C and C++, the fact that a location is accessed
5524 through a pointer to const does not mean that the value there can
5525 never change. Languages where it can never change should
5526 also set TREE_STATIC. */
5527 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
5532 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
5536 tree array
= TREE_OPERAND (exp
, 0);
5537 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5538 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
5539 tree index
= TREE_OPERAND (exp
, 1);
5540 tree index_type
= TREE_TYPE (index
);
5543 /* Optimize the special-case of a zero lower bound.
5545 We convert the low_bound to sizetype to avoid some problems
5546 with constant folding. (E.g. suppose the lower bound is 1,
5547 and its mode is QI. Without the conversion, (ARRAY
5548 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5549 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5551 But sizetype isn't quite right either (especially if
5552 the lowbound is negative). FIXME */
5554 if (! integer_zerop (low_bound
))
5555 index
= fold (build (MINUS_EXPR
, index_type
, index
,
5556 convert (sizetype
, low_bound
)));
5558 /* Fold an expression like: "foo"[2].
5559 This is not done in fold so it won't happen inside &.
5560 Don't fold if this is for wide characters since it's too
5561 difficult to do correctly and this is a very rare case. */
5563 if (TREE_CODE (array
) == STRING_CST
5564 && TREE_CODE (index
) == INTEGER_CST
5565 && !TREE_INT_CST_HIGH (index
)
5566 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (array
)
5567 && GET_MODE_CLASS (mode
) == MODE_INT
5568 && GET_MODE_SIZE (mode
) == 1)
5569 return GEN_INT (TREE_STRING_POINTER (array
)[i
]);
5571 /* If this is a constant index into a constant array,
5572 just get the value from the array. Handle both the cases when
5573 we have an explicit constructor and when our operand is a variable
5574 that was declared const. */
5576 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
))
5578 if (TREE_CODE (index
) == INTEGER_CST
5579 && TREE_INT_CST_HIGH (index
) == 0)
5581 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
5583 i
= TREE_INT_CST_LOW (index
);
5585 elem
= TREE_CHAIN (elem
);
5587 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5588 tmode
, ro_modifier
);
5592 else if (optimize
>= 1
5593 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
5594 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
5595 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
5597 if (TREE_CODE (index
) == INTEGER_CST
)
5599 tree init
= DECL_INITIAL (array
);
5601 i
= TREE_INT_CST_LOW (index
);
5602 if (TREE_CODE (init
) == CONSTRUCTOR
)
5604 tree elem
= CONSTRUCTOR_ELTS (init
);
5607 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
))
5608 elem
= TREE_CHAIN (elem
);
5610 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5611 tmode
, ro_modifier
);
5613 else if (TREE_CODE (init
) == STRING_CST
5614 && TREE_INT_CST_HIGH (index
) == 0
5615 && (TREE_INT_CST_LOW (index
)
5616 < TREE_STRING_LENGTH (init
)))
5618 (TREE_STRING_POINTER
5619 (init
)[TREE_INT_CST_LOW (index
)]));
5624 /* ... fall through ... */
5628 /* If the operand is a CONSTRUCTOR, we can just extract the
5629 appropriate field if it is present. Don't do this if we have
5630 already written the data since we want to refer to that copy
5631 and varasm.c assumes that's what we'll do. */
5632 if (code
!= ARRAY_REF
5633 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
5634 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
5638 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
5639 elt
= TREE_CHAIN (elt
))
5640 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
5641 /* We can normally use the value of the field in the
5642 CONSTRUCTOR. However, if this is a bitfield in
5643 an integral mode that we can fit in a HOST_WIDE_INT,
5644 we must mask only the number of bits in the bitfield,
5645 since this is done implicitly by the constructor. If
5646 the bitfield does not meet either of those conditions,
5647 we can't do this optimization. */
5648 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
5649 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
5651 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
5652 <= HOST_BITS_PER_WIDE_INT
))))
5654 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
5655 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
5657 int bitsize
= DECL_FIELD_SIZE (TREE_PURPOSE (elt
));
5659 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
5661 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
5662 op0
= expand_and (op0
, op1
, target
);
5666 enum machine_mode imode
5667 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
5669 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
5672 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
5674 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
5684 enum machine_mode mode1
;
5690 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
5691 &mode1
, &unsignedp
, &volatilep
,
5694 /* If we got back the original object, something is wrong. Perhaps
5695 we are evaluating an expression too early. In any event, don't
5696 infinitely recurse. */
5700 /* If TEM's type is a union of variable size, pass TARGET to the inner
5701 computation, since it will need a temporary and TARGET is known
5702 to have to do. This occurs in unchecked conversion in Ada. */
5704 op0
= expand_expr (tem
,
5705 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
5706 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
5708 ? target
: NULL_RTX
),
5710 modifier
== EXPAND_INITIALIZER
5711 ? modifier
: EXPAND_NORMAL
);
5713 /* If this is a constant, put it into a register if it is a
5714 legitimate constant and memory if it isn't. */
5715 if (CONSTANT_P (op0
))
5717 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
5718 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
))
5719 op0
= force_reg (mode
, op0
);
5721 op0
= validize_mem (force_const_mem (mode
, op0
));
5726 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
5728 if (GET_CODE (op0
) != MEM
)
5731 if (GET_MODE (offset_rtx
) != ptr_mode
)
5732 #ifdef POINTERS_EXTEND_UNSIGNED
5733 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 1);
5735 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
5738 op0
= change_address (op0
, VOIDmode
,
5739 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
5740 force_reg (ptr_mode
, offset_rtx
)));
5743 /* Don't forget about volatility even if this is a bitfield. */
5744 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
5746 op0
= copy_rtx (op0
);
5747 MEM_VOLATILE_P (op0
) = 1;
5750 /* Check the access. */
5751 if (flag_check_memory_usage
&& GET_CODE (op0
) == MEM
)
5753 enum memory_use_mode memory_usage
;
5754 memory_usage
= get_memory_usage_from_modifier (modifier
);
5756 if (memory_usage
!= MEMORY_USE_DONT
)
5761 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
5762 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
5764 /* Check the access right of the pointer. */
5765 if (size
> BITS_PER_UNIT
)
5766 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
5768 GEN_INT (size
/ BITS_PER_UNIT
),
5769 TYPE_MODE (sizetype
),
5770 GEN_INT (memory_usage
),
5771 TYPE_MODE (integer_type_node
));
5775 /* In cases where an aligned union has an unaligned object
5776 as a field, we might be extracting a BLKmode value from
5777 an integer-mode (e.g., SImode) object. Handle this case
5778 by doing the extract into an object as wide as the field
5779 (which we know to be the width of a basic mode), then
5780 storing into memory, and changing the mode to BLKmode.
5781 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5782 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5783 if (mode1
== VOIDmode
5784 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
5785 || (modifier
!= EXPAND_CONST_ADDRESS
5786 && modifier
!= EXPAND_INITIALIZER
5787 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
5788 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5789 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5790 /* If the field isn't aligned enough to fetch as a memref,
5791 fetch it as a bit field. */
5792 || (SLOW_UNALIGNED_ACCESS
5793 && ((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
))
5794 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))))))
5796 enum machine_mode ext_mode
= mode
;
5798 if (ext_mode
== BLKmode
)
5799 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
5801 if (ext_mode
== BLKmode
)
5803 /* In this case, BITPOS must start at a byte boundary and
5804 TARGET, if specified, must be a MEM. */
5805 if (GET_CODE (op0
) != MEM
5806 || (target
!= 0 && GET_CODE (target
) != MEM
)
5807 || bitpos
% BITS_PER_UNIT
!= 0)
5810 op0
= change_address (op0
, VOIDmode
,
5811 plus_constant (XEXP (op0
, 0),
5812 bitpos
/ BITS_PER_UNIT
));
5814 target
= assign_temp (type
, 0, 1, 1);
5816 emit_block_move (target
, op0
,
5817 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5824 op0
= validize_mem (op0
);
5826 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
5827 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5829 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
5830 unsignedp
, target
, ext_mode
, ext_mode
,
5832 int_size_in_bytes (TREE_TYPE (tem
)));
5834 /* If the result is a record type and BITSIZE is narrower than
5835 the mode of OP0, an integral mode, and this is a big endian
5836 machine, we must put the field into the high-order bits. */
5837 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
5838 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
5839 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
5840 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
5841 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
5845 if (mode
== BLKmode
)
5847 rtx
new = assign_stack_temp (ext_mode
,
5848 bitsize
/ BITS_PER_UNIT
, 0);
5850 emit_move_insn (new, op0
);
5851 op0
= copy_rtx (new);
5852 PUT_MODE (op0
, BLKmode
);
5853 MEM_IN_STRUCT_P (op0
) = 1;
5859 /* If the result is BLKmode, use that to access the object
5861 if (mode
== BLKmode
)
5864 /* Get a reference to just this component. */
5865 if (modifier
== EXPAND_CONST_ADDRESS
5866 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
5867 op0
= gen_rtx_MEM (mode1
, plus_constant (XEXP (op0
, 0),
5868 (bitpos
/ BITS_PER_UNIT
)));
5870 op0
= change_address (op0
, mode1
,
5871 plus_constant (XEXP (op0
, 0),
5872 (bitpos
/ BITS_PER_UNIT
)));
5873 if (GET_CODE (XEXP (op0
, 0)) == REG
)
5874 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5876 MEM_IN_STRUCT_P (op0
) = 1;
5877 MEM_VOLATILE_P (op0
) |= volatilep
;
5878 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
5879 || modifier
== EXPAND_CONST_ADDRESS
5880 || modifier
== EXPAND_INITIALIZER
)
5882 else if (target
== 0)
5883 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5885 convert_move (target
, op0
, unsignedp
);
5889 /* Intended for a reference to a buffer of a file-object in Pascal.
5890 But it's not certain that a special tree code will really be
5891 necessary for these. INDIRECT_REF might work for them. */
5897 /* Pascal set IN expression.
5900 rlo = set_low - (set_low%bits_per_word);
5901 the_word = set [ (index - rlo)/bits_per_word ];
5902 bit_index = index % bits_per_word;
5903 bitmask = 1 << bit_index;
5904 return !!(the_word & bitmask); */
5906 tree set
= TREE_OPERAND (exp
, 0);
5907 tree index
= TREE_OPERAND (exp
, 1);
5908 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
5909 tree set_type
= TREE_TYPE (set
);
5910 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
5911 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
5912 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
5913 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
5914 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
5915 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
5916 rtx setaddr
= XEXP (setval
, 0);
5917 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
5919 rtx diff
, quo
, rem
, addr
, bit
, result
;
5921 preexpand_calls (exp
);
5923 /* If domain is empty, answer is no. Likewise if index is constant
5924 and out of bounds. */
5925 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
5926 && TREE_CODE (set_low_bound
) == INTEGER_CST
5927 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
5928 || (TREE_CODE (index
) == INTEGER_CST
5929 && TREE_CODE (set_low_bound
) == INTEGER_CST
5930 && tree_int_cst_lt (index
, set_low_bound
))
5931 || (TREE_CODE (set_high_bound
) == INTEGER_CST
5932 && TREE_CODE (index
) == INTEGER_CST
5933 && tree_int_cst_lt (set_high_bound
, index
))))
5937 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5939 /* If we get here, we have to generate the code for both cases
5940 (in range and out of range). */
5942 op0
= gen_label_rtx ();
5943 op1
= gen_label_rtx ();
5945 if (! (GET_CODE (index_val
) == CONST_INT
5946 && GET_CODE (lo_r
) == CONST_INT
))
5948 emit_cmp_insn (index_val
, lo_r
, LT
, NULL_RTX
,
5949 GET_MODE (index_val
), iunsignedp
, 0);
5950 emit_jump_insn (gen_blt (op1
));
5953 if (! (GET_CODE (index_val
) == CONST_INT
5954 && GET_CODE (hi_r
) == CONST_INT
))
5956 emit_cmp_insn (index_val
, hi_r
, GT
, NULL_RTX
,
5957 GET_MODE (index_val
), iunsignedp
, 0);
5958 emit_jump_insn (gen_bgt (op1
));
5961 /* Calculate the element number of bit zero in the first word
5963 if (GET_CODE (lo_r
) == CONST_INT
)
5964 rlow
= GEN_INT (INTVAL (lo_r
)
5965 & ~ ((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
5967 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
5968 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
5969 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5971 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
5972 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5974 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
5975 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5976 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
5977 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5979 addr
= memory_address (byte_mode
,
5980 expand_binop (index_mode
, add_optab
, diff
,
5981 setaddr
, NULL_RTX
, iunsignedp
,
5984 /* Extract the bit we want to examine */
5985 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
5986 gen_rtx_MEM (byte_mode
, addr
),
5987 make_tree (TREE_TYPE (index
), rem
),
5989 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
5990 GET_MODE (target
) == byte_mode
? target
: 0,
5991 1, OPTAB_LIB_WIDEN
);
5993 if (result
!= target
)
5994 convert_move (target
, result
, 1);
5996 /* Output the code to handle the out-of-range case. */
5999 emit_move_insn (target
, const0_rtx
);
6004 case WITH_CLEANUP_EXPR
:
6005 if (RTL_EXPR_RTL (exp
) == 0)
6008 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
6009 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 2));
6011 /* That's it for this cleanup. */
6012 TREE_OPERAND (exp
, 2) = 0;
6014 return RTL_EXPR_RTL (exp
);
6016 case CLEANUP_POINT_EXPR
:
6018 extern int temp_slot_level
;
6019 /* Start a new binding layer that will keep track of all cleanup
6020 actions to be performed. */
6021 expand_start_bindings (0);
6023 target_temp_slot_level
= temp_slot_level
;
6025 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
6026 /* If we're going to use this value, load it up now. */
6028 op0
= force_not_mem (op0
);
6029 preserve_temp_slots (op0
);
6030 expand_end_bindings (NULL_TREE
, 0, 0);
6035 /* Check for a built-in function. */
6036 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6037 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6039 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6040 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
6042 /* If this call was expanded already by preexpand_calls,
6043 just return the result we got. */
6044 if (CALL_EXPR_RTL (exp
) != 0)
6045 return CALL_EXPR_RTL (exp
);
6047 return expand_call (exp
, target
, ignore
);
6049 case NON_LVALUE_EXPR
:
6052 case REFERENCE_EXPR
:
6053 if (TREE_CODE (type
) == UNION_TYPE
)
6055 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
6058 if (mode
!= BLKmode
)
6059 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6061 target
= assign_temp (type
, 0, 1, 1);
6064 if (GET_CODE (target
) == MEM
)
6065 /* Store data into beginning of memory target. */
6066 store_expr (TREE_OPERAND (exp
, 0),
6067 change_address (target
, TYPE_MODE (valtype
), 0), 0);
6069 else if (GET_CODE (target
) == REG
)
6070 /* Store this field into a union of the proper type. */
6071 store_field (target
, GET_MODE_BITSIZE (TYPE_MODE (valtype
)), 0,
6072 TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
6074 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6078 /* Return the entire union. */
6082 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6084 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
6087 /* If the signedness of the conversion differs and OP0 is
6088 a promoted SUBREG, clear that indication since we now
6089 have to do the proper extension. */
6090 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
6091 && GET_CODE (op0
) == SUBREG
)
6092 SUBREG_PROMOTED_VAR_P (op0
) = 0;
6097 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
6098 if (GET_MODE (op0
) == mode
)
6101 /* If OP0 is a constant, just convert it into the proper mode. */
6102 if (CONSTANT_P (op0
))
6104 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6105 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6107 if (modifier
== EXPAND_INITIALIZER
)
6108 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
6112 convert_to_mode (mode
, op0
,
6113 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6115 convert_move (target
, op0
,
6116 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6120 /* We come here from MINUS_EXPR when the second operand is a
6123 this_optab
= add_optab
;
6125 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6126 something else, make sure we add the register to the constant and
6127 then to the other thing. This case can occur during strength
6128 reduction and doing it this way will produce better code if the
6129 frame pointer or argument pointer is eliminated.
6131 fold-const.c will ensure that the constant is always in the inner
6132 PLUS_EXPR, so the only case we need to do anything about is if
6133 sp, ap, or fp is our second argument, in which case we must swap
6134 the innermost first argument and our second argument. */
6136 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
6137 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
6138 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
6139 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
6140 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
6141 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
6143 tree t
= TREE_OPERAND (exp
, 1);
6145 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6146 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
6149 /* If the result is to be ptr_mode and we are adding an integer to
6150 something, we might be forming a constant. So try to use
6151 plus_constant. If it produces a sum and we can't accept it,
6152 use force_operand. This allows P = &ARR[const] to generate
6153 efficient code on machines where a SYMBOL_REF is not a valid
6156 If this is an EXPAND_SUM call, always return the sum. */
6157 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
6158 || mode
== ptr_mode
)
6160 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
6161 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
6162 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
6164 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
6166 op1
= plus_constant (op1
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)));
6167 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6168 op1
= force_operand (op1
, target
);
6172 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6173 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
6174 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
6176 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
6178 if (! CONSTANT_P (op0
))
6180 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6181 VOIDmode
, modifier
);
6182 /* Don't go to both_summands if modifier
6183 says it's not right to return a PLUS. */
6184 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6188 op0
= plus_constant (op0
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)));
6189 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6190 op0
= force_operand (op0
, target
);
6195 /* No sense saving up arithmetic to be done
6196 if it's all in the wrong mode to form part of an address.
6197 And force_operand won't know whether to sign-extend or
6199 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6200 || mode
!= ptr_mode
)
6203 preexpand_calls (exp
);
6204 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
6207 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, ro_modifier
);
6208 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, ro_modifier
);
6211 /* Make sure any term that's a sum with a constant comes last. */
6212 if (GET_CODE (op0
) == PLUS
6213 && CONSTANT_P (XEXP (op0
, 1)))
6219 /* If adding to a sum including a constant,
6220 associate it to put the constant outside. */
6221 if (GET_CODE (op1
) == PLUS
6222 && CONSTANT_P (XEXP (op1
, 1)))
6224 rtx constant_term
= const0_rtx
;
6226 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
6229 /* Ensure that MULT comes first if there is one. */
6230 else if (GET_CODE (op0
) == MULT
)
6231 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
6233 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
6235 /* Let's also eliminate constants from op0 if possible. */
6236 op0
= eliminate_constant_term (op0
, &constant_term
);
6238 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6239 their sum should be a constant. Form it into OP1, since the
6240 result we want will then be OP0 + OP1. */
6242 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
6247 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
6250 /* Put a constant term last and put a multiplication first. */
6251 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
6252 temp
= op1
, op1
= op0
, op0
= temp
;
6254 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
6255 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
6258 /* For initializers, we are allowed to return a MINUS of two
6259 symbolic constants. Here we handle all cases when both operands
6261 /* Handle difference of two symbolic constants,
6262 for the sake of an initializer. */
6263 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6264 && really_constant_p (TREE_OPERAND (exp
, 0))
6265 && really_constant_p (TREE_OPERAND (exp
, 1)))
6267 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
6268 VOIDmode
, ro_modifier
);
6269 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6270 VOIDmode
, ro_modifier
);
6272 /* If the last operand is a CONST_INT, use plus_constant of
6273 the negated constant. Else make the MINUS. */
6274 if (GET_CODE (op1
) == CONST_INT
)
6275 return plus_constant (op0
, - INTVAL (op1
));
6277 return gen_rtx_MINUS (mode
, op0
, op1
);
6279 /* Convert A - const to A + (-const). */
6280 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6282 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
6283 TREE_OPERAND (exp
, 1)));
6285 /* Deal with the case where we can't negate the constant
6287 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
6289 tree newtype
= signed_type (type
);
6290 tree newop0
= convert (newtype
, TREE_OPERAND (exp
, 0));
6291 tree newop1
= convert (newtype
, TREE_OPERAND (exp
, 1));
6292 tree newneg
= fold (build1 (NEGATE_EXPR
, newtype
, newop1
));
6294 if (! TREE_OVERFLOW (newneg
))
6295 return expand_expr (convert (type
,
6296 build (PLUS_EXPR
, newtype
,
6298 target
, tmode
, ro_modifier
);
6302 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
6306 this_optab
= sub_optab
;
6310 preexpand_calls (exp
);
6311 /* If first operand is constant, swap them.
6312 Thus the following special case checks need only
6313 check the second operand. */
6314 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6316 register tree t1
= TREE_OPERAND (exp
, 0);
6317 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
6318 TREE_OPERAND (exp
, 1) = t1
;
6321 /* Attempt to return something suitable for generating an
6322 indexed address, for machines that support that. */
6324 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
6325 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6326 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
6328 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
6331 /* Apply distributive law if OP0 is x+c. */
6332 if (GET_CODE (op0
) == PLUS
6333 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
6334 return gen_rtx_PLUS (mode
,
6335 gen_rtx_MULT (mode
, XEXP (op0
, 0),
6336 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
6337 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
6338 * INTVAL (XEXP (op0
, 1))));
6340 if (GET_CODE (op0
) != REG
)
6341 op0
= force_operand (op0
, NULL_RTX
);
6342 if (GET_CODE (op0
) != REG
)
6343 op0
= copy_to_mode_reg (mode
, op0
);
6345 return gen_rtx_MULT (mode
, op0
,
6346 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
6349 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
6352 /* Check for multiplying things that have been extended
6353 from a narrower type. If this machine supports multiplying
6354 in that narrower type with a result in the desired type,
6355 do it that way, and avoid the explicit type-conversion. */
6356 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
6357 && TREE_CODE (type
) == INTEGER_TYPE
6358 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6359 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6360 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6361 && int_fits_type_p (TREE_OPERAND (exp
, 1),
6362 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6363 /* Don't use a widening multiply if a shift will do. */
6364 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
6365 > HOST_BITS_PER_WIDE_INT
)
6366 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
6368 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
6369 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6371 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
6372 /* If both operands are extended, they must either both
6373 be zero-extended or both be sign-extended. */
6374 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6376 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
6378 enum machine_mode innermode
6379 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
6380 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6381 ? smul_widen_optab
: umul_widen_optab
);
6382 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6383 ? umul_widen_optab
: smul_widen_optab
);
6384 if (mode
== GET_MODE_WIDER_MODE (innermode
))
6386 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
6388 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6389 NULL_RTX
, VOIDmode
, 0);
6390 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6391 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6394 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6395 NULL_RTX
, VOIDmode
, 0);
6398 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
6399 && innermode
== word_mode
)
6402 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6403 NULL_RTX
, VOIDmode
, 0);
6404 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6405 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6408 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6409 NULL_RTX
, VOIDmode
, 0);
6410 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
6411 unsignedp
, OPTAB_LIB_WIDEN
);
6412 htem
= expand_mult_highpart_adjust (innermode
,
6413 gen_highpart (innermode
, temp
),
6415 gen_highpart (innermode
, temp
),
6417 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
6422 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6423 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6424 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
6426 case TRUNC_DIV_EXPR
:
6427 case FLOOR_DIV_EXPR
:
6429 case ROUND_DIV_EXPR
:
6430 case EXACT_DIV_EXPR
:
6431 preexpand_calls (exp
);
6432 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
6434 /* Possible optimization: compute the dividend with EXPAND_SUM
6435 then if the divisor is constant can optimize the case
6436 where some terms of the dividend have coeffs divisible by it. */
6437 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6438 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6439 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
6442 this_optab
= flodiv_optab
;
6445 case TRUNC_MOD_EXPR
:
6446 case FLOOR_MOD_EXPR
:
6448 case ROUND_MOD_EXPR
:
6449 preexpand_calls (exp
);
6450 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
6452 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6453 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6454 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
6456 case FIX_ROUND_EXPR
:
6457 case FIX_FLOOR_EXPR
:
6459 abort (); /* Not used for C. */
6461 case FIX_TRUNC_EXPR
:
6462 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6464 target
= gen_reg_rtx (mode
);
6465 expand_fix (target
, op0
, unsignedp
);
6469 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6471 target
= gen_reg_rtx (mode
);
6472 /* expand_float can't figure out what to do if FROM has VOIDmode.
6473 So give it the correct mode. With -O, cse will optimize this. */
6474 if (GET_MODE (op0
) == VOIDmode
)
6475 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6477 expand_float (target
, op0
,
6478 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6482 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6483 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
6489 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6491 /* Handle complex values specially. */
6492 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
6493 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
6494 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
6496 /* Unsigned abs is simply the operand. Testing here means we don't
6497 risk generating incorrect code below. */
6498 if (TREE_UNSIGNED (type
))
6501 return expand_abs (mode
, op0
, target
, unsignedp
,
6502 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
6506 target
= original_target
;
6507 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
6508 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
6509 || GET_MODE (target
) != mode
6510 || (GET_CODE (target
) == REG
6511 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
6512 target
= gen_reg_rtx (mode
);
6513 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6514 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6516 /* First try to do it with a special MIN or MAX instruction.
6517 If that does not win, use a conditional jump to select the proper
6519 this_optab
= (TREE_UNSIGNED (type
)
6520 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
6521 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
6523 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
6528 /* At this point, a MEM target is no longer useful; we will get better
6531 if (GET_CODE (target
) == MEM
)
6532 target
= gen_reg_rtx (mode
);
6535 emit_move_insn (target
, op0
);
6537 op0
= gen_label_rtx ();
6539 /* If this mode is an integer too wide to compare properly,
6540 compare word by word. Rely on cse to optimize constant cases. */
6541 if (GET_MODE_CLASS (mode
) == MODE_INT
&& !can_compare_p (mode
))
6543 if (code
== MAX_EXPR
)
6544 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6545 target
, op1
, NULL_RTX
, op0
);
6547 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6548 op1
, target
, NULL_RTX
, op0
);
6549 emit_move_insn (target
, op1
);
6553 if (code
== MAX_EXPR
)
6554 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6555 ? compare_from_rtx (target
, op1
, GEU
, 1, mode
, NULL_RTX
, 0)
6556 : compare_from_rtx (target
, op1
, GE
, 0, mode
, NULL_RTX
, 0));
6558 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6559 ? compare_from_rtx (target
, op1
, LEU
, 1, mode
, NULL_RTX
, 0)
6560 : compare_from_rtx (target
, op1
, LE
, 0, mode
, NULL_RTX
, 0));
6561 if (temp
== const0_rtx
)
6562 emit_move_insn (target
, op1
);
6563 else if (temp
!= const_true_rtx
)
6565 if (bcc_gen_fctn
[(int) GET_CODE (temp
)] != 0)
6566 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (temp
)]) (op0
));
6569 emit_move_insn (target
, op1
);
6576 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6577 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
6583 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6584 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
6589 /* ??? Can optimize bitwise operations with one arg constant.
6590 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6591 and (a bitwise1 b) bitwise2 b (etc)
6592 but that is probably not worth while. */
6594 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6595 boolean values when we want in all cases to compute both of them. In
6596 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6597 as actual zero-or-1 values and then bitwise anding. In cases where
6598 there cannot be any side effects, better code would be made by
6599 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6600 how to recognize those cases. */
6602 case TRUTH_AND_EXPR
:
6604 this_optab
= and_optab
;
6609 this_optab
= ior_optab
;
6612 case TRUTH_XOR_EXPR
:
6614 this_optab
= xor_optab
;
6621 preexpand_calls (exp
);
6622 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
6624 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6625 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
6628 /* Could determine the answer when only additive constants differ. Also,
6629 the addition of one can be handled by changing the condition. */
6636 preexpand_calls (exp
);
6637 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
6641 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6642 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
6644 && GET_CODE (original_target
) == REG
6645 && (GET_MODE (original_target
)
6646 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6648 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6651 if (temp
!= original_target
)
6652 temp
= copy_to_reg (temp
);
6654 op1
= gen_label_rtx ();
6655 emit_cmp_insn (temp
, const0_rtx
, EQ
, NULL_RTX
,
6656 GET_MODE (temp
), unsignedp
, 0);
6657 emit_jump_insn (gen_beq (op1
));
6658 emit_move_insn (temp
, const1_rtx
);
6663 /* If no set-flag instruction, must generate a conditional
6664 store into a temporary variable. Drop through
6665 and handle this like && and ||. */
6667 case TRUTH_ANDIF_EXPR
:
6668 case TRUTH_ORIF_EXPR
:
6670 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
6671 /* Make sure we don't have a hard reg (such as function's return
6672 value) live across basic blocks, if not optimizing. */
6673 || (!optimize
&& GET_CODE (target
) == REG
6674 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
6675 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6678 emit_clr_insn (target
);
6680 op1
= gen_label_rtx ();
6681 jumpifnot (exp
, op1
);
6684 emit_0_to_1_insn (target
);
6687 return ignore
? const0_rtx
: target
;
6689 case TRUTH_NOT_EXPR
:
6690 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6691 /* The parser is careful to generate TRUTH_NOT_EXPR
6692 only with operands that are always zero or one. */
6693 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
6694 target
, 1, OPTAB_LIB_WIDEN
);
6700 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6702 return expand_expr (TREE_OPERAND (exp
, 1),
6703 (ignore
? const0_rtx
: target
),
6707 /* If we would have a "singleton" (see below) were it not for a
6708 conversion in each arm, bring that conversion back out. */
6709 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
6710 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
6711 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
6712 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
6714 tree
true = TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
6715 tree
false = TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
6717 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6718 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6719 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6720 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6721 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6722 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6723 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6724 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6725 return expand_expr (build1 (NOP_EXPR
, type
,
6726 build (COND_EXPR
, TREE_TYPE (true),
6727 TREE_OPERAND (exp
, 0),
6729 target
, tmode
, modifier
);
6733 /* Note that COND_EXPRs whose type is a structure or union
6734 are required to be constructed to contain assignments of
6735 a temporary variable, so that we can evaluate them here
6736 for side effect only. If type is void, we must do likewise. */
6738 /* If an arm of the branch requires a cleanup,
6739 only that cleanup is performed. */
6742 tree binary_op
= 0, unary_op
= 0;
6744 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6745 convert it to our mode, if necessary. */
6746 if (integer_onep (TREE_OPERAND (exp
, 1))
6747 && integer_zerop (TREE_OPERAND (exp
, 2))
6748 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6752 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6757 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, ro_modifier
);
6758 if (GET_MODE (op0
) == mode
)
6762 target
= gen_reg_rtx (mode
);
6763 convert_move (target
, op0
, unsignedp
);
6767 /* Check for X ? A + B : A. If we have this, we can copy A to the
6768 output and conditionally add B. Similarly for unary operations.
6769 Don't do this if X has side-effects because those side effects
6770 might affect A or B and the "?" operation is a sequence point in
6771 ANSI. (operand_equal_p tests for side effects.) */
6773 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
6774 && operand_equal_p (TREE_OPERAND (exp
, 2),
6775 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6776 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
6777 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
6778 && operand_equal_p (TREE_OPERAND (exp
, 1),
6779 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6780 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
6781 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
6782 && operand_equal_p (TREE_OPERAND (exp
, 2),
6783 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6784 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
6785 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
6786 && operand_equal_p (TREE_OPERAND (exp
, 1),
6787 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6788 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
6790 /* If we are not to produce a result, we have no target. Otherwise,
6791 if a target was specified use it; it will not be used as an
6792 intermediate target unless it is safe. If no target, use a
6797 else if (original_target
6798 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
6799 || (singleton
&& GET_CODE (original_target
) == REG
6800 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
6801 && original_target
== var_rtx (singleton
)))
6802 && GET_MODE (original_target
) == mode
6803 #ifdef HAVE_conditional_move
6804 && (! can_conditionally_move_p (mode
)
6805 || GET_CODE (original_target
) == REG
6806 || TREE_ADDRESSABLE (type
))
6808 && ! (GET_CODE (original_target
) == MEM
6809 && MEM_VOLATILE_P (original_target
)))
6810 temp
= original_target
;
6811 else if (TREE_ADDRESSABLE (type
))
6814 temp
= assign_temp (type
, 0, 0, 1);
6816 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6817 do the test of X as a store-flag operation, do this as
6818 A + ((X != 0) << log C). Similarly for other simple binary
6819 operators. Only do for C == 1 if BRANCH_COST is low. */
6820 if (temp
&& singleton
&& binary_op
6821 && (TREE_CODE (binary_op
) == PLUS_EXPR
6822 || TREE_CODE (binary_op
) == MINUS_EXPR
6823 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
6824 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
6825 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
6826 : integer_onep (TREE_OPERAND (binary_op
, 1)))
6827 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6830 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
6831 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
6832 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
6835 /* If we had X ? A : A + 1, do this as A + (X == 0).
6837 We have to invert the truth value here and then put it
6838 back later if do_store_flag fails. We cannot simply copy
6839 TREE_OPERAND (exp, 0) to another variable and modify that
6840 because invert_truthvalue can modify the tree pointed to
6842 if (singleton
== TREE_OPERAND (exp
, 1))
6843 TREE_OPERAND (exp
, 0)
6844 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6846 result
= do_store_flag (TREE_OPERAND (exp
, 0),
6847 (safe_from_p (temp
, singleton
, 1)
6849 mode
, BRANCH_COST
<= 1);
6851 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
6852 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
6853 build_int_2 (tree_log2
6857 (safe_from_p (temp
, singleton
, 1)
6858 ? temp
: NULL_RTX
), 0);
6862 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
6863 return expand_binop (mode
, boptab
, op1
, result
, temp
,
6864 unsignedp
, OPTAB_LIB_WIDEN
);
6866 else if (singleton
== TREE_OPERAND (exp
, 1))
6867 TREE_OPERAND (exp
, 0)
6868 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6871 do_pending_stack_adjust ();
6873 op0
= gen_label_rtx ();
6875 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
6879 /* If the target conflicts with the other operand of the
6880 binary op, we can't use it. Also, we can't use the target
6881 if it is a hard register, because evaluating the condition
6882 might clobber it. */
6884 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
6885 || (GET_CODE (temp
) == REG
6886 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
6887 temp
= gen_reg_rtx (mode
);
6888 store_expr (singleton
, temp
, 0);
6891 expand_expr (singleton
,
6892 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6893 if (singleton
== TREE_OPERAND (exp
, 1))
6894 jumpif (TREE_OPERAND (exp
, 0), op0
);
6896 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6898 start_cleanup_deferral ();
6899 if (binary_op
&& temp
== 0)
6900 /* Just touch the other operand. */
6901 expand_expr (TREE_OPERAND (binary_op
, 1),
6902 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6904 store_expr (build (TREE_CODE (binary_op
), type
,
6905 make_tree (type
, temp
),
6906 TREE_OPERAND (binary_op
, 1)),
6909 store_expr (build1 (TREE_CODE (unary_op
), type
,
6910 make_tree (type
, temp
)),
6914 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6915 comparison operator. If we have one of these cases, set the
6916 output to A, branch on A (cse will merge these two references),
6917 then set the output to FOO. */
6919 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6920 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6921 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6922 TREE_OPERAND (exp
, 1), 0)
6923 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6924 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
6925 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
6927 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6928 temp
= gen_reg_rtx (mode
);
6929 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6930 jumpif (TREE_OPERAND (exp
, 0), op0
);
6932 start_cleanup_deferral ();
6933 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6937 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6938 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6939 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6940 TREE_OPERAND (exp
, 2), 0)
6941 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6942 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
6943 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
6945 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6946 temp
= gen_reg_rtx (mode
);
6947 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6948 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6950 start_cleanup_deferral ();
6951 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6956 op1
= gen_label_rtx ();
6957 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6959 start_cleanup_deferral ();
6961 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6963 expand_expr (TREE_OPERAND (exp
, 1),
6964 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6965 end_cleanup_deferral ();
6967 emit_jump_insn (gen_jump (op1
));
6970 start_cleanup_deferral ();
6972 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6974 expand_expr (TREE_OPERAND (exp
, 2),
6975 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6978 end_cleanup_deferral ();
6989 /* Something needs to be initialized, but we didn't know
6990 where that thing was when building the tree. For example,
6991 it could be the return value of a function, or a parameter
6992 to a function which lays down in the stack, or a temporary
6993 variable which must be passed by reference.
6995 We guarantee that the expression will either be constructed
6996 or copied into our original target. */
6998 tree slot
= TREE_OPERAND (exp
, 0);
6999 tree cleanups
= NULL_TREE
;
7002 if (TREE_CODE (slot
) != VAR_DECL
)
7006 target
= original_target
;
7010 if (DECL_RTL (slot
) != 0)
7012 target
= DECL_RTL (slot
);
7013 /* If we have already expanded the slot, so don't do
7015 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
7020 target
= assign_temp (type
, 2, 0, 1);
7021 /* All temp slots at this level must not conflict. */
7022 preserve_temp_slots (target
);
7023 DECL_RTL (slot
) = target
;
7024 if (TREE_ADDRESSABLE (slot
))
7026 TREE_ADDRESSABLE (slot
) = 0;
7027 mark_addressable (slot
);
7030 /* Since SLOT is not known to the called function
7031 to belong to its stack frame, we must build an explicit
7032 cleanup. This case occurs when we must build up a reference
7033 to pass the reference as an argument. In this case,
7034 it is very likely that such a reference need not be
7037 if (TREE_OPERAND (exp
, 2) == 0)
7038 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
7039 cleanups
= TREE_OPERAND (exp
, 2);
7044 /* This case does occur, when expanding a parameter which
7045 needs to be constructed on the stack. The target
7046 is the actual stack address that we want to initialize.
7047 The function we call will perform the cleanup in this case. */
7049 /* If we have already assigned it space, use that space,
7050 not target that we were passed in, as our target
7051 parameter is only a hint. */
7052 if (DECL_RTL (slot
) != 0)
7054 target
= DECL_RTL (slot
);
7055 /* If we have already expanded the slot, so don't do
7057 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
7062 DECL_RTL (slot
) = target
;
7063 /* If we must have an addressable slot, then make sure that
7064 the RTL that we just stored in slot is OK. */
7065 if (TREE_ADDRESSABLE (slot
))
7067 TREE_ADDRESSABLE (slot
) = 0;
7068 mark_addressable (slot
);
7073 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
7074 /* Mark it as expanded. */
7075 TREE_OPERAND (exp
, 1) = NULL_TREE
;
7077 TREE_USED (slot
) = 1;
7078 store_expr (exp1
, target
, 0);
7080 expand_decl_cleanup (NULL_TREE
, cleanups
);
7087 tree lhs
= TREE_OPERAND (exp
, 0);
7088 tree rhs
= TREE_OPERAND (exp
, 1);
7089 tree noncopied_parts
= 0;
7090 tree lhs_type
= TREE_TYPE (lhs
);
7092 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
7093 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
7094 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
7095 TYPE_NONCOPIED_PARTS (lhs_type
));
7096 while (noncopied_parts
!= 0)
7098 expand_assignment (TREE_VALUE (noncopied_parts
),
7099 TREE_PURPOSE (noncopied_parts
), 0, 0);
7100 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
7107 /* If lhs is complex, expand calls in rhs before computing it.
7108 That's so we don't compute a pointer and save it over a call.
7109 If lhs is simple, compute it first so we can give it as a
7110 target if the rhs is just a call. This avoids an extra temp and copy
7111 and that prevents a partial-subsumption which makes bad code.
7112 Actually we could treat component_ref's of vars like vars. */
7114 tree lhs
= TREE_OPERAND (exp
, 0);
7115 tree rhs
= TREE_OPERAND (exp
, 1);
7116 tree noncopied_parts
= 0;
7117 tree lhs_type
= TREE_TYPE (lhs
);
7121 if (TREE_CODE (lhs
) != VAR_DECL
7122 && TREE_CODE (lhs
) != RESULT_DECL
7123 && TREE_CODE (lhs
) != PARM_DECL
7124 && ! (TREE_CODE (lhs
) == INDIRECT_REF
7125 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs
, 0)))))
7126 preexpand_calls (exp
);
7128 /* Check for |= or &= of a bitfield of size one into another bitfield
7129 of size 1. In this case, (unless we need the result of the
7130 assignment) we can do this more efficiently with a
7131 test followed by an assignment, if necessary.
7133 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7134 things change so we do, this code should be enhanced to
7137 && TREE_CODE (lhs
) == COMPONENT_REF
7138 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
7139 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
7140 && TREE_OPERAND (rhs
, 0) == lhs
7141 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
7142 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs
, 1))) == 1
7143 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))) == 1)
7145 rtx label
= gen_label_rtx ();
7147 do_jump (TREE_OPERAND (rhs
, 1),
7148 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
7149 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
7150 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
7151 (TREE_CODE (rhs
) == BIT_IOR_EXPR
7153 : integer_zero_node
)),
7155 do_pending_stack_adjust ();
7160 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
7161 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
7162 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
7163 TYPE_NONCOPIED_PARTS (lhs_type
));
7165 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
7166 while (noncopied_parts
!= 0)
7168 expand_assignment (TREE_PURPOSE (noncopied_parts
),
7169 TREE_VALUE (noncopied_parts
), 0, 0);
7170 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
7175 case PREINCREMENT_EXPR
:
7176 case PREDECREMENT_EXPR
:
7177 return expand_increment (exp
, 0, ignore
);
7179 case POSTINCREMENT_EXPR
:
7180 case POSTDECREMENT_EXPR
:
7181 /* Faster to treat as pre-increment if result is not used. */
7182 return expand_increment (exp
, ! ignore
, ignore
);
7185 /* If nonzero, TEMP will be set to the address of something that might
7186 be a MEM corresponding to a stack slot. */
7189 /* Are we taking the address of a nested function? */
7190 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
7191 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
7192 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
7193 && ! TREE_STATIC (exp
))
7195 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
7196 op0
= force_operand (op0
, target
);
7198 /* If we are taking the address of something erroneous, just
7200 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
7204 /* We make sure to pass const0_rtx down if we came in with
7205 ignore set, to avoid doing the cleanups twice for something. */
7206 op0
= expand_expr (TREE_OPERAND (exp
, 0),
7207 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
7208 (modifier
== EXPAND_INITIALIZER
7209 ? modifier
: EXPAND_CONST_ADDRESS
));
7211 /* If we are going to ignore the result, OP0 will have been set
7212 to const0_rtx, so just return it. Don't get confused and
7213 think we are taking the address of the constant. */
7217 op0
= protect_from_queue (op0
, 0);
7219 /* We would like the object in memory. If it is a constant,
7220 we can have it be statically allocated into memory. For
7221 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7222 memory and store the value into it. */
7224 if (CONSTANT_P (op0
))
7225 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7227 else if (GET_CODE (op0
) == MEM
)
7229 mark_temp_addr_taken (op0
);
7230 temp
= XEXP (op0
, 0);
7233 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7234 || GET_CODE (op0
) == CONCAT
)
7236 /* If this object is in a register, it must be not
7238 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7239 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
7241 mark_temp_addr_taken (memloc
);
7242 emit_move_insn (memloc
, op0
);
7246 if (GET_CODE (op0
) != MEM
)
7249 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7251 temp
= XEXP (op0
, 0);
7252 #ifdef POINTERS_EXTEND_UNSIGNED
7253 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
7254 && mode
== ptr_mode
)
7255 temp
= convert_memory_address (ptr_mode
, temp
);
7260 op0
= force_operand (XEXP (op0
, 0), target
);
7263 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
7264 op0
= force_reg (Pmode
, op0
);
7266 if (GET_CODE (op0
) == REG
7267 && ! REG_USERVAR_P (op0
))
7268 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)) / BITS_PER_UNIT
);
7270 /* If we might have had a temp slot, add an equivalent address
7273 update_temp_slot_address (temp
, op0
);
7275 #ifdef POINTERS_EXTEND_UNSIGNED
7276 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
7277 && mode
== ptr_mode
)
7278 op0
= convert_memory_address (ptr_mode
, op0
);
7283 case ENTRY_VALUE_EXPR
:
7286 /* COMPLEX type for Extended Pascal & Fortran */
7289 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7292 /* Get the rtx code of the operands. */
7293 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7294 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
7297 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7301 /* Move the real (op0) and imaginary (op1) parts to their location. */
7302 emit_move_insn (gen_realpart (mode
, target
), op0
);
7303 emit_move_insn (gen_imagpart (mode
, target
), op1
);
7305 insns
= get_insns ();
7308 /* Complex construction should appear as a single unit. */
7309 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7310 each with a separate pseudo as destination.
7311 It's not correct for flow to treat them as a unit. */
7312 if (GET_CODE (target
) != CONCAT
)
7313 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
7321 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7322 return gen_realpart (mode
, op0
);
7325 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7326 return gen_imagpart (mode
, op0
);
7330 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7334 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7337 target
= gen_reg_rtx (mode
);
7341 /* Store the realpart and the negated imagpart to target. */
7342 emit_move_insn (gen_realpart (partmode
, target
),
7343 gen_realpart (partmode
, op0
));
7345 imag_t
= gen_imagpart (partmode
, target
);
7346 temp
= expand_unop (partmode
, neg_optab
,
7347 gen_imagpart (partmode
, op0
), imag_t
, 0);
7349 emit_move_insn (imag_t
, temp
);
7351 insns
= get_insns ();
7354 /* Conjugate should appear as a single unit
7355 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7356 each with a separate pseudo as destination.
7357 It's not correct for flow to treat them as a unit. */
7358 if (GET_CODE (target
) != CONCAT
)
7359 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
7366 case TRY_CATCH_EXPR
:
7368 tree handler
= TREE_OPERAND (exp
, 1);
7370 expand_eh_region_start ();
7372 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7374 expand_eh_region_end (handler
);
7381 rtx dcc
= get_dynamic_cleanup_chain ();
7382 emit_move_insn (dcc
, validize_mem (gen_rtx_MEM (Pmode
, dcc
)));
7388 rtx dhc
= get_dynamic_handler_chain ();
7389 emit_move_insn (dhc
, validize_mem (gen_rtx_MEM (Pmode
, dhc
)));
7394 op0
= CONST0_RTX (tmode
);
7400 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
7403 /* Here to do an ordinary binary operator, generating an instruction
7404 from the optab already placed in `this_optab'. */
7406 preexpand_calls (exp
);
7407 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7409 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7410 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7412 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
7413 unsignedp
, OPTAB_LIB_WIDEN
);
7421 /* Return the alignment in bits of EXP, a pointer valued expression.
7422 But don't return more than MAX_ALIGN no matter what.
7423 The alignment returned is, by default, the alignment of the thing that
7424 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7426 Otherwise, look at the expression to see if we can do better, i.e., if the
7427 expression is actually pointing at an object whose alignment is tighter. */
7430 get_pointer_alignment (exp
, max_align
)
7434 unsigned align
, inner
;
7436 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7439 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7440 align
= MIN (align
, max_align
);
7444 switch (TREE_CODE (exp
))
7448 case NON_LVALUE_EXPR
:
7449 exp
= TREE_OPERAND (exp
, 0);
7450 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7452 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7453 align
= MIN (inner
, max_align
);
7457 /* If sum of pointer + int, restrict our maximum alignment to that
7458 imposed by the integer. If not, we can't do any better than
7460 if (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
)
7463 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
)
7468 exp
= TREE_OPERAND (exp
, 0);
7472 /* See what we are pointing at and look at its alignment. */
7473 exp
= TREE_OPERAND (exp
, 0);
7474 if (TREE_CODE (exp
) == FUNCTION_DECL
)
7475 align
= FUNCTION_BOUNDARY
;
7476 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
7477 align
= DECL_ALIGN (exp
);
7478 #ifdef CONSTANT_ALIGNMENT
7479 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c')
7480 align
= CONSTANT_ALIGNMENT (exp
, align
);
7482 return MIN (align
, max_align
);
7490 /* Return the tree node and offset if a given argument corresponds to
7491 a string constant. */
7494 string_constant (arg
, ptr_offset
)
7500 if (TREE_CODE (arg
) == ADDR_EXPR
7501 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
7503 *ptr_offset
= integer_zero_node
;
7504 return TREE_OPERAND (arg
, 0);
7506 else if (TREE_CODE (arg
) == PLUS_EXPR
)
7508 tree arg0
= TREE_OPERAND (arg
, 0);
7509 tree arg1
= TREE_OPERAND (arg
, 1);
7514 if (TREE_CODE (arg0
) == ADDR_EXPR
7515 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
7518 return TREE_OPERAND (arg0
, 0);
7520 else if (TREE_CODE (arg1
) == ADDR_EXPR
7521 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
7524 return TREE_OPERAND (arg1
, 0);
7531 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7532 way, because it could contain a zero byte in the middle.
7533 TREE_STRING_LENGTH is the size of the character array, not the string.
7535 Unfortunately, string_constant can't access the values of const char
7536 arrays with initializers, so neither can we do so here. */
7546 src
= string_constant (src
, &offset_node
);
7549 max
= TREE_STRING_LENGTH (src
);
7550 ptr
= TREE_STRING_POINTER (src
);
7551 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
7553 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7554 compute the offset to the following null if we don't know where to
7555 start searching for it. */
7557 for (i
= 0; i
< max
; i
++)
7560 /* We don't know the starting offset, but we do know that the string
7561 has no internal zero bytes. We can assume that the offset falls
7562 within the bounds of the string; otherwise, the programmer deserves
7563 what he gets. Subtract the offset from the length of the string,
7565 /* This would perhaps not be valid if we were dealing with named
7566 arrays in addition to literal string constants. */
7567 return size_binop (MINUS_EXPR
, size_int (max
), offset_node
);
7570 /* We have a known offset into the string. Start searching there for
7571 a null character. */
7572 if (offset_node
== 0)
7576 /* Did we get a long long offset? If so, punt. */
7577 if (TREE_INT_CST_HIGH (offset_node
) != 0)
7579 offset
= TREE_INT_CST_LOW (offset_node
);
7581 /* If the offset is known to be out of bounds, warn, and call strlen at
7583 if (offset
< 0 || offset
> max
)
7585 warning ("offset outside bounds of constant string");
7588 /* Use strlen to search for the first zero byte. Since any strings
7589 constructed with build_string will have nulls appended, we win even
7590 if we get handed something like (char[4])"abcd".
7592 Since OFFSET is our starting index into the string, no further
7593 calculation is needed. */
7594 return size_int (strlen (ptr
+ offset
));
7598 expand_builtin_return_addr (fndecl_code
, count
, tem
)
7599 enum built_in_function fndecl_code
;
7605 /* Some machines need special handling before we can access
7606 arbitrary frames. For example, on the sparc, we must first flush
7607 all register windows to the stack. */
7608 #ifdef SETUP_FRAME_ADDRESSES
7610 SETUP_FRAME_ADDRESSES ();
7613 /* On the sparc, the return address is not in the frame, it is in a
7614 register. There is no way to access it off of the current frame
7615 pointer, but it can be accessed off the previous frame pointer by
7616 reading the value from the register window save area. */
7617 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7618 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
7622 /* Scan back COUNT frames to the specified frame. */
7623 for (i
= 0; i
< count
; i
++)
7625 /* Assume the dynamic chain pointer is in the word that the
7626 frame address points to, unless otherwise specified. */
7627 #ifdef DYNAMIC_CHAIN_ADDRESS
7628 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
7630 tem
= memory_address (Pmode
, tem
);
7631 tem
= copy_to_reg (gen_rtx_MEM (Pmode
, tem
));
7634 /* For __builtin_frame_address, return what we've got. */
7635 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
7638 /* For __builtin_return_address, Get the return address from that
7640 #ifdef RETURN_ADDR_RTX
7641 tem
= RETURN_ADDR_RTX (count
, tem
);
7643 tem
= memory_address (Pmode
,
7644 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
7645 tem
= gen_rtx_MEM (Pmode
, tem
);
7650 /* __builtin_setjmp is passed a pointer to an array of five words (not
7651 all will be used on all machines). It operates similarly to the C
7652 library function of the same name, but is more efficient. Much of
7653 the code below (and for longjmp) is copied from the handling of
7656 NOTE: This is intended for use by GNAT and the exception handling
7657 scheme in the compiler and will only work in the method used by
7661 expand_builtin_setjmp (buf_addr
, target
, first_label
, next_label
)
7664 rtx first_label
, next_label
;
7666 rtx lab1
= gen_label_rtx ();
7667 enum machine_mode sa_mode
= Pmode
, value_mode
;
7670 value_mode
= TYPE_MODE (integer_type_node
);
7672 #ifdef POINTERS_EXTEND_UNSIGNED
7673 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
7676 buf_addr
= force_reg (Pmode
, buf_addr
);
7678 if (target
== 0 || GET_CODE (target
) != REG
7679 || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
7680 target
= gen_reg_rtx (value_mode
);
7684 /* We store the frame pointer and the address of lab1 in the buffer
7685 and use the rest of it for the stack save area, which is
7686 machine-dependent. */
7687 emit_move_insn (gen_rtx_MEM (Pmode
, buf_addr
),
7688 virtual_stack_vars_rtx
);
7689 emit_move_insn (validize_mem
7690 (gen_rtx_MEM (Pmode
,
7691 plus_constant (buf_addr
,
7692 GET_MODE_SIZE (Pmode
)))),
7693 gen_rtx_LABEL_REF (Pmode
, lab1
));
7695 #ifdef HAVE_save_stack_nonlocal
7696 if (HAVE_save_stack_nonlocal
)
7697 sa_mode
= insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0];
7700 stack_save
= gen_rtx_MEM (sa_mode
,
7701 plus_constant (buf_addr
,
7702 2 * GET_MODE_SIZE (Pmode
)));
7703 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
7705 /* If there is further processing to do, do it. */
7706 #ifdef HAVE_builtin_setjmp_setup
7707 if (HAVE_builtin_setjmp_setup
)
7708 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
7711 /* Set TARGET to zero and branch to the first-time-through label. */
7712 emit_move_insn (target
, const0_rtx
);
7713 emit_jump_insn (gen_jump (first_label
));
7717 /* Tell flow about the strange goings on. */
7718 current_function_has_nonlocal_label
= 1;
7720 /* Clobber the FP when we get here, so we have to make sure it's
7721 marked as used by this function. */
7722 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
7724 /* Mark the static chain as clobbered here so life information
7725 doesn't get messed up for it. */
7726 emit_insn (gen_rtx_CLOBBER (VOIDmode
, static_chain_rtx
));
7728 /* Now put in the code to restore the frame pointer, and argument
7729 pointer, if needed. The code below is from expand_end_bindings
7730 in stmt.c; see detailed documentation there. */
7731 #ifdef HAVE_nonlocal_goto
7732 if (! HAVE_nonlocal_goto
)
7734 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
7736 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7737 if (fixed_regs
[ARG_POINTER_REGNUM
])
7739 #ifdef ELIMINABLE_REGS
7741 static struct elims
{int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
7743 for (i
= 0; i
< sizeof elim_regs
/ sizeof elim_regs
[0]; i
++)
7744 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
7745 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
7748 if (i
== sizeof elim_regs
/ sizeof elim_regs
[0])
7751 /* Now restore our arg pointer from the address at which it
7752 was saved in our stack frame.
7753 If there hasn't be space allocated for it yet, make
7755 if (arg_pointer_save_area
== 0)
7756 arg_pointer_save_area
7757 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
7758 emit_move_insn (virtual_incoming_args_rtx
,
7759 copy_to_reg (arg_pointer_save_area
));
7764 #ifdef HAVE_builtin_setjmp_receiver
7765 if (HAVE_builtin_setjmp_receiver
)
7766 emit_insn (gen_builtin_setjmp_receiver (lab1
));
7769 #ifdef HAVE_nonlocal_goto_receiver
7770 if (HAVE_nonlocal_goto_receiver
)
7771 emit_insn (gen_nonlocal_goto_receiver ());
7778 /* Set TARGET, and branch to the next-time-through label. */
7779 emit_move_insn (target
, gen_lowpart (GET_MODE (target
), static_chain_rtx
));
7780 emit_jump_insn (gen_jump (next_label
));
7787 expand_builtin_longjmp (buf_addr
, value
)
7788 rtx buf_addr
, value
;
7791 enum machine_mode sa_mode
;
7793 #ifdef POINTERS_EXTEND_UNSIGNED
7794 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
7796 buf_addr
= force_reg (Pmode
, buf_addr
);
7798 /* The value sent by longjmp is not allowed to be zero. Force it
7800 if (GET_CODE (value
) == CONST_INT
)
7802 if (INTVAL (value
) == 0)
7807 lab
= gen_label_rtx ();
7809 emit_cmp_insn (value
, const0_rtx
, NE
, NULL_RTX
, GET_MODE (value
), 0, 0);
7810 emit_jump_insn (gen_bne (lab
));
7811 emit_move_insn (value
, const1_rtx
);
7815 /* Make sure the value is in the right mode to be copied to the chain. */
7816 if (GET_MODE (value
) != VOIDmode
)
7817 value
= gen_lowpart (GET_MODE (static_chain_rtx
), value
);
7819 #ifdef HAVE_builtin_longjmp
7820 if (HAVE_builtin_longjmp
)
7822 /* Copy the "return value" to the static chain reg. */
7823 emit_move_insn (static_chain_rtx
, value
);
7824 emit_insn (gen_rtx_USE (VOIDmode
, static_chain_rtx
));
7825 emit_insn (gen_builtin_longjmp (buf_addr
));
7830 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
7831 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
7832 GET_MODE_SIZE (Pmode
)));
7834 #ifdef HAVE_save_stack_nonlocal
7835 sa_mode
= (HAVE_save_stack_nonlocal
7836 ? insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0]
7842 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
7843 2 * GET_MODE_SIZE (Pmode
)));
7845 /* Pick up FP, label, and SP from the block and jump. This code is
7846 from expand_goto in stmt.c; see there for detailed comments. */
7847 #if HAVE_nonlocal_goto
7848 if (HAVE_nonlocal_goto
)
7849 emit_insn (gen_nonlocal_goto (value
, fp
, stack
, lab
));
7853 lab
= copy_to_reg (lab
);
7855 /* Copy the "return value" to the static chain reg. */
7856 emit_move_insn (static_chain_rtx
, value
);
7858 emit_move_insn (hard_frame_pointer_rtx
, fp
);
7859 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
7861 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
7862 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
7863 emit_insn (gen_rtx_USE (VOIDmode
, static_chain_rtx
));
7864 emit_indirect_jump (lab
);
7870 /* Expand an expression EXP that calls a built-in function,
7871 with result going to TARGET if that's convenient
7872 (and in mode MODE if that's convenient).
7873 SUBTARGET may be used as the target for computing one of EXP's operands.
7874 IGNORE is nonzero if the value is to be ignored. */
7876 #define CALLED_AS_BUILT_IN(NODE) \
7877 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7880 expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
7884 enum machine_mode mode
;
7887 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7888 tree arglist
= TREE_OPERAND (exp
, 1);
7891 enum machine_mode value_mode
= TYPE_MODE (TREE_TYPE (exp
));
7892 optab builtin_optab
;
7894 switch (DECL_FUNCTION_CODE (fndecl
))
7899 /* build_function_call changes these into ABS_EXPR. */
7904 /* Treat these like sqrt, but only if the user asks for them. */
7905 if (! flag_fast_math
)
7907 case BUILT_IN_FSQRT
:
7908 /* If not optimizing, call the library function. */
7913 /* Arg could be wrong type if user redeclared this fcn wrong. */
7914 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
)
7917 /* Stabilize and compute the argument. */
7918 if (TREE_CODE (TREE_VALUE (arglist
)) != VAR_DECL
7919 && TREE_CODE (TREE_VALUE (arglist
)) != PARM_DECL
)
7921 exp
= copy_node (exp
);
7922 arglist
= copy_node (arglist
);
7923 TREE_OPERAND (exp
, 1) = arglist
;
7924 TREE_VALUE (arglist
) = save_expr (TREE_VALUE (arglist
));
7926 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
7928 /* Make a suitable register to place result in. */
7929 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7934 switch (DECL_FUNCTION_CODE (fndecl
))
7937 builtin_optab
= sin_optab
; break;
7939 builtin_optab
= cos_optab
; break;
7940 case BUILT_IN_FSQRT
:
7941 builtin_optab
= sqrt_optab
; break;
7946 /* Compute into TARGET.
7947 Set TARGET to wherever the result comes back. */
7948 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
7949 builtin_optab
, op0
, target
, 0);
7951 /* If we were unable to expand via the builtin, stop the
7952 sequence (without outputting the insns) and break, causing
7953 a call the the library function. */
7960 /* Check the results by default. But if flag_fast_math is turned on,
7961 then assume sqrt will always be called with valid arguments. */
7963 if (! flag_fast_math
)
7965 /* Don't define the builtin FP instructions
7966 if your machine is not IEEE. */
7967 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
7970 lab1
= gen_label_rtx ();
7972 /* Test the result; if it is NaN, set errno=EDOM because
7973 the argument was not in the domain. */
7974 emit_cmp_insn (target
, target
, EQ
, 0, GET_MODE (target
), 0, 0);
7975 emit_jump_insn (gen_beq (lab1
));
7979 #ifdef GEN_ERRNO_RTX
7980 rtx errno_rtx
= GEN_ERRNO_RTX
;
7983 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
7986 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
7989 /* We can't set errno=EDOM directly; let the library call do it.
7990 Pop the arguments right away in case the call gets deleted. */
7992 expand_call (exp
, target
, 0);
7999 /* Output the entire sequence. */
8000 insns
= get_insns ();
8009 /* __builtin_apply_args returns block of memory allocated on
8010 the stack into which is stored the arg pointer, structure
8011 value address, static chain, and all the registers that might
8012 possibly be used in performing a function call. The code is
8013 moved to the start of the function so the incoming values are
8015 case BUILT_IN_APPLY_ARGS
:
8016 /* Don't do __builtin_apply_args more than once in a function.
8017 Save the result of the first call and reuse it. */
8018 if (apply_args_value
!= 0)
8019 return apply_args_value
;
8021 /* When this function is called, it means that registers must be
8022 saved on entry to this function. So we migrate the
8023 call to the first insn of this function. */
8028 temp
= expand_builtin_apply_args ();
8032 apply_args_value
= temp
;
8034 /* Put the sequence after the NOTE that starts the function.
8035 If this is inside a SEQUENCE, make the outer-level insn
8036 chain current, so the code is placed at the start of the
8038 push_topmost_sequence ();
8039 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8040 pop_topmost_sequence ();
8044 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8045 FUNCTION with a copy of the parameters described by
8046 ARGUMENTS, and ARGSIZE. It returns a block of memory
8047 allocated on the stack into which is stored all the registers
8048 that might possibly be used for returning the result of a
8049 function. ARGUMENTS is the value returned by
8050 __builtin_apply_args. ARGSIZE is the number of bytes of
8051 arguments that must be copied. ??? How should this value be
8052 computed? We'll also need a safe worst case value for varargs
8054 case BUILT_IN_APPLY
:
8056 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8057 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist
)))
8058 || TREE_CHAIN (arglist
) == 0
8059 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8060 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8061 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8069 for (t
= arglist
, i
= 0; t
; t
= TREE_CHAIN (t
), i
++)
8070 ops
[i
] = expand_expr (TREE_VALUE (t
), NULL_RTX
, VOIDmode
, 0);
8072 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
8075 /* __builtin_return (RESULT) causes the function to return the
8076 value described by RESULT. RESULT is address of the block of
8077 memory returned by __builtin_apply. */
8078 case BUILT_IN_RETURN
:
8080 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8081 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
)
8082 expand_builtin_return (expand_expr (TREE_VALUE (arglist
),
8083 NULL_RTX
, VOIDmode
, 0));
8086 case BUILT_IN_SAVEREGS
:
8087 /* Don't do __builtin_saveregs more than once in a function.
8088 Save the result of the first call and reuse it. */
8089 if (saveregs_value
!= 0)
8090 return saveregs_value
;
8092 /* When this function is called, it means that registers must be
8093 saved on entry to this function. So we migrate the
8094 call to the first insn of this function. */
8098 /* Now really call the function. `expand_call' does not call
8099 expand_builtin, so there is no danger of infinite recursion here. */
8102 #ifdef EXPAND_BUILTIN_SAVEREGS
8103 /* Do whatever the machine needs done in this case. */
8104 temp
= EXPAND_BUILTIN_SAVEREGS (arglist
);
8106 /* The register where the function returns its value
8107 is likely to have something else in it, such as an argument.
8108 So preserve that register around the call. */
8110 if (value_mode
!= VOIDmode
)
8112 rtx valreg
= hard_libcall_value (value_mode
);
8113 rtx saved_valreg
= gen_reg_rtx (value_mode
);
8115 emit_move_insn (saved_valreg
, valreg
);
8116 temp
= expand_call (exp
, target
, ignore
);
8117 emit_move_insn (valreg
, saved_valreg
);
8120 /* Generate the call, putting the value in a pseudo. */
8121 temp
= expand_call (exp
, target
, ignore
);
8127 saveregs_value
= temp
;
8129 /* Put the sequence after the NOTE that starts the function.
8130 If this is inside a SEQUENCE, make the outer-level insn
8131 chain current, so the code is placed at the start of the
8133 push_topmost_sequence ();
8134 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8135 pop_topmost_sequence ();
8139 /* __builtin_args_info (N) returns word N of the arg space info
8140 for the current function. The number and meanings of words
8141 is controlled by the definition of CUMULATIVE_ARGS. */
8142 case BUILT_IN_ARGS_INFO
:
8144 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
8145 int *word_ptr
= (int *) ¤t_function_args_info
;
8147 /* These are used by the code below that is if 0'ed away */
8149 tree type
, elts
, result
;
8152 if (sizeof (CUMULATIVE_ARGS
) % sizeof (int) != 0)
8153 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8154 __FILE__
, __LINE__
);
8158 tree arg
= TREE_VALUE (arglist
);
8159 if (TREE_CODE (arg
) != INTEGER_CST
)
8160 error ("argument of `__builtin_args_info' must be constant");
8163 int wordnum
= TREE_INT_CST_LOW (arg
);
8165 if (wordnum
< 0 || wordnum
>= nwords
|| TREE_INT_CST_HIGH (arg
))
8166 error ("argument of `__builtin_args_info' out of range");
8168 return GEN_INT (word_ptr
[wordnum
]);
8172 error ("missing argument in `__builtin_args_info'");
8177 for (i
= 0; i
< nwords
; i
++)
8178 elts
= tree_cons (NULL_TREE
, build_int_2 (word_ptr
[i
], 0));
8180 type
= build_array_type (integer_type_node
,
8181 build_index_type (build_int_2 (nwords
, 0)));
8182 result
= build (CONSTRUCTOR
, type
, NULL_TREE
, nreverse (elts
));
8183 TREE_CONSTANT (result
) = 1;
8184 TREE_STATIC (result
) = 1;
8185 result
= build (INDIRECT_REF
, build_pointer_type (type
), result
);
8186 TREE_CONSTANT (result
) = 1;
8187 return expand_expr (result
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_BAD
);
8191 /* Return the address of the first anonymous stack arg. */
8192 case BUILT_IN_NEXT_ARG
:
8194 tree fntype
= TREE_TYPE (current_function_decl
);
8196 if ((TYPE_ARG_TYPES (fntype
) == 0
8197 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
8199 && ! current_function_varargs
)
8201 error ("`va_start' used in function with fixed args");
8207 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
8208 tree arg
= TREE_VALUE (arglist
);
8210 /* Strip off all nops for the sake of the comparison. This
8211 is not quite the same as STRIP_NOPS. It does more.
8212 We must also strip off INDIRECT_EXPR for C++ reference
8214 while (TREE_CODE (arg
) == NOP_EXPR
8215 || TREE_CODE (arg
) == CONVERT_EXPR
8216 || TREE_CODE (arg
) == NON_LVALUE_EXPR
8217 || TREE_CODE (arg
) == INDIRECT_REF
)
8218 arg
= TREE_OPERAND (arg
, 0);
8219 if (arg
!= last_parm
)
8220 warning ("second parameter of `va_start' not last named argument");
8222 else if (! current_function_varargs
)
8223 /* Evidently an out of date version of <stdarg.h>; can't validate
8224 va_start's second argument, but can still work as intended. */
8225 warning ("`__builtin_next_arg' called without an argument");
8228 return expand_binop (Pmode
, add_optab
,
8229 current_function_internal_arg_pointer
,
8230 current_function_arg_offset_rtx
,
8231 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
8233 case BUILT_IN_CLASSIFY_TYPE
:
8236 tree type
= TREE_TYPE (TREE_VALUE (arglist
));
8237 enum tree_code code
= TREE_CODE (type
);
8238 if (code
== VOID_TYPE
)
8239 return GEN_INT (void_type_class
);
8240 if (code
== INTEGER_TYPE
)
8241 return GEN_INT (integer_type_class
);
8242 if (code
== CHAR_TYPE
)
8243 return GEN_INT (char_type_class
);
8244 if (code
== ENUMERAL_TYPE
)
8245 return GEN_INT (enumeral_type_class
);
8246 if (code
== BOOLEAN_TYPE
)
8247 return GEN_INT (boolean_type_class
);
8248 if (code
== POINTER_TYPE
)
8249 return GEN_INT (pointer_type_class
);
8250 if (code
== REFERENCE_TYPE
)
8251 return GEN_INT (reference_type_class
);
8252 if (code
== OFFSET_TYPE
)
8253 return GEN_INT (offset_type_class
);
8254 if (code
== REAL_TYPE
)
8255 return GEN_INT (real_type_class
);
8256 if (code
== COMPLEX_TYPE
)
8257 return GEN_INT (complex_type_class
);
8258 if (code
== FUNCTION_TYPE
)
8259 return GEN_INT (function_type_class
);
8260 if (code
== METHOD_TYPE
)
8261 return GEN_INT (method_type_class
);
8262 if (code
== RECORD_TYPE
)
8263 return GEN_INT (record_type_class
);
8264 if (code
== UNION_TYPE
|| code
== QUAL_UNION_TYPE
)
8265 return GEN_INT (union_type_class
);
8266 if (code
== ARRAY_TYPE
)
8268 if (TYPE_STRING_FLAG (type
))
8269 return GEN_INT (string_type_class
);
8271 return GEN_INT (array_type_class
);
8273 if (code
== SET_TYPE
)
8274 return GEN_INT (set_type_class
);
8275 if (code
== FILE_TYPE
)
8276 return GEN_INT (file_type_class
);
8277 if (code
== LANG_TYPE
)
8278 return GEN_INT (lang_type_class
);
8280 return GEN_INT (no_type_class
);
8282 case BUILT_IN_CONSTANT_P
:
8287 tree arg
= TREE_VALUE (arglist
);
8290 return (TREE_CODE_CLASS (TREE_CODE (arg
)) == 'c'
8291 || (TREE_CODE (arg
) == ADDR_EXPR
8292 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8293 ? const1_rtx
: const0_rtx
);
8296 case BUILT_IN_FRAME_ADDRESS
:
8297 /* The argument must be a nonnegative integer constant.
8298 It counts the number of frames to scan up the stack.
8299 The value is the address of that frame. */
8300 case BUILT_IN_RETURN_ADDRESS
:
8301 /* The argument must be a nonnegative integer constant.
8302 It counts the number of frames to scan up the stack.
8303 The value is the return address saved in that frame. */
8305 /* Warning about missing arg was already issued. */
8307 else if (TREE_CODE (TREE_VALUE (arglist
)) != INTEGER_CST
8308 || tree_int_cst_sgn (TREE_VALUE (arglist
)) < 0)
8310 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8311 error ("invalid arg to `__builtin_frame_address'");
8313 error ("invalid arg to `__builtin_return_address'");
8318 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
8319 TREE_INT_CST_LOW (TREE_VALUE (arglist
)),
8320 hard_frame_pointer_rtx
);
8322 /* Some ports cannot access arbitrary stack frames. */
8325 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8326 warning ("unsupported arg to `__builtin_frame_address'");
8328 warning ("unsupported arg to `__builtin_return_address'");
8332 /* For __builtin_frame_address, return what we've got. */
8333 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8336 if (GET_CODE (tem
) != REG
)
8337 tem
= copy_to_reg (tem
);
8341 /* Returns the address of the area where the structure is returned.
8343 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
8345 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
8346 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) != MEM
)
8349 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
8351 case BUILT_IN_ALLOCA
:
8353 /* Arg could be non-integer if user redeclared this fcn wrong. */
8354 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8357 /* Compute the argument. */
8358 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
8360 /* Allocate the desired space. */
8361 return allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
8364 /* If not optimizing, call the library function. */
8365 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8369 /* Arg could be non-integer if user redeclared this fcn wrong. */
8370 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8373 /* Compute the argument. */
8374 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8375 /* Compute ffs, into TARGET if possible.
8376 Set TARGET to wherever the result comes back. */
8377 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8378 ffs_optab
, op0
, target
, 1);
8383 case BUILT_IN_STRLEN
:
8384 /* If not optimizing, call the library function. */
8385 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8389 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8390 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8394 tree src
= TREE_VALUE (arglist
);
8395 tree len
= c_strlen (src
);
8398 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8400 rtx result
, src_rtx
, char_rtx
;
8401 enum machine_mode insn_mode
= value_mode
, char_mode
;
8402 enum insn_code icode
;
8404 /* If the length is known, just return it. */
8406 return expand_expr (len
, target
, mode
, EXPAND_MEMORY_USE_BAD
);
8408 /* If SRC is not a pointer type, don't do this operation inline. */
8412 /* Call a function if we can't compute strlen in the right mode. */
8414 while (insn_mode
!= VOIDmode
)
8416 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
8417 if (icode
!= CODE_FOR_nothing
)
8420 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
8422 if (insn_mode
== VOIDmode
)
8425 /* Make a place to write the result of the instruction. */
8428 && GET_CODE (result
) == REG
8429 && GET_MODE (result
) == insn_mode
8430 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8431 result
= gen_reg_rtx (insn_mode
);
8433 /* Make sure the operands are acceptable to the predicates. */
8435 if (! (*insn_operand_predicate
[(int)icode
][0]) (result
, insn_mode
))
8436 result
= gen_reg_rtx (insn_mode
);
8437 src_rtx
= memory_address (BLKmode
,
8438 expand_expr (src
, NULL_RTX
, ptr_mode
,
8441 if (! (*insn_operand_predicate
[(int)icode
][1]) (src_rtx
, Pmode
))
8442 src_rtx
= copy_to_mode_reg (Pmode
, src_rtx
);
8444 /* Check the string is readable and has an end. */
8445 if (flag_check_memory_usage
)
8446 emit_library_call (chkr_check_str_libfunc
, 1, VOIDmode
, 2,
8448 GEN_INT (MEMORY_USE_RO
),
8449 TYPE_MODE (integer_type_node
));
8451 char_rtx
= const0_rtx
;
8452 char_mode
= insn_operand_mode
[(int)icode
][2];
8453 if (! (*insn_operand_predicate
[(int)icode
][2]) (char_rtx
, char_mode
))
8454 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
8456 emit_insn (GEN_FCN (icode
) (result
,
8457 gen_rtx_MEM (BLKmode
, src_rtx
),
8458 char_rtx
, GEN_INT (align
)));
8460 /* Return the value in the proper mode for this function. */
8461 if (GET_MODE (result
) == value_mode
)
8463 else if (target
!= 0)
8465 convert_move (target
, result
, 0);
8469 return convert_to_mode (value_mode
, result
, 0);
8472 case BUILT_IN_STRCPY
:
8473 /* If not optimizing, call the library function. */
8474 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8478 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8479 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8480 || TREE_CHAIN (arglist
) == 0
8481 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8485 tree len
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)));
8490 len
= size_binop (PLUS_EXPR
, len
, integer_one_node
);
8492 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8496 case BUILT_IN_MEMCPY
:
8497 /* If not optimizing, call the library function. */
8498 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8502 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8503 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8504 || TREE_CHAIN (arglist
) == 0
8505 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
))))
8507 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8508 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8509 (TREE_CHAIN (TREE_CHAIN (arglist
)))))
8514 tree dest
= TREE_VALUE (arglist
);
8515 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
8516 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8520 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8522 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8523 rtx dest_rtx
, dest_mem
, src_mem
, src_rtx
, dest_addr
, len_rtx
;
8525 /* If either SRC or DEST is not a pointer type, don't do
8526 this operation in-line. */
8527 if (src_align
== 0 || dest_align
== 0)
8529 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCPY
)
8530 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8534 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8535 dest_mem
= gen_rtx_MEM (BLKmode
,
8536 memory_address (BLKmode
, dest_rtx
));
8537 /* There could be a void* cast on top of the object. */
8538 while (TREE_CODE (dest
) == NOP_EXPR
)
8539 dest
= TREE_OPERAND (dest
, 0);
8540 type
= TREE_TYPE (TREE_TYPE (dest
));
8541 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
8542 src_rtx
= expand_expr (src
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8543 src_mem
= gen_rtx_MEM (BLKmode
,
8544 memory_address (BLKmode
, src_rtx
));
8545 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
8547 /* Just copy the rights of SRC to the rights of DEST. */
8548 if (flag_check_memory_usage
)
8549 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
8552 len_rtx
, TYPE_MODE (sizetype
));
8554 /* There could be a void* cast on top of the object. */
8555 while (TREE_CODE (src
) == NOP_EXPR
)
8556 src
= TREE_OPERAND (src
, 0);
8557 type
= TREE_TYPE (TREE_TYPE (src
));
8558 MEM_IN_STRUCT_P (src_mem
) = AGGREGATE_TYPE_P (type
);
8560 /* Copy word part most expediently. */
8562 = emit_block_move (dest_mem
, src_mem
, len_rtx
,
8563 MIN (src_align
, dest_align
));
8566 dest_addr
= force_operand (dest_rtx
, NULL_RTX
);
8571 case BUILT_IN_MEMSET
:
8572 /* If not optimizing, call the library function. */
8573 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8577 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8578 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8579 || TREE_CHAIN (arglist
) == 0
8580 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
))))
8582 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8584 != (TREE_CODE (TREE_TYPE
8586 (TREE_CHAIN (TREE_CHAIN (arglist
))))))))
8590 tree dest
= TREE_VALUE (arglist
);
8591 tree val
= TREE_VALUE (TREE_CHAIN (arglist
));
8592 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8596 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8597 rtx dest_rtx
, dest_mem
, dest_addr
, len_rtx
;
8599 /* If DEST is not a pointer type, don't do this
8600 operation in-line. */
8601 if (dest_align
== 0)
8604 /* If VAL is not 0, don't do this operation in-line. */
8605 if (expand_expr (val
, NULL_RTX
, VOIDmode
, 0) != const0_rtx
)
8608 /* If LEN does not expand to a constant, don't do this
8609 operation in-line. */
8610 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
8611 if (GET_CODE (len_rtx
) != CONST_INT
)
8614 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8615 dest_mem
= gen_rtx_MEM (BLKmode
,
8616 memory_address (BLKmode
, dest_rtx
));
8618 /* Just check DST is writable and mark it as readable. */
8619 if (flag_check_memory_usage
)
8620 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
8622 len_rtx
, TYPE_MODE (sizetype
),
8623 GEN_INT (MEMORY_USE_WO
),
8624 TYPE_MODE (integer_type_node
));
8626 /* There could be a void* cast on top of the object. */
8627 while (TREE_CODE (dest
) == NOP_EXPR
)
8628 dest
= TREE_OPERAND (dest
, 0);
8629 type
= TREE_TYPE (TREE_TYPE (dest
));
8630 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
8632 dest_addr
= clear_storage (dest_mem
, len_rtx
, dest_align
);
8635 dest_addr
= force_operand (dest_rtx
, NULL_RTX
);
8640 /* These comparison functions need an instruction that returns an actual
8641 index. An ordinary compare that just sets the condition codes
8643 #ifdef HAVE_cmpstrsi
8644 case BUILT_IN_STRCMP
:
8645 /* If not optimizing, call the library function. */
8646 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8649 /* If we need to check memory accesses, call the library function. */
8650 if (flag_check_memory_usage
)
8654 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8655 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8656 || TREE_CHAIN (arglist
) == 0
8657 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8659 else if (!HAVE_cmpstrsi
)
8662 tree arg1
= TREE_VALUE (arglist
);
8663 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
8666 len
= c_strlen (arg1
);
8668 len
= size_binop (PLUS_EXPR
, integer_one_node
, len
);
8669 len2
= c_strlen (arg2
);
8671 len2
= size_binop (PLUS_EXPR
, integer_one_node
, len2
);
8673 /* If we don't have a constant length for the first, use the length
8674 of the second, if we know it. We don't require a constant for
8675 this case; some cost analysis could be done if both are available
8676 but neither is constant. For now, assume they're equally cheap.
8678 If both strings have constant lengths, use the smaller. This
8679 could arise if optimization results in strcpy being called with
8680 two fixed strings, or if the code was machine-generated. We should
8681 add some code to the `memcmp' handler below to deal with such
8682 situations, someday. */
8683 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
8690 else if (len2
&& TREE_CODE (len2
) == INTEGER_CST
)
8692 if (tree_int_cst_lt (len2
, len
))
8696 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8700 case BUILT_IN_MEMCMP
:
8701 /* If not optimizing, call the library function. */
8702 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8705 /* If we need to check memory accesses, call the library function. */
8706 if (flag_check_memory_usage
)
8710 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8711 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8712 || TREE_CHAIN (arglist
) == 0
8713 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8714 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8715 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8717 else if (!HAVE_cmpstrsi
)
8720 tree arg1
= TREE_VALUE (arglist
);
8721 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
8722 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8726 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8728 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8729 enum machine_mode insn_mode
8730 = insn_operand_mode
[(int) CODE_FOR_cmpstrsi
][0];
8732 /* If we don't have POINTER_TYPE, call the function. */
8733 if (arg1_align
== 0 || arg2_align
== 0)
8735 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCMP
)
8736 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8740 /* Make a place to write the result of the instruction. */
8743 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
8744 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8745 result
= gen_reg_rtx (insn_mode
);
8747 emit_insn (gen_cmpstrsi (result
,
8748 gen_rtx_MEM (BLKmode
,
8749 expand_expr (arg1
, NULL_RTX
,
8752 gen_rtx_MEM (BLKmode
,
8753 expand_expr (arg2
, NULL_RTX
,
8756 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8757 GEN_INT (MIN (arg1_align
, arg2_align
))));
8759 /* Return the value in the proper mode for this function. */
8760 mode
= TYPE_MODE (TREE_TYPE (exp
));
8761 if (GET_MODE (result
) == mode
)
8763 else if (target
!= 0)
8765 convert_move (target
, result
, 0);
8769 return convert_to_mode (mode
, result
, 0);
8772 case BUILT_IN_STRCMP
:
8773 case BUILT_IN_MEMCMP
:
8777 case BUILT_IN_SETJMP
:
8779 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8783 rtx buf_addr
= expand_expr (TREE_VALUE (arglist
), subtarget
,
8785 rtx lab
= gen_label_rtx ();
8786 rtx ret
= expand_builtin_setjmp (buf_addr
, target
, lab
, lab
);
8791 /* __builtin_longjmp is passed a pointer to an array of five words.
8792 It's similar to the C library longjmp function but works with
8793 __builtin_setjmp above. */
8794 case BUILT_IN_LONGJMP
:
8795 if (arglist
== 0 || TREE_CHAIN (arglist
) == 0
8796 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8800 rtx buf_addr
= expand_expr (TREE_VALUE (arglist
), subtarget
,
8802 rtx value
= expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)),
8803 const0_rtx
, VOIDmode
, 0);
8804 expand_builtin_longjmp (buf_addr
, value
);
8808 /* Various hooks for the DWARF 2 __throw routine. */
8809 case BUILT_IN_UNWIND_INIT
:
8810 expand_builtin_unwind_init ();
8813 return frame_pointer_rtx
;
8815 return stack_pointer_rtx
;
8816 #ifdef DWARF2_UNWIND_INFO
8817 case BUILT_IN_DWARF_FP_REGNUM
:
8818 return expand_builtin_dwarf_fp_regnum ();
8819 case BUILT_IN_DWARF_REG_SIZE
:
8820 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist
), target
);
8822 case BUILT_IN_FROB_RETURN_ADDR
:
8823 return expand_builtin_frob_return_addr (TREE_VALUE (arglist
));
8824 case BUILT_IN_EXTRACT_RETURN_ADDR
:
8825 return expand_builtin_extract_return_addr (TREE_VALUE (arglist
));
8826 case BUILT_IN_SET_RETURN_ADDR_REG
:
8827 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist
));
8829 case BUILT_IN_EH_STUB
:
8830 return expand_builtin_eh_stub ();
8831 case BUILT_IN_SET_EH_REGS
:
8832 expand_builtin_set_eh_regs (TREE_VALUE (arglist
),
8833 TREE_VALUE (TREE_CHAIN (arglist
)));
8836 default: /* just do library call, if unknown builtin */
8837 error ("built-in function `%s' not currently supported",
8838 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
8841 /* The switch statement above can drop through to cause the function
8842 to be called normally. */
8844 return expand_call (exp
, target
, ignore
);
8847 /* Built-in functions to perform an untyped call and return. */
8849 /* For each register that may be used for calling a function, this
8850 gives a mode used to copy the register's value. VOIDmode indicates
8851 the register is not used for calling a function. If the machine
8852 has register windows, this gives only the outbound registers.
8853 INCOMING_REGNO gives the corresponding inbound register. */
8854 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
8856 /* For each register that may be used for returning values, this gives
8857 a mode used to copy the register's value. VOIDmode indicates the
8858 register is not used for returning values. If the machine has
8859 register windows, this gives only the outbound registers.
8860 INCOMING_REGNO gives the corresponding inbound register. */
8861 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
8863 /* For each register that may be used for calling a function, this
8864 gives the offset of that register into the block returned by
8865 __builtin_apply_args. 0 indicates that the register is not
8866 used for calling a function. */
8867 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
8869 /* Return the offset of register REGNO into the block returned by
8870 __builtin_apply_args. This is not declared static, since it is
8871 needed in objc-act.c. */
8874 apply_args_register_offset (regno
)
8879 /* Arguments are always put in outgoing registers (in the argument
8880 block) if such make sense. */
8881 #ifdef OUTGOING_REGNO
8882 regno
= OUTGOING_REGNO(regno
);
8884 return apply_args_reg_offset
[regno
];
8887 /* Return the size required for the block returned by __builtin_apply_args,
8888 and initialize apply_args_mode. */
8893 static int size
= -1;
8895 enum machine_mode mode
;
8897 /* The values computed by this function never change. */
8900 /* The first value is the incoming arg-pointer. */
8901 size
= GET_MODE_SIZE (Pmode
);
8903 /* The second value is the structure value address unless this is
8904 passed as an "invisible" first argument. */
8905 if (struct_value_rtx
)
8906 size
+= GET_MODE_SIZE (Pmode
);
8908 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8909 if (FUNCTION_ARG_REGNO_P (regno
))
8911 /* Search for the proper mode for copying this register's
8912 value. I'm not sure this is right, but it works so far. */
8913 enum machine_mode best_mode
= VOIDmode
;
8915 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
8917 mode
= GET_MODE_WIDER_MODE (mode
))
8918 if (HARD_REGNO_MODE_OK (regno
, mode
)
8919 && HARD_REGNO_NREGS (regno
, mode
) == 1)
8922 if (best_mode
== VOIDmode
)
8923 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
8925 mode
= GET_MODE_WIDER_MODE (mode
))
8926 if (HARD_REGNO_MODE_OK (regno
, mode
)
8927 && (mov_optab
->handlers
[(int) mode
].insn_code
8928 != CODE_FOR_nothing
))
8932 if (mode
== VOIDmode
)
8935 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
8936 if (size
% align
!= 0)
8937 size
= CEIL (size
, align
) * align
;
8938 apply_args_reg_offset
[regno
] = size
;
8939 size
+= GET_MODE_SIZE (mode
);
8940 apply_args_mode
[regno
] = mode
;
8944 apply_args_mode
[regno
] = VOIDmode
;
8945 apply_args_reg_offset
[regno
] = 0;
8951 /* Return the size required for the block returned by __builtin_apply,
8952 and initialize apply_result_mode. */
8955 apply_result_size ()
8957 static int size
= -1;
8959 enum machine_mode mode
;
8961 /* The values computed by this function never change. */
8966 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8967 if (FUNCTION_VALUE_REGNO_P (regno
))
8969 /* Search for the proper mode for copying this register's
8970 value. I'm not sure this is right, but it works so far. */
8971 enum machine_mode best_mode
= VOIDmode
;
8973 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
8975 mode
= GET_MODE_WIDER_MODE (mode
))
8976 if (HARD_REGNO_MODE_OK (regno
, mode
))
8979 if (best_mode
== VOIDmode
)
8980 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
8982 mode
= GET_MODE_WIDER_MODE (mode
))
8983 if (HARD_REGNO_MODE_OK (regno
, mode
)
8984 && (mov_optab
->handlers
[(int) mode
].insn_code
8985 != CODE_FOR_nothing
))
8989 if (mode
== VOIDmode
)
8992 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
8993 if (size
% align
!= 0)
8994 size
= CEIL (size
, align
) * align
;
8995 size
+= GET_MODE_SIZE (mode
);
8996 apply_result_mode
[regno
] = mode
;
8999 apply_result_mode
[regno
] = VOIDmode
;
9001 /* Allow targets that use untyped_call and untyped_return to override
9002 the size so that machine-specific information can be stored here. */
9003 #ifdef APPLY_RESULT_SIZE
9004 size
= APPLY_RESULT_SIZE
;
9010 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9011 /* Create a vector describing the result block RESULT. If SAVEP is true,
9012 the result block is used to save the values; otherwise it is used to
9013 restore the values. */
9016 result_vector (savep
, result
)
9020 int regno
, size
, align
, nelts
;
9021 enum machine_mode mode
;
9023 rtx
*savevec
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
9026 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9027 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9029 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9030 if (size
% align
!= 0)
9031 size
= CEIL (size
, align
) * align
;
9032 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
9033 mem
= change_address (result
, mode
,
9034 plus_constant (XEXP (result
, 0), size
));
9035 savevec
[nelts
++] = (savep
9036 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
9037 : gen_rtx_SET (VOIDmode
, reg
, mem
));
9038 size
+= GET_MODE_SIZE (mode
);
9040 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
9042 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9044 /* Save the state required to perform an untyped call with the same
9045 arguments as were passed to the current function. */
9048 expand_builtin_apply_args ()
9051 int size
, align
, regno
;
9052 enum machine_mode mode
;
9054 /* Create a block where the arg-pointer, structure value address,
9055 and argument registers can be saved. */
9056 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
9058 /* Walk past the arg-pointer and structure value address. */
9059 size
= GET_MODE_SIZE (Pmode
);
9060 if (struct_value_rtx
)
9061 size
+= GET_MODE_SIZE (Pmode
);
9063 /* Save each register used in calling a function to the block. */
9064 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9065 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9069 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9070 if (size
% align
!= 0)
9071 size
= CEIL (size
, align
) * align
;
9073 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
9076 /* For reg-stack.c's stack register household.
9077 Compare with a similar piece of code in function.c. */
9079 emit_insn (gen_rtx_USE (mode
, tem
));
9082 emit_move_insn (change_address (registers
, mode
,
9083 plus_constant (XEXP (registers
, 0),
9086 size
+= GET_MODE_SIZE (mode
);
9089 /* Save the arg pointer to the block. */
9090 emit_move_insn (change_address (registers
, Pmode
, XEXP (registers
, 0)),
9091 copy_to_reg (virtual_incoming_args_rtx
));
9092 size
= GET_MODE_SIZE (Pmode
);
9094 /* Save the structure value address unless this is passed as an
9095 "invisible" first argument. */
9096 if (struct_value_incoming_rtx
)
9098 emit_move_insn (change_address (registers
, Pmode
,
9099 plus_constant (XEXP (registers
, 0),
9101 copy_to_reg (struct_value_incoming_rtx
));
9102 size
+= GET_MODE_SIZE (Pmode
);
9105 /* Return the address of the block. */
9106 return copy_addr_to_reg (XEXP (registers
, 0));
9109 /* Perform an untyped call and save the state required to perform an
9110 untyped return of whatever value was returned by the given function. */
9113 expand_builtin_apply (function
, arguments
, argsize
)
9114 rtx function
, arguments
, argsize
;
9116 int size
, align
, regno
;
9117 enum machine_mode mode
;
9118 rtx incoming_args
, result
, reg
, dest
, call_insn
;
9119 rtx old_stack_level
= 0;
9120 rtx call_fusage
= 0;
9122 /* Create a block where the return registers can be saved. */
9123 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
9125 /* ??? The argsize value should be adjusted here. */
9127 /* Fetch the arg pointer from the ARGUMENTS block. */
9128 incoming_args
= gen_reg_rtx (Pmode
);
9129 emit_move_insn (incoming_args
,
9130 gen_rtx_MEM (Pmode
, arguments
));
9131 #ifndef STACK_GROWS_DOWNWARD
9132 incoming_args
= expand_binop (Pmode
, sub_optab
, incoming_args
, argsize
,
9133 incoming_args
, 0, OPTAB_LIB_WIDEN
);
9136 /* Perform postincrements before actually calling the function. */
9139 /* Push a new argument block and copy the arguments. */
9140 do_pending_stack_adjust ();
9142 /* Save the stack with nonlocal if available */
9143 #ifdef HAVE_save_stack_nonlocal
9144 if (HAVE_save_stack_nonlocal
)
9145 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
9148 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
9150 /* Push a block of memory onto the stack to store the memory arguments.
9151 Save the address in a register, and copy the memory arguments. ??? I
9152 haven't figured out how the calling convention macros effect this,
9153 but it's likely that the source and/or destination addresses in
9154 the block copy will need updating in machine specific ways. */
9155 dest
= allocate_dynamic_stack_space (argsize
, 0, 0);
9156 emit_block_move (gen_rtx_MEM (BLKmode
, dest
),
9157 gen_rtx_MEM (BLKmode
, incoming_args
),
9159 PARM_BOUNDARY
/ BITS_PER_UNIT
);
9161 /* Refer to the argument block. */
9163 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
9165 /* Walk past the arg-pointer and structure value address. */
9166 size
= GET_MODE_SIZE (Pmode
);
9167 if (struct_value_rtx
)
9168 size
+= GET_MODE_SIZE (Pmode
);
9170 /* Restore each of the registers previously saved. Make USE insns
9171 for each of these registers for use in making the call. */
9172 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9173 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9175 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9176 if (size
% align
!= 0)
9177 size
= CEIL (size
, align
) * align
;
9178 reg
= gen_rtx_REG (mode
, regno
);
9179 emit_move_insn (reg
,
9180 change_address (arguments
, mode
,
9181 plus_constant (XEXP (arguments
, 0),
9184 use_reg (&call_fusage
, reg
);
9185 size
+= GET_MODE_SIZE (mode
);
9188 /* Restore the structure value address unless this is passed as an
9189 "invisible" first argument. */
9190 size
= GET_MODE_SIZE (Pmode
);
9191 if (struct_value_rtx
)
9193 rtx value
= gen_reg_rtx (Pmode
);
9194 emit_move_insn (value
,
9195 change_address (arguments
, Pmode
,
9196 plus_constant (XEXP (arguments
, 0),
9198 emit_move_insn (struct_value_rtx
, value
);
9199 if (GET_CODE (struct_value_rtx
) == REG
)
9200 use_reg (&call_fusage
, struct_value_rtx
);
9201 size
+= GET_MODE_SIZE (Pmode
);
9204 /* All arguments and registers used for the call are set up by now! */
9205 function
= prepare_call_address (function
, NULL_TREE
, &call_fusage
, 0);
9207 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9208 and we don't want to load it into a register as an optimization,
9209 because prepare_call_address already did it if it should be done. */
9210 if (GET_CODE (function
) != SYMBOL_REF
)
9211 function
= memory_address (FUNCTION_MODE
, function
);
9213 /* Generate the actual call instruction and save the return value. */
9214 #ifdef HAVE_untyped_call
9215 if (HAVE_untyped_call
)
9216 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
9217 result
, result_vector (1, result
)));
9220 #ifdef HAVE_call_value
9221 if (HAVE_call_value
)
9225 /* Locate the unique return register. It is not possible to
9226 express a call that sets more than one return register using
9227 call_value; use untyped_call for that. In fact, untyped_call
9228 only needs to save the return registers in the given block. */
9229 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9230 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9233 abort (); /* HAVE_untyped_call required. */
9234 valreg
= gen_rtx_REG (mode
, regno
);
9237 emit_call_insn (gen_call_value (valreg
,
9238 gen_rtx_MEM (FUNCTION_MODE
, function
),
9239 const0_rtx
, NULL_RTX
, const0_rtx
));
9241 emit_move_insn (change_address (result
, GET_MODE (valreg
),
9249 /* Find the CALL insn we just emitted. */
9250 for (call_insn
= get_last_insn ();
9251 call_insn
&& GET_CODE (call_insn
) != CALL_INSN
;
9252 call_insn
= PREV_INSN (call_insn
))
9258 /* Put the register usage information on the CALL. If there is already
9259 some usage information, put ours at the end. */
9260 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
9264 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
9265 link
= XEXP (link
, 1))
9268 XEXP (link
, 1) = call_fusage
;
9271 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
9273 /* Restore the stack. */
9274 #ifdef HAVE_save_stack_nonlocal
9275 if (HAVE_save_stack_nonlocal
)
9276 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
9279 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
9281 /* Return the address of the result block. */
9282 return copy_addr_to_reg (XEXP (result
, 0));
9285 /* Perform an untyped return. */
9288 expand_builtin_return (result
)
9291 int size
, align
, regno
;
9292 enum machine_mode mode
;
9294 rtx call_fusage
= 0;
9296 apply_result_size ();
9297 result
= gen_rtx_MEM (BLKmode
, result
);
9299 #ifdef HAVE_untyped_return
9300 if (HAVE_untyped_return
)
9302 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
9308 /* Restore the return value and note that each value is used. */
9310 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9311 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9313 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9314 if (size
% align
!= 0)
9315 size
= CEIL (size
, align
) * align
;
9316 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
9317 emit_move_insn (reg
,
9318 change_address (result
, mode
,
9319 plus_constant (XEXP (result
, 0),
9322 push_to_sequence (call_fusage
);
9323 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
9324 call_fusage
= get_insns ();
9326 size
+= GET_MODE_SIZE (mode
);
9329 /* Put the USE insns before the return. */
9330 emit_insns (call_fusage
);
9332 /* Return whatever values was restored by jumping directly to the end
9334 expand_null_return ();
9337 /* Expand code for a post- or pre- increment or decrement
9338 and return the RTX for the result.
9339 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9342 expand_increment (exp
, post
, ignore
)
9346 register rtx op0
, op1
;
9347 register rtx temp
, value
;
9348 register tree incremented
= TREE_OPERAND (exp
, 0);
9349 optab this_optab
= add_optab
;
9351 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9352 int op0_is_copy
= 0;
9353 int single_insn
= 0;
9354 /* 1 means we can't store into OP0 directly,
9355 because it is a subreg narrower than a word,
9356 and we don't dare clobber the rest of the word. */
9359 /* Stabilize any component ref that might need to be
9360 evaluated more than once below. */
9362 || TREE_CODE (incremented
) == BIT_FIELD_REF
9363 || (TREE_CODE (incremented
) == COMPONENT_REF
9364 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9365 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9366 incremented
= stabilize_reference (incremented
);
9367 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9368 ones into save exprs so that they don't accidentally get evaluated
9369 more than once by the code below. */
9370 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9371 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9372 incremented
= save_expr (incremented
);
9374 /* Compute the operands as RTX.
9375 Note whether OP0 is the actual lvalue or a copy of it:
9376 I believe it is a copy iff it is a register or subreg
9377 and insns were generated in computing it. */
9379 temp
= get_last_insn ();
9380 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_RW
);
9382 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9383 in place but instead must do sign- or zero-extension during assignment,
9384 so we copy it into a new register and let the code below use it as
9387 Note that we can safely modify this SUBREG since it is know not to be
9388 shared (it was made by the expand_expr call above). */
9390 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9393 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9397 else if (GET_CODE (op0
) == SUBREG
9398 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9400 /* We cannot increment this SUBREG in place. If we are
9401 post-incrementing, get a copy of the old value. Otherwise,
9402 just mark that we cannot increment in place. */
9404 op0
= copy_to_reg (op0
);
9409 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9410 && temp
!= get_last_insn ());
9411 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
9412 EXPAND_MEMORY_USE_BAD
);
9414 /* Decide whether incrementing or decrementing. */
9415 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9416 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9417 this_optab
= sub_optab
;
9419 /* Convert decrement by a constant into a negative increment. */
9420 if (this_optab
== sub_optab
9421 && GET_CODE (op1
) == CONST_INT
)
9423 op1
= GEN_INT (- INTVAL (op1
));
9424 this_optab
= add_optab
;
9427 /* For a preincrement, see if we can do this with a single instruction. */
9430 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9431 if (icode
!= (int) CODE_FOR_nothing
9432 /* Make sure that OP0 is valid for operands 0 and 1
9433 of the insn we want to queue. */
9434 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9435 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
)
9436 && (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9440 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9441 then we cannot just increment OP0. We must therefore contrive to
9442 increment the original value. Then, for postincrement, we can return
9443 OP0 since it is a copy of the old value. For preincrement, expand here
9444 unless we can do it with a single insn.
9446 Likewise if storing directly into OP0 would clobber high bits
9447 we need to preserve (bad_subreg). */
9448 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9450 /* This is the easiest way to increment the value wherever it is.
9451 Problems with multiple evaluation of INCREMENTED are prevented
9452 because either (1) it is a component_ref or preincrement,
9453 in which case it was stabilized above, or (2) it is an array_ref
9454 with constant index in an array in a register, which is
9455 safe to reevaluate. */
9456 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9457 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9458 ? MINUS_EXPR
: PLUS_EXPR
),
9461 TREE_OPERAND (exp
, 1));
9463 while (TREE_CODE (incremented
) == NOP_EXPR
9464 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9466 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9467 incremented
= TREE_OPERAND (incremented
, 0);
9470 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9471 return post
? op0
: temp
;
9476 /* We have a true reference to the value in OP0.
9477 If there is an insn to add or subtract in this mode, queue it.
9478 Queueing the increment insn avoids the register shuffling
9479 that often results if we must increment now and first save
9480 the old value for subsequent use. */
9482 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9483 op0
= stabilize (op0
);
9486 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9487 if (icode
!= (int) CODE_FOR_nothing
9488 /* Make sure that OP0 is valid for operands 0 and 1
9489 of the insn we want to queue. */
9490 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9491 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
))
9493 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9494 op1
= force_reg (mode
, op1
);
9496 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9498 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9500 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9501 ? force_reg (Pmode
, XEXP (op0
, 0))
9502 : copy_to_reg (XEXP (op0
, 0)));
9505 op0
= change_address (op0
, VOIDmode
, addr
);
9506 temp
= force_reg (GET_MODE (op0
), op0
);
9507 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9508 op1
= force_reg (mode
, op1
);
9510 /* The increment queue is LIFO, thus we have to `queue'
9511 the instructions in reverse order. */
9512 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9513 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9518 /* Preincrement, or we can't increment with one simple insn. */
9520 /* Save a copy of the value before inc or dec, to return it later. */
9521 temp
= value
= copy_to_reg (op0
);
9523 /* Arrange to return the incremented value. */
9524 /* Copy the rtx because expand_binop will protect from the queue,
9525 and the results of that would be invalid for us to return
9526 if our caller does emit_queue before using our result. */
9527 temp
= copy_rtx (value
= op0
);
9529 /* Increment however we can. */
9530 op1
= expand_binop (mode
, this_optab
, value
, op1
,
9531 flag_check_memory_usage
? NULL_RTX
: op0
,
9532 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9533 /* Make sure the value is stored into OP0. */
9535 emit_move_insn (op0
, op1
);
9540 /* Expand all function calls contained within EXP, innermost ones first.
9541 But don't look within expressions that have sequence points.
9542 For each CALL_EXPR, record the rtx for its value
9543 in the CALL_EXPR_RTL field. */
9546 preexpand_calls (exp
)
9549 register int nops
, i
;
9550 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
9552 if (! do_preexpand_calls
)
9555 /* Only expressions and references can contain calls. */
9557 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
9560 switch (TREE_CODE (exp
))
9563 /* Do nothing if already expanded. */
9564 if (CALL_EXPR_RTL (exp
) != 0
9565 /* Do nothing if the call returns a variable-sized object. */
9566 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp
))) != INTEGER_CST
9567 /* Do nothing to built-in functions. */
9568 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
9569 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
9571 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
9574 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
9579 case TRUTH_ANDIF_EXPR
:
9580 case TRUTH_ORIF_EXPR
:
9581 /* If we find one of these, then we can be sure
9582 the adjust will be done for it (since it makes jumps).
9583 Do it now, so that if this is inside an argument
9584 of a function, we don't get the stack adjustment
9585 after some other args have already been pushed. */
9586 do_pending_stack_adjust ();
9591 case WITH_CLEANUP_EXPR
:
9592 case CLEANUP_POINT_EXPR
:
9593 case TRY_CATCH_EXPR
:
9597 if (SAVE_EXPR_RTL (exp
) != 0)
9604 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
9605 for (i
= 0; i
< nops
; i
++)
9606 if (TREE_OPERAND (exp
, i
) != 0)
9608 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
9609 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
9611 preexpand_calls (TREE_OPERAND (exp
, i
));
9615 /* At the start of a function, record that we have no previously-pushed
9616 arguments waiting to be popped. */
9619 init_pending_stack_adjust ()
9621 pending_stack_adjust
= 0;
9624 /* When exiting from function, if safe, clear out any pending stack adjust
9625 so the adjustment won't get done.
9627 Note, if the current function calls alloca, then it must have a
9628 frame pointer regardless of the value of flag_omit_frame_pointer. */
9631 clear_pending_stack_adjust ()
9633 #ifdef EXIT_IGNORE_STACK
9635 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9636 && EXIT_IGNORE_STACK
9637 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9638 && ! flag_inline_functions
)
9639 pending_stack_adjust
= 0;
9643 /* Pop any previously-pushed arguments that have not been popped yet. */
9646 do_pending_stack_adjust ()
9648 if (inhibit_defer_pop
== 0)
9650 if (pending_stack_adjust
!= 0)
9651 adjust_stack (GEN_INT (pending_stack_adjust
));
9652 pending_stack_adjust
= 0;
9656 /* Expand conditional expressions. */
9658 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9659 LABEL is an rtx of code CODE_LABEL, in this function and all the
9663 jumpifnot (exp
, label
)
9667 do_jump (exp
, label
, NULL_RTX
);
9670 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9677 do_jump (exp
, NULL_RTX
, label
);
9680 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9681 the result is zero, or IF_TRUE_LABEL if the result is one.
9682 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9683 meaning fall through in that case.
9685 do_jump always does any pending stack adjust except when it does not
9686 actually perform a jump. An example where there is no jump
9687 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9689 This function is responsible for optimizing cases such as
9690 &&, || and comparison operators in EXP. */
9693 do_jump (exp
, if_false_label
, if_true_label
)
9695 rtx if_false_label
, if_true_label
;
9697 register enum tree_code code
= TREE_CODE (exp
);
9698 /* Some cases need to create a label to jump to
9699 in order to properly fall through.
9700 These cases set DROP_THROUGH_LABEL nonzero. */
9701 rtx drop_through_label
= 0;
9706 enum machine_mode mode
;
9716 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9722 /* This is not true with #pragma weak */
9724 /* The address of something can never be zero. */
9726 emit_jump (if_true_label
);
9731 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9732 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9733 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
9736 /* If we are narrowing the operand, we have to do the compare in the
9738 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9739 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9741 case NON_LVALUE_EXPR
:
9742 case REFERENCE_EXPR
:
9747 /* These cannot change zero->non-zero or vice versa. */
9748 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9752 /* This is never less insns than evaluating the PLUS_EXPR followed by
9753 a test and can be longer if the test is eliminated. */
9755 /* Reduce to minus. */
9756 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9757 TREE_OPERAND (exp
, 0),
9758 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9759 TREE_OPERAND (exp
, 1))));
9760 /* Process as MINUS. */
9764 /* Non-zero iff operands of minus differ. */
9765 comparison
= compare (build (NE_EXPR
, TREE_TYPE (exp
),
9766 TREE_OPERAND (exp
, 0),
9767 TREE_OPERAND (exp
, 1)),
9772 /* If we are AND'ing with a small constant, do this comparison in the
9773 smallest type that fits. If the machine doesn't have comparisons
9774 that small, it will be converted back to the wider comparison.
9775 This helps if we are testing the sign bit of a narrower object.
9776 combine can't do this for us because it can't know whether a
9777 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9779 if (! SLOW_BYTE_ACCESS
9780 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9781 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9782 && (i
= floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))) >= 0
9783 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9784 && (type
= type_for_mode (mode
, 1)) != 0
9785 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9786 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9787 != CODE_FOR_nothing
))
9789 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9794 case TRUTH_NOT_EXPR
:
9795 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9798 case TRUTH_ANDIF_EXPR
:
9799 if (if_false_label
== 0)
9800 if_false_label
= drop_through_label
= gen_label_rtx ();
9801 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9802 start_cleanup_deferral ();
9803 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9804 end_cleanup_deferral ();
9807 case TRUTH_ORIF_EXPR
:
9808 if (if_true_label
== 0)
9809 if_true_label
= drop_through_label
= gen_label_rtx ();
9810 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9811 start_cleanup_deferral ();
9812 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9813 end_cleanup_deferral ();
9818 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9819 preserve_temp_slots (NULL_RTX
);
9823 do_pending_stack_adjust ();
9824 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9831 int bitsize
, bitpos
, unsignedp
;
9832 enum machine_mode mode
;
9838 /* Get description of this reference. We don't actually care
9839 about the underlying object here. */
9840 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
9841 &mode
, &unsignedp
, &volatilep
,
9844 type
= type_for_size (bitsize
, unsignedp
);
9845 if (! SLOW_BYTE_ACCESS
9846 && type
!= 0 && bitsize
>= 0
9847 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9848 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9849 != CODE_FOR_nothing
))
9851 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9858 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9859 if (integer_onep (TREE_OPERAND (exp
, 1))
9860 && integer_zerop (TREE_OPERAND (exp
, 2)))
9861 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9863 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9864 && integer_onep (TREE_OPERAND (exp
, 2)))
9865 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9869 register rtx label1
= gen_label_rtx ();
9870 drop_through_label
= gen_label_rtx ();
9872 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9874 start_cleanup_deferral ();
9875 /* Now the THEN-expression. */
9876 do_jump (TREE_OPERAND (exp
, 1),
9877 if_false_label
? if_false_label
: drop_through_label
,
9878 if_true_label
? if_true_label
: drop_through_label
);
9879 /* In case the do_jump just above never jumps. */
9880 do_pending_stack_adjust ();
9881 emit_label (label1
);
9883 /* Now the ELSE-expression. */
9884 do_jump (TREE_OPERAND (exp
, 2),
9885 if_false_label
? if_false_label
: drop_through_label
,
9886 if_true_label
? if_true_label
: drop_through_label
);
9887 end_cleanup_deferral ();
9893 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9895 if (integer_zerop (TREE_OPERAND (exp
, 1)))
9896 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9897 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9898 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9901 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9902 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9903 fold (build1 (REALPART_EXPR
,
9904 TREE_TYPE (inner_type
),
9905 TREE_OPERAND (exp
, 0))),
9906 fold (build1 (REALPART_EXPR
,
9907 TREE_TYPE (inner_type
),
9908 TREE_OPERAND (exp
, 1))))),
9909 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9910 fold (build1 (IMAGPART_EXPR
,
9911 TREE_TYPE (inner_type
),
9912 TREE_OPERAND (exp
, 0))),
9913 fold (build1 (IMAGPART_EXPR
,
9914 TREE_TYPE (inner_type
),
9915 TREE_OPERAND (exp
, 1))))))),
9916 if_false_label
, if_true_label
);
9917 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9918 && !can_compare_p (TYPE_MODE (inner_type
)))
9919 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9921 comparison
= compare (exp
, EQ
, EQ
);
9927 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9929 if (integer_zerop (TREE_OPERAND (exp
, 1)))
9930 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9931 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9932 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9935 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9936 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9937 fold (build1 (REALPART_EXPR
,
9938 TREE_TYPE (inner_type
),
9939 TREE_OPERAND (exp
, 0))),
9940 fold (build1 (REALPART_EXPR
,
9941 TREE_TYPE (inner_type
),
9942 TREE_OPERAND (exp
, 1))))),
9943 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9944 fold (build1 (IMAGPART_EXPR
,
9945 TREE_TYPE (inner_type
),
9946 TREE_OPERAND (exp
, 0))),
9947 fold (build1 (IMAGPART_EXPR
,
9948 TREE_TYPE (inner_type
),
9949 TREE_OPERAND (exp
, 1))))))),
9950 if_false_label
, if_true_label
);
9951 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9952 && !can_compare_p (TYPE_MODE (inner_type
)))
9953 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9955 comparison
= compare (exp
, NE
, NE
);
9960 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9962 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9963 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9965 comparison
= compare (exp
, LT
, LTU
);
9969 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9971 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9972 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9974 comparison
= compare (exp
, LE
, LEU
);
9978 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9980 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9981 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9983 comparison
= compare (exp
, GT
, GTU
);
9987 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9989 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9990 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9992 comparison
= compare (exp
, GE
, GEU
);
9997 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9999 /* This is not needed any more and causes poor code since it causes
10000 comparisons and tests from non-SI objects to have different code
10002 /* Copy to register to avoid generating bad insns by cse
10003 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10004 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
10005 temp
= copy_to_reg (temp
);
10007 do_pending_stack_adjust ();
10008 if (GET_CODE (temp
) == CONST_INT
)
10009 comparison
= (temp
== const0_rtx
? const0_rtx
: const_true_rtx
);
10010 else if (GET_CODE (temp
) == LABEL_REF
)
10011 comparison
= const_true_rtx
;
10012 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
10013 && !can_compare_p (GET_MODE (temp
)))
10014 /* Note swapping the labels gives us not-equal. */
10015 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
10016 else if (GET_MODE (temp
) != VOIDmode
)
10017 comparison
= compare_from_rtx (temp
, CONST0_RTX (GET_MODE (temp
)),
10018 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10019 GET_MODE (temp
), NULL_RTX
, 0);
10024 /* Do any postincrements in the expression that was tested. */
10027 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10028 straight into a conditional jump instruction as the jump condition.
10029 Otherwise, all the work has been done already. */
10031 if (comparison
== const_true_rtx
)
10034 emit_jump (if_true_label
);
10036 else if (comparison
== const0_rtx
)
10038 if (if_false_label
)
10039 emit_jump (if_false_label
);
10041 else if (comparison
)
10042 do_jump_for_compare (comparison
, if_false_label
, if_true_label
);
10044 if (drop_through_label
)
10046 /* If do_jump produces code that might be jumped around,
10047 do any stack adjusts from that code, before the place
10048 where control merges in. */
10049 do_pending_stack_adjust ();
10050 emit_label (drop_through_label
);
10054 /* Given a comparison expression EXP for values too wide to be compared
10055 with one insn, test the comparison and jump to the appropriate label.
10056 The code of EXP is ignored; we always test GT if SWAP is 0,
10057 and LT if SWAP is 1. */
10060 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
10063 rtx if_false_label
, if_true_label
;
10065 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
10066 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
10067 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10068 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10069 rtx drop_through_label
= 0;
10070 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10073 if (! if_true_label
|| ! if_false_label
)
10074 drop_through_label
= gen_label_rtx ();
10075 if (! if_true_label
)
10076 if_true_label
= drop_through_label
;
10077 if (! if_false_label
)
10078 if_false_label
= drop_through_label
;
10080 /* Compare a word at a time, high order first. */
10081 for (i
= 0; i
< nwords
; i
++)
10084 rtx op0_word
, op1_word
;
10086 if (WORDS_BIG_ENDIAN
)
10088 op0_word
= operand_subword_force (op0
, i
, mode
);
10089 op1_word
= operand_subword_force (op1
, i
, mode
);
10093 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10094 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10097 /* All but high-order word must be compared as unsigned. */
10098 comp
= compare_from_rtx (op0_word
, op1_word
,
10099 (unsignedp
|| i
> 0) ? GTU
: GT
,
10100 unsignedp
, word_mode
, NULL_RTX
, 0);
10101 if (comp
== const_true_rtx
)
10102 emit_jump (if_true_label
);
10103 else if (comp
!= const0_rtx
)
10104 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10106 /* Consider lower words only if these are equal. */
10107 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10109 if (comp
== const_true_rtx
)
10110 emit_jump (if_false_label
);
10111 else if (comp
!= const0_rtx
)
10112 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10115 if (if_false_label
)
10116 emit_jump (if_false_label
);
10117 if (drop_through_label
)
10118 emit_label (drop_through_label
);
10121 /* Compare OP0 with OP1, word at a time, in mode MODE.
10122 UNSIGNEDP says to do unsigned comparison.
10123 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10126 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
10127 enum machine_mode mode
;
10130 rtx if_false_label
, if_true_label
;
10132 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10133 rtx drop_through_label
= 0;
10136 if (! if_true_label
|| ! if_false_label
)
10137 drop_through_label
= gen_label_rtx ();
10138 if (! if_true_label
)
10139 if_true_label
= drop_through_label
;
10140 if (! if_false_label
)
10141 if_false_label
= drop_through_label
;
10143 /* Compare a word at a time, high order first. */
10144 for (i
= 0; i
< nwords
; i
++)
10147 rtx op0_word
, op1_word
;
10149 if (WORDS_BIG_ENDIAN
)
10151 op0_word
= operand_subword_force (op0
, i
, mode
);
10152 op1_word
= operand_subword_force (op1
, i
, mode
);
10156 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10157 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10160 /* All but high-order word must be compared as unsigned. */
10161 comp
= compare_from_rtx (op0_word
, op1_word
,
10162 (unsignedp
|| i
> 0) ? GTU
: GT
,
10163 unsignedp
, word_mode
, NULL_RTX
, 0);
10164 if (comp
== const_true_rtx
)
10165 emit_jump (if_true_label
);
10166 else if (comp
!= const0_rtx
)
10167 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10169 /* Consider lower words only if these are equal. */
10170 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10172 if (comp
== const_true_rtx
)
10173 emit_jump (if_false_label
);
10174 else if (comp
!= const0_rtx
)
10175 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10178 if (if_false_label
)
10179 emit_jump (if_false_label
);
10180 if (drop_through_label
)
10181 emit_label (drop_through_label
);
10184 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10185 with one insn, test the comparison and jump to the appropriate label. */
10188 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
10190 rtx if_false_label
, if_true_label
;
10192 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10193 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10194 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10195 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10197 rtx drop_through_label
= 0;
10199 if (! if_false_label
)
10200 drop_through_label
= if_false_label
= gen_label_rtx ();
10202 for (i
= 0; i
< nwords
; i
++)
10204 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
, mode
),
10205 operand_subword_force (op1
, i
, mode
),
10206 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10207 word_mode
, NULL_RTX
, 0);
10208 if (comp
== const_true_rtx
)
10209 emit_jump (if_false_label
);
10210 else if (comp
!= const0_rtx
)
10211 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10215 emit_jump (if_true_label
);
10216 if (drop_through_label
)
10217 emit_label (drop_through_label
);
10220 /* Jump according to whether OP0 is 0.
10221 We assume that OP0 has an integer mode that is too wide
10222 for the available compare insns. */
10225 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
10227 rtx if_false_label
, if_true_label
;
10229 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
10232 rtx drop_through_label
= 0;
10234 /* The fastest way of doing this comparison on almost any machine is to
10235 "or" all the words and compare the result. If all have to be loaded
10236 from memory and this is a very wide item, it's possible this may
10237 be slower, but that's highly unlikely. */
10239 part
= gen_reg_rtx (word_mode
);
10240 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
10241 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
10242 part
= expand_binop (word_mode
, ior_optab
, part
,
10243 operand_subword_force (op0
, i
, GET_MODE (op0
)),
10244 part
, 1, OPTAB_WIDEN
);
10248 rtx comp
= compare_from_rtx (part
, const0_rtx
, EQ
, 1, word_mode
,
10251 if (comp
== const_true_rtx
)
10252 emit_jump (if_false_label
);
10253 else if (comp
== const0_rtx
)
10254 emit_jump (if_true_label
);
10256 do_jump_for_compare (comp
, if_false_label
, if_true_label
);
10261 /* If we couldn't do the "or" simply, do this with a series of compares. */
10262 if (! if_false_label
)
10263 drop_through_label
= if_false_label
= gen_label_rtx ();
10265 for (i
= 0; i
< nwords
; i
++)
10267 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
,
10269 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0);
10270 if (comp
== const_true_rtx
)
10271 emit_jump (if_false_label
);
10272 else if (comp
!= const0_rtx
)
10273 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10277 emit_jump (if_true_label
);
10279 if (drop_through_label
)
10280 emit_label (drop_through_label
);
10283 /* Given a comparison expression in rtl form, output conditional branches to
10284 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10287 do_jump_for_compare (comparison
, if_false_label
, if_true_label
)
10288 rtx comparison
, if_false_label
, if_true_label
;
10292 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10293 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)]) (if_true_label
));
10297 if (if_false_label
)
10298 emit_jump (if_false_label
);
10300 else if (if_false_label
)
10303 rtx prev
= get_last_insn ();
10306 /* Output the branch with the opposite condition. Then try to invert
10307 what is generated. If more than one insn is a branch, or if the
10308 branch is not the last insn written, abort. If we can't invert
10309 the branch, emit make a true label, redirect this jump to that,
10310 emit a jump to the false label and define the true label. */
10312 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10313 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)])(if_false_label
));
10317 /* Here we get the first insn that was just emitted. It used to be the
10318 case that, on some machines, emitting the branch would discard
10319 the previous compare insn and emit a replacement. This isn't
10320 done anymore, but abort if we see that PREV is deleted. */
10323 insn
= get_insns ();
10324 else if (INSN_DELETED_P (prev
))
10327 insn
= NEXT_INSN (prev
);
10329 for (; insn
; insn
= NEXT_INSN (insn
))
10330 if (GET_CODE (insn
) == JUMP_INSN
)
10337 if (branch
!= get_last_insn ())
10340 JUMP_LABEL (branch
) = if_false_label
;
10341 if (! invert_jump (branch
, if_false_label
))
10343 if_true_label
= gen_label_rtx ();
10344 redirect_jump (branch
, if_true_label
);
10345 emit_jump (if_false_label
);
10346 emit_label (if_true_label
);
10351 /* Generate code for a comparison expression EXP
10352 (including code to compute the values to be compared)
10353 and set (CC0) according to the result.
10354 SIGNED_CODE should be the rtx operation for this comparison for
10355 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10357 We force a stack adjustment unless there are currently
10358 things pushed on the stack that aren't yet used. */
10361 compare (exp
, signed_code
, unsigned_code
)
10363 enum rtx_code signed_code
, unsigned_code
;
10366 = expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10368 = expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10369 register tree type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10370 register enum machine_mode mode
= TYPE_MODE (type
);
10371 int unsignedp
= TREE_UNSIGNED (type
);
10372 enum rtx_code code
= unsignedp
? unsigned_code
: signed_code
;
10374 #ifdef HAVE_canonicalize_funcptr_for_compare
10375 /* If function pointers need to be "canonicalized" before they can
10376 be reliably compared, then canonicalize them. */
10377 if (HAVE_canonicalize_funcptr_for_compare
10378 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10379 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10382 rtx new_op0
= gen_reg_rtx (mode
);
10384 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10388 if (HAVE_canonicalize_funcptr_for_compare
10389 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10390 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10393 rtx new_op1
= gen_reg_rtx (mode
);
10395 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10400 return compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
,
10402 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10403 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
10406 /* Like compare but expects the values to compare as two rtx's.
10407 The decision as to signed or unsigned comparison must be made by the caller.
10409 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10412 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10413 size of MODE should be used. */
10416 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
10417 register rtx op0
, op1
;
10418 enum rtx_code code
;
10420 enum machine_mode mode
;
10426 /* If one operand is constant, make it the second one. Only do this
10427 if the other operand is not constant as well. */
10429 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
10430 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
10435 code
= swap_condition (code
);
10438 if (flag_force_mem
)
10440 op0
= force_not_mem (op0
);
10441 op1
= force_not_mem (op1
);
10444 do_pending_stack_adjust ();
10446 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10447 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10451 /* There's no need to do this now that combine.c can eliminate lots of
10452 sign extensions. This can be less efficient in certain cases on other
10455 /* If this is a signed equality comparison, we can do it as an
10456 unsigned comparison since zero-extension is cheaper than sign
10457 extension and comparisons with zero are done as unsigned. This is
10458 the case even on machines that can do fast sign extension, since
10459 zero-extension is easier to combine with other operations than
10460 sign-extension is. If we are comparing against a constant, we must
10461 convert it to what it would look like unsigned. */
10462 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10463 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10465 if (GET_CODE (op1
) == CONST_INT
10466 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10467 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10472 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
10474 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10477 /* Generate code to calculate EXP using a store-flag instruction
10478 and return an rtx for the result. EXP is either a comparison
10479 or a TRUTH_NOT_EXPR whose operand is a comparison.
10481 If TARGET is nonzero, store the result there if convenient.
10483 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10486 Return zero if there is no suitable set-flag instruction
10487 available on this machine.
10489 Once expand_expr has been called on the arguments of the comparison,
10490 we are committed to doing the store flag, since it is not safe to
10491 re-evaluate the expression. We emit the store-flag insn by calling
10492 emit_store_flag, but only expand the arguments if we have a reason
10493 to believe that emit_store_flag will be successful. If we think that
10494 it will, but it isn't, we have to simulate the store-flag with a
10495 set/jump/set sequence. */
10498 do_store_flag (exp
, target
, mode
, only_cheap
)
10501 enum machine_mode mode
;
10504 enum rtx_code code
;
10505 tree arg0
, arg1
, type
;
10507 enum machine_mode operand_mode
;
10511 enum insn_code icode
;
10512 rtx subtarget
= target
;
10515 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10516 result at the end. We can't simply invert the test since it would
10517 have already been inverted if it were valid. This case occurs for
10518 some floating-point comparisons. */
10520 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10521 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10523 arg0
= TREE_OPERAND (exp
, 0);
10524 arg1
= TREE_OPERAND (exp
, 1);
10525 type
= TREE_TYPE (arg0
);
10526 operand_mode
= TYPE_MODE (type
);
10527 unsignedp
= TREE_UNSIGNED (type
);
10529 /* We won't bother with BLKmode store-flag operations because it would mean
10530 passing a lot of information to emit_store_flag. */
10531 if (operand_mode
== BLKmode
)
10534 /* We won't bother with store-flag operations involving function pointers
10535 when function pointers must be canonicalized before comparisons. */
10536 #ifdef HAVE_canonicalize_funcptr_for_compare
10537 if (HAVE_canonicalize_funcptr_for_compare
10538 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10539 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10541 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10542 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10543 == FUNCTION_TYPE
))))
10550 /* Get the rtx comparison code to use. We know that EXP is a comparison
10551 operation of some type. Some comparisons against 1 and -1 can be
10552 converted to comparisons with zero. Do so here so that the tests
10553 below will be aware that we have a comparison with zero. These
10554 tests will not catch constants in the first operand, but constants
10555 are rarely passed as the first operand. */
10557 switch (TREE_CODE (exp
))
10566 if (integer_onep (arg1
))
10567 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10569 code
= unsignedp
? LTU
: LT
;
10572 if (! unsignedp
&& integer_all_onesp (arg1
))
10573 arg1
= integer_zero_node
, code
= LT
;
10575 code
= unsignedp
? LEU
: LE
;
10578 if (! unsignedp
&& integer_all_onesp (arg1
))
10579 arg1
= integer_zero_node
, code
= GE
;
10581 code
= unsignedp
? GTU
: GT
;
10584 if (integer_onep (arg1
))
10585 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10587 code
= unsignedp
? GEU
: GE
;
10593 /* Put a constant second. */
10594 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10596 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10597 code
= swap_condition (code
);
10600 /* If this is an equality or inequality test of a single bit, we can
10601 do this by shifting the bit being tested to the low-order bit and
10602 masking the result with the constant 1. If the condition was EQ,
10603 we xor it with 1. This does not require an scc insn and is faster
10604 than an scc insn even if we have it. */
10606 if ((code
== NE
|| code
== EQ
)
10607 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10608 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10610 tree inner
= TREE_OPERAND (arg0
, 0);
10611 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10614 /* If INNER is a right shift of a constant and it plus BITNUM does
10615 not overflow, adjust BITNUM and INNER. */
10617 if (TREE_CODE (inner
) == RSHIFT_EXPR
10618 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10619 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10620 && (bitnum
+ TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
10621 < TYPE_PRECISION (type
)))
10623 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10624 inner
= TREE_OPERAND (inner
, 0);
10627 /* If we are going to be able to omit the AND below, we must do our
10628 operations as unsigned. If we must use the AND, we have a choice.
10629 Normally unsigned is faster, but for some machines signed is. */
10630 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10631 #ifdef LOAD_EXTEND_OP
10632 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10638 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
10639 || GET_MODE (subtarget
) != operand_mode
10640 || ! safe_from_p (subtarget
, inner
, 1))
10643 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10646 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
10647 size_int (bitnum
), subtarget
, ops_unsignedp
);
10649 if (GET_MODE (op0
) != mode
)
10650 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10652 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10653 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10654 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10656 /* Put the AND last so it can combine with more things. */
10657 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10658 op0
= expand_and (op0
, const1_rtx
, subtarget
);
10663 /* Now see if we are likely to be able to do this. Return if not. */
10664 if (! can_compare_p (operand_mode
))
10666 icode
= setcc_gen_code
[(int) code
];
10667 if (icode
== CODE_FOR_nothing
10668 || (only_cheap
&& insn_operand_mode
[(int) icode
][0] != mode
))
10670 /* We can only do this if it is one of the special cases that
10671 can be handled without an scc insn. */
10672 if ((code
== LT
&& integer_zerop (arg1
))
10673 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10675 else if (BRANCH_COST
>= 0
10676 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10677 && TREE_CODE (type
) != REAL_TYPE
10678 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10679 != CODE_FOR_nothing
)
10680 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10681 != CODE_FOR_nothing
)))
10687 preexpand_calls (exp
);
10688 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
10689 || GET_MODE (subtarget
) != operand_mode
10690 || ! safe_from_p (subtarget
, arg1
, 1))
10693 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10694 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10697 target
= gen_reg_rtx (mode
);
10699 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10700 because, if the emit_store_flag does anything it will succeed and
10701 OP0 and OP1 will not be used subsequently. */
10703 result
= emit_store_flag (target
, code
,
10704 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10705 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10706 operand_mode
, unsignedp
, 1);
10711 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10712 result
, 0, OPTAB_LIB_WIDEN
);
10716 /* If this failed, we have to do this with set/compare/jump/set code. */
10717 if (GET_CODE (target
) != REG
10718 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10719 target
= gen_reg_rtx (GET_MODE (target
));
10721 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10722 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10723 operand_mode
, NULL_RTX
, 0);
10724 if (GET_CODE (result
) == CONST_INT
)
10725 return (((result
== const0_rtx
&& ! invert
)
10726 || (result
!= const0_rtx
&& invert
))
10727 ? const0_rtx
: const1_rtx
);
10729 label
= gen_label_rtx ();
10730 if (bcc_gen_fctn
[(int) code
] == 0)
10733 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10734 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10735 emit_label (label
);
10740 /* Generate a tablejump instruction (used for switch statements). */
10742 #ifdef HAVE_tablejump
10744 /* INDEX is the value being switched on, with the lowest value
10745 in the table already subtracted.
10746 MODE is its expected mode (needed if INDEX is constant).
10747 RANGE is the length of the jump table.
10748 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10750 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10751 index value is out of range. */
10754 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10755 rtx index
, range
, table_label
, default_label
;
10756 enum machine_mode mode
;
10758 register rtx temp
, vector
;
10760 /* Do an unsigned comparison (in the proper mode) between the index
10761 expression and the value which represents the length of the range.
10762 Since we just finished subtracting the lower bound of the range
10763 from the index expression, this comparison allows us to simultaneously
10764 check that the original index expression value is both greater than
10765 or equal to the minimum value of the range and less than or equal to
10766 the maximum value of the range. */
10768 emit_cmp_insn (index
, range
, GTU
, NULL_RTX
, mode
, 1, 0);
10769 emit_jump_insn (gen_bgtu (default_label
));
10771 /* If index is in range, it must fit in Pmode.
10772 Convert to Pmode so we can index with it. */
10774 index
= convert_to_mode (Pmode
, index
, 1);
10776 /* Don't let a MEM slip thru, because then INDEX that comes
10777 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10778 and break_out_memory_refs will go to work on it and mess it up. */
10779 #ifdef PIC_CASE_VECTOR_ADDRESS
10780 if (flag_pic
&& GET_CODE (index
) != REG
)
10781 index
= copy_to_mode_reg (Pmode
, index
);
10784 /* If flag_force_addr were to affect this address
10785 it could interfere with the tricky assumptions made
10786 about addresses that contain label-refs,
10787 which may be valid only very near the tablejump itself. */
10788 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10789 GET_MODE_SIZE, because this indicates how large insns are. The other
10790 uses should all be Pmode, because they are addresses. This code
10791 could fail if addresses and insns are not the same size. */
10792 index
= gen_rtx_PLUS (Pmode
,
10793 gen_rtx_MULT (Pmode
, index
,
10794 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10795 gen_rtx_LABEL_REF (Pmode
, table_label
));
10796 #ifdef PIC_CASE_VECTOR_ADDRESS
10798 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10801 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10802 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10803 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10804 RTX_UNCHANGING_P (vector
) = 1;
10805 convert_move (temp
, vector
, 0);
10807 emit_jump_insn (gen_tablejump (temp
, table_label
));
10809 /* If we are generating PIC code or if the table is PC-relative, the
10810 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10811 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10815 #endif /* HAVE_tablejump */