1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
31 #include "hard-reg-set.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
49 #ifndef ACCUMULATE_OUTGOING_ARGS
50 #define ACCUMULATE_OUTGOING_ARGS 0
53 /* Supply a default definition for PUSH_ARGS. */
56 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
62 /* Decide whether a function's arguments should be processed
63 from first to last or from last to first.
65 They should if the stack and args grow in opposite directions, but
66 only if we have push insns. */
70 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
71 #define PUSH_ARGS_REVERSED /* If it's last to first */
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
80 #define STACK_PUSH_CODE PRE_INC
84 /* Assume that case vectors are not pc-relative. */
85 #ifndef CASE_VECTOR_PC_RELATIVE
86 #define CASE_VECTOR_PC_RELATIVE 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Nonzero to generate code for all the subroutines within an
98 expression before generating the upper levels of the expression.
99 Nowadays this is never zero. */
100 int do_preexpand_calls
= 1;
102 /* Don't check memory usage, since code is being emitted to check a memory
103 usage. Used when current_function_check_memory_usage is true, to avoid
104 infinite recursion. */
105 static int in_check_memory_usage
;
107 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
108 static tree placeholder_list
= 0;
110 /* This structure is used by move_by_pieces to describe the move to
112 struct move_by_pieces
123 int explicit_inc_from
;
131 /* This structure is used by clear_by_pieces to describe the clear to
134 struct clear_by_pieces
146 extern struct obstack permanent_obstack
;
148 static rtx get_push_address
PARAMS ((int));
150 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
151 static int move_by_pieces_ninsns
PARAMS ((unsigned int, unsigned int));
152 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
153 struct move_by_pieces
*));
154 static void clear_by_pieces
PARAMS ((rtx
, int, unsigned int));
155 static void clear_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...),
157 struct clear_by_pieces
*));
158 static int is_zeros_p
PARAMS ((tree
));
159 static int mostly_zeros_p
PARAMS ((tree
));
160 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
161 HOST_WIDE_INT
, enum machine_mode
,
162 tree
, tree
, unsigned int, int));
163 static void store_constructor
PARAMS ((tree
, rtx
, unsigned int, int,
165 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
166 HOST_WIDE_INT
, enum machine_mode
,
167 tree
, enum machine_mode
, int,
168 unsigned int, HOST_WIDE_INT
, int));
169 static enum memory_use_mode
170 get_memory_usage_from_modifier
PARAMS ((enum expand_modifier
));
171 static tree save_noncopied_parts
PARAMS ((tree
, tree
));
172 static tree init_noncopied_parts
PARAMS ((tree
, tree
));
173 static int safe_from_p
PARAMS ((rtx
, tree
, int));
174 static int fixed_type_p
PARAMS ((tree
));
175 static rtx var_rtx
PARAMS ((tree
));
176 static int readonly_fields_p
PARAMS ((tree
));
177 static rtx expand_expr_unaligned
PARAMS ((tree
, unsigned int *));
178 static rtx expand_increment
PARAMS ((tree
, int, int));
179 static void preexpand_calls
PARAMS ((tree
));
180 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
181 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
182 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
184 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
186 /* Record for each mode whether we can move a register directly to or
187 from an object of that mode in memory. If we can't, we won't try
188 to use that mode directly when accessing a field of that mode. */
190 static char direct_load
[NUM_MACHINE_MODES
];
191 static char direct_store
[NUM_MACHINE_MODES
];
193 /* If a memory-to-memory move would take MOVE_RATIO or more simple
194 move-instruction sequences, we will do a movstr or libcall instead. */
197 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
200 /* If we are optimizing for space (-Os), cut down the default move ratio */
201 #define MOVE_RATIO (optimize_size ? 3 : 15)
205 /* This macro is used to determine whether move_by_pieces should be called
206 to perform a structure copy. */
207 #ifndef MOVE_BY_PIECES_P
208 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
209 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
212 /* This array records the insn_code of insns to perform block moves. */
213 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
215 /* This array records the insn_code of insns to perform block clears. */
216 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
218 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
224 /* This is run once per compilation to set up which modes can be used
225 directly in memory and to initialize the block move optab. */
231 enum machine_mode mode
;
238 /* Since we are on the permanent obstack, we must be sure we save this
239 spot AFTER we call start_sequence, since it will reuse the rtl it
241 free_point
= (char *) oballoc (0);
243 /* Try indexing by frame ptr and try by stack ptr.
244 It is known that on the Convex the stack ptr isn't a valid index.
245 With luck, one or the other is valid on any machine. */
246 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
247 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
249 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
250 pat
= PATTERN (insn
);
252 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
253 mode
= (enum machine_mode
) ((int) mode
+ 1))
258 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
259 PUT_MODE (mem
, mode
);
260 PUT_MODE (mem1
, mode
);
262 /* See if there is some register that can be used in this mode and
263 directly loaded or stored from memory. */
265 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
266 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
267 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
270 if (! HARD_REGNO_MODE_OK (regno
, mode
))
273 reg
= gen_rtx_REG (mode
, regno
);
276 SET_DEST (pat
) = reg
;
277 if (recog (pat
, insn
, &num_clobbers
) >= 0)
278 direct_load
[(int) mode
] = 1;
280 SET_SRC (pat
) = mem1
;
281 SET_DEST (pat
) = reg
;
282 if (recog (pat
, insn
, &num_clobbers
) >= 0)
283 direct_load
[(int) mode
] = 1;
286 SET_DEST (pat
) = mem
;
287 if (recog (pat
, insn
, &num_clobbers
) >= 0)
288 direct_store
[(int) mode
] = 1;
291 SET_DEST (pat
) = mem1
;
292 if (recog (pat
, insn
, &num_clobbers
) >= 0)
293 direct_store
[(int) mode
] = 1;
301 /* This is run at the start of compiling a function. */
306 cfun
->expr
= (struct expr_status
*) xmalloc (sizeof (struct expr_status
));
309 pending_stack_adjust
= 0;
310 stack_pointer_delta
= 0;
311 inhibit_defer_pop
= 0;
313 apply_args_value
= 0;
319 struct expr_status
*p
;
324 ggc_mark_rtx (p
->x_saveregs_value
);
325 ggc_mark_rtx (p
->x_apply_args_value
);
326 ggc_mark_rtx (p
->x_forced_labels
);
337 /* Small sanity check that the queue is empty at the end of a function. */
339 finish_expr_for_function ()
345 /* Manage the queue of increment instructions to be output
346 for POSTINCREMENT_EXPR expressions, etc. */
348 /* Queue up to increment (or change) VAR later. BODY says how:
349 BODY should be the same thing you would pass to emit_insn
350 to increment right away. It will go to emit_insn later on.
352 The value is a QUEUED expression to be used in place of VAR
353 where you want to guarantee the pre-incrementation value of VAR. */
356 enqueue_insn (var
, body
)
359 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
360 body
, pending_chain
);
361 return pending_chain
;
364 /* Use protect_from_queue to convert a QUEUED expression
365 into something that you can put immediately into an instruction.
366 If the queued incrementation has not happened yet,
367 protect_from_queue returns the variable itself.
368 If the incrementation has happened, protect_from_queue returns a temp
369 that contains a copy of the old value of the variable.
371 Any time an rtx which might possibly be a QUEUED is to be put
372 into an instruction, it must be passed through protect_from_queue first.
373 QUEUED expressions are not meaningful in instructions.
375 Do not pass a value through protect_from_queue and then hold
376 on to it for a while before putting it in an instruction!
377 If the queue is flushed in between, incorrect code will result. */
380 protect_from_queue (x
, modify
)
384 register RTX_CODE code
= GET_CODE (x
);
386 #if 0 /* A QUEUED can hang around after the queue is forced out. */
387 /* Shortcut for most common case. */
388 if (pending_chain
== 0)
394 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
395 use of autoincrement. Make a copy of the contents of the memory
396 location rather than a copy of the address, but not if the value is
397 of mode BLKmode. Don't modify X in place since it might be
399 if (code
== MEM
&& GET_MODE (x
) != BLKmode
400 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
402 register rtx y
= XEXP (x
, 0);
403 register rtx
new = gen_rtx_MEM (GET_MODE (x
), QUEUED_VAR (y
));
405 MEM_COPY_ATTRIBUTES (new, x
);
409 register rtx temp
= gen_reg_rtx (GET_MODE (new));
410 emit_insn_before (gen_move_insn (temp
, new),
416 /* Otherwise, recursively protect the subexpressions of all
417 the kinds of rtx's that can contain a QUEUED. */
420 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
421 if (tem
!= XEXP (x
, 0))
427 else if (code
== PLUS
|| code
== MULT
)
429 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
430 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
431 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
440 /* If the increment has not happened, use the variable itself. */
441 if (QUEUED_INSN (x
) == 0)
442 return QUEUED_VAR (x
);
443 /* If the increment has happened and a pre-increment copy exists,
445 if (QUEUED_COPY (x
) != 0)
446 return QUEUED_COPY (x
);
447 /* The increment has happened but we haven't set up a pre-increment copy.
448 Set one up now, and use it. */
449 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
450 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
452 return QUEUED_COPY (x
);
455 /* Return nonzero if X contains a QUEUED expression:
456 if it contains anything that will be altered by a queued increment.
457 We handle only combinations of MEM, PLUS, MINUS and MULT operators
458 since memory addresses generally contain only those. */
464 register enum rtx_code code
= GET_CODE (x
);
470 return queued_subexp_p (XEXP (x
, 0));
474 return (queued_subexp_p (XEXP (x
, 0))
475 || queued_subexp_p (XEXP (x
, 1)));
481 /* Perform all the pending incrementations. */
487 while ((p
= pending_chain
))
489 rtx body
= QUEUED_BODY (p
);
491 if (GET_CODE (body
) == SEQUENCE
)
493 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
494 emit_insn (QUEUED_BODY (p
));
497 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
498 pending_chain
= QUEUED_NEXT (p
);
502 /* Copy data from FROM to TO, where the machine modes are not the same.
503 Both modes may be integer, or both may be floating.
504 UNSIGNEDP should be nonzero if FROM is an unsigned type.
505 This causes zero-extension instead of sign-extension. */
508 convert_move (to
, from
, unsignedp
)
509 register rtx to
, from
;
512 enum machine_mode to_mode
= GET_MODE (to
);
513 enum machine_mode from_mode
= GET_MODE (from
);
514 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
515 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
519 /* rtx code for making an equivalent value. */
520 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
522 to
= protect_from_queue (to
, 1);
523 from
= protect_from_queue (from
, 0);
525 if (to_real
!= from_real
)
528 /* If FROM is a SUBREG that indicates that we have already done at least
529 the required extension, strip it. We don't handle such SUBREGs as
532 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
533 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
534 >= GET_MODE_SIZE (to_mode
))
535 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
536 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
538 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
541 if (to_mode
== from_mode
542 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
544 emit_move_insn (to
, from
);
552 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
554 /* Try converting directly if the insn is supported. */
555 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
558 emit_unop_insn (code
, to
, from
, UNKNOWN
);
563 #ifdef HAVE_trunchfqf2
564 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
566 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
570 #ifdef HAVE_trunctqfqf2
571 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
573 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
577 #ifdef HAVE_truncsfqf2
578 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
580 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
584 #ifdef HAVE_truncdfqf2
585 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
587 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
591 #ifdef HAVE_truncxfqf2
592 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
594 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
598 #ifdef HAVE_trunctfqf2
599 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
601 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
606 #ifdef HAVE_trunctqfhf2
607 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
609 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
613 #ifdef HAVE_truncsfhf2
614 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
616 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
620 #ifdef HAVE_truncdfhf2
621 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
623 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
627 #ifdef HAVE_truncxfhf2
628 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
630 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
634 #ifdef HAVE_trunctfhf2
635 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
637 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
642 #ifdef HAVE_truncsftqf2
643 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
645 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
649 #ifdef HAVE_truncdftqf2
650 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
652 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
656 #ifdef HAVE_truncxftqf2
657 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
659 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
663 #ifdef HAVE_trunctftqf2
664 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
666 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
671 #ifdef HAVE_truncdfsf2
672 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
674 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
678 #ifdef HAVE_truncxfsf2
679 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
681 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
685 #ifdef HAVE_trunctfsf2
686 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
688 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
692 #ifdef HAVE_truncxfdf2
693 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
695 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
699 #ifdef HAVE_trunctfdf2
700 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
702 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
714 libcall
= extendsfdf2_libfunc
;
718 libcall
= extendsfxf2_libfunc
;
722 libcall
= extendsftf2_libfunc
;
734 libcall
= truncdfsf2_libfunc
;
738 libcall
= extenddfxf2_libfunc
;
742 libcall
= extenddftf2_libfunc
;
754 libcall
= truncxfsf2_libfunc
;
758 libcall
= truncxfdf2_libfunc
;
770 libcall
= trunctfsf2_libfunc
;
774 libcall
= trunctfdf2_libfunc
;
786 if (libcall
== (rtx
) 0)
787 /* This conversion is not implemented yet. */
790 value
= emit_library_call_value (libcall
, NULL_RTX
, 1, to_mode
,
792 emit_move_insn (to
, value
);
796 /* Now both modes are integers. */
798 /* Handle expanding beyond a word. */
799 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
800 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
807 enum machine_mode lowpart_mode
;
808 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
810 /* Try converting directly if the insn is supported. */
811 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
814 /* If FROM is a SUBREG, put it into a register. Do this
815 so that we always generate the same set of insns for
816 better cse'ing; if an intermediate assignment occurred,
817 we won't be doing the operation directly on the SUBREG. */
818 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
819 from
= force_reg (from_mode
, from
);
820 emit_unop_insn (code
, to
, from
, equiv_code
);
823 /* Next, try converting via full word. */
824 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
825 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
826 != CODE_FOR_nothing
))
828 if (GET_CODE (to
) == REG
)
829 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
830 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
831 emit_unop_insn (code
, to
,
832 gen_lowpart (word_mode
, to
), equiv_code
);
836 /* No special multiword conversion insn; do it by hand. */
839 /* Since we will turn this into a no conflict block, we must ensure
840 that the source does not overlap the target. */
842 if (reg_overlap_mentioned_p (to
, from
))
843 from
= force_reg (from_mode
, from
);
845 /* Get a copy of FROM widened to a word, if necessary. */
846 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
847 lowpart_mode
= word_mode
;
849 lowpart_mode
= from_mode
;
851 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
853 lowpart
= gen_lowpart (lowpart_mode
, to
);
854 emit_move_insn (lowpart
, lowfrom
);
856 /* Compute the value to put in each remaining word. */
858 fill_value
= const0_rtx
;
863 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
864 && STORE_FLAG_VALUE
== -1)
866 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
868 fill_value
= gen_reg_rtx (word_mode
);
869 emit_insn (gen_slt (fill_value
));
875 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
876 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
878 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
882 /* Fill the remaining words. */
883 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
885 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
886 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
891 if (fill_value
!= subword
)
892 emit_move_insn (subword
, fill_value
);
895 insns
= get_insns ();
898 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
899 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
903 /* Truncating multi-word to a word or less. */
904 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
905 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
907 if (!((GET_CODE (from
) == MEM
908 && ! MEM_VOLATILE_P (from
)
909 && direct_load
[(int) to_mode
]
910 && ! mode_dependent_address_p (XEXP (from
, 0)))
911 || GET_CODE (from
) == REG
912 || GET_CODE (from
) == SUBREG
))
913 from
= force_reg (from_mode
, from
);
914 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
918 /* Handle pointer conversion */ /* SPEE 900220 */
919 if (to_mode
== PQImode
)
921 if (from_mode
!= QImode
)
922 from
= convert_to_mode (QImode
, from
, unsignedp
);
924 #ifdef HAVE_truncqipqi2
925 if (HAVE_truncqipqi2
)
927 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
930 #endif /* HAVE_truncqipqi2 */
934 if (from_mode
== PQImode
)
936 if (to_mode
!= QImode
)
938 from
= convert_to_mode (QImode
, from
, unsignedp
);
943 #ifdef HAVE_extendpqiqi2
944 if (HAVE_extendpqiqi2
)
946 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
949 #endif /* HAVE_extendpqiqi2 */
954 if (to_mode
== PSImode
)
956 if (from_mode
!= SImode
)
957 from
= convert_to_mode (SImode
, from
, unsignedp
);
959 #ifdef HAVE_truncsipsi2
960 if (HAVE_truncsipsi2
)
962 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
965 #endif /* HAVE_truncsipsi2 */
969 if (from_mode
== PSImode
)
971 if (to_mode
!= SImode
)
973 from
= convert_to_mode (SImode
, from
, unsignedp
);
978 #ifdef HAVE_extendpsisi2
979 if (HAVE_extendpsisi2
)
981 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
984 #endif /* HAVE_extendpsisi2 */
989 if (to_mode
== PDImode
)
991 if (from_mode
!= DImode
)
992 from
= convert_to_mode (DImode
, from
, unsignedp
);
994 #ifdef HAVE_truncdipdi2
995 if (HAVE_truncdipdi2
)
997 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1000 #endif /* HAVE_truncdipdi2 */
1004 if (from_mode
== PDImode
)
1006 if (to_mode
!= DImode
)
1008 from
= convert_to_mode (DImode
, from
, unsignedp
);
1013 #ifdef HAVE_extendpdidi2
1014 if (HAVE_extendpdidi2
)
1016 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1019 #endif /* HAVE_extendpdidi2 */
1024 /* Now follow all the conversions between integers
1025 no more than a word long. */
1027 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1028 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1029 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1030 GET_MODE_BITSIZE (from_mode
)))
1032 if (!((GET_CODE (from
) == MEM
1033 && ! MEM_VOLATILE_P (from
)
1034 && direct_load
[(int) to_mode
]
1035 && ! mode_dependent_address_p (XEXP (from
, 0)))
1036 || GET_CODE (from
) == REG
1037 || GET_CODE (from
) == SUBREG
))
1038 from
= force_reg (from_mode
, from
);
1039 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1040 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1041 from
= copy_to_reg (from
);
1042 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1046 /* Handle extension. */
1047 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1049 /* Convert directly if that works. */
1050 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1051 != CODE_FOR_nothing
)
1053 emit_unop_insn (code
, to
, from
, equiv_code
);
1058 enum machine_mode intermediate
;
1062 /* Search for a mode to convert via. */
1063 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1064 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1065 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1066 != CODE_FOR_nothing
)
1067 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1068 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1069 GET_MODE_BITSIZE (intermediate
))))
1070 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1071 != CODE_FOR_nothing
))
1073 convert_move (to
, convert_to_mode (intermediate
, from
,
1074 unsignedp
), unsignedp
);
1078 /* No suitable intermediate mode.
1079 Generate what we need with shifts. */
1080 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1081 - GET_MODE_BITSIZE (from_mode
), 0);
1082 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1083 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1085 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1088 emit_move_insn (to
, tmp
);
1093 /* Support special truncate insns for certain modes. */
1095 if (from_mode
== DImode
&& to_mode
== SImode
)
1097 #ifdef HAVE_truncdisi2
1098 if (HAVE_truncdisi2
)
1100 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1104 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1108 if (from_mode
== DImode
&& to_mode
== HImode
)
1110 #ifdef HAVE_truncdihi2
1111 if (HAVE_truncdihi2
)
1113 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1117 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1121 if (from_mode
== DImode
&& to_mode
== QImode
)
1123 #ifdef HAVE_truncdiqi2
1124 if (HAVE_truncdiqi2
)
1126 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1130 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1134 if (from_mode
== SImode
&& to_mode
== HImode
)
1136 #ifdef HAVE_truncsihi2
1137 if (HAVE_truncsihi2
)
1139 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1143 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1147 if (from_mode
== SImode
&& to_mode
== QImode
)
1149 #ifdef HAVE_truncsiqi2
1150 if (HAVE_truncsiqi2
)
1152 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1156 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1160 if (from_mode
== HImode
&& to_mode
== QImode
)
1162 #ifdef HAVE_trunchiqi2
1163 if (HAVE_trunchiqi2
)
1165 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1169 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1173 if (from_mode
== TImode
&& to_mode
== DImode
)
1175 #ifdef HAVE_trunctidi2
1176 if (HAVE_trunctidi2
)
1178 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1182 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1186 if (from_mode
== TImode
&& to_mode
== SImode
)
1188 #ifdef HAVE_trunctisi2
1189 if (HAVE_trunctisi2
)
1191 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1195 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1199 if (from_mode
== TImode
&& to_mode
== HImode
)
1201 #ifdef HAVE_trunctihi2
1202 if (HAVE_trunctihi2
)
1204 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1208 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1212 if (from_mode
== TImode
&& to_mode
== QImode
)
1214 #ifdef HAVE_trunctiqi2
1215 if (HAVE_trunctiqi2
)
1217 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1221 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1225 /* Handle truncation of volatile memrefs, and so on;
1226 the things that couldn't be truncated directly,
1227 and for which there was no special instruction. */
1228 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1230 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1231 emit_move_insn (to
, temp
);
1235 /* Mode combination is not recognized. */
1239 /* Return an rtx for a value that would result
1240 from converting X to mode MODE.
1241 Both X and MODE may be floating, or both integer.
1242 UNSIGNEDP is nonzero if X is an unsigned value.
1243 This can be done by referring to a part of X in place
1244 or by copying to a new temporary with conversion.
1246 This function *must not* call protect_from_queue
1247 except when putting X into an insn (in which case convert_move does it). */
1250 convert_to_mode (mode
, x
, unsignedp
)
1251 enum machine_mode mode
;
1255 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1258 /* Return an rtx for a value that would result
1259 from converting X from mode OLDMODE to mode MODE.
1260 Both modes may be floating, or both integer.
1261 UNSIGNEDP is nonzero if X is an unsigned value.
1263 This can be done by referring to a part of X in place
1264 or by copying to a new temporary with conversion.
1266 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1268 This function *must not* call protect_from_queue
1269 except when putting X into an insn (in which case convert_move does it). */
1272 convert_modes (mode
, oldmode
, x
, unsignedp
)
1273 enum machine_mode mode
, oldmode
;
1279 /* If FROM is a SUBREG that indicates that we have already done at least
1280 the required extension, strip it. */
1282 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1283 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1284 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1285 x
= gen_lowpart (mode
, x
);
1287 if (GET_MODE (x
) != VOIDmode
)
1288 oldmode
= GET_MODE (x
);
1290 if (mode
== oldmode
)
1293 /* There is one case that we must handle specially: If we are converting
1294 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1295 we are to interpret the constant as unsigned, gen_lowpart will do
1296 the wrong if the constant appears negative. What we want to do is
1297 make the high-order word of the constant zero, not all ones. */
1299 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1300 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1301 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1303 HOST_WIDE_INT val
= INTVAL (x
);
1305 if (oldmode
!= VOIDmode
1306 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1308 int width
= GET_MODE_BITSIZE (oldmode
);
1310 /* We need to zero extend VAL. */
1311 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1314 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1317 /* We can do this with a gen_lowpart if both desired and current modes
1318 are integer, and this is either a constant integer, a register, or a
1319 non-volatile MEM. Except for the constant case where MODE is no
1320 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1322 if ((GET_CODE (x
) == CONST_INT
1323 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1324 || (GET_MODE_CLASS (mode
) == MODE_INT
1325 && GET_MODE_CLASS (oldmode
) == MODE_INT
1326 && (GET_CODE (x
) == CONST_DOUBLE
1327 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1328 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1329 && direct_load
[(int) mode
])
1330 || (GET_CODE (x
) == REG
1331 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1332 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1334 /* ?? If we don't know OLDMODE, we have to assume here that
1335 X does not need sign- or zero-extension. This may not be
1336 the case, but it's the best we can do. */
1337 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1338 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1340 HOST_WIDE_INT val
= INTVAL (x
);
1341 int width
= GET_MODE_BITSIZE (oldmode
);
1343 /* We must sign or zero-extend in this case. Start by
1344 zero-extending, then sign extend if we need to. */
1345 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1347 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1348 val
|= (HOST_WIDE_INT
) (-1) << width
;
1350 return GEN_INT (val
);
1353 return gen_lowpart (mode
, x
);
1356 temp
= gen_reg_rtx (mode
);
1357 convert_move (temp
, x
, unsignedp
);
1362 /* This macro is used to determine what the largest unit size that
1363 move_by_pieces can use is. */
1365 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1366 move efficiently, as opposed to MOVE_MAX which is the maximum
1367 number of bytes we can move with a single instruction. */
1369 #ifndef MOVE_MAX_PIECES
1370 #define MOVE_MAX_PIECES MOVE_MAX
1373 /* Generate several move instructions to copy LEN bytes
1374 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1375 The caller must pass FROM and TO
1376 through protect_from_queue before calling.
1377 ALIGN is maximum alignment we can assume. */
1380 move_by_pieces (to
, from
, len
, align
)
1385 struct move_by_pieces data
;
1386 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1387 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1388 enum machine_mode mode
= VOIDmode
, tmode
;
1389 enum insn_code icode
;
1392 data
.to_addr
= to_addr
;
1393 data
.from_addr
= from_addr
;
1397 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1398 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1400 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1401 || GET_CODE (from_addr
) == POST_INC
1402 || GET_CODE (from_addr
) == POST_DEC
);
1404 data
.explicit_inc_from
= 0;
1405 data
.explicit_inc_to
= 0;
1407 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1408 if (data
.reverse
) data
.offset
= len
;
1411 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1412 data
.from_struct
= MEM_IN_STRUCT_P (from
);
1413 data
.to_readonly
= RTX_UNCHANGING_P (to
);
1414 data
.from_readonly
= RTX_UNCHANGING_P (from
);
1416 /* If copying requires more than two move insns,
1417 copy addresses to registers (to make displacements shorter)
1418 and use post-increment if available. */
1419 if (!(data
.autinc_from
&& data
.autinc_to
)
1420 && move_by_pieces_ninsns (len
, align
) > 2)
1422 /* Find the mode of the largest move... */
1423 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1424 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1425 if (GET_MODE_SIZE (tmode
) < max_size
)
1428 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1430 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1431 data
.autinc_from
= 1;
1432 data
.explicit_inc_from
= -1;
1434 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1436 data
.from_addr
= copy_addr_to_reg (from_addr
);
1437 data
.autinc_from
= 1;
1438 data
.explicit_inc_from
= 1;
1440 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1441 data
.from_addr
= copy_addr_to_reg (from_addr
);
1442 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1444 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1446 data
.explicit_inc_to
= -1;
1448 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1450 data
.to_addr
= copy_addr_to_reg (to_addr
);
1452 data
.explicit_inc_to
= 1;
1454 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1455 data
.to_addr
= copy_addr_to_reg (to_addr
);
1458 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1459 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1460 align
= MOVE_MAX
* BITS_PER_UNIT
;
1462 /* First move what we can in the largest integer mode, then go to
1463 successively smaller modes. */
1465 while (max_size
> 1)
1467 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1468 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1469 if (GET_MODE_SIZE (tmode
) < max_size
)
1472 if (mode
== VOIDmode
)
1475 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1476 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1477 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1479 max_size
= GET_MODE_SIZE (mode
);
1482 /* The code above should have handled everything. */
1487 /* Return number of insns required to move L bytes by pieces.
1488 ALIGN (in bytes) is maximum alignment we can assume. */
1491 move_by_pieces_ninsns (l
, align
)
1495 register int n_insns
= 0;
1496 unsigned int max_size
= MOVE_MAX
+ 1;
1498 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1499 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1500 align
= MOVE_MAX
* BITS_PER_UNIT
;
1502 while (max_size
> 1)
1504 enum machine_mode mode
= VOIDmode
, tmode
;
1505 enum insn_code icode
;
1507 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1508 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1509 if (GET_MODE_SIZE (tmode
) < max_size
)
1512 if (mode
== VOIDmode
)
1515 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1516 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1517 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1519 max_size
= GET_MODE_SIZE (mode
);
1525 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1526 with move instructions for mode MODE. GENFUN is the gen_... function
1527 to make a move insn for that mode. DATA has all the other info. */
1530 move_by_pieces_1 (genfun
, mode
, data
)
1531 rtx (*genfun
) PARAMS ((rtx
, ...));
1532 enum machine_mode mode
;
1533 struct move_by_pieces
*data
;
1535 register int size
= GET_MODE_SIZE (mode
);
1536 register rtx to1
, from1
;
1538 while (data
->len
>= size
)
1540 if (data
->reverse
) data
->offset
-= size
;
1542 to1
= (data
->autinc_to
1543 ? gen_rtx_MEM (mode
, data
->to_addr
)
1544 : copy_rtx (change_address (data
->to
, mode
,
1545 plus_constant (data
->to_addr
,
1547 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
1548 RTX_UNCHANGING_P (to1
) = data
->to_readonly
;
1551 = (data
->autinc_from
1552 ? gen_rtx_MEM (mode
, data
->from_addr
)
1553 : copy_rtx (change_address (data
->from
, mode
,
1554 plus_constant (data
->from_addr
,
1556 MEM_IN_STRUCT_P (from1
) = data
->from_struct
;
1557 RTX_UNCHANGING_P (from1
) = data
->from_readonly
;
1559 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1560 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1561 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1562 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1564 emit_insn ((*genfun
) (to1
, from1
));
1565 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1566 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1567 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1568 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1570 if (! data
->reverse
) data
->offset
+= size
;
1576 /* Emit code to move a block Y to a block X.
1577 This may be done with string-move instructions,
1578 with multiple scalar move instructions, or with a library call.
1580 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1582 SIZE is an rtx that says how long they are.
1583 ALIGN is the maximum alignment we can assume they have.
1585 Return the address of the new block, if memcpy is called and returns it,
1589 emit_block_move (x
, y
, size
, align
)
1595 #ifdef TARGET_MEM_FUNCTIONS
1597 tree call_expr
, arg_list
;
1600 if (GET_MODE (x
) != BLKmode
)
1603 if (GET_MODE (y
) != BLKmode
)
1606 x
= protect_from_queue (x
, 1);
1607 y
= protect_from_queue (y
, 0);
1608 size
= protect_from_queue (size
, 0);
1610 if (GET_CODE (x
) != MEM
)
1612 if (GET_CODE (y
) != MEM
)
1617 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1618 move_by_pieces (x
, y
, INTVAL (size
), align
);
1621 /* Try the most limited insn first, because there's no point
1622 including more than one in the machine description unless
1623 the more limited one has some advantage. */
1625 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1626 enum machine_mode mode
;
1628 /* Since this is a move insn, we don't care about volatility. */
1631 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1632 mode
= GET_MODE_WIDER_MODE (mode
))
1634 enum insn_code code
= movstr_optab
[(int) mode
];
1635 insn_operand_predicate_fn pred
;
1637 if (code
!= CODE_FOR_nothing
1638 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1639 here because if SIZE is less than the mode mask, as it is
1640 returned by the macro, it will definitely be less than the
1641 actual mode mask. */
1642 && ((GET_CODE (size
) == CONST_INT
1643 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1644 <= (GET_MODE_MASK (mode
) >> 1)))
1645 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1646 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1647 || (*pred
) (x
, BLKmode
))
1648 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1649 || (*pred
) (y
, BLKmode
))
1650 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1651 || (*pred
) (opalign
, VOIDmode
)))
1654 rtx last
= get_last_insn ();
1657 op2
= convert_to_mode (mode
, size
, 1);
1658 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1659 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1660 op2
= copy_to_mode_reg (mode
, op2
);
1662 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1670 delete_insns_since (last
);
1676 /* X, Y, or SIZE may have been passed through protect_from_queue.
1678 It is unsafe to save the value generated by protect_from_queue
1679 and reuse it later. Consider what happens if emit_queue is
1680 called before the return value from protect_from_queue is used.
1682 Expansion of the CALL_EXPR below will call emit_queue before
1683 we are finished emitting RTL for argument setup. So if we are
1684 not careful we could get the wrong value for an argument.
1686 To avoid this problem we go ahead and emit code to copy X, Y &
1687 SIZE into new pseudos. We can then place those new pseudos
1688 into an RTL_EXPR and use them later, even after a call to
1691 Note this is not strictly needed for library calls since they
1692 do not call emit_queue before loading their arguments. However,
1693 we may need to have library calls call emit_queue in the future
1694 since failing to do so could cause problems for targets which
1695 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1696 x
= copy_to_mode_reg (Pmode
, XEXP (x
, 0));
1697 y
= copy_to_mode_reg (Pmode
, XEXP (y
, 0));
1699 #ifdef TARGET_MEM_FUNCTIONS
1700 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
1702 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1703 TREE_UNSIGNED (integer_type_node
));
1704 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
1707 #ifdef TARGET_MEM_FUNCTIONS
1708 /* It is incorrect to use the libcall calling conventions to call
1709 memcpy in this context.
1711 This could be a user call to memcpy and the user may wish to
1712 examine the return value from memcpy.
1714 For targets where libcalls and normal calls have different conventions
1715 for returning pointers, we could end up generating incorrect code.
1717 So instead of using a libcall sequence we build up a suitable
1718 CALL_EXPR and expand the call in the normal fashion. */
1719 if (fn
== NULL_TREE
)
1723 /* This was copied from except.c, I don't know if all this is
1724 necessary in this context or not. */
1725 fn
= get_identifier ("memcpy");
1726 push_obstacks_nochange ();
1727 end_temporary_allocation ();
1728 fntype
= build_pointer_type (void_type_node
);
1729 fntype
= build_function_type (fntype
, NULL_TREE
);
1730 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
1731 ggc_add_tree_root (&fn
, 1);
1732 DECL_EXTERNAL (fn
) = 1;
1733 TREE_PUBLIC (fn
) = 1;
1734 DECL_ARTIFICIAL (fn
) = 1;
1735 make_decl_rtl (fn
, NULL_PTR
, 1);
1736 assemble_external (fn
);
1740 /* We need to make an argument list for the function call.
1742 memcpy has three arguments, the first two are void * addresses and
1743 the last is a size_t byte count for the copy. */
1745 = build_tree_list (NULL_TREE
,
1746 make_tree (build_pointer_type (void_type_node
), x
));
1747 TREE_CHAIN (arg_list
)
1748 = build_tree_list (NULL_TREE
,
1749 make_tree (build_pointer_type (void_type_node
), y
));
1750 TREE_CHAIN (TREE_CHAIN (arg_list
))
1751 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
1752 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
1754 /* Now we have to build up the CALL_EXPR itself. */
1755 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1756 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1757 call_expr
, arg_list
, NULL_TREE
);
1758 TREE_SIDE_EFFECTS (call_expr
) = 1;
1760 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1762 emit_library_call (bcopy_libfunc
, 0,
1763 VOIDmode
, 3, y
, Pmode
, x
, Pmode
,
1764 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1765 TREE_UNSIGNED (integer_type_node
)),
1766 TYPE_MODE (integer_type_node
));
1773 /* Copy all or part of a value X into registers starting at REGNO.
1774 The number of registers to be filled is NREGS. */
1777 move_block_to_reg (regno
, x
, nregs
, mode
)
1781 enum machine_mode mode
;
1784 #ifdef HAVE_load_multiple
1792 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1793 x
= validize_mem (force_const_mem (mode
, x
));
1795 /* See if the machine can do this with a load multiple insn. */
1796 #ifdef HAVE_load_multiple
1797 if (HAVE_load_multiple
)
1799 last
= get_last_insn ();
1800 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1808 delete_insns_since (last
);
1812 for (i
= 0; i
< nregs
; i
++)
1813 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1814 operand_subword_force (x
, i
, mode
));
1817 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1818 The number of registers to be filled is NREGS. SIZE indicates the number
1819 of bytes in the object X. */
1823 move_block_from_reg (regno
, x
, nregs
, size
)
1830 #ifdef HAVE_store_multiple
1834 enum machine_mode mode
;
1836 /* If SIZE is that of a mode no bigger than a word, just use that
1837 mode's store operation. */
1838 if (size
<= UNITS_PER_WORD
1839 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1841 emit_move_insn (change_address (x
, mode
, NULL
),
1842 gen_rtx_REG (mode
, regno
));
1846 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1847 to the left before storing to memory. Note that the previous test
1848 doesn't handle all cases (e.g. SIZE == 3). */
1849 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1851 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1857 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1858 gen_rtx_REG (word_mode
, regno
),
1859 build_int_2 ((UNITS_PER_WORD
- size
)
1860 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1861 emit_move_insn (tem
, shift
);
1865 /* See if the machine can do this with a store multiple insn. */
1866 #ifdef HAVE_store_multiple
1867 if (HAVE_store_multiple
)
1869 last
= get_last_insn ();
1870 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1878 delete_insns_since (last
);
1882 for (i
= 0; i
< nregs
; i
++)
1884 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1889 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1893 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1894 registers represented by a PARALLEL. SSIZE represents the total size of
1895 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1897 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1898 the balance will be in what would be the low-order memory addresses, i.e.
1899 left justified for big endian, right justified for little endian. This
1900 happens to be true for the targets currently using this support. If this
1901 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1905 emit_group_load (dst
, orig_src
, ssize
, align
)
1913 if (GET_CODE (dst
) != PARALLEL
)
1916 /* Check for a NULL entry, used to indicate that the parameter goes
1917 both on the stack and in registers. */
1918 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1923 tmps
= (rtx
*) alloca (sizeof(rtx
) * XVECLEN (dst
, 0));
1925 /* If we won't be loading directly from memory, protect the real source
1926 from strange tricks we might play. */
1928 if (GET_CODE (src
) != MEM
)
1930 if (GET_CODE (src
) == VOIDmode
)
1931 src
= gen_reg_rtx (GET_MODE (dst
));
1933 src
= gen_reg_rtx (GET_MODE (orig_src
));
1934 emit_move_insn (src
, orig_src
);
1937 /* Process the pieces. */
1938 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1940 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1941 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1942 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1945 /* Handle trailing fragments that run over the size of the struct. */
1946 if (ssize
>= 0 && bytepos
+ bytelen
> ssize
)
1948 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1949 bytelen
= ssize
- bytepos
;
1954 /* Optimize the access just a bit. */
1955 if (GET_CODE (src
) == MEM
1956 && align
>= GET_MODE_ALIGNMENT (mode
)
1957 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1958 && bytelen
== GET_MODE_SIZE (mode
))
1960 tmps
[i
] = gen_reg_rtx (mode
);
1961 emit_move_insn (tmps
[i
],
1962 change_address (src
, mode
,
1963 plus_constant (XEXP (src
, 0),
1966 else if (GET_CODE (src
) == CONCAT
)
1969 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0))))
1970 tmps
[i
] = XEXP (src
, 0);
1971 else if (bytepos
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
1972 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 1))))
1973 tmps
[i
] = XEXP (src
, 1);
1978 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1979 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1980 mode
, mode
, align
, ssize
);
1982 if (BYTES_BIG_ENDIAN
&& shift
)
1983 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
1984 tmps
[i
], 0, OPTAB_WIDEN
);
1989 /* Copy the extracted pieces into the proper (probable) hard regs. */
1990 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1991 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
1994 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1995 registers represented by a PARALLEL. SSIZE represents the total size of
1996 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1999 emit_group_store (orig_dst
, src
, ssize
, align
)
2007 if (GET_CODE (src
) != PARALLEL
)
2010 /* Check for a NULL entry, used to indicate that the parameter goes
2011 both on the stack and in registers. */
2012 if (XEXP (XVECEXP (src
, 0, 0), 0))
2017 tmps
= (rtx
*) alloca (sizeof(rtx
) * XVECLEN (src
, 0));
2019 /* Copy the (probable) hard regs into pseudos. */
2020 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2022 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2023 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2024 emit_move_insn (tmps
[i
], reg
);
2028 /* If we won't be storing directly into memory, protect the real destination
2029 from strange tricks we might play. */
2031 if (GET_CODE (dst
) == PARALLEL
)
2035 /* We can get a PARALLEL dst if there is a conditional expression in
2036 a return statement. In that case, the dst and src are the same,
2037 so no action is necessary. */
2038 if (rtx_equal_p (dst
, src
))
2041 /* It is unclear if we can ever reach here, but we may as well handle
2042 it. Allocate a temporary, and split this into a store/load to/from
2045 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2046 emit_group_store (temp
, src
, ssize
, align
);
2047 emit_group_load (dst
, temp
, ssize
, align
);
2050 else if (GET_CODE (dst
) != MEM
)
2052 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2053 /* Make life a bit easier for combine. */
2054 emit_move_insn (dst
, const0_rtx
);
2056 else if (! MEM_IN_STRUCT_P (dst
))
2058 /* store_bit_field requires that memory operations have
2059 mem_in_struct_p set; we might not. */
2061 dst
= copy_rtx (orig_dst
);
2062 MEM_SET_IN_STRUCT_P (dst
, 1);
2065 /* Process the pieces. */
2066 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2068 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2069 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2070 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2072 /* Handle trailing fragments that run over the size of the struct. */
2073 if (ssize
>= 0 && bytepos
+ bytelen
> ssize
)
2075 if (BYTES_BIG_ENDIAN
)
2077 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2078 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2079 tmps
[i
], 0, OPTAB_WIDEN
);
2081 bytelen
= ssize
- bytepos
;
2084 /* Optimize the access just a bit. */
2085 if (GET_CODE (dst
) == MEM
2086 && align
>= GET_MODE_ALIGNMENT (mode
)
2087 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2088 && bytelen
== GET_MODE_SIZE (mode
))
2089 emit_move_insn (change_address (dst
, mode
,
2090 plus_constant (XEXP (dst
, 0),
2094 store_bit_field (dst
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2095 mode
, tmps
[i
], align
, ssize
);
2100 /* Copy from the pseudo into the (probable) hard reg. */
2101 if (GET_CODE (dst
) == REG
)
2102 emit_move_insn (orig_dst
, dst
);
2105 /* Generate code to copy a BLKmode object of TYPE out of a
2106 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2107 is null, a stack temporary is created. TGTBLK is returned.
2109 The primary purpose of this routine is to handle functions
2110 that return BLKmode structures in registers. Some machines
2111 (the PA for example) want to return all small structures
2112 in registers regardless of the structure's alignment. */
2115 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2120 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2121 rtx src
= NULL
, dst
= NULL
;
2122 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2123 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2127 tgtblk
= assign_stack_temp (BLKmode
, bytes
, 0);
2128 MEM_SET_IN_STRUCT_P (tgtblk
, AGGREGATE_TYPE_P (type
));
2129 preserve_temp_slots (tgtblk
);
2132 /* This code assumes srcreg is at least a full word. If it isn't,
2133 copy it into a new pseudo which is a full word. */
2134 if (GET_MODE (srcreg
) != BLKmode
2135 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2136 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2138 /* Structures whose size is not a multiple of a word are aligned
2139 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2140 machine, this means we must skip the empty high order bytes when
2141 calculating the bit offset. */
2142 if (BYTES_BIG_ENDIAN
&& bytes
% UNITS_PER_WORD
)
2143 big_endian_correction
2144 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2146 /* Copy the structure BITSIZE bites at a time.
2148 We could probably emit more efficient code for machines which do not use
2149 strict alignment, but it doesn't seem worth the effort at the current
2151 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2152 bitpos
< bytes
* BITS_PER_UNIT
;
2153 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2155 /* We need a new source operand each time xbitpos is on a
2156 word boundary and when xbitpos == big_endian_correction
2157 (the first time through). */
2158 if (xbitpos
% BITS_PER_WORD
== 0
2159 || xbitpos
== big_endian_correction
)
2160 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, BLKmode
);
2162 /* We need a new destination operand each time bitpos is on
2164 if (bitpos
% BITS_PER_WORD
== 0)
2165 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2167 /* Use xbitpos for the source extraction (right justified) and
2168 xbitpos for the destination store (left justified). */
2169 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2170 extract_bit_field (src
, bitsize
,
2171 xbitpos
% BITS_PER_WORD
, 1,
2172 NULL_RTX
, word_mode
, word_mode
,
2173 bitsize
, BITS_PER_WORD
),
2174 bitsize
, BITS_PER_WORD
);
2181 /* Add a USE expression for REG to the (possibly empty) list pointed
2182 to by CALL_FUSAGE. REG must denote a hard register. */
2185 use_reg (call_fusage
, reg
)
2186 rtx
*call_fusage
, reg
;
2188 if (GET_CODE (reg
) != REG
2189 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2193 = gen_rtx_EXPR_LIST (VOIDmode
,
2194 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2197 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2198 starting at REGNO. All of these registers must be hard registers. */
2201 use_regs (call_fusage
, regno
, nregs
)
2208 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2211 for (i
= 0; i
< nregs
; i
++)
2212 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2215 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2216 PARALLEL REGS. This is for calls that pass values in multiple
2217 non-contiguous locations. The Irix 6 ABI has examples of this. */
2220 use_group_regs (call_fusage
, regs
)
2226 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2228 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2230 /* A NULL entry means the parameter goes both on the stack and in
2231 registers. This can also be a MEM for targets that pass values
2232 partially on the stack and partially in registers. */
2233 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2234 use_reg (call_fusage
, reg
);
2238 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2239 rtx with BLKmode). The caller must pass TO through protect_from_queue
2240 before calling. ALIGN is maximum alignment we can assume. */
2243 clear_by_pieces (to
, len
, align
)
2248 struct clear_by_pieces data
;
2249 rtx to_addr
= XEXP (to
, 0);
2250 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
2251 enum machine_mode mode
= VOIDmode
, tmode
;
2252 enum insn_code icode
;
2255 data
.to_addr
= to_addr
;
2258 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2259 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2261 data
.explicit_inc_to
= 0;
2263 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2264 if (data
.reverse
) data
.offset
= len
;
2267 data
.to_struct
= MEM_IN_STRUCT_P (to
);
2269 /* If copying requires more than two move insns,
2270 copy addresses to registers (to make displacements shorter)
2271 and use post-increment if available. */
2273 && move_by_pieces_ninsns (len
, align
) > 2)
2275 /* Determine the main mode we'll be using */
2276 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2277 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2278 if (GET_MODE_SIZE (tmode
) < max_size
)
2281 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
2283 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
2285 data
.explicit_inc_to
= -1;
2287 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
2289 data
.to_addr
= copy_addr_to_reg (to_addr
);
2291 data
.explicit_inc_to
= 1;
2293 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
2294 data
.to_addr
= copy_addr_to_reg (to_addr
);
2297 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2298 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2299 align
= MOVE_MAX
* BITS_PER_UNIT
;
2301 /* First move what we can in the largest integer mode, then go to
2302 successively smaller modes. */
2304 while (max_size
> 1)
2306 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2307 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2308 if (GET_MODE_SIZE (tmode
) < max_size
)
2311 if (mode
== VOIDmode
)
2314 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2315 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2316 clear_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
2318 max_size
= GET_MODE_SIZE (mode
);
2321 /* The code above should have handled everything. */
2326 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2327 with move instructions for mode MODE. GENFUN is the gen_... function
2328 to make a move insn for that mode. DATA has all the other info. */
2331 clear_by_pieces_1 (genfun
, mode
, data
)
2332 rtx (*genfun
) PARAMS ((rtx
, ...));
2333 enum machine_mode mode
;
2334 struct clear_by_pieces
*data
;
2336 register int size
= GET_MODE_SIZE (mode
);
2339 while (data
->len
>= size
)
2341 if (data
->reverse
) data
->offset
-= size
;
2343 to1
= (data
->autinc_to
2344 ? gen_rtx_MEM (mode
, data
->to_addr
)
2345 : copy_rtx (change_address (data
->to
, mode
,
2346 plus_constant (data
->to_addr
,
2348 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
2350 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2351 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
2353 emit_insn ((*genfun
) (to1
, const0_rtx
));
2354 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2355 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2357 if (! data
->reverse
) data
->offset
+= size
;
2363 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2364 its length in bytes and ALIGN is the maximum alignment we can is has.
2366 If we call a function that returns the length of the block, return it. */
2369 clear_storage (object
, size
, align
)
2374 #ifdef TARGET_MEM_FUNCTIONS
2376 tree call_expr
, arg_list
;
2380 if (GET_MODE (object
) == BLKmode
)
2382 object
= protect_from_queue (object
, 1);
2383 size
= protect_from_queue (size
, 0);
2385 if (GET_CODE (size
) == CONST_INT
2386 && MOVE_BY_PIECES_P (INTVAL (size
), align
))
2387 clear_by_pieces (object
, INTVAL (size
), align
);
2390 /* Try the most limited insn first, because there's no point
2391 including more than one in the machine description unless
2392 the more limited one has some advantage. */
2394 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2395 enum machine_mode mode
;
2397 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2398 mode
= GET_MODE_WIDER_MODE (mode
))
2400 enum insn_code code
= clrstr_optab
[(int) mode
];
2401 insn_operand_predicate_fn pred
;
2403 if (code
!= CODE_FOR_nothing
2404 /* We don't need MODE to be narrower than
2405 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2406 the mode mask, as it is returned by the macro, it will
2407 definitely be less than the actual mode mask. */
2408 && ((GET_CODE (size
) == CONST_INT
2409 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2410 <= (GET_MODE_MASK (mode
) >> 1)))
2411 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2412 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2413 || (*pred
) (object
, BLKmode
))
2414 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2415 || (*pred
) (opalign
, VOIDmode
)))
2418 rtx last
= get_last_insn ();
2421 op1
= convert_to_mode (mode
, size
, 1);
2422 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2423 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2424 op1
= copy_to_mode_reg (mode
, op1
);
2426 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2433 delete_insns_since (last
);
2437 /* OBJECT or SIZE may have been passed through protect_from_queue.
2439 It is unsafe to save the value generated by protect_from_queue
2440 and reuse it later. Consider what happens if emit_queue is
2441 called before the return value from protect_from_queue is used.
2443 Expansion of the CALL_EXPR below will call emit_queue before
2444 we are finished emitting RTL for argument setup. So if we are
2445 not careful we could get the wrong value for an argument.
2447 To avoid this problem we go ahead and emit code to copy OBJECT
2448 and SIZE into new pseudos. We can then place those new pseudos
2449 into an RTL_EXPR and use them later, even after a call to
2452 Note this is not strictly needed for library calls since they
2453 do not call emit_queue before loading their arguments. However,
2454 we may need to have library calls call emit_queue in the future
2455 since failing to do so could cause problems for targets which
2456 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2457 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2459 #ifdef TARGET_MEM_FUNCTIONS
2460 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
2462 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
2463 TREE_UNSIGNED (integer_type_node
));
2464 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
2468 #ifdef TARGET_MEM_FUNCTIONS
2469 /* It is incorrect to use the libcall calling conventions to call
2470 memset in this context.
2472 This could be a user call to memset and the user may wish to
2473 examine the return value from memset.
2475 For targets where libcalls and normal calls have different
2476 conventions for returning pointers, we could end up generating
2479 So instead of using a libcall sequence we build up a suitable
2480 CALL_EXPR and expand the call in the normal fashion. */
2481 if (fn
== NULL_TREE
)
2485 /* This was copied from except.c, I don't know if all this is
2486 necessary in this context or not. */
2487 fn
= get_identifier ("memset");
2488 push_obstacks_nochange ();
2489 end_temporary_allocation ();
2490 fntype
= build_pointer_type (void_type_node
);
2491 fntype
= build_function_type (fntype
, NULL_TREE
);
2492 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
2493 ggc_add_tree_root (&fn
, 1);
2494 DECL_EXTERNAL (fn
) = 1;
2495 TREE_PUBLIC (fn
) = 1;
2496 DECL_ARTIFICIAL (fn
) = 1;
2497 make_decl_rtl (fn
, NULL_PTR
, 1);
2498 assemble_external (fn
);
2502 /* We need to make an argument list for the function call.
2504 memset has three arguments, the first is a void * addresses, the
2505 second a integer with the initialization value, the last is a
2506 size_t byte count for the copy. */
2508 = build_tree_list (NULL_TREE
,
2509 make_tree (build_pointer_type (void_type_node
),
2511 TREE_CHAIN (arg_list
)
2512 = build_tree_list (NULL_TREE
,
2513 make_tree (integer_type_node
, const0_rtx
));
2514 TREE_CHAIN (TREE_CHAIN (arg_list
))
2515 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
2516 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
2518 /* Now we have to build up the CALL_EXPR itself. */
2519 call_expr
= build1 (ADDR_EXPR
,
2520 build_pointer_type (TREE_TYPE (fn
)), fn
);
2521 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2522 call_expr
, arg_list
, NULL_TREE
);
2523 TREE_SIDE_EFFECTS (call_expr
) = 1;
2525 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2527 emit_library_call (bzero_libfunc
, 0,
2528 VOIDmode
, 2, object
, Pmode
, size
,
2529 TYPE_MODE (integer_type_node
));
2534 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2539 /* Generate code to copy Y into X.
2540 Both Y and X must have the same mode, except that
2541 Y can be a constant with VOIDmode.
2542 This mode cannot be BLKmode; use emit_block_move for that.
2544 Return the last instruction emitted. */
2547 emit_move_insn (x
, y
)
2550 enum machine_mode mode
= GET_MODE (x
);
2552 x
= protect_from_queue (x
, 1);
2553 y
= protect_from_queue (y
, 0);
2555 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2558 /* Never force constant_p_rtx to memory. */
2559 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2561 else if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2562 y
= force_const_mem (mode
, y
);
2564 /* If X or Y are memory references, verify that their addresses are valid
2566 if (GET_CODE (x
) == MEM
2567 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2568 && ! push_operand (x
, GET_MODE (x
)))
2570 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2571 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2573 if (GET_CODE (y
) == MEM
2574 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2576 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2577 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2579 if (mode
== BLKmode
)
2582 return emit_move_insn_1 (x
, y
);
2585 /* Low level part of emit_move_insn.
2586 Called just like emit_move_insn, but assumes X and Y
2587 are basically valid. */
2590 emit_move_insn_1 (x
, y
)
2593 enum machine_mode mode
= GET_MODE (x
);
2594 enum machine_mode submode
;
2595 enum mode_class
class = GET_MODE_CLASS (mode
);
2598 if (mode
>= MAX_MACHINE_MODE
)
2601 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2603 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2605 /* Expand complex moves by moving real part and imag part, if possible. */
2606 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2607 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2609 (class == MODE_COMPLEX_INT
2610 ? MODE_INT
: MODE_FLOAT
),
2612 && (mov_optab
->handlers
[(int) submode
].insn_code
2613 != CODE_FOR_nothing
))
2615 /* Don't split destination if it is a stack push. */
2616 int stack
= push_operand (x
, GET_MODE (x
));
2618 /* If this is a stack, push the highpart first, so it
2619 will be in the argument order.
2621 In that case, change_address is used only to convert
2622 the mode, not to change the address. */
2625 /* Note that the real part always precedes the imag part in memory
2626 regardless of machine's endianness. */
2627 #ifdef STACK_GROWS_DOWNWARD
2628 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2629 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2630 gen_imagpart (submode
, y
)));
2631 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2632 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2633 gen_realpart (submode
, y
)));
2635 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2636 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2637 gen_realpart (submode
, y
)));
2638 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2639 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2640 gen_imagpart (submode
, y
)));
2645 rtx realpart_x
, realpart_y
;
2646 rtx imagpart_x
, imagpart_y
;
2648 /* If this is a complex value with each part being smaller than a
2649 word, the usual calling sequence will likely pack the pieces into
2650 a single register. Unfortunately, SUBREG of hard registers only
2651 deals in terms of words, so we have a problem converting input
2652 arguments to the CONCAT of two registers that is used elsewhere
2653 for complex values. If this is before reload, we can copy it into
2654 memory and reload. FIXME, we should see about using extract and
2655 insert on integer registers, but complex short and complex char
2656 variables should be rarely used. */
2657 if (GET_MODE_BITSIZE (mode
) < 2*BITS_PER_WORD
2658 && (reload_in_progress
| reload_completed
) == 0)
2660 int packed_dest_p
= (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2661 int packed_src_p
= (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2663 if (packed_dest_p
|| packed_src_p
)
2665 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2666 ? MODE_FLOAT
: MODE_INT
);
2668 enum machine_mode reg_mode
=
2669 mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2671 if (reg_mode
!= BLKmode
)
2673 rtx mem
= assign_stack_temp (reg_mode
,
2674 GET_MODE_SIZE (mode
), 0);
2676 rtx cmem
= change_address (mem
, mode
, NULL_RTX
);
2678 cfun
->cannot_inline
= N_("function using short complex types cannot be inline");
2682 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2683 emit_move_insn_1 (cmem
, y
);
2684 return emit_move_insn_1 (sreg
, mem
);
2688 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2689 emit_move_insn_1 (mem
, sreg
);
2690 return emit_move_insn_1 (x
, cmem
);
2696 realpart_x
= gen_realpart (submode
, x
);
2697 realpart_y
= gen_realpart (submode
, y
);
2698 imagpart_x
= gen_imagpart (submode
, x
);
2699 imagpart_y
= gen_imagpart (submode
, y
);
2701 /* Show the output dies here. This is necessary for SUBREGs
2702 of pseudos since we cannot track their lifetimes correctly;
2703 hard regs shouldn't appear here except as return values.
2704 We never want to emit such a clobber after reload. */
2706 && ! (reload_in_progress
|| reload_completed
)
2707 && (GET_CODE (realpart_x
) == SUBREG
2708 || GET_CODE (imagpart_x
) == SUBREG
))
2710 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2713 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2714 (realpart_x
, realpart_y
));
2715 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2716 (imagpart_x
, imagpart_y
));
2719 return get_last_insn ();
2722 /* This will handle any multi-word mode that lacks a move_insn pattern.
2723 However, you will get better code if you define such patterns,
2724 even if they must turn into multiple assembler instructions. */
2725 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2731 #ifdef PUSH_ROUNDING
2733 /* If X is a push on the stack, do the push now and replace
2734 X with a reference to the stack pointer. */
2735 if (push_operand (x
, GET_MODE (x
)))
2737 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2738 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2742 /* If we are in reload, see if either operand is a MEM whose address
2743 is scheduled for replacement. */
2744 if (reload_in_progress
&& GET_CODE (x
) == MEM
2745 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
2747 rtx
new = gen_rtx_MEM (GET_MODE (x
), inner
);
2749 MEM_COPY_ATTRIBUTES (new, x
);
2752 if (reload_in_progress
&& GET_CODE (y
) == MEM
2753 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
2755 rtx
new = gen_rtx_MEM (GET_MODE (y
), inner
);
2757 MEM_COPY_ATTRIBUTES (new, y
);
2765 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2768 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2769 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2771 /* If we can't get a part of Y, put Y into memory if it is a
2772 constant. Otherwise, force it into a register. If we still
2773 can't get a part of Y, abort. */
2774 if (ypart
== 0 && CONSTANT_P (y
))
2776 y
= force_const_mem (mode
, y
);
2777 ypart
= operand_subword (y
, i
, 1, mode
);
2779 else if (ypart
== 0)
2780 ypart
= operand_subword_force (y
, i
, mode
);
2782 if (xpart
== 0 || ypart
== 0)
2785 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
2787 last_insn
= emit_move_insn (xpart
, ypart
);
2790 seq
= gen_sequence ();
2793 /* Show the output dies here. This is necessary for SUBREGs
2794 of pseudos since we cannot track their lifetimes correctly;
2795 hard regs shouldn't appear here except as return values.
2796 We never want to emit such a clobber after reload. */
2798 && ! (reload_in_progress
|| reload_completed
)
2799 && need_clobber
!= 0)
2801 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2812 /* Pushing data onto the stack. */
2814 /* Push a block of length SIZE (perhaps variable)
2815 and return an rtx to address the beginning of the block.
2816 Note that it is not possible for the value returned to be a QUEUED.
2817 The value may be virtual_outgoing_args_rtx.
2819 EXTRA is the number of bytes of padding to push in addition to SIZE.
2820 BELOW nonzero means this padding comes at low addresses;
2821 otherwise, the padding comes at high addresses. */
2824 push_block (size
, extra
, below
)
2830 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2831 if (CONSTANT_P (size
))
2832 anti_adjust_stack (plus_constant (size
, extra
));
2833 else if (GET_CODE (size
) == REG
&& extra
== 0)
2834 anti_adjust_stack (size
);
2837 temp
= copy_to_mode_reg (Pmode
, size
);
2839 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2840 temp
, 0, OPTAB_LIB_WIDEN
);
2841 anti_adjust_stack (temp
);
2844 #ifndef STACK_GROWS_DOWNWARD
2845 #ifdef ARGS_GROW_DOWNWARD
2846 if (!ACCUMULATE_OUTGOING_ARGS
)
2854 /* Return the lowest stack address when STACK or ARGS grow downward and
2855 we are not aaccumulating outgoing arguments (the c4x port uses such
2857 temp
= virtual_outgoing_args_rtx
;
2858 if (extra
!= 0 && below
)
2859 temp
= plus_constant (temp
, extra
);
2863 if (GET_CODE (size
) == CONST_INT
)
2864 temp
= plus_constant (virtual_outgoing_args_rtx
,
2865 - INTVAL (size
) - (below
? 0 : extra
));
2866 else if (extra
!= 0 && !below
)
2867 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2868 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2870 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2871 negate_rtx (Pmode
, size
));
2874 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2880 return gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2883 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2884 block of SIZE bytes. */
2887 get_push_address (size
)
2892 if (STACK_PUSH_CODE
== POST_DEC
)
2893 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2894 else if (STACK_PUSH_CODE
== POST_INC
)
2895 temp
= gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2897 temp
= stack_pointer_rtx
;
2899 return copy_to_reg (temp
);
2902 /* Generate code to push X onto the stack, assuming it has mode MODE and
2904 MODE is redundant except when X is a CONST_INT (since they don't
2906 SIZE is an rtx for the size of data to be copied (in bytes),
2907 needed only if X is BLKmode.
2909 ALIGN is maximum alignment we can assume.
2911 If PARTIAL and REG are both nonzero, then copy that many of the first
2912 words of X into registers starting with REG, and push the rest of X.
2913 The amount of space pushed is decreased by PARTIAL words,
2914 rounded *down* to a multiple of PARM_BOUNDARY.
2915 REG must be a hard register in this case.
2916 If REG is zero but PARTIAL is not, take any all others actions for an
2917 argument partially in registers, but do not actually load any
2920 EXTRA is the amount in bytes of extra space to leave next to this arg.
2921 This is ignored if an argument block has already been allocated.
2923 On a machine that lacks real push insns, ARGS_ADDR is the address of
2924 the bottom of the argument block for this call. We use indexing off there
2925 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2926 argument block has not been preallocated.
2928 ARGS_SO_FAR is the size of args previously pushed for this call.
2930 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2931 for arguments passed in registers. If nonzero, it will be the number
2932 of bytes required. */
2935 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2936 args_addr
, args_so_far
, reg_parm_stack_space
,
2939 enum machine_mode mode
;
2948 int reg_parm_stack_space
;
2952 enum direction stack_direction
2953 #ifdef STACK_GROWS_DOWNWARD
2959 /* Decide where to pad the argument: `downward' for below,
2960 `upward' for above, or `none' for don't pad it.
2961 Default is below for small data on big-endian machines; else above. */
2962 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2964 /* Invert direction if stack is post-update. */
2965 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2966 if (where_pad
!= none
)
2967 where_pad
= (where_pad
== downward
? upward
: downward
);
2969 xinner
= x
= protect_from_queue (x
, 0);
2971 if (mode
== BLKmode
)
2973 /* Copy a block into the stack, entirely or partially. */
2976 int used
= partial
* UNITS_PER_WORD
;
2977 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
2985 /* USED is now the # of bytes we need not copy to the stack
2986 because registers will take care of them. */
2989 xinner
= change_address (xinner
, BLKmode
,
2990 plus_constant (XEXP (xinner
, 0), used
));
2992 /* If the partial register-part of the arg counts in its stack size,
2993 skip the part of stack space corresponding to the registers.
2994 Otherwise, start copying to the beginning of the stack space,
2995 by setting SKIP to 0. */
2996 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
2998 #ifdef PUSH_ROUNDING
2999 /* Do it with several push insns if that doesn't take lots of insns
3000 and if there is no difficulty with push insns that skip bytes
3001 on the stack for alignment purposes. */
3004 && GET_CODE (size
) == CONST_INT
3006 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3007 /* Here we avoid the case of a structure whose weak alignment
3008 forces many pushes of a small amount of data,
3009 and such small pushes do rounding that causes trouble. */
3010 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3011 || align
>= BIGGEST_ALIGNMENT
3012 || PUSH_ROUNDING (align
) == align
)
3013 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3015 /* Push padding now if padding above and stack grows down,
3016 or if padding below and stack grows up.
3017 But if space already allocated, this has already been done. */
3018 if (extra
&& args_addr
== 0
3019 && where_pad
!= none
&& where_pad
!= stack_direction
)
3020 anti_adjust_stack (GEN_INT (extra
));
3022 stack_pointer_delta
+= INTVAL (size
) - used
;
3023 move_by_pieces (gen_rtx_MEM (BLKmode
, gen_push_operand ()), xinner
,
3024 INTVAL (size
) - used
, align
);
3026 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3030 in_check_memory_usage
= 1;
3031 temp
= get_push_address (INTVAL(size
) - used
);
3032 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3033 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3035 XEXP (xinner
, 0), Pmode
,
3036 GEN_INT (INTVAL(size
) - used
),
3037 TYPE_MODE (sizetype
));
3039 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
3041 GEN_INT (INTVAL(size
) - used
),
3042 TYPE_MODE (sizetype
),
3043 GEN_INT (MEMORY_USE_RW
),
3044 TYPE_MODE (integer_type_node
));
3045 in_check_memory_usage
= 0;
3049 #endif /* PUSH_ROUNDING */
3051 /* Otherwise make space on the stack and copy the data
3052 to the address of that space. */
3054 /* Deduct words put into registers from the size we must copy. */
3057 if (GET_CODE (size
) == CONST_INT
)
3058 size
= GEN_INT (INTVAL (size
) - used
);
3060 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3061 GEN_INT (used
), NULL_RTX
, 0,
3065 /* Get the address of the stack space.
3066 In this case, we do not deal with EXTRA separately.
3067 A single stack adjust will do. */
3070 temp
= push_block (size
, extra
, where_pad
== downward
);
3073 else if (GET_CODE (args_so_far
) == CONST_INT
)
3074 temp
= memory_address (BLKmode
,
3075 plus_constant (args_addr
,
3076 skip
+ INTVAL (args_so_far
)));
3078 temp
= memory_address (BLKmode
,
3079 plus_constant (gen_rtx_PLUS (Pmode
,
3083 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3087 in_check_memory_usage
= 1;
3088 target
= copy_to_reg (temp
);
3089 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3090 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3092 XEXP (xinner
, 0), Pmode
,
3093 size
, TYPE_MODE (sizetype
));
3095 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
3097 size
, TYPE_MODE (sizetype
),
3098 GEN_INT (MEMORY_USE_RW
),
3099 TYPE_MODE (integer_type_node
));
3100 in_check_memory_usage
= 0;
3103 /* TEMP is the address of the block. Copy the data there. */
3104 if (GET_CODE (size
) == CONST_INT
3105 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size
), align
))
3107 move_by_pieces (gen_rtx_MEM (BLKmode
, temp
), xinner
,
3108 INTVAL (size
), align
);
3113 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
3114 enum machine_mode mode
;
3115 rtx target
= gen_rtx_MEM (BLKmode
, temp
);
3117 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3119 mode
= GET_MODE_WIDER_MODE (mode
))
3121 enum insn_code code
= movstr_optab
[(int) mode
];
3122 insn_operand_predicate_fn pred
;
3124 if (code
!= CODE_FOR_nothing
3125 && ((GET_CODE (size
) == CONST_INT
3126 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3127 <= (GET_MODE_MASK (mode
) >> 1)))
3128 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3129 && (!(pred
= insn_data
[(int) code
].operand
[0].predicate
)
3130 || ((*pred
) (target
, BLKmode
)))
3131 && (!(pred
= insn_data
[(int) code
].operand
[1].predicate
)
3132 || ((*pred
) (xinner
, BLKmode
)))
3133 && (!(pred
= insn_data
[(int) code
].operand
[3].predicate
)
3134 || ((*pred
) (opalign
, VOIDmode
))))
3136 rtx op2
= convert_to_mode (mode
, size
, 1);
3137 rtx last
= get_last_insn ();
3140 pred
= insn_data
[(int) code
].operand
[2].predicate
;
3141 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
3142 op2
= copy_to_mode_reg (mode
, op2
);
3144 pat
= GEN_FCN ((int) code
) (target
, xinner
,
3152 delete_insns_since (last
);
3157 if (!ACCUMULATE_OUTGOING_ARGS
)
3159 /* If the source is referenced relative to the stack pointer,
3160 copy it to another register to stabilize it. We do not need
3161 to do this if we know that we won't be changing sp. */
3163 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3164 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3165 temp
= copy_to_reg (temp
);
3168 /* Make inhibit_defer_pop nonzero around the library call
3169 to force it to pop the bcopy-arguments right away. */
3171 #ifdef TARGET_MEM_FUNCTIONS
3172 emit_library_call (memcpy_libfunc
, 0,
3173 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
3174 convert_to_mode (TYPE_MODE (sizetype
),
3175 size
, TREE_UNSIGNED (sizetype
)),
3176 TYPE_MODE (sizetype
));
3178 emit_library_call (bcopy_libfunc
, 0,
3179 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
3180 convert_to_mode (TYPE_MODE (integer_type_node
),
3182 TREE_UNSIGNED (integer_type_node
)),
3183 TYPE_MODE (integer_type_node
));
3188 else if (partial
> 0)
3190 /* Scalar partly in registers. */
3192 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3195 /* # words of start of argument
3196 that we must make space for but need not store. */
3197 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3198 int args_offset
= INTVAL (args_so_far
);
3201 /* Push padding now if padding above and stack grows down,
3202 or if padding below and stack grows up.
3203 But if space already allocated, this has already been done. */
3204 if (extra
&& args_addr
== 0
3205 && where_pad
!= none
&& where_pad
!= stack_direction
)
3206 anti_adjust_stack (GEN_INT (extra
));
3208 /* If we make space by pushing it, we might as well push
3209 the real data. Otherwise, we can leave OFFSET nonzero
3210 and leave the space uninitialized. */
3214 /* Now NOT_STACK gets the number of words that we don't need to
3215 allocate on the stack. */
3216 not_stack
= partial
- offset
;
3218 /* If the partial register-part of the arg counts in its stack size,
3219 skip the part of stack space corresponding to the registers.
3220 Otherwise, start copying to the beginning of the stack space,
3221 by setting SKIP to 0. */
3222 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3224 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3225 x
= validize_mem (force_const_mem (mode
, x
));
3227 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3228 SUBREGs of such registers are not allowed. */
3229 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3230 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3231 x
= copy_to_reg (x
);
3233 /* Loop over all the words allocated on the stack for this arg. */
3234 /* We can do it by words, because any scalar bigger than a word
3235 has a size a multiple of a word. */
3236 #ifndef PUSH_ARGS_REVERSED
3237 for (i
= not_stack
; i
< size
; i
++)
3239 for (i
= size
- 1; i
>= not_stack
; i
--)
3241 if (i
>= not_stack
+ offset
)
3242 emit_push_insn (operand_subword_force (x
, i
, mode
),
3243 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3245 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3247 reg_parm_stack_space
, alignment_pad
);
3252 rtx target
= NULL_RTX
;
3254 /* Push padding now if padding above and stack grows down,
3255 or if padding below and stack grows up.
3256 But if space already allocated, this has already been done. */
3257 if (extra
&& args_addr
== 0
3258 && where_pad
!= none
&& where_pad
!= stack_direction
)
3259 anti_adjust_stack (GEN_INT (extra
));
3261 #ifdef PUSH_ROUNDING
3262 if (args_addr
== 0 && PUSH_ARGS
)
3264 addr
= gen_push_operand ();
3265 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3270 if (GET_CODE (args_so_far
) == CONST_INT
)
3272 = memory_address (mode
,
3273 plus_constant (args_addr
,
3274 INTVAL (args_so_far
)));
3276 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3281 emit_move_insn (gen_rtx_MEM (mode
, addr
), x
);
3283 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3285 in_check_memory_usage
= 1;
3287 target
= get_push_address (GET_MODE_SIZE (mode
));
3289 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3290 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3293 GEN_INT (GET_MODE_SIZE (mode
)),
3294 TYPE_MODE (sizetype
));
3296 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
3298 GEN_INT (GET_MODE_SIZE (mode
)),
3299 TYPE_MODE (sizetype
),
3300 GEN_INT (MEMORY_USE_RW
),
3301 TYPE_MODE (integer_type_node
));
3302 in_check_memory_usage
= 0;
3307 /* If part should go in registers, copy that part
3308 into the appropriate registers. Do this now, at the end,
3309 since mem-to-mem copies above may do function calls. */
3310 if (partial
> 0 && reg
!= 0)
3312 /* Handle calls that pass values in multiple non-contiguous locations.
3313 The Irix 6 ABI has examples of this. */
3314 if (GET_CODE (reg
) == PARALLEL
)
3315 emit_group_load (reg
, x
, -1, align
); /* ??? size? */
3317 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3320 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3321 anti_adjust_stack (GEN_INT (extra
));
3324 anti_adjust_stack (alignment_pad
);
3327 /* Expand an assignment that stores the value of FROM into TO.
3328 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3329 (This may contain a QUEUED rtx;
3330 if the value is constant, this rtx is a constant.)
3331 Otherwise, the returned value is NULL_RTX.
3333 SUGGEST_REG is no longer actually used.
3334 It used to mean, copy the value through a register
3335 and return that register, if that is possible.
3336 We now use WANT_VALUE to decide whether to do this. */
3339 expand_assignment (to
, from
, want_value
, suggest_reg
)
3342 int suggest_reg ATTRIBUTE_UNUSED
;
3344 register rtx to_rtx
= 0;
3347 /* Don't crash if the lhs of the assignment was erroneous. */
3349 if (TREE_CODE (to
) == ERROR_MARK
)
3351 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3352 return want_value
? result
: NULL_RTX
;
3355 /* Assignment of a structure component needs special treatment
3356 if the structure component's rtx is not simply a MEM.
3357 Assignment of an array element at a constant index, and assignment of
3358 an array element in an unaligned packed structure field, has the same
3361 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3362 || TREE_CODE (to
) == ARRAY_REF
)
3364 enum machine_mode mode1
;
3365 HOST_WIDE_INT bitsize
, bitpos
;
3370 unsigned int alignment
;
3373 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3374 &unsignedp
, &volatilep
, &alignment
);
3376 /* If we are going to use store_bit_field and extract_bit_field,
3377 make sure to_rtx will be safe for multiple use. */
3379 if (mode1
== VOIDmode
&& want_value
)
3380 tem
= stabilize_reference (tem
);
3382 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_DONT
);
3385 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3387 if (GET_CODE (to_rtx
) != MEM
)
3390 if (GET_MODE (offset_rtx
) != ptr_mode
)
3392 #ifdef POINTERS_EXTEND_UNSIGNED
3393 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
3395 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3399 /* A constant address in TO_RTX can have VOIDmode, we must not try
3400 to call force_reg for that case. Avoid that case. */
3401 if (GET_CODE (to_rtx
) == MEM
3402 && GET_MODE (to_rtx
) == BLKmode
3403 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3405 && (bitpos
% bitsize
) == 0
3406 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3407 && alignment
== GET_MODE_ALIGNMENT (mode1
))
3409 rtx temp
= change_address (to_rtx
, mode1
,
3410 plus_constant (XEXP (to_rtx
, 0),
3413 if (GET_CODE (XEXP (temp
, 0)) == REG
)
3416 to_rtx
= change_address (to_rtx
, mode1
,
3417 force_reg (GET_MODE (XEXP (temp
, 0)),
3422 to_rtx
= change_address (to_rtx
, VOIDmode
,
3423 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
3424 force_reg (ptr_mode
,
3430 if (GET_CODE (to_rtx
) == MEM
)
3432 /* When the offset is zero, to_rtx is the address of the
3433 structure we are storing into, and hence may be shared.
3434 We must make a new MEM before setting the volatile bit. */
3436 to_rtx
= copy_rtx (to_rtx
);
3438 MEM_VOLATILE_P (to_rtx
) = 1;
3440 #if 0 /* This was turned off because, when a field is volatile
3441 in an object which is not volatile, the object may be in a register,
3442 and then we would abort over here. */
3448 if (TREE_CODE (to
) == COMPONENT_REF
3449 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3452 to_rtx
= copy_rtx (to_rtx
);
3454 RTX_UNCHANGING_P (to_rtx
) = 1;
3457 /* Check the access. */
3458 if (current_function_check_memory_usage
&& GET_CODE (to_rtx
) == MEM
)
3463 enum machine_mode best_mode
;
3465 best_mode
= get_best_mode (bitsize
, bitpos
,
3466 TYPE_ALIGN (TREE_TYPE (tem
)),
3468 if (best_mode
== VOIDmode
)
3471 best_mode_size
= GET_MODE_BITSIZE (best_mode
);
3472 to_addr
= plus_constant (XEXP (to_rtx
, 0), (bitpos
/ BITS_PER_UNIT
));
3473 size
= CEIL ((bitpos
% best_mode_size
) + bitsize
, best_mode_size
);
3474 size
*= GET_MODE_SIZE (best_mode
);
3476 /* Check the access right of the pointer. */
3478 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3480 GEN_INT (size
), TYPE_MODE (sizetype
),
3481 GEN_INT (MEMORY_USE_WO
),
3482 TYPE_MODE (integer_type_node
));
3485 /* If this is a varying-length object, we must get the address of
3486 the source and do an explicit block move. */
3489 unsigned int from_align
;
3490 rtx from_rtx
= expand_expr_unaligned (from
, &from_align
);
3492 = change_address (to_rtx
, VOIDmode
,
3493 plus_constant (XEXP (to_rtx
, 0),
3494 bitpos
/ BITS_PER_UNIT
));
3496 emit_block_move (inner_to_rtx
, from_rtx
, expr_size (from
),
3497 MIN (alignment
, from_align
));
3504 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3506 /* Spurious cast for HPUX compiler. */
3507 ? ((enum machine_mode
)
3508 TYPE_MODE (TREE_TYPE (to
)))
3512 int_size_in_bytes (TREE_TYPE (tem
)),
3513 get_alias_set (to
));
3515 preserve_temp_slots (result
);
3519 /* If the value is meaningful, convert RESULT to the proper mode.
3520 Otherwise, return nothing. */
3521 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3522 TYPE_MODE (TREE_TYPE (from
)),
3524 TREE_UNSIGNED (TREE_TYPE (to
)))
3529 /* If the rhs is a function call and its value is not an aggregate,
3530 call the function before we start to compute the lhs.
3531 This is needed for correct code for cases such as
3532 val = setjmp (buf) on machines where reference to val
3533 requires loading up part of an address in a separate insn.
3535 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3536 since it might be a promoted variable where the zero- or sign- extension
3537 needs to be done. Handling this in the normal way is safe because no
3538 computation is done before the call. */
3539 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3540 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3541 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3542 && GET_CODE (DECL_RTL (to
)) == REG
))
3547 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3549 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3551 /* Handle calls that return values in multiple non-contiguous locations.
3552 The Irix 6 ABI has examples of this. */
3553 if (GET_CODE (to_rtx
) == PARALLEL
)
3554 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)),
3555 TYPE_ALIGN (TREE_TYPE (from
)));
3556 else if (GET_MODE (to_rtx
) == BLKmode
)
3557 emit_block_move (to_rtx
, value
, expr_size (from
),
3558 TYPE_ALIGN (TREE_TYPE (from
)));
3561 #ifdef POINTERS_EXTEND_UNSIGNED
3562 if (TREE_CODE (TREE_TYPE (to
)) == REFERENCE_TYPE
3563 || TREE_CODE (TREE_TYPE (to
)) == POINTER_TYPE
)
3564 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3566 emit_move_insn (to_rtx
, value
);
3568 preserve_temp_slots (to_rtx
);
3571 return want_value
? to_rtx
: NULL_RTX
;
3574 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3575 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3579 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3580 if (GET_CODE (to_rtx
) == MEM
)
3581 MEM_ALIAS_SET (to_rtx
) = get_alias_set (to
);
3584 /* Don't move directly into a return register. */
3585 if (TREE_CODE (to
) == RESULT_DECL
3586 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3591 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3593 if (GET_CODE (to_rtx
) == PARALLEL
)
3594 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)),
3595 TYPE_ALIGN (TREE_TYPE (from
)));
3597 emit_move_insn (to_rtx
, temp
);
3599 preserve_temp_slots (to_rtx
);
3602 return want_value
? to_rtx
: NULL_RTX
;
3605 /* In case we are returning the contents of an object which overlaps
3606 the place the value is being stored, use a safe function when copying
3607 a value through a pointer into a structure value return block. */
3608 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3609 && current_function_returns_struct
3610 && !current_function_returns_pcc_struct
)
3615 size
= expr_size (from
);
3616 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
,
3617 EXPAND_MEMORY_USE_DONT
);
3619 /* Copy the rights of the bitmap. */
3620 if (current_function_check_memory_usage
)
3621 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3622 XEXP (to_rtx
, 0), Pmode
,
3623 XEXP (from_rtx
, 0), Pmode
,
3624 convert_to_mode (TYPE_MODE (sizetype
),
3625 size
, TREE_UNSIGNED (sizetype
)),
3626 TYPE_MODE (sizetype
));
3628 #ifdef TARGET_MEM_FUNCTIONS
3629 emit_library_call (memcpy_libfunc
, 0,
3630 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3631 XEXP (from_rtx
, 0), Pmode
,
3632 convert_to_mode (TYPE_MODE (sizetype
),
3633 size
, TREE_UNSIGNED (sizetype
)),
3634 TYPE_MODE (sizetype
));
3636 emit_library_call (bcopy_libfunc
, 0,
3637 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3638 XEXP (to_rtx
, 0), Pmode
,
3639 convert_to_mode (TYPE_MODE (integer_type_node
),
3640 size
, TREE_UNSIGNED (integer_type_node
)),
3641 TYPE_MODE (integer_type_node
));
3644 preserve_temp_slots (to_rtx
);
3647 return want_value
? to_rtx
: NULL_RTX
;
3650 /* Compute FROM and store the value in the rtx we got. */
3653 result
= store_expr (from
, to_rtx
, want_value
);
3654 preserve_temp_slots (result
);
3657 return want_value
? result
: NULL_RTX
;
3660 /* Generate code for computing expression EXP,
3661 and storing the value into TARGET.
3662 TARGET may contain a QUEUED rtx.
3664 If WANT_VALUE is nonzero, return a copy of the value
3665 not in TARGET, so that we can be sure to use the proper
3666 value in a containing expression even if TARGET has something
3667 else stored in it. If possible, we copy the value through a pseudo
3668 and return that pseudo. Or, if the value is constant, we try to
3669 return the constant. In some cases, we return a pseudo
3670 copied *from* TARGET.
3672 If the mode is BLKmode then we may return TARGET itself.
3673 It turns out that in BLKmode it doesn't cause a problem.
3674 because C has no operators that could combine two different
3675 assignments into the same BLKmode object with different values
3676 with no sequence point. Will other languages need this to
3679 If WANT_VALUE is 0, we return NULL, to make sure
3680 to catch quickly any cases where the caller uses the value
3681 and fails to set WANT_VALUE. */
3684 store_expr (exp
, target
, want_value
)
3686 register rtx target
;
3690 int dont_return_target
= 0;
3692 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
3694 /* Perform first part of compound expression, then assign from second
3696 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
3698 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3700 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3702 /* For conditional expression, get safe form of the target. Then
3703 test the condition, doing the appropriate assignment on either
3704 side. This avoids the creation of unnecessary temporaries.
3705 For non-BLKmode, it is more efficient not to do this. */
3707 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3710 target
= protect_from_queue (target
, 1);
3712 do_pending_stack_adjust ();
3714 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3715 start_cleanup_deferral ();
3716 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3717 end_cleanup_deferral ();
3719 emit_jump_insn (gen_jump (lab2
));
3722 start_cleanup_deferral ();
3723 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3724 end_cleanup_deferral ();
3729 return want_value
? target
: NULL_RTX
;
3731 else if (queued_subexp_p (target
))
3732 /* If target contains a postincrement, let's not risk
3733 using it as the place to generate the rhs. */
3735 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3737 /* Expand EXP into a new pseudo. */
3738 temp
= gen_reg_rtx (GET_MODE (target
));
3739 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3742 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3744 /* If target is volatile, ANSI requires accessing the value
3745 *from* the target, if it is accessed. So make that happen.
3746 In no case return the target itself. */
3747 if (! MEM_VOLATILE_P (target
) && want_value
)
3748 dont_return_target
= 1;
3750 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3751 && GET_MODE (target
) != BLKmode
)
3752 /* If target is in memory and caller wants value in a register instead,
3753 arrange that. Pass TARGET as target for expand_expr so that,
3754 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3755 We know expand_expr will not use the target in that case.
3756 Don't do this if TARGET is volatile because we are supposed
3757 to write it and then read it. */
3759 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3760 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3761 temp
= copy_to_reg (temp
);
3762 dont_return_target
= 1;
3764 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3765 /* If this is an scalar in a register that is stored in a wider mode
3766 than the declared mode, compute the result into its declared mode
3767 and then convert to the wider mode. Our value is the computed
3770 /* If we don't want a value, we can do the conversion inside EXP,
3771 which will often result in some optimizations. Do the conversion
3772 in two steps: first change the signedness, if needed, then
3773 the extend. But don't do this if the type of EXP is a subtype
3774 of something else since then the conversion might involve
3775 more than just converting modes. */
3776 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
3777 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
3779 if (TREE_UNSIGNED (TREE_TYPE (exp
))
3780 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3783 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
3787 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
3788 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3792 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3794 /* If TEMP is a volatile MEM and we want a result value, make
3795 the access now so it gets done only once. Likewise if
3796 it contains TARGET. */
3797 if (GET_CODE (temp
) == MEM
&& want_value
3798 && (MEM_VOLATILE_P (temp
)
3799 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
3800 temp
= copy_to_reg (temp
);
3802 /* If TEMP is a VOIDmode constant, use convert_modes to make
3803 sure that we properly convert it. */
3804 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3805 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3806 TYPE_MODE (TREE_TYPE (exp
)), temp
,
3807 SUBREG_PROMOTED_UNSIGNED_P (target
));
3809 convert_move (SUBREG_REG (target
), temp
,
3810 SUBREG_PROMOTED_UNSIGNED_P (target
));
3812 /* If we promoted a constant, change the mode back down to match
3813 target. Otherwise, the caller might get confused by a result whose
3814 mode is larger than expected. */
3816 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
)
3817 && GET_MODE (temp
) != VOIDmode
)
3819 temp
= gen_rtx_SUBREG (GET_MODE (target
), temp
, 0);
3820 SUBREG_PROMOTED_VAR_P (temp
) = 1;
3821 SUBREG_PROMOTED_UNSIGNED_P (temp
)
3822 = SUBREG_PROMOTED_UNSIGNED_P (target
);
3825 return want_value
? temp
: NULL_RTX
;
3829 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3830 /* Return TARGET if it's a specified hardware register.
3831 If TARGET is a volatile mem ref, either return TARGET
3832 or return a reg copied *from* TARGET; ANSI requires this.
3834 Otherwise, if TEMP is not TARGET, return TEMP
3835 if it is constant (for efficiency),
3836 or if we really want the correct value. */
3837 if (!(target
&& GET_CODE (target
) == REG
3838 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3839 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
3840 && ! rtx_equal_p (temp
, target
)
3841 && (CONSTANT_P (temp
) || want_value
))
3842 dont_return_target
= 1;
3845 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3846 the same as that of TARGET, adjust the constant. This is needed, for
3847 example, in case it is a CONST_DOUBLE and we want only a word-sized
3849 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3850 && TREE_CODE (exp
) != ERROR_MARK
3851 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3852 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3853 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
3855 if (current_function_check_memory_usage
3856 && GET_CODE (target
) == MEM
3857 && AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
3859 if (GET_CODE (temp
) == MEM
)
3860 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3861 XEXP (target
, 0), Pmode
,
3862 XEXP (temp
, 0), Pmode
,
3863 expr_size (exp
), TYPE_MODE (sizetype
));
3865 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3866 XEXP (target
, 0), Pmode
,
3867 expr_size (exp
), TYPE_MODE (sizetype
),
3868 GEN_INT (MEMORY_USE_WO
),
3869 TYPE_MODE (integer_type_node
));
3872 /* If value was not generated in the target, store it there.
3873 Convert the value to TARGET's type first if nec. */
3874 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3875 one or both of them are volatile memory refs, we have to distinguish
3877 - expand_expr has used TARGET. In this case, we must not generate
3878 another copy. This can be detected by TARGET being equal according
3880 - expand_expr has not used TARGET - that means that the source just
3881 happens to have the same RTX form. Since temp will have been created
3882 by expand_expr, it will compare unequal according to == .
3883 We must generate a copy in this case, to reach the correct number
3884 of volatile memory references. */
3886 if ((! rtx_equal_p (temp
, target
)
3887 || (temp
!= target
&& (side_effects_p (temp
)
3888 || side_effects_p (target
))))
3889 && TREE_CODE (exp
) != ERROR_MARK
)
3891 target
= protect_from_queue (target
, 1);
3892 if (GET_MODE (temp
) != GET_MODE (target
)
3893 && GET_MODE (temp
) != VOIDmode
)
3895 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3896 if (dont_return_target
)
3898 /* In this case, we will return TEMP,
3899 so make sure it has the proper mode.
3900 But don't forget to store the value into TARGET. */
3901 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
3902 emit_move_insn (target
, temp
);
3905 convert_move (target
, temp
, unsignedp
);
3908 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
3910 /* Handle copying a string constant into an array.
3911 The string constant may be shorter than the array.
3912 So copy just the string's actual length, and clear the rest. */
3916 /* Get the size of the data type of the string,
3917 which is actually the size of the target. */
3918 size
= expr_size (exp
);
3919 if (GET_CODE (size
) == CONST_INT
3920 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3921 emit_block_move (target
, temp
, size
, TYPE_ALIGN (TREE_TYPE (exp
)));
3924 /* Compute the size of the data to copy from the string. */
3926 = size_binop (MIN_EXPR
,
3927 make_tree (sizetype
, size
),
3928 size_int (TREE_STRING_LENGTH (exp
)));
3929 int align
= TYPE_ALIGN (TREE_TYPE (exp
));
3930 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
3934 /* Copy that much. */
3935 emit_block_move (target
, temp
, copy_size_rtx
,
3936 TYPE_ALIGN (TREE_TYPE (exp
)));
3938 /* Figure out how much is left in TARGET that we have to clear.
3939 Do all calculations in ptr_mode. */
3941 addr
= XEXP (target
, 0);
3942 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
3944 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
3946 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
3947 size
= plus_constant (size
, - TREE_STRING_LENGTH (exp
));
3948 align
= MIN (align
, (BITS_PER_UNIT
3949 * (INTVAL (copy_size_rtx
)
3950 & - INTVAL (copy_size_rtx
))));
3954 addr
= force_reg (ptr_mode
, addr
);
3955 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
3956 copy_size_rtx
, NULL_RTX
, 0,
3959 size
= expand_binop (ptr_mode
, sub_optab
, size
,
3960 copy_size_rtx
, NULL_RTX
, 0,
3963 align
= BITS_PER_UNIT
;
3964 label
= gen_label_rtx ();
3965 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
3966 GET_MODE (size
), 0, 0, label
);
3968 align
= MIN (align
, expr_align (copy_size
));
3970 if (size
!= const0_rtx
)
3972 /* Be sure we can write on ADDR. */
3973 if (current_function_check_memory_usage
)
3974 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3976 size
, TYPE_MODE (sizetype
),
3977 GEN_INT (MEMORY_USE_WO
),
3978 TYPE_MODE (integer_type_node
));
3979 clear_storage (gen_rtx_MEM (BLKmode
, addr
), size
, align
);
3986 /* Handle calls that return values in multiple non-contiguous locations.
3987 The Irix 6 ABI has examples of this. */
3988 else if (GET_CODE (target
) == PARALLEL
)
3989 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)),
3990 TYPE_ALIGN (TREE_TYPE (exp
)));
3991 else if (GET_MODE (temp
) == BLKmode
)
3992 emit_block_move (target
, temp
, expr_size (exp
),
3993 TYPE_ALIGN (TREE_TYPE (exp
)));
3995 emit_move_insn (target
, temp
);
3998 /* If we don't want a value, return NULL_RTX. */
4002 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4003 ??? The latter test doesn't seem to make sense. */
4004 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4007 /* Return TARGET itself if it is a hard register. */
4008 else if (want_value
&& GET_MODE (target
) != BLKmode
4009 && ! (GET_CODE (target
) == REG
4010 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4011 return copy_to_reg (target
);
4017 /* Return 1 if EXP just contains zeros. */
4025 switch (TREE_CODE (exp
))
4029 case NON_LVALUE_EXPR
:
4030 return is_zeros_p (TREE_OPERAND (exp
, 0));
4033 return integer_zerop (exp
);
4037 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4040 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4043 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4044 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4045 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4046 if (! is_zeros_p (TREE_VALUE (elt
)))
4056 /* Return 1 if EXP contains mostly (3/4) zeros. */
4059 mostly_zeros_p (exp
)
4062 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4064 int elts
= 0, zeros
= 0;
4065 tree elt
= CONSTRUCTOR_ELTS (exp
);
4066 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4068 /* If there are no ranges of true bits, it is all zero. */
4069 return elt
== NULL_TREE
;
4071 for (; elt
; elt
= TREE_CHAIN (elt
))
4073 /* We do not handle the case where the index is a RANGE_EXPR,
4074 so the statistic will be somewhat inaccurate.
4075 We do make a more accurate count in store_constructor itself,
4076 so since this function is only used for nested array elements,
4077 this should be close enough. */
4078 if (mostly_zeros_p (TREE_VALUE (elt
)))
4083 return 4 * zeros
>= 3 * elts
;
4086 return is_zeros_p (exp
);
4089 /* Helper function for store_constructor.
4090 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4091 TYPE is the type of the CONSTRUCTOR, not the element type.
4092 ALIGN and CLEARED are as for store_constructor.
4094 This provides a recursive shortcut back to store_constructor when it isn't
4095 necessary to go through store_field. This is so that we can pass through
4096 the cleared field to let store_constructor know that we may not have to
4097 clear a substructure if the outer structure has already been cleared. */
4100 store_constructor_field (target
, bitsize
, bitpos
,
4101 mode
, exp
, type
, align
, cleared
)
4103 unsigned HOST_WIDE_INT bitsize
;
4104 HOST_WIDE_INT bitpos
;
4105 enum machine_mode mode
;
4110 if (TREE_CODE (exp
) == CONSTRUCTOR
4111 && bitpos
% BITS_PER_UNIT
== 0
4112 /* If we have a non-zero bitpos for a register target, then we just
4113 let store_field do the bitfield handling. This is unlikely to
4114 generate unnecessary clear instructions anyways. */
4115 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4119 = change_address (target
,
4120 GET_MODE (target
) == BLKmode
4122 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4123 ? BLKmode
: VOIDmode
,
4124 plus_constant (XEXP (target
, 0),
4125 bitpos
/ BITS_PER_UNIT
));
4126 store_constructor (exp
, target
, align
, cleared
, bitsize
/ BITS_PER_UNIT
);
4129 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, align
,
4130 int_size_in_bytes (type
), 0);
4133 /* Store the value of constructor EXP into the rtx TARGET.
4134 TARGET is either a REG or a MEM.
4135 ALIGN is the maximum known alignment for TARGET.
4136 CLEARED is true if TARGET is known to have been zero'd.
4137 SIZE is the number of bytes of TARGET we are allowed to modify: this
4138 may not be the same as the size of EXP if we are assigning to a field
4139 which has been packed to exclude padding bits. */
4142 store_constructor (exp
, target
, align
, cleared
, size
)
4149 tree type
= TREE_TYPE (exp
);
4150 #ifdef WORD_REGISTER_OPERATIONS
4151 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4154 /* We know our target cannot conflict, since safe_from_p has been called. */
4156 /* Don't try copying piece by piece into a hard register
4157 since that is vulnerable to being clobbered by EXP.
4158 Instead, construct in a pseudo register and then copy it all. */
4159 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4161 rtx temp
= gen_reg_rtx (GET_MODE (target
));
4162 store_constructor (exp
, temp
, align
, cleared
, size
);
4163 emit_move_insn (target
, temp
);
4168 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4169 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4173 /* Inform later passes that the whole union value is dead. */
4174 if ((TREE_CODE (type
) == UNION_TYPE
4175 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4178 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4180 /* If the constructor is empty, clear the union. */
4181 if (! CONSTRUCTOR_ELTS (exp
) && ! cleared
)
4182 clear_storage (target
, expr_size (exp
), TYPE_ALIGN (type
));
4185 /* If we are building a static constructor into a register,
4186 set the initial value as zero so we can fold the value into
4187 a constant. But if more than one register is involved,
4188 this probably loses. */
4189 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4190 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4193 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4198 /* If the constructor has fewer fields than the structure
4199 or if we are initializing the structure to mostly zeros,
4200 clear the whole structure first. */
4202 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4203 != fields_length (type
))
4204 || mostly_zeros_p (exp
)))
4207 clear_storage (target
, GEN_INT (size
), align
);
4212 /* Inform later passes that the old value is dead. */
4213 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4215 /* Store each element of the constructor into
4216 the corresponding field of TARGET. */
4218 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4220 register tree field
= TREE_PURPOSE (elt
);
4221 #ifdef WORD_REGISTER_OPERATIONS
4222 tree value
= TREE_VALUE (elt
);
4224 register enum machine_mode mode
;
4225 HOST_WIDE_INT bitsize
;
4226 HOST_WIDE_INT bitpos
= 0;
4229 rtx to_rtx
= target
;
4231 /* Just ignore missing fields.
4232 We cleared the whole structure, above,
4233 if any fields are missing. */
4237 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
4240 if (host_integerp (DECL_SIZE (field
), 1))
4241 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4245 unsignedp
= TREE_UNSIGNED (field
);
4246 mode
= DECL_MODE (field
);
4247 if (DECL_BIT_FIELD (field
))
4250 offset
= DECL_FIELD_OFFSET (field
);
4251 if (host_integerp (offset
, 0)
4252 && host_integerp (bit_position (field
), 0))
4254 bitpos
= int_bit_position (field
);
4258 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4264 if (contains_placeholder_p (offset
))
4265 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4266 offset
, make_tree (TREE_TYPE (exp
), target
));
4268 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4269 if (GET_CODE (to_rtx
) != MEM
)
4272 if (GET_MODE (offset_rtx
) != ptr_mode
)
4274 #ifdef POINTERS_EXTEND_UNSIGNED
4275 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
4277 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4282 = change_address (to_rtx
, VOIDmode
,
4283 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
4284 force_reg (ptr_mode
,
4286 align
= DECL_OFFSET_ALIGN (field
);
4289 if (TREE_READONLY (field
))
4291 if (GET_CODE (to_rtx
) == MEM
)
4292 to_rtx
= copy_rtx (to_rtx
);
4294 RTX_UNCHANGING_P (to_rtx
) = 1;
4297 #ifdef WORD_REGISTER_OPERATIONS
4298 /* If this initializes a field that is smaller than a word, at the
4299 start of a word, try to widen it to a full word.
4300 This special case allows us to output C++ member function
4301 initializations in a form that the optimizers can understand. */
4302 if (GET_CODE (target
) == REG
4303 && bitsize
< BITS_PER_WORD
4304 && bitpos
% BITS_PER_WORD
== 0
4305 && GET_MODE_CLASS (mode
) == MODE_INT
4306 && TREE_CODE (value
) == INTEGER_CST
4308 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4310 tree type
= TREE_TYPE (value
);
4311 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4313 type
= type_for_size (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4314 value
= convert (type
, value
);
4316 if (BYTES_BIG_ENDIAN
)
4318 = fold (build (LSHIFT_EXPR
, type
, value
,
4319 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4320 bitsize
= BITS_PER_WORD
;
4324 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4325 TREE_VALUE (elt
), type
, align
, cleared
);
4328 else if (TREE_CODE (type
) == ARRAY_TYPE
)
4333 tree domain
= TYPE_DOMAIN (type
);
4334 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
4335 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
4336 tree elttype
= TREE_TYPE (type
);
4338 /* If the constructor has fewer elements than the array,
4339 clear the whole array first. Similarly if this is
4340 static constructor of a non-BLKmode object. */
4341 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4345 HOST_WIDE_INT count
= 0, zero_count
= 0;
4347 /* This loop is a more accurate version of the loop in
4348 mostly_zeros_p (it handles RANGE_EXPR in an index).
4349 It is also needed to check for missing elements. */
4350 for (elt
= CONSTRUCTOR_ELTS (exp
);
4352 elt
= TREE_CHAIN (elt
))
4354 tree index
= TREE_PURPOSE (elt
);
4355 HOST_WIDE_INT this_node_count
;
4357 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4359 tree lo_index
= TREE_OPERAND (index
, 0);
4360 tree hi_index
= TREE_OPERAND (index
, 1);
4362 if (! host_integerp (lo_index
, 1)
4363 || ! host_integerp (hi_index
, 1))
4369 this_node_count
= (tree_low_cst (hi_index
, 1)
4370 - tree_low_cst (lo_index
, 1) + 1);
4373 this_node_count
= 1;
4374 count
+= this_node_count
;
4375 if (mostly_zeros_p (TREE_VALUE (elt
)))
4376 zero_count
+= this_node_count
;
4378 /* Clear the entire array first if there are any missing elements,
4379 or if the incidence of zero elements is >= 75%. */
4380 if (count
< maxelt
- minelt
+ 1
4381 || 4 * zero_count
>= 3 * count
)
4384 if (need_to_clear
&& size
> 0)
4387 clear_storage (target
, GEN_INT (size
), align
);
4391 /* Inform later passes that the old value is dead. */
4392 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4394 /* Store each element of the constructor into
4395 the corresponding element of TARGET, determined
4396 by counting the elements. */
4397 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4399 elt
= TREE_CHAIN (elt
), i
++)
4401 register enum machine_mode mode
;
4402 HOST_WIDE_INT bitsize
;
4403 HOST_WIDE_INT bitpos
;
4405 tree value
= TREE_VALUE (elt
);
4406 unsigned int align
= TYPE_ALIGN (TREE_TYPE (value
));
4407 tree index
= TREE_PURPOSE (elt
);
4408 rtx xtarget
= target
;
4410 if (cleared
&& is_zeros_p (value
))
4413 unsignedp
= TREE_UNSIGNED (elttype
);
4414 mode
= TYPE_MODE (elttype
);
4415 if (mode
== BLKmode
)
4416 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4417 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4420 bitsize
= GET_MODE_BITSIZE (mode
);
4422 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4424 tree lo_index
= TREE_OPERAND (index
, 0);
4425 tree hi_index
= TREE_OPERAND (index
, 1);
4426 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
4427 struct nesting
*loop
;
4428 HOST_WIDE_INT lo
, hi
, count
;
4431 /* If the range is constant and "small", unroll the loop. */
4432 if (host_integerp (lo_index
, 0)
4433 && host_integerp (hi_index
, 0)
4434 && (lo
= tree_low_cst (lo_index
, 0),
4435 hi
= tree_low_cst (hi_index
, 0),
4436 count
= hi
- lo
+ 1,
4437 (GET_CODE (target
) != MEM
4439 || (host_integerp (TYPE_SIZE (elttype
), 1)
4440 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4443 lo
-= minelt
; hi
-= minelt
;
4444 for (; lo
<= hi
; lo
++)
4446 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4447 store_constructor_field (target
, bitsize
, bitpos
, mode
,
4448 value
, type
, align
, cleared
);
4453 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4454 loop_top
= gen_label_rtx ();
4455 loop_end
= gen_label_rtx ();
4457 unsignedp
= TREE_UNSIGNED (domain
);
4459 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4461 DECL_RTL (index
) = index_r
4462 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4465 if (TREE_CODE (value
) == SAVE_EXPR
4466 && SAVE_EXPR_RTL (value
) == 0)
4468 /* Make sure value gets expanded once before the
4470 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4473 store_expr (lo_index
, index_r
, 0);
4474 loop
= expand_start_loop (0);
4476 /* Assign value to element index. */
4478 = convert (ssizetype
,
4479 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4480 index
, TYPE_MIN_VALUE (domain
))));
4481 position
= size_binop (MULT_EXPR
, position
,
4483 TYPE_SIZE_UNIT (elttype
)));
4485 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4486 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4487 xtarget
= change_address (target
, mode
, addr
);
4488 if (TREE_CODE (value
) == CONSTRUCTOR
)
4489 store_constructor (value
, xtarget
, align
, cleared
,
4490 bitsize
/ BITS_PER_UNIT
);
4492 store_expr (value
, xtarget
, 0);
4494 expand_exit_loop_if_false (loop
,
4495 build (LT_EXPR
, integer_type_node
,
4498 expand_increment (build (PREINCREMENT_EXPR
,
4500 index
, integer_one_node
), 0, 0);
4502 emit_label (loop_end
);
4505 else if ((index
!= 0 && ! host_integerp (index
, 0))
4506 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4512 index
= ssize_int (1);
4515 index
= convert (ssizetype
,
4516 fold (build (MINUS_EXPR
, index
,
4517 TYPE_MIN_VALUE (domain
))));
4519 position
= size_binop (MULT_EXPR
, index
,
4521 TYPE_SIZE_UNIT (elttype
)));
4522 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4523 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4524 xtarget
= change_address (target
, mode
, addr
);
4525 store_expr (value
, xtarget
, 0);
4530 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4531 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4533 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4535 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4536 type
, align
, cleared
);
4541 /* Set constructor assignments */
4542 else if (TREE_CODE (type
) == SET_TYPE
)
4544 tree elt
= CONSTRUCTOR_ELTS (exp
);
4545 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4546 tree domain
= TYPE_DOMAIN (type
);
4547 tree domain_min
, domain_max
, bitlength
;
4549 /* The default implementation strategy is to extract the constant
4550 parts of the constructor, use that to initialize the target,
4551 and then "or" in whatever non-constant ranges we need in addition.
4553 If a large set is all zero or all ones, it is
4554 probably better to set it using memset (if available) or bzero.
4555 Also, if a large set has just a single range, it may also be
4556 better to first clear all the first clear the set (using
4557 bzero/memset), and set the bits we want. */
4559 /* Check for all zeros. */
4560 if (elt
== NULL_TREE
&& size
> 0)
4563 clear_storage (target
, GEN_INT (size
), TYPE_ALIGN (type
));
4567 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4568 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4569 bitlength
= size_binop (PLUS_EXPR
,
4570 size_diffop (domain_max
, domain_min
),
4573 nbits
= tree_low_cst (bitlength
, 1);
4575 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4576 are "complicated" (more than one range), initialize (the
4577 constant parts) by copying from a constant. */
4578 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4579 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4581 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4582 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4583 char *bit_buffer
= (char *) alloca (nbits
);
4584 HOST_WIDE_INT word
= 0;
4585 unsigned int bit_pos
= 0;
4586 unsigned int ibit
= 0;
4587 unsigned int offset
= 0; /* In bytes from beginning of set. */
4589 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4592 if (bit_buffer
[ibit
])
4594 if (BYTES_BIG_ENDIAN
)
4595 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4597 word
|= 1 << bit_pos
;
4601 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4603 if (word
!= 0 || ! cleared
)
4605 rtx datum
= GEN_INT (word
);
4608 /* The assumption here is that it is safe to use
4609 XEXP if the set is multi-word, but not if
4610 it's single-word. */
4611 if (GET_CODE (target
) == MEM
)
4613 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
4614 to_rtx
= change_address (target
, mode
, to_rtx
);
4616 else if (offset
== 0)
4620 emit_move_insn (to_rtx
, datum
);
4627 offset
+= set_word_size
/ BITS_PER_UNIT
;
4632 /* Don't bother clearing storage if the set is all ones. */
4633 if (TREE_CHAIN (elt
) != NULL_TREE
4634 || (TREE_PURPOSE (elt
) == NULL_TREE
4636 : ( ! host_integerp (TREE_VALUE (elt
), 0)
4637 || ! host_integerp (TREE_PURPOSE (elt
), 0)
4638 || (tree_low_cst (TREE_VALUE (elt
), 0)
4639 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
4640 != (HOST_WIDE_INT
) nbits
))))
4641 clear_storage (target
, expr_size (exp
), TYPE_ALIGN (type
));
4643 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
4645 /* start of range of element or NULL */
4646 tree startbit
= TREE_PURPOSE (elt
);
4647 /* end of range of element, or element value */
4648 tree endbit
= TREE_VALUE (elt
);
4649 #ifdef TARGET_MEM_FUNCTIONS
4650 HOST_WIDE_INT startb
, endb
;
4652 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
4654 bitlength_rtx
= expand_expr (bitlength
,
4655 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
4657 /* handle non-range tuple element like [ expr ] */
4658 if (startbit
== NULL_TREE
)
4660 startbit
= save_expr (endbit
);
4664 startbit
= convert (sizetype
, startbit
);
4665 endbit
= convert (sizetype
, endbit
);
4666 if (! integer_zerop (domain_min
))
4668 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
4669 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
4671 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
4672 EXPAND_CONST_ADDRESS
);
4673 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
4674 EXPAND_CONST_ADDRESS
);
4678 targetx
= assign_stack_temp (GET_MODE (target
),
4679 GET_MODE_SIZE (GET_MODE (target
)),
4681 emit_move_insn (targetx
, target
);
4684 else if (GET_CODE (target
) == MEM
)
4689 #ifdef TARGET_MEM_FUNCTIONS
4690 /* Optimization: If startbit and endbit are
4691 constants divisible by BITS_PER_UNIT,
4692 call memset instead. */
4693 if (TREE_CODE (startbit
) == INTEGER_CST
4694 && TREE_CODE (endbit
) == INTEGER_CST
4695 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
4696 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
4698 emit_library_call (memset_libfunc
, 0,
4700 plus_constant (XEXP (targetx
, 0),
4701 startb
/ BITS_PER_UNIT
),
4703 constm1_rtx
, TYPE_MODE (integer_type_node
),
4704 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
4705 TYPE_MODE (sizetype
));
4709 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
4710 0, VOIDmode
, 4, XEXP (targetx
, 0), Pmode
,
4711 bitlength_rtx
, TYPE_MODE (sizetype
),
4712 startbit_rtx
, TYPE_MODE (sizetype
),
4713 endbit_rtx
, TYPE_MODE (sizetype
));
4716 emit_move_insn (target
, targetx
);
4724 /* Store the value of EXP (an expression tree)
4725 into a subfield of TARGET which has mode MODE and occupies
4726 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4727 If MODE is VOIDmode, it means that we are storing into a bit-field.
4729 If VALUE_MODE is VOIDmode, return nothing in particular.
4730 UNSIGNEDP is not used in this case.
4732 Otherwise, return an rtx for the value stored. This rtx
4733 has mode VALUE_MODE if that is convenient to do.
4734 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4736 ALIGN is the alignment that TARGET is known to have.
4737 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4739 ALIAS_SET is the alias set for the destination. This value will
4740 (in general) be different from that for TARGET, since TARGET is a
4741 reference to the containing structure. */
4744 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
4745 unsignedp
, align
, total_size
, alias_set
)
4747 HOST_WIDE_INT bitsize
;
4748 HOST_WIDE_INT bitpos
;
4749 enum machine_mode mode
;
4751 enum machine_mode value_mode
;
4754 HOST_WIDE_INT total_size
;
4757 HOST_WIDE_INT width_mask
= 0;
4759 if (TREE_CODE (exp
) == ERROR_MARK
)
4762 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
4763 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
4765 /* If we are storing into an unaligned field of an aligned union that is
4766 in a register, we may have the mode of TARGET being an integer mode but
4767 MODE == BLKmode. In that case, get an aligned object whose size and
4768 alignment are the same as TARGET and store TARGET into it (we can avoid
4769 the store if the field being stored is the entire width of TARGET). Then
4770 call ourselves recursively to store the field into a BLKmode version of
4771 that object. Finally, load from the object into TARGET. This is not
4772 very efficient in general, but should only be slightly more expensive
4773 than the otherwise-required unaligned accesses. Perhaps this can be
4774 cleaned up later. */
4777 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
4779 rtx object
= assign_stack_temp (GET_MODE (target
),
4780 GET_MODE_SIZE (GET_MODE (target
)), 0);
4781 rtx blk_object
= copy_rtx (object
);
4783 MEM_SET_IN_STRUCT_P (object
, 1);
4784 MEM_SET_IN_STRUCT_P (blk_object
, 1);
4785 PUT_MODE (blk_object
, BLKmode
);
4787 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
4788 emit_move_insn (object
, target
);
4790 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
4791 align
, total_size
, alias_set
);
4793 /* Even though we aren't returning target, we need to
4794 give it the updated value. */
4795 emit_move_insn (target
, object
);
4800 if (GET_CODE (target
) == CONCAT
)
4802 /* We're storing into a struct containing a single __complex. */
4806 return store_expr (exp
, target
, 0);
4809 /* If the structure is in a register or if the component
4810 is a bit field, we cannot use addressing to access it.
4811 Use bit-field techniques or SUBREG to store in it. */
4813 if (mode
== VOIDmode
4814 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
4815 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
4816 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
4817 || GET_CODE (target
) == REG
4818 || GET_CODE (target
) == SUBREG
4819 /* If the field isn't aligned enough to store as an ordinary memref,
4820 store it as a bit field. */
4821 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, align
)
4822 && (align
< GET_MODE_ALIGNMENT (mode
)
4823 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
4824 || (mode
== BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, align
)
4825 && (TYPE_ALIGN (TREE_TYPE (exp
)) > align
4826 || bitpos
% TYPE_ALIGN (TREE_TYPE (exp
)) != 0))
4827 /* If the RHS and field are a constant size and the size of the
4828 RHS isn't the same size as the bitfield, we must use bitfield
4831 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
4832 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
4834 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4836 /* If BITSIZE is narrower than the size of the type of EXP
4837 we will be narrowing TEMP. Normally, what's wanted are the
4838 low-order bits. However, if EXP's type is a record and this is
4839 big-endian machine, we want the upper BITSIZE bits. */
4840 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
4841 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
4842 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
4843 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
4844 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
4848 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4850 if (mode
!= VOIDmode
&& mode
!= BLKmode
4851 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
4852 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
4854 /* If the modes of TARGET and TEMP are both BLKmode, both
4855 must be in memory and BITPOS must be aligned on a byte
4856 boundary. If so, we simply do a block copy. */
4857 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
4859 unsigned int exp_align
= expr_align (exp
);
4861 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
4862 || bitpos
% BITS_PER_UNIT
!= 0)
4865 target
= change_address (target
, VOIDmode
,
4866 plus_constant (XEXP (target
, 0),
4867 bitpos
/ BITS_PER_UNIT
));
4869 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4870 align
= MIN (exp_align
, align
);
4872 /* Find an alignment that is consistent with the bit position. */
4873 while ((bitpos
% align
) != 0)
4876 emit_block_move (target
, temp
,
4877 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
4881 return value_mode
== VOIDmode
? const0_rtx
: target
;
4884 /* Store the value in the bitfield. */
4885 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
4886 if (value_mode
!= VOIDmode
)
4888 /* The caller wants an rtx for the value. */
4889 /* If possible, avoid refetching from the bitfield itself. */
4891 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
4894 enum machine_mode tmode
;
4897 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
4898 tmode
= GET_MODE (temp
);
4899 if (tmode
== VOIDmode
)
4901 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
4902 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4903 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4905 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
4906 NULL_RTX
, value_mode
, 0, align
,
4913 rtx addr
= XEXP (target
, 0);
4916 /* If a value is wanted, it must be the lhs;
4917 so make the address stable for multiple use. */
4919 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
4920 && ! CONSTANT_ADDRESS_P (addr
)
4921 /* A frame-pointer reference is already stable. */
4922 && ! (GET_CODE (addr
) == PLUS
4923 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
4924 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
4925 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
4926 addr
= copy_to_reg (addr
);
4928 /* Now build a reference to just the desired component. */
4930 to_rtx
= copy_rtx (change_address (target
, mode
,
4931 plus_constant (addr
,
4933 / BITS_PER_UNIT
))));
4934 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
4935 MEM_ALIAS_SET (to_rtx
) = alias_set
;
4937 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
4941 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4942 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4943 ARRAY_REFs and find the ultimate containing object, which we return.
4945 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4946 bit position, and *PUNSIGNEDP to the signedness of the field.
4947 If the position of the field is variable, we store a tree
4948 giving the variable offset (in units) in *POFFSET.
4949 This offset is in addition to the bit position.
4950 If the position is not variable, we store 0 in *POFFSET.
4951 We set *PALIGNMENT to the alignment of the address that will be
4952 computed. This is the alignment of the thing we return if *POFFSET
4953 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4955 If any of the extraction expressions is volatile,
4956 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4958 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4959 is a mode that can be used to access the field. In that case, *PBITSIZE
4962 If the field describes a variable-sized object, *PMODE is set to
4963 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4964 this case, but the address of the object can be found. */
4967 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
4968 punsignedp
, pvolatilep
, palignment
)
4970 HOST_WIDE_INT
*pbitsize
;
4971 HOST_WIDE_INT
*pbitpos
;
4973 enum machine_mode
*pmode
;
4976 unsigned int *palignment
;
4979 enum machine_mode mode
= VOIDmode
;
4980 tree offset
= size_zero_node
;
4981 tree bit_offset
= bitsize_zero_node
;
4982 unsigned int alignment
= BIGGEST_ALIGNMENT
;
4985 /* First get the mode, signedness, and size. We do this from just the
4986 outermost expression. */
4987 if (TREE_CODE (exp
) == COMPONENT_REF
)
4989 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
4990 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
4991 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
4993 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
4995 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
4997 size_tree
= TREE_OPERAND (exp
, 1);
4998 *punsignedp
= TREE_UNSIGNED (exp
);
5002 mode
= TYPE_MODE (TREE_TYPE (exp
));
5003 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5005 if (mode
== BLKmode
)
5006 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5008 *pbitsize
= GET_MODE_BITSIZE (mode
);
5013 if (! host_integerp (size_tree
, 1))
5014 mode
= BLKmode
, *pbitsize
= -1;
5016 *pbitsize
= tree_low_cst (size_tree
, 1);
5019 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5020 and find the ultimate containing object. */
5023 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5024 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5025 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5027 tree field
= TREE_OPERAND (exp
, 1);
5028 tree this_offset
= DECL_FIELD_OFFSET (field
);
5030 /* If this field hasn't been filled in yet, don't go
5031 past it. This should only happen when folding expressions
5032 made during type construction. */
5033 if (this_offset
== 0)
5035 else if (! TREE_CONSTANT (this_offset
)
5036 && contains_placeholder_p (this_offset
))
5037 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5039 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5040 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5041 DECL_FIELD_BIT_OFFSET (field
));
5043 if (! host_integerp (offset
, 0))
5044 alignment
= MIN (alignment
, DECL_OFFSET_ALIGN (field
));
5047 else if (TREE_CODE (exp
) == ARRAY_REF
)
5049 tree index
= TREE_OPERAND (exp
, 1);
5050 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5051 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5052 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (exp
));
5054 /* We assume all arrays have sizes that are a multiple of a byte.
5055 First subtract the lower bound, if any, in the type of the
5056 index, then convert to sizetype and multiply by the size of the
5058 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5059 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5062 /* If the index has a self-referential type, pass it to a
5063 WITH_RECORD_EXPR; if the component size is, pass our
5064 component to one. */
5065 if (! TREE_CONSTANT (index
)
5066 && contains_placeholder_p (index
))
5067 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5068 if (! TREE_CONSTANT (unit_size
)
5069 && contains_placeholder_p (unit_size
))
5070 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
,
5071 TREE_OPERAND (exp
, 0));
5073 offset
= size_binop (PLUS_EXPR
, offset
,
5074 size_binop (MULT_EXPR
,
5075 convert (sizetype
, index
),
5079 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5080 && ! ((TREE_CODE (exp
) == NOP_EXPR
5081 || TREE_CODE (exp
) == CONVERT_EXPR
)
5082 && (TYPE_MODE (TREE_TYPE (exp
))
5083 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5086 /* If any reference in the chain is volatile, the effect is volatile. */
5087 if (TREE_THIS_VOLATILE (exp
))
5090 /* If the offset is non-constant already, then we can't assume any
5091 alignment more than the alignment here. */
5092 if (! TREE_CONSTANT (offset
))
5093 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
5095 exp
= TREE_OPERAND (exp
, 0);
5099 alignment
= MIN (alignment
, DECL_ALIGN (exp
));
5100 else if (TREE_TYPE (exp
) != 0)
5101 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
5103 /* If OFFSET is constant, see if we can return the whole thing as a
5104 constant bit position. Otherwise, split it up. */
5105 if (host_integerp (offset
, 0)
5106 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5108 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5109 && host_integerp (tem
, 0))
5110 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5112 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5115 *palignment
= alignment
;
5119 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5121 static enum memory_use_mode
5122 get_memory_usage_from_modifier (modifier
)
5123 enum expand_modifier modifier
;
5129 return MEMORY_USE_RO
;
5131 case EXPAND_MEMORY_USE_WO
:
5132 return MEMORY_USE_WO
;
5134 case EXPAND_MEMORY_USE_RW
:
5135 return MEMORY_USE_RW
;
5137 case EXPAND_MEMORY_USE_DONT
:
5138 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5139 MEMORY_USE_DONT, because they are modifiers to a call of
5140 expand_expr in the ADDR_EXPR case of expand_expr. */
5141 case EXPAND_CONST_ADDRESS
:
5142 case EXPAND_INITIALIZER
:
5143 return MEMORY_USE_DONT
;
5144 case EXPAND_MEMORY_USE_BAD
:
5150 /* Given an rtx VALUE that may contain additions and multiplications,
5151 return an equivalent value that just refers to a register or memory.
5152 This is done by generating instructions to perform the arithmetic
5153 and returning a pseudo-register containing the value.
5155 The returned value may be a REG, SUBREG, MEM or constant. */
5158 force_operand (value
, target
)
5161 register optab binoptab
= 0;
5162 /* Use a temporary to force order of execution of calls to
5166 /* Use subtarget as the target for operand 0 of a binary operation. */
5167 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
5169 /* Check for a PIC address load. */
5171 && (GET_CODE (value
) == PLUS
|| GET_CODE (value
) == MINUS
)
5172 && XEXP (value
, 0) == pic_offset_table_rtx
5173 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5174 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5175 || GET_CODE (XEXP (value
, 1)) == CONST
))
5178 subtarget
= gen_reg_rtx (GET_MODE (value
));
5179 emit_move_insn (subtarget
, value
);
5183 if (GET_CODE (value
) == PLUS
)
5184 binoptab
= add_optab
;
5185 else if (GET_CODE (value
) == MINUS
)
5186 binoptab
= sub_optab
;
5187 else if (GET_CODE (value
) == MULT
)
5189 op2
= XEXP (value
, 1);
5190 if (!CONSTANT_P (op2
)
5191 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5193 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5194 return expand_mult (GET_MODE (value
), tmp
,
5195 force_operand (op2
, NULL_RTX
),
5201 op2
= XEXP (value
, 1);
5202 if (!CONSTANT_P (op2
)
5203 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5205 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
5207 binoptab
= add_optab
;
5208 op2
= negate_rtx (GET_MODE (value
), op2
);
5211 /* Check for an addition with OP2 a constant integer and our first
5212 operand a PLUS of a virtual register and something else. In that
5213 case, we want to emit the sum of the virtual register and the
5214 constant first and then add the other value. This allows virtual
5215 register instantiation to simply modify the constant rather than
5216 creating another one around this addition. */
5217 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
5218 && GET_CODE (XEXP (value
, 0)) == PLUS
5219 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5220 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5221 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5223 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
5224 XEXP (XEXP (value
, 0), 0), op2
,
5225 subtarget
, 0, OPTAB_LIB_WIDEN
);
5226 return expand_binop (GET_MODE (value
), binoptab
, temp
,
5227 force_operand (XEXP (XEXP (value
, 0), 1), 0),
5228 target
, 0, OPTAB_LIB_WIDEN
);
5231 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5232 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
5233 force_operand (op2
, NULL_RTX
),
5234 target
, 0, OPTAB_LIB_WIDEN
);
5235 /* We give UNSIGNEDP = 0 to expand_binop
5236 because the only operations we are expanding here are signed ones. */
5241 /* Subroutine of expand_expr:
5242 save the non-copied parts (LIST) of an expr (LHS), and return a list
5243 which can restore these values to their previous values,
5244 should something modify their storage. */
5247 save_noncopied_parts (lhs
, list
)
5254 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5255 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5256 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5259 tree part
= TREE_VALUE (tail
);
5260 tree part_type
= TREE_TYPE (part
);
5261 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5262 rtx target
= assign_temp (part_type
, 0, 1, 1);
5263 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
5264 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
5265 parts
= tree_cons (to_be_saved
,
5266 build (RTL_EXPR
, part_type
, NULL_TREE
,
5269 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
5274 /* Subroutine of expand_expr:
5275 record the non-copied parts (LIST) of an expr (LHS), and return a list
5276 which specifies the initial values of these parts. */
5279 init_noncopied_parts (lhs
, list
)
5286 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5287 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5288 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5289 else if (TREE_PURPOSE (tail
))
5291 tree part
= TREE_VALUE (tail
);
5292 tree part_type
= TREE_TYPE (part
);
5293 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5294 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
5299 /* Subroutine of expand_expr: return nonzero iff there is no way that
5300 EXP can reference X, which is being modified. TOP_P is nonzero if this
5301 call is going to be used to determine whether we need a temporary
5302 for EXP, as opposed to a recursive call to this function.
5304 It is always safe for this routine to return zero since it merely
5305 searches for optimization opportunities. */
5308 safe_from_p (x
, exp
, top_p
)
5315 static int save_expr_count
;
5316 static int save_expr_size
= 0;
5317 static tree
*save_expr_rewritten
;
5318 static tree save_expr_trees
[256];
5321 /* If EXP has varying size, we MUST use a target since we currently
5322 have no way of allocating temporaries of variable size
5323 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5324 So we assume here that something at a higher level has prevented a
5325 clash. This is somewhat bogus, but the best we can do. Only
5326 do this when X is BLKmode and when we are at the top level. */
5327 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5328 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5329 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5330 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5331 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5333 && GET_MODE (x
) == BLKmode
))
5336 if (top_p
&& save_expr_size
== 0)
5340 save_expr_count
= 0;
5341 save_expr_size
= sizeof (save_expr_trees
) / sizeof (save_expr_trees
[0]);
5342 save_expr_rewritten
= &save_expr_trees
[0];
5344 rtn
= safe_from_p (x
, exp
, 1);
5346 for (i
= 0; i
< save_expr_count
; ++i
)
5348 if (TREE_CODE (save_expr_trees
[i
]) != ERROR_MARK
)
5350 TREE_SET_CODE (save_expr_trees
[i
], SAVE_EXPR
);
5358 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5359 find the underlying pseudo. */
5360 if (GET_CODE (x
) == SUBREG
)
5363 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5367 /* If X is a location in the outgoing argument area, it is always safe. */
5368 if (GET_CODE (x
) == MEM
5369 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5370 || (GET_CODE (XEXP (x
, 0)) == PLUS
5371 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
5374 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5377 exp_rtl
= DECL_RTL (exp
);
5384 if (TREE_CODE (exp
) == TREE_LIST
)
5385 return ((TREE_VALUE (exp
) == 0
5386 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5387 && (TREE_CHAIN (exp
) == 0
5388 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5389 else if (TREE_CODE (exp
) == ERROR_MARK
)
5390 return 1; /* An already-visited SAVE_EXPR? */
5395 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5399 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5400 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5404 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5405 the expression. If it is set, we conflict iff we are that rtx or
5406 both are in memory. Otherwise, we check all operands of the
5407 expression recursively. */
5409 switch (TREE_CODE (exp
))
5412 return (staticp (TREE_OPERAND (exp
, 0))
5413 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5414 || TREE_STATIC (exp
));
5417 if (GET_CODE (x
) == MEM
)
5422 exp_rtl
= CALL_EXPR_RTL (exp
);
5425 /* Assume that the call will clobber all hard registers and
5427 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5428 || GET_CODE (x
) == MEM
)
5435 /* If a sequence exists, we would have to scan every instruction
5436 in the sequence to see if it was safe. This is probably not
5438 if (RTL_EXPR_SEQUENCE (exp
))
5441 exp_rtl
= RTL_EXPR_RTL (exp
);
5444 case WITH_CLEANUP_EXPR
:
5445 exp_rtl
= RTL_EXPR_RTL (exp
);
5448 case CLEANUP_POINT_EXPR
:
5449 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5452 exp_rtl
= SAVE_EXPR_RTL (exp
);
5456 /* This SAVE_EXPR might appear many times in the top-level
5457 safe_from_p() expression, and if it has a complex
5458 subexpression, examining it multiple times could result
5459 in a combinatorial explosion. E.g. on an Alpha
5460 running at least 200MHz, a Fortran test case compiled with
5461 optimization took about 28 minutes to compile -- even though
5462 it was only a few lines long, and the complicated line causing
5463 so much time to be spent in the earlier version of safe_from_p()
5464 had only 293 or so unique nodes.
5466 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5467 where it is so we can turn it back in the top-level safe_from_p()
5470 /* For now, don't bother re-sizing the array. */
5471 if (save_expr_count
>= save_expr_size
)
5473 save_expr_rewritten
[save_expr_count
++] = exp
;
5475 nops
= tree_code_length
[(int) SAVE_EXPR
];
5476 for (i
= 0; i
< nops
; i
++)
5478 tree operand
= TREE_OPERAND (exp
, i
);
5479 if (operand
== NULL_TREE
)
5481 TREE_SET_CODE (exp
, ERROR_MARK
);
5482 if (!safe_from_p (x
, operand
, 0))
5484 TREE_SET_CODE (exp
, SAVE_EXPR
);
5486 TREE_SET_CODE (exp
, ERROR_MARK
);
5490 /* The only operand we look at is operand 1. The rest aren't
5491 part of the expression. */
5492 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5494 case METHOD_CALL_EXPR
:
5495 /* This takes a rtx argument, but shouldn't appear here. */
5502 /* If we have an rtx, we do not need to scan our operands. */
5506 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
5507 for (i
= 0; i
< nops
; i
++)
5508 if (TREE_OPERAND (exp
, i
) != 0
5509 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5513 /* If we have an rtl, find any enclosed object. Then see if we conflict
5517 if (GET_CODE (exp_rtl
) == SUBREG
)
5519 exp_rtl
= SUBREG_REG (exp_rtl
);
5520 if (GET_CODE (exp_rtl
) == REG
5521 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5525 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5526 are memory and EXP is not readonly. */
5527 return ! (rtx_equal_p (x
, exp_rtl
)
5528 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5529 && ! TREE_READONLY (exp
)));
5532 /* If we reach here, it is safe. */
5536 /* Subroutine of expand_expr: return nonzero iff EXP is an
5537 expression whose type is statically determinable. */
5543 if (TREE_CODE (exp
) == PARM_DECL
5544 || TREE_CODE (exp
) == VAR_DECL
5545 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
5546 || TREE_CODE (exp
) == COMPONENT_REF
5547 || TREE_CODE (exp
) == ARRAY_REF
)
5552 /* Subroutine of expand_expr: return rtx if EXP is a
5553 variable or parameter; else return 0. */
5560 switch (TREE_CODE (exp
))
5564 return DECL_RTL (exp
);
5570 #ifdef MAX_INTEGER_COMPUTATION_MODE
5572 check_max_integer_computation_mode (exp
)
5575 enum tree_code code
;
5576 enum machine_mode mode
;
5578 /* Strip any NOPs that don't change the mode. */
5580 code
= TREE_CODE (exp
);
5582 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5583 if (code
== NOP_EXPR
5584 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5587 /* First check the type of the overall operation. We need only look at
5588 unary, binary and relational operations. */
5589 if (TREE_CODE_CLASS (code
) == '1'
5590 || TREE_CODE_CLASS (code
) == '2'
5591 || TREE_CODE_CLASS (code
) == '<')
5593 mode
= TYPE_MODE (TREE_TYPE (exp
));
5594 if (GET_MODE_CLASS (mode
) == MODE_INT
5595 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5596 fatal ("unsupported wide integer operation");
5599 /* Check operand of a unary op. */
5600 if (TREE_CODE_CLASS (code
) == '1')
5602 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5603 if (GET_MODE_CLASS (mode
) == MODE_INT
5604 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5605 fatal ("unsupported wide integer operation");
5608 /* Check operands of a binary/comparison op. */
5609 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5611 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5612 if (GET_MODE_CLASS (mode
) == MODE_INT
5613 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5614 fatal ("unsupported wide integer operation");
5616 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5617 if (GET_MODE_CLASS (mode
) == MODE_INT
5618 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5619 fatal ("unsupported wide integer operation");
5625 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5626 has any readonly fields. If any of the fields have types that
5627 contain readonly fields, return true as well. */
5630 readonly_fields_p (type
)
5635 for (field
= TYPE_FIELDS (type
); field
!= 0; field
= TREE_CHAIN (field
))
5636 if (TREE_CODE (field
) == FIELD_DECL
5637 && (TREE_READONLY (field
)
5638 || (TREE_CODE (TREE_TYPE (field
)) == RECORD_TYPE
5639 && readonly_fields_p (TREE_TYPE (field
)))))
5645 /* expand_expr: generate code for computing expression EXP.
5646 An rtx for the computed value is returned. The value is never null.
5647 In the case of a void EXP, const0_rtx is returned.
5649 The value may be stored in TARGET if TARGET is nonzero.
5650 TARGET is just a suggestion; callers must assume that
5651 the rtx returned may not be the same as TARGET.
5653 If TARGET is CONST0_RTX, it means that the value will be ignored.
5655 If TMODE is not VOIDmode, it suggests generating the
5656 result in mode TMODE. But this is done only when convenient.
5657 Otherwise, TMODE is ignored and the value generated in its natural mode.
5658 TMODE is just a suggestion; callers must assume that
5659 the rtx returned may not have mode TMODE.
5661 Note that TARGET may have neither TMODE nor MODE. In that case, it
5662 probably will not be used.
5664 If MODIFIER is EXPAND_SUM then when EXP is an addition
5665 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5666 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5667 products as above, or REG or MEM, or constant.
5668 Ordinarily in such cases we would output mul or add instructions
5669 and then return a pseudo reg containing the sum.
5671 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5672 it also marks a label as absolutely required (it can't be dead).
5673 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5674 This is used for outputting expressions used in initializers.
5676 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5677 with a constant address even if that address is not normally legitimate.
5678 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5681 expand_expr (exp
, target
, tmode
, modifier
)
5684 enum machine_mode tmode
;
5685 enum expand_modifier modifier
;
5687 register rtx op0
, op1
, temp
;
5688 tree type
= TREE_TYPE (exp
);
5689 int unsignedp
= TREE_UNSIGNED (type
);
5690 register enum machine_mode mode
;
5691 register enum tree_code code
= TREE_CODE (exp
);
5693 rtx subtarget
, original_target
;
5696 /* Used by check-memory-usage to make modifier read only. */
5697 enum expand_modifier ro_modifier
;
5699 /* Handle ERROR_MARK before anybody tries to access its type. */
5700 if (TREE_CODE (exp
) == ERROR_MARK
)
5702 op0
= CONST0_RTX (tmode
);
5708 mode
= TYPE_MODE (type
);
5709 /* Use subtarget as the target for operand 0 of a binary operation. */
5710 subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
5711 original_target
= target
;
5712 ignore
= (target
== const0_rtx
5713 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
5714 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
5715 || code
== COND_EXPR
)
5716 && TREE_CODE (type
) == VOID_TYPE
));
5718 /* Make a read-only version of the modifier. */
5719 if (modifier
== EXPAND_NORMAL
|| modifier
== EXPAND_SUM
5720 || modifier
== EXPAND_CONST_ADDRESS
|| modifier
== EXPAND_INITIALIZER
)
5721 ro_modifier
= modifier
;
5723 ro_modifier
= EXPAND_NORMAL
;
5725 /* Don't use hard regs as subtargets, because the combiner
5726 can only handle pseudo regs. */
5727 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
5729 /* Avoid subtargets inside loops,
5730 since they hide some invariant expressions. */
5731 if (preserve_subexpressions_p ())
5734 /* If we are going to ignore this result, we need only do something
5735 if there is a side-effect somewhere in the expression. If there
5736 is, short-circuit the most common cases here. Note that we must
5737 not call expand_expr with anything but const0_rtx in case this
5738 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5742 if (! TREE_SIDE_EFFECTS (exp
))
5745 /* Ensure we reference a volatile object even if value is ignored, but
5746 don't do this if all we are doing is taking its address. */
5747 if (TREE_THIS_VOLATILE (exp
)
5748 && TREE_CODE (exp
) != FUNCTION_DECL
5749 && mode
!= VOIDmode
&& mode
!= BLKmode
5750 && modifier
!= EXPAND_CONST_ADDRESS
)
5752 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, ro_modifier
);
5753 if (GET_CODE (temp
) == MEM
)
5754 temp
= copy_to_reg (temp
);
5758 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
5759 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
5760 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
5761 VOIDmode
, ro_modifier
);
5762 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
5763 || code
== ARRAY_REF
)
5765 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
5766 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
5769 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
5770 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
5771 /* If the second operand has no side effects, just evaluate
5773 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
5774 VOIDmode
, ro_modifier
);
5775 else if (code
== BIT_FIELD_REF
)
5777 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
5778 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
5779 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, ro_modifier
);
5786 #ifdef MAX_INTEGER_COMPUTATION_MODE
5787 /* Only check stuff here if the mode we want is different from the mode
5788 of the expression; if it's the same, check_max_integer_computiation_mode
5789 will handle it. Do we really need to check this stuff at all? */
5792 && GET_MODE (target
) != mode
5793 && TREE_CODE (exp
) != INTEGER_CST
5794 && TREE_CODE (exp
) != PARM_DECL
5795 && TREE_CODE (exp
) != ARRAY_REF
5796 && TREE_CODE (exp
) != COMPONENT_REF
5797 && TREE_CODE (exp
) != BIT_FIELD_REF
5798 && TREE_CODE (exp
) != INDIRECT_REF
5799 && TREE_CODE (exp
) != CALL_EXPR
5800 && TREE_CODE (exp
) != VAR_DECL
5801 && TREE_CODE (exp
) != RTL_EXPR
)
5803 enum machine_mode mode
= GET_MODE (target
);
5805 if (GET_MODE_CLASS (mode
) == MODE_INT
5806 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5807 fatal ("unsupported wide integer operation");
5811 && TREE_CODE (exp
) != INTEGER_CST
5812 && TREE_CODE (exp
) != PARM_DECL
5813 && TREE_CODE (exp
) != ARRAY_REF
5814 && TREE_CODE (exp
) != COMPONENT_REF
5815 && TREE_CODE (exp
) != BIT_FIELD_REF
5816 && TREE_CODE (exp
) != INDIRECT_REF
5817 && TREE_CODE (exp
) != VAR_DECL
5818 && TREE_CODE (exp
) != CALL_EXPR
5819 && TREE_CODE (exp
) != RTL_EXPR
5820 && GET_MODE_CLASS (tmode
) == MODE_INT
5821 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
5822 fatal ("unsupported wide integer operation");
5824 check_max_integer_computation_mode (exp
);
5827 /* If will do cse, generate all results into pseudo registers
5828 since 1) that allows cse to find more things
5829 and 2) otherwise cse could produce an insn the machine
5832 if (! cse_not_expected
&& mode
!= BLKmode
&& target
5833 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
5840 tree function
= decl_function_context (exp
);
5841 /* Handle using a label in a containing function. */
5842 if (function
!= current_function_decl
5843 && function
!= inline_function_decl
&& function
!= 0)
5845 struct function
*p
= find_function_data (function
);
5846 /* Allocate in the memory associated with the function
5847 that the label is in. */
5848 push_obstacks (p
->function_obstack
,
5849 p
->function_maybepermanent_obstack
);
5851 p
->expr
->x_forced_labels
5852 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
5853 p
->expr
->x_forced_labels
);
5858 if (modifier
== EXPAND_INITIALIZER
)
5859 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
5864 temp
= gen_rtx_MEM (FUNCTION_MODE
,
5865 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
5866 if (function
!= current_function_decl
5867 && function
!= inline_function_decl
&& function
!= 0)
5868 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
5873 if (DECL_RTL (exp
) == 0)
5875 error_with_decl (exp
, "prior parameter's size depends on `%s'");
5876 return CONST0_RTX (mode
);
5879 /* ... fall through ... */
5882 /* If a static var's type was incomplete when the decl was written,
5883 but the type is complete now, lay out the decl now. */
5884 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5885 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
5887 push_obstacks_nochange ();
5888 end_temporary_allocation ();
5889 layout_decl (exp
, 0);
5890 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
5894 /* Although static-storage variables start off initialized, according to
5895 ANSI C, a memcpy could overwrite them with uninitialized values. So
5896 we check them too. This also lets us check for read-only variables
5897 accessed via a non-const declaration, in case it won't be detected
5898 any other way (e.g., in an embedded system or OS kernel without
5901 Aggregates are not checked here; they're handled elsewhere. */
5902 if (cfun
&& current_function_check_memory_usage
5904 && GET_CODE (DECL_RTL (exp
)) == MEM
5905 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
5907 enum memory_use_mode memory_usage
;
5908 memory_usage
= get_memory_usage_from_modifier (modifier
);
5910 if (memory_usage
!= MEMORY_USE_DONT
)
5911 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
5912 XEXP (DECL_RTL (exp
), 0), Pmode
,
5913 GEN_INT (int_size_in_bytes (type
)),
5914 TYPE_MODE (sizetype
),
5915 GEN_INT (memory_usage
),
5916 TYPE_MODE (integer_type_node
));
5919 /* ... fall through ... */
5923 if (DECL_RTL (exp
) == 0)
5926 /* Ensure variable marked as used even if it doesn't go through
5927 a parser. If it hasn't be used yet, write out an external
5929 if (! TREE_USED (exp
))
5931 assemble_external (exp
);
5932 TREE_USED (exp
) = 1;
5935 /* Show we haven't gotten RTL for this yet. */
5938 /* Handle variables inherited from containing functions. */
5939 context
= decl_function_context (exp
);
5941 /* We treat inline_function_decl as an alias for the current function
5942 because that is the inline function whose vars, types, etc.
5943 are being merged into the current function.
5944 See expand_inline_function. */
5946 if (context
!= 0 && context
!= current_function_decl
5947 && context
!= inline_function_decl
5948 /* If var is static, we don't need a static chain to access it. */
5949 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
5950 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
5954 /* Mark as non-local and addressable. */
5955 DECL_NONLOCAL (exp
) = 1;
5956 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
5958 mark_addressable (exp
);
5959 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
5961 addr
= XEXP (DECL_RTL (exp
), 0);
5962 if (GET_CODE (addr
) == MEM
)
5963 addr
= gen_rtx_MEM (Pmode
,
5964 fix_lexical_addr (XEXP (addr
, 0), exp
));
5966 addr
= fix_lexical_addr (addr
, exp
);
5967 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
5970 /* This is the case of an array whose size is to be determined
5971 from its initializer, while the initializer is still being parsed.
5974 else if (GET_CODE (DECL_RTL (exp
)) == MEM
5975 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
5976 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
5977 XEXP (DECL_RTL (exp
), 0));
5979 /* If DECL_RTL is memory, we are in the normal case and either
5980 the address is not valid or it is not a register and -fforce-addr
5981 is specified, get the address into a register. */
5983 else if (GET_CODE (DECL_RTL (exp
)) == MEM
5984 && modifier
!= EXPAND_CONST_ADDRESS
5985 && modifier
!= EXPAND_SUM
5986 && modifier
!= EXPAND_INITIALIZER
5987 && (! memory_address_p (DECL_MODE (exp
),
5988 XEXP (DECL_RTL (exp
), 0))
5990 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
5991 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
5992 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
5994 /* If we got something, return it. But first, set the alignment
5995 the address is a register. */
5998 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
5999 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6004 /* If the mode of DECL_RTL does not match that of the decl, it
6005 must be a promoted value. We return a SUBREG of the wanted mode,
6006 but mark it so that we know that it was already extended. */
6008 if (GET_CODE (DECL_RTL (exp
)) == REG
6009 && GET_MODE (DECL_RTL (exp
)) != mode
)
6011 /* Get the signedness used for this variable. Ensure we get the
6012 same mode we got when the variable was declared. */
6013 if (GET_MODE (DECL_RTL (exp
))
6014 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
6017 temp
= gen_rtx_SUBREG (mode
, DECL_RTL (exp
), 0);
6018 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6019 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6023 return DECL_RTL (exp
);
6026 return immed_double_const (TREE_INT_CST_LOW (exp
),
6027 TREE_INT_CST_HIGH (exp
), mode
);
6030 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
,
6031 EXPAND_MEMORY_USE_BAD
);
6034 /* If optimized, generate immediate CONST_DOUBLE
6035 which will be turned into memory by reload if necessary.
6037 We used to force a register so that loop.c could see it. But
6038 this does not allow gen_* patterns to perform optimizations with
6039 the constants. It also produces two insns in cases like "x = 1.0;".
6040 On most machines, floating-point constants are not permitted in
6041 many insns, so we'd end up copying it to a register in any case.
6043 Now, we do the copying in expand_binop, if appropriate. */
6044 return immed_real_const (exp
);
6048 if (! TREE_CST_RTL (exp
))
6049 output_constant_def (exp
);
6051 /* TREE_CST_RTL probably contains a constant address.
6052 On RISC machines where a constant address isn't valid,
6053 make some insns to get that address into a register. */
6054 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6055 && modifier
!= EXPAND_CONST_ADDRESS
6056 && modifier
!= EXPAND_INITIALIZER
6057 && modifier
!= EXPAND_SUM
6058 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6060 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6061 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
6062 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6063 return TREE_CST_RTL (exp
);
6065 case EXPR_WITH_FILE_LOCATION
:
6068 char *saved_input_filename
= input_filename
;
6069 int saved_lineno
= lineno
;
6070 input_filename
= EXPR_WFL_FILENAME (exp
);
6071 lineno
= EXPR_WFL_LINENO (exp
);
6072 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6073 emit_line_note (input_filename
, lineno
);
6074 /* Possibly avoid switching back and force here */
6075 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6076 input_filename
= saved_input_filename
;
6077 lineno
= saved_lineno
;
6082 context
= decl_function_context (exp
);
6084 /* If this SAVE_EXPR was at global context, assume we are an
6085 initialization function and move it into our context. */
6087 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6089 /* We treat inline_function_decl as an alias for the current function
6090 because that is the inline function whose vars, types, etc.
6091 are being merged into the current function.
6092 See expand_inline_function. */
6093 if (context
== current_function_decl
|| context
== inline_function_decl
)
6096 /* If this is non-local, handle it. */
6099 /* The following call just exists to abort if the context is
6100 not of a containing function. */
6101 find_function_data (context
);
6103 temp
= SAVE_EXPR_RTL (exp
);
6104 if (temp
&& GET_CODE (temp
) == REG
)
6106 put_var_into_stack (exp
);
6107 temp
= SAVE_EXPR_RTL (exp
);
6109 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6111 return change_address (temp
, mode
,
6112 fix_lexical_addr (XEXP (temp
, 0), exp
));
6114 if (SAVE_EXPR_RTL (exp
) == 0)
6116 if (mode
== VOIDmode
)
6119 temp
= assign_temp (type
, 3, 0, 0);
6121 SAVE_EXPR_RTL (exp
) = temp
;
6122 if (!optimize
&& GET_CODE (temp
) == REG
)
6123 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6126 /* If the mode of TEMP does not match that of the expression, it
6127 must be a promoted value. We pass store_expr a SUBREG of the
6128 wanted mode but mark it so that we know that it was already
6129 extended. Note that `unsignedp' was modified above in
6132 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6134 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
6135 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6136 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6139 if (temp
== const0_rtx
)
6140 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6141 EXPAND_MEMORY_USE_BAD
);
6143 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6145 TREE_USED (exp
) = 1;
6148 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6149 must be a promoted value. We return a SUBREG of the wanted mode,
6150 but mark it so that we know that it was already extended. */
6152 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6153 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6155 /* Compute the signedness and make the proper SUBREG. */
6156 promote_mode (type
, mode
, &unsignedp
, 0);
6157 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
6158 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6159 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6163 return SAVE_EXPR_RTL (exp
);
6168 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6169 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
6173 case PLACEHOLDER_EXPR
:
6175 tree placeholder_expr
;
6177 /* If there is an object on the head of the placeholder list,
6178 see if some object in it of type TYPE or a pointer to it. For
6179 further information, see tree.def. */
6180 for (placeholder_expr
= placeholder_list
;
6181 placeholder_expr
!= 0;
6182 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6184 tree need_type
= TYPE_MAIN_VARIANT (type
);
6186 tree old_list
= placeholder_list
;
6189 /* Find the outermost reference that is of the type we want.
6190 If none, see if any object has a type that is a pointer to
6191 the type we want. */
6192 for (elt
= TREE_PURPOSE (placeholder_expr
);
6193 elt
!= 0 && object
== 0;
6195 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6196 || TREE_CODE (elt
) == COND_EXPR
)
6197 ? TREE_OPERAND (elt
, 1)
6198 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6199 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6200 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6201 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6202 ? TREE_OPERAND (elt
, 0) : 0))
6203 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6206 for (elt
= TREE_PURPOSE (placeholder_expr
);
6207 elt
!= 0 && object
== 0;
6209 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6210 || TREE_CODE (elt
) == COND_EXPR
)
6211 ? TREE_OPERAND (elt
, 1)
6212 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6213 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6214 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6215 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6216 ? TREE_OPERAND (elt
, 0) : 0))
6217 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6218 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6220 object
= build1 (INDIRECT_REF
, need_type
, elt
);
6224 /* Expand this object skipping the list entries before
6225 it was found in case it is also a PLACEHOLDER_EXPR.
6226 In that case, we want to translate it using subsequent
6228 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6229 temp
= expand_expr (object
, original_target
, tmode
,
6231 placeholder_list
= old_list
;
6237 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6240 case WITH_RECORD_EXPR
:
6241 /* Put the object on the placeholder list, expand our first operand,
6242 and pop the list. */
6243 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6245 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6246 tmode
, ro_modifier
);
6247 placeholder_list
= TREE_CHAIN (placeholder_list
);
6251 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6252 expand_goto (TREE_OPERAND (exp
, 0));
6254 expand_computed_goto (TREE_OPERAND (exp
, 0));
6258 expand_exit_loop_if_false (NULL_PTR
,
6259 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6262 case LABELED_BLOCK_EXPR
:
6263 if (LABELED_BLOCK_BODY (exp
))
6264 expand_expr_stmt (LABELED_BLOCK_BODY (exp
));
6265 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6268 case EXIT_BLOCK_EXPR
:
6269 if (EXIT_BLOCK_RETURN (exp
))
6270 sorry ("returned value in block_exit_expr");
6271 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6276 expand_start_loop (1);
6277 expand_expr_stmt (TREE_OPERAND (exp
, 0));
6285 tree vars
= TREE_OPERAND (exp
, 0);
6286 int vars_need_expansion
= 0;
6288 /* Need to open a binding contour here because
6289 if there are any cleanups they must be contained here. */
6290 expand_start_bindings (2);
6292 /* Mark the corresponding BLOCK for output in its proper place. */
6293 if (TREE_OPERAND (exp
, 2) != 0
6294 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6295 insert_block (TREE_OPERAND (exp
, 2));
6297 /* If VARS have not yet been expanded, expand them now. */
6300 if (DECL_RTL (vars
) == 0)
6302 vars_need_expansion
= 1;
6305 expand_decl_init (vars
);
6306 vars
= TREE_CHAIN (vars
);
6309 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, ro_modifier
);
6311 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6317 if (RTL_EXPR_SEQUENCE (exp
))
6319 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6321 emit_insns (RTL_EXPR_SEQUENCE (exp
));
6322 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6324 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6325 free_temps_for_rtl_expr (exp
);
6326 return RTL_EXPR_RTL (exp
);
6329 /* If we don't need the result, just ensure we evaluate any
6334 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6335 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
,
6336 EXPAND_MEMORY_USE_BAD
);
6340 /* All elts simple constants => refer to a constant in memory. But
6341 if this is a non-BLKmode mode, let it store a field at a time
6342 since that should make a CONST_INT or CONST_DOUBLE when we
6343 fold. Likewise, if we have a target we can use, it is best to
6344 store directly into the target unless the type is large enough
6345 that memcpy will be used. If we are making an initializer and
6346 all operands are constant, put it in memory as well. */
6347 else if ((TREE_STATIC (exp
)
6348 && ((mode
== BLKmode
6349 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6350 || TREE_ADDRESSABLE (exp
)
6351 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6352 && (! MOVE_BY_PIECES_P
6353 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6355 && ! mostly_zeros_p (exp
))))
6356 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6358 rtx constructor
= output_constant_def (exp
);
6360 if (modifier
!= EXPAND_CONST_ADDRESS
6361 && modifier
!= EXPAND_INITIALIZER
6362 && modifier
!= EXPAND_SUM
6363 && (! memory_address_p (GET_MODE (constructor
),
6364 XEXP (constructor
, 0))
6366 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
6367 constructor
= change_address (constructor
, VOIDmode
,
6368 XEXP (constructor
, 0));
6374 /* Handle calls that pass values in multiple non-contiguous
6375 locations. The Irix 6 ABI has examples of this. */
6376 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6377 || GET_CODE (target
) == PARALLEL
)
6379 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
6380 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6382 target
= assign_temp (type
, 0, 1, 1);
6385 if (TREE_READONLY (exp
))
6387 if (GET_CODE (target
) == MEM
)
6388 target
= copy_rtx (target
);
6390 RTX_UNCHANGING_P (target
) = 1;
6393 store_constructor (exp
, target
, TYPE_ALIGN (TREE_TYPE (exp
)), 0,
6394 int_size_in_bytes (TREE_TYPE (exp
)));
6400 tree exp1
= TREE_OPERAND (exp
, 0);
6403 tree string
= string_constant (exp1
, &index
);
6405 /* Try to optimize reads from const strings. */
6407 && TREE_CODE (string
) == STRING_CST
6408 && TREE_CODE (index
) == INTEGER_CST
6409 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6410 && GET_MODE_CLASS (mode
) == MODE_INT
6411 && GET_MODE_SIZE (mode
) == 1
6412 && modifier
!= EXPAND_MEMORY_USE_WO
)
6414 GEN_INT (TREE_STRING_POINTER (string
)[TREE_INT_CST_LOW (index
)]);
6416 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6417 op0
= memory_address (mode
, op0
);
6419 if (cfun
&& current_function_check_memory_usage
6420 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6422 enum memory_use_mode memory_usage
;
6423 memory_usage
= get_memory_usage_from_modifier (modifier
);
6425 if (memory_usage
!= MEMORY_USE_DONT
)
6427 in_check_memory_usage
= 1;
6428 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
6430 GEN_INT (int_size_in_bytes (type
)),
6431 TYPE_MODE (sizetype
),
6432 GEN_INT (memory_usage
),
6433 TYPE_MODE (integer_type_node
));
6434 in_check_memory_usage
= 0;
6438 temp
= gen_rtx_MEM (mode
, op0
);
6439 /* If address was computed by addition,
6440 mark this as an element of an aggregate. */
6441 if (TREE_CODE (exp1
) == PLUS_EXPR
6442 || (TREE_CODE (exp1
) == SAVE_EXPR
6443 && TREE_CODE (TREE_OPERAND (exp1
, 0)) == PLUS_EXPR
)
6444 || AGGREGATE_TYPE_P (TREE_TYPE (exp
))
6445 || (TREE_CODE (exp1
) == ADDR_EXPR
6446 && (exp2
= TREE_OPERAND (exp1
, 0))
6447 && AGGREGATE_TYPE_P (TREE_TYPE (exp2
))))
6448 MEM_SET_IN_STRUCT_P (temp
, 1);
6450 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) | flag_volatile
;
6451 MEM_ALIAS_SET (temp
) = get_alias_set (exp
);
6453 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6454 here, because, in C and C++, the fact that a location is accessed
6455 through a pointer to const does not mean that the value there can
6456 never change. Languages where it can never change should
6457 also set TREE_STATIC. */
6458 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
6460 /* If we are writing to this object and its type is a record with
6461 readonly fields, we must mark it as readonly so it will
6462 conflict with readonly references to those fields. */
6463 if (modifier
== EXPAND_MEMORY_USE_WO
6464 && TREE_CODE (type
) == RECORD_TYPE
&& readonly_fields_p (type
))
6465 RTX_UNCHANGING_P (temp
) = 1;
6471 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6475 tree array
= TREE_OPERAND (exp
, 0);
6476 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6477 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6478 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6481 /* Optimize the special-case of a zero lower bound.
6483 We convert the low_bound to sizetype to avoid some problems
6484 with constant folding. (E.g. suppose the lower bound is 1,
6485 and its mode is QI. Without the conversion, (ARRAY
6486 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6487 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6489 if (! integer_zerop (low_bound
))
6490 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6492 /* Fold an expression like: "foo"[2].
6493 This is not done in fold so it won't happen inside &.
6494 Don't fold if this is for wide characters since it's too
6495 difficult to do correctly and this is a very rare case. */
6497 if (TREE_CODE (array
) == STRING_CST
6498 && TREE_CODE (index
) == INTEGER_CST
6499 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6500 && GET_MODE_CLASS (mode
) == MODE_INT
6501 && GET_MODE_SIZE (mode
) == 1)
6503 GEN_INT (TREE_STRING_POINTER (array
)[TREE_INT_CST_LOW (index
)]);
6505 /* If this is a constant index into a constant array,
6506 just get the value from the array. Handle both the cases when
6507 we have an explicit constructor and when our operand is a variable
6508 that was declared const. */
6510 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
6511 && TREE_CODE (index
) == INTEGER_CST
6512 && 0 > compare_tree_int (index
,
6513 list_length (CONSTRUCTOR_ELTS
6514 (TREE_OPERAND (exp
, 0)))))
6518 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6519 i
= TREE_INT_CST_LOW (index
);
6520 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6524 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6525 tmode
, ro_modifier
);
6528 else if (optimize
>= 1
6529 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6530 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6531 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6533 if (TREE_CODE (index
) == INTEGER_CST
)
6535 tree init
= DECL_INITIAL (array
);
6537 if (TREE_CODE (init
) == CONSTRUCTOR
)
6541 for (elem
= CONSTRUCTOR_ELTS (init
);
6543 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6544 elem
= TREE_CHAIN (elem
))
6548 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6549 tmode
, ro_modifier
);
6551 else if (TREE_CODE (init
) == STRING_CST
6552 && 0 > compare_tree_int (index
,
6553 TREE_STRING_LENGTH (init
)))
6555 (TREE_STRING_POINTER
6556 (init
)[TREE_INT_CST_LOW (index
)]));
6561 /* ... fall through ... */
6565 /* If the operand is a CONSTRUCTOR, we can just extract the
6566 appropriate field if it is present. Don't do this if we have
6567 already written the data since we want to refer to that copy
6568 and varasm.c assumes that's what we'll do. */
6569 if (code
!= ARRAY_REF
6570 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
6571 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
6575 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6576 elt
= TREE_CHAIN (elt
))
6577 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6578 /* We can normally use the value of the field in the
6579 CONSTRUCTOR. However, if this is a bitfield in
6580 an integral mode that we can fit in a HOST_WIDE_INT,
6581 we must mask only the number of bits in the bitfield,
6582 since this is done implicitly by the constructor. If
6583 the bitfield does not meet either of those conditions,
6584 we can't do this optimization. */
6585 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6586 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6588 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6589 <= HOST_BITS_PER_WIDE_INT
))))
6591 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6592 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6594 HOST_WIDE_INT bitsize
6595 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6597 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6599 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6600 op0
= expand_and (op0
, op1
, target
);
6604 enum machine_mode imode
6605 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6607 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6610 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6612 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6622 enum machine_mode mode1
;
6623 HOST_WIDE_INT bitsize
, bitpos
;
6626 unsigned int alignment
;
6627 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6628 &mode1
, &unsignedp
, &volatilep
,
6631 /* If we got back the original object, something is wrong. Perhaps
6632 we are evaluating an expression too early. In any event, don't
6633 infinitely recurse. */
6637 /* If TEM's type is a union of variable size, pass TARGET to the inner
6638 computation, since it will need a temporary and TARGET is known
6639 to have to do. This occurs in unchecked conversion in Ada. */
6641 op0
= expand_expr (tem
,
6642 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
6643 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
6645 ? target
: NULL_RTX
),
6647 (modifier
== EXPAND_INITIALIZER
6648 || modifier
== EXPAND_CONST_ADDRESS
)
6649 ? modifier
: EXPAND_NORMAL
);
6651 /* If this is a constant, put it into a register if it is a
6652 legitimate constant and OFFSET is 0 and memory if it isn't. */
6653 if (CONSTANT_P (op0
))
6655 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
6656 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
6658 op0
= force_reg (mode
, op0
);
6660 op0
= validize_mem (force_const_mem (mode
, op0
));
6665 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
6667 /* If this object is in memory, put it into a register.
6668 This case can't occur in C, but can in Ada if we have
6669 unchecked conversion of an expression from a scalar type to
6670 an array or record type. */
6671 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6672 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
6674 rtx memloc
= assign_temp (TREE_TYPE (tem
), 1, 1, 1);
6676 mark_temp_addr_taken (memloc
);
6677 emit_move_insn (memloc
, op0
);
6681 if (GET_CODE (op0
) != MEM
)
6684 if (GET_MODE (offset_rtx
) != ptr_mode
)
6686 #ifdef POINTERS_EXTEND_UNSIGNED
6687 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
6689 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
6693 /* A constant address in OP0 can have VOIDmode, we must not try
6694 to call force_reg for that case. Avoid that case. */
6695 if (GET_CODE (op0
) == MEM
6696 && GET_MODE (op0
) == BLKmode
6697 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
6699 && (bitpos
% bitsize
) == 0
6700 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
6701 && alignment
== GET_MODE_ALIGNMENT (mode1
))
6703 rtx temp
= change_address (op0
, mode1
,
6704 plus_constant (XEXP (op0
, 0),
6707 if (GET_CODE (XEXP (temp
, 0)) == REG
)
6710 op0
= change_address (op0
, mode1
,
6711 force_reg (GET_MODE (XEXP (temp
, 0)),
6717 op0
= change_address (op0
, VOIDmode
,
6718 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
6719 force_reg (ptr_mode
,
6723 /* Don't forget about volatility even if this is a bitfield. */
6724 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
6726 op0
= copy_rtx (op0
);
6727 MEM_VOLATILE_P (op0
) = 1;
6730 /* Check the access. */
6731 if (cfun
!= 0 && current_function_check_memory_usage
6732 && GET_CODE (op0
) == MEM
)
6734 enum memory_use_mode memory_usage
;
6735 memory_usage
= get_memory_usage_from_modifier (modifier
);
6737 if (memory_usage
!= MEMORY_USE_DONT
)
6742 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
6743 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
6745 /* Check the access right of the pointer. */
6746 if (size
> BITS_PER_UNIT
)
6747 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
6749 GEN_INT (size
/ BITS_PER_UNIT
),
6750 TYPE_MODE (sizetype
),
6751 GEN_INT (memory_usage
),
6752 TYPE_MODE (integer_type_node
));
6756 /* In cases where an aligned union has an unaligned object
6757 as a field, we might be extracting a BLKmode value from
6758 an integer-mode (e.g., SImode) object. Handle this case
6759 by doing the extract into an object as wide as the field
6760 (which we know to be the width of a basic mode), then
6761 storing into memory, and changing the mode to BLKmode.
6762 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6763 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6764 if (mode1
== VOIDmode
6765 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6766 || (modifier
!= EXPAND_CONST_ADDRESS
6767 && modifier
!= EXPAND_INITIALIZER
6768 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
6769 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6770 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6771 /* If the field isn't aligned enough to fetch as a memref,
6772 fetch it as a bit field. */
6773 || (mode1
!= BLKmode
6774 && SLOW_UNALIGNED_ACCESS (mode1
, alignment
)
6775 && ((TYPE_ALIGN (TREE_TYPE (tem
))
6776 < GET_MODE_ALIGNMENT (mode
))
6777 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
6778 /* If the type and the field are a constant size and the
6779 size of the type isn't the same size as the bitfield,
6780 we must use bitfield operations. */
6782 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
6784 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
6786 || (modifier
!= EXPAND_CONST_ADDRESS
6787 && modifier
!= EXPAND_INITIALIZER
6789 && SLOW_UNALIGNED_ACCESS (mode
, alignment
)
6790 && (TYPE_ALIGN (type
) > alignment
6791 || bitpos
% TYPE_ALIGN (type
) != 0)))
6793 enum machine_mode ext_mode
= mode
;
6795 if (ext_mode
== BLKmode
6796 && ! (target
!= 0 && GET_CODE (op0
) == MEM
6797 && GET_CODE (target
) == MEM
6798 && bitpos
% BITS_PER_UNIT
== 0))
6799 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
6801 if (ext_mode
== BLKmode
)
6803 /* In this case, BITPOS must start at a byte boundary and
6804 TARGET, if specified, must be a MEM. */
6805 if (GET_CODE (op0
) != MEM
6806 || (target
!= 0 && GET_CODE (target
) != MEM
)
6807 || bitpos
% BITS_PER_UNIT
!= 0)
6810 op0
= change_address (op0
, VOIDmode
,
6811 plus_constant (XEXP (op0
, 0),
6812 bitpos
/ BITS_PER_UNIT
));
6814 target
= assign_temp (type
, 0, 1, 1);
6816 emit_block_move (target
, op0
,
6817 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6824 op0
= validize_mem (op0
);
6826 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
6827 mark_reg_pointer (XEXP (op0
, 0), alignment
);
6829 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
6830 unsignedp
, target
, ext_mode
, ext_mode
,
6832 int_size_in_bytes (TREE_TYPE (tem
)));
6834 /* If the result is a record type and BITSIZE is narrower than
6835 the mode of OP0, an integral mode, and this is a big endian
6836 machine, we must put the field into the high-order bits. */
6837 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
6838 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
6839 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
6840 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
6841 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
6845 if (mode
== BLKmode
)
6847 rtx
new = assign_stack_temp (ext_mode
,
6848 bitsize
/ BITS_PER_UNIT
, 0);
6850 emit_move_insn (new, op0
);
6851 op0
= copy_rtx (new);
6852 PUT_MODE (op0
, BLKmode
);
6853 MEM_SET_IN_STRUCT_P (op0
, 1);
6859 /* If the result is BLKmode, use that to access the object
6861 if (mode
== BLKmode
)
6864 /* Get a reference to just this component. */
6865 if (modifier
== EXPAND_CONST_ADDRESS
6866 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6867 op0
= gen_rtx_MEM (mode1
, plus_constant (XEXP (op0
, 0),
6868 (bitpos
/ BITS_PER_UNIT
)));
6870 op0
= change_address (op0
, mode1
,
6871 plus_constant (XEXP (op0
, 0),
6872 (bitpos
/ BITS_PER_UNIT
)));
6874 if (GET_CODE (op0
) == MEM
)
6875 MEM_ALIAS_SET (op0
) = get_alias_set (exp
);
6877 if (GET_CODE (XEXP (op0
, 0)) == REG
)
6878 mark_reg_pointer (XEXP (op0
, 0), alignment
);
6880 MEM_SET_IN_STRUCT_P (op0
, 1);
6881 MEM_VOLATILE_P (op0
) |= volatilep
;
6882 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
6883 || modifier
== EXPAND_CONST_ADDRESS
6884 || modifier
== EXPAND_INITIALIZER
)
6886 else if (target
== 0)
6887 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6889 convert_move (target
, op0
, unsignedp
);
6893 /* Intended for a reference to a buffer of a file-object in Pascal.
6894 But it's not certain that a special tree code will really be
6895 necessary for these. INDIRECT_REF might work for them. */
6901 /* Pascal set IN expression.
6904 rlo = set_low - (set_low%bits_per_word);
6905 the_word = set [ (index - rlo)/bits_per_word ];
6906 bit_index = index % bits_per_word;
6907 bitmask = 1 << bit_index;
6908 return !!(the_word & bitmask); */
6910 tree set
= TREE_OPERAND (exp
, 0);
6911 tree index
= TREE_OPERAND (exp
, 1);
6912 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
6913 tree set_type
= TREE_TYPE (set
);
6914 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
6915 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
6916 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
6917 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
6918 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
6919 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
6920 rtx setaddr
= XEXP (setval
, 0);
6921 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
6923 rtx diff
, quo
, rem
, addr
, bit
, result
;
6925 preexpand_calls (exp
);
6927 /* If domain is empty, answer is no. Likewise if index is constant
6928 and out of bounds. */
6929 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
6930 && TREE_CODE (set_low_bound
) == INTEGER_CST
6931 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
6932 || (TREE_CODE (index
) == INTEGER_CST
6933 && TREE_CODE (set_low_bound
) == INTEGER_CST
6934 && tree_int_cst_lt (index
, set_low_bound
))
6935 || (TREE_CODE (set_high_bound
) == INTEGER_CST
6936 && TREE_CODE (index
) == INTEGER_CST
6937 && tree_int_cst_lt (set_high_bound
, index
))))
6941 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6943 /* If we get here, we have to generate the code for both cases
6944 (in range and out of range). */
6946 op0
= gen_label_rtx ();
6947 op1
= gen_label_rtx ();
6949 if (! (GET_CODE (index_val
) == CONST_INT
6950 && GET_CODE (lo_r
) == CONST_INT
))
6952 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
6953 GET_MODE (index_val
), iunsignedp
, 0, op1
);
6956 if (! (GET_CODE (index_val
) == CONST_INT
6957 && GET_CODE (hi_r
) == CONST_INT
))
6959 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
6960 GET_MODE (index_val
), iunsignedp
, 0, op1
);
6963 /* Calculate the element number of bit zero in the first word
6965 if (GET_CODE (lo_r
) == CONST_INT
)
6966 rlow
= GEN_INT (INTVAL (lo_r
)
6967 & ~ ((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
6969 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
6970 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
6971 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
6973 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
6974 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
6976 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
6977 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
6978 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
6979 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
6981 addr
= memory_address (byte_mode
,
6982 expand_binop (index_mode
, add_optab
, diff
,
6983 setaddr
, NULL_RTX
, iunsignedp
,
6986 /* Extract the bit we want to examine */
6987 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
6988 gen_rtx_MEM (byte_mode
, addr
),
6989 make_tree (TREE_TYPE (index
), rem
),
6991 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
6992 GET_MODE (target
) == byte_mode
? target
: 0,
6993 1, OPTAB_LIB_WIDEN
);
6995 if (result
!= target
)
6996 convert_move (target
, result
, 1);
6998 /* Output the code to handle the out-of-range case. */
7001 emit_move_insn (target
, const0_rtx
);
7006 case WITH_CLEANUP_EXPR
:
7007 if (RTL_EXPR_RTL (exp
) == 0)
7010 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7011 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 2));
7013 /* That's it for this cleanup. */
7014 TREE_OPERAND (exp
, 2) = 0;
7016 return RTL_EXPR_RTL (exp
);
7018 case CLEANUP_POINT_EXPR
:
7020 /* Start a new binding layer that will keep track of all cleanup
7021 actions to be performed. */
7022 expand_start_bindings (2);
7024 target_temp_slot_level
= temp_slot_level
;
7026 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7027 /* If we're going to use this value, load it up now. */
7029 op0
= force_not_mem (op0
);
7030 preserve_temp_slots (op0
);
7031 expand_end_bindings (NULL_TREE
, 0, 0);
7036 /* Check for a built-in function. */
7037 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7038 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7040 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7041 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7043 /* If this call was expanded already by preexpand_calls,
7044 just return the result we got. */
7045 if (CALL_EXPR_RTL (exp
) != 0)
7046 return CALL_EXPR_RTL (exp
);
7048 return expand_call (exp
, target
, ignore
);
7050 case NON_LVALUE_EXPR
:
7053 case REFERENCE_EXPR
:
7054 if (TREE_CODE (type
) == UNION_TYPE
)
7056 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7058 /* If both input and output are BLKmode, this conversion
7059 isn't actually doing anything unless we need to make the
7060 alignment stricter. */
7061 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
7062 && (TYPE_ALIGN (type
) <= TYPE_ALIGN (valtype
)
7063 || TYPE_ALIGN (type
) >= BIGGEST_ALIGNMENT
))
7064 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7069 if (mode
!= BLKmode
)
7070 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7072 target
= assign_temp (type
, 0, 1, 1);
7075 if (GET_CODE (target
) == MEM
)
7076 /* Store data into beginning of memory target. */
7077 store_expr (TREE_OPERAND (exp
, 0),
7078 change_address (target
, TYPE_MODE (valtype
), 0), 0);
7080 else if (GET_CODE (target
) == REG
)
7081 /* Store this field into a union of the proper type. */
7082 store_field (target
,
7083 MIN ((int_size_in_bytes (TREE_TYPE
7084 (TREE_OPERAND (exp
, 0)))
7086 GET_MODE_BITSIZE (mode
)),
7087 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7088 VOIDmode
, 0, BITS_PER_UNIT
,
7089 int_size_in_bytes (type
), 0);
7093 /* Return the entire union. */
7097 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7099 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7102 /* If the signedness of the conversion differs and OP0 is
7103 a promoted SUBREG, clear that indication since we now
7104 have to do the proper extension. */
7105 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7106 && GET_CODE (op0
) == SUBREG
)
7107 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7112 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
7113 if (GET_MODE (op0
) == mode
)
7116 /* If OP0 is a constant, just convert it into the proper mode. */
7117 if (CONSTANT_P (op0
))
7119 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7120 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7122 if (modifier
== EXPAND_INITIALIZER
)
7123 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7127 convert_to_mode (mode
, op0
,
7128 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7130 convert_move (target
, op0
,
7131 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7135 /* We come here from MINUS_EXPR when the second operand is a
7138 this_optab
= add_optab
;
7140 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7141 something else, make sure we add the register to the constant and
7142 then to the other thing. This case can occur during strength
7143 reduction and doing it this way will produce better code if the
7144 frame pointer or argument pointer is eliminated.
7146 fold-const.c will ensure that the constant is always in the inner
7147 PLUS_EXPR, so the only case we need to do anything about is if
7148 sp, ap, or fp is our second argument, in which case we must swap
7149 the innermost first argument and our second argument. */
7151 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7152 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7153 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7154 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7155 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7156 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7158 tree t
= TREE_OPERAND (exp
, 1);
7160 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7161 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7164 /* If the result is to be ptr_mode and we are adding an integer to
7165 something, we might be forming a constant. So try to use
7166 plus_constant. If it produces a sum and we can't accept it,
7167 use force_operand. This allows P = &ARR[const] to generate
7168 efficient code on machines where a SYMBOL_REF is not a valid
7171 If this is an EXPAND_SUM call, always return the sum. */
7172 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7173 || mode
== ptr_mode
)
7175 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7176 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7177 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7181 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7183 /* Use immed_double_const to ensure that the constant is
7184 truncated according to the mode of OP1, then sign extended
7185 to a HOST_WIDE_INT. Using the constant directly can result
7186 in non-canonical RTL in a 64x32 cross compile. */
7188 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7190 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7191 op1
= plus_constant (op1
, INTVAL (constant_part
));
7192 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7193 op1
= force_operand (op1
, target
);
7197 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7198 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7199 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7203 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7205 if (! CONSTANT_P (op0
))
7207 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7208 VOIDmode
, modifier
);
7209 /* Don't go to both_summands if modifier
7210 says it's not right to return a PLUS. */
7211 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7215 /* Use immed_double_const to ensure that the constant is
7216 truncated according to the mode of OP1, then sign extended
7217 to a HOST_WIDE_INT. Using the constant directly can result
7218 in non-canonical RTL in a 64x32 cross compile. */
7220 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7222 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7223 op0
= plus_constant (op0
, INTVAL (constant_part
));
7224 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7225 op0
= force_operand (op0
, target
);
7230 /* No sense saving up arithmetic to be done
7231 if it's all in the wrong mode to form part of an address.
7232 And force_operand won't know whether to sign-extend or
7234 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7235 || mode
!= ptr_mode
)
7238 preexpand_calls (exp
);
7239 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7242 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, ro_modifier
);
7243 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, ro_modifier
);
7246 /* Make sure any term that's a sum with a constant comes last. */
7247 if (GET_CODE (op0
) == PLUS
7248 && CONSTANT_P (XEXP (op0
, 1)))
7254 /* If adding to a sum including a constant,
7255 associate it to put the constant outside. */
7256 if (GET_CODE (op1
) == PLUS
7257 && CONSTANT_P (XEXP (op1
, 1)))
7259 rtx constant_term
= const0_rtx
;
7261 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7264 /* Ensure that MULT comes first if there is one. */
7265 else if (GET_CODE (op0
) == MULT
)
7266 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7268 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7270 /* Let's also eliminate constants from op0 if possible. */
7271 op0
= eliminate_constant_term (op0
, &constant_term
);
7273 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7274 their sum should be a constant. Form it into OP1, since the
7275 result we want will then be OP0 + OP1. */
7277 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7282 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7285 /* Put a constant term last and put a multiplication first. */
7286 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7287 temp
= op1
, op1
= op0
, op0
= temp
;
7289 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7290 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7293 /* For initializers, we are allowed to return a MINUS of two
7294 symbolic constants. Here we handle all cases when both operands
7296 /* Handle difference of two symbolic constants,
7297 for the sake of an initializer. */
7298 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7299 && really_constant_p (TREE_OPERAND (exp
, 0))
7300 && really_constant_p (TREE_OPERAND (exp
, 1)))
7302 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
7303 VOIDmode
, ro_modifier
);
7304 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7305 VOIDmode
, ro_modifier
);
7307 /* If the last operand is a CONST_INT, use plus_constant of
7308 the negated constant. Else make the MINUS. */
7309 if (GET_CODE (op1
) == CONST_INT
)
7310 return plus_constant (op0
, - INTVAL (op1
));
7312 return gen_rtx_MINUS (mode
, op0
, op1
);
7314 /* Convert A - const to A + (-const). */
7315 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7317 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
7318 TREE_OPERAND (exp
, 1)));
7320 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
7321 /* If we can't negate the constant in TYPE, leave it alone and
7322 expand_binop will negate it for us. We used to try to do it
7323 here in the signed version of TYPE, but that doesn't work
7324 on POINTER_TYPEs. */;
7327 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
7331 this_optab
= sub_optab
;
7335 preexpand_calls (exp
);
7336 /* If first operand is constant, swap them.
7337 Thus the following special case checks need only
7338 check the second operand. */
7339 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7341 register tree t1
= TREE_OPERAND (exp
, 0);
7342 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7343 TREE_OPERAND (exp
, 1) = t1
;
7346 /* Attempt to return something suitable for generating an
7347 indexed address, for machines that support that. */
7349 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7350 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7351 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
7353 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7356 /* Apply distributive law if OP0 is x+c. */
7357 if (GET_CODE (op0
) == PLUS
7358 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
7363 (mode
, XEXP (op0
, 0),
7364 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
7365 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
7366 * INTVAL (XEXP (op0
, 1))));
7368 if (GET_CODE (op0
) != REG
)
7369 op0
= force_operand (op0
, NULL_RTX
);
7370 if (GET_CODE (op0
) != REG
)
7371 op0
= copy_to_mode_reg (mode
, op0
);
7374 gen_rtx_MULT (mode
, op0
,
7375 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
7378 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7381 /* Check for multiplying things that have been extended
7382 from a narrower type. If this machine supports multiplying
7383 in that narrower type with a result in the desired type,
7384 do it that way, and avoid the explicit type-conversion. */
7385 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7386 && TREE_CODE (type
) == INTEGER_TYPE
7387 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7388 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7389 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7390 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7391 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7392 /* Don't use a widening multiply if a shift will do. */
7393 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7394 > HOST_BITS_PER_WIDE_INT
)
7395 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7397 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7398 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7400 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7401 /* If both operands are extended, they must either both
7402 be zero-extended or both be sign-extended. */
7403 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7405 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7407 enum machine_mode innermode
7408 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7409 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7410 ? smul_widen_optab
: umul_widen_optab
);
7411 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7412 ? umul_widen_optab
: smul_widen_optab
);
7413 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7415 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7417 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7418 NULL_RTX
, VOIDmode
, 0);
7419 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7420 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7423 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7424 NULL_RTX
, VOIDmode
, 0);
7427 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7428 && innermode
== word_mode
)
7431 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7432 NULL_RTX
, VOIDmode
, 0);
7433 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7434 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7437 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7438 NULL_RTX
, VOIDmode
, 0);
7439 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7440 unsignedp
, OPTAB_LIB_WIDEN
);
7441 htem
= expand_mult_highpart_adjust (innermode
,
7442 gen_highpart (innermode
, temp
),
7444 gen_highpart (innermode
, temp
),
7446 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7451 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7452 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7453 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7455 case TRUNC_DIV_EXPR
:
7456 case FLOOR_DIV_EXPR
:
7458 case ROUND_DIV_EXPR
:
7459 case EXACT_DIV_EXPR
:
7460 preexpand_calls (exp
);
7461 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7463 /* Possible optimization: compute the dividend with EXPAND_SUM
7464 then if the divisor is constant can optimize the case
7465 where some terms of the dividend have coeffs divisible by it. */
7466 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7467 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7468 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7471 this_optab
= flodiv_optab
;
7474 case TRUNC_MOD_EXPR
:
7475 case FLOOR_MOD_EXPR
:
7477 case ROUND_MOD_EXPR
:
7478 preexpand_calls (exp
);
7479 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7481 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7482 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7483 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7485 case FIX_ROUND_EXPR
:
7486 case FIX_FLOOR_EXPR
:
7488 abort (); /* Not used for C. */
7490 case FIX_TRUNC_EXPR
:
7491 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7493 target
= gen_reg_rtx (mode
);
7494 expand_fix (target
, op0
, unsignedp
);
7498 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7500 target
= gen_reg_rtx (mode
);
7501 /* expand_float can't figure out what to do if FROM has VOIDmode.
7502 So give it the correct mode. With -O, cse will optimize this. */
7503 if (GET_MODE (op0
) == VOIDmode
)
7504 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7506 expand_float (target
, op0
,
7507 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7511 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7512 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
7518 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7520 /* Handle complex values specially. */
7521 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
7522 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7523 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
7525 /* Unsigned abs is simply the operand. Testing here means we don't
7526 risk generating incorrect code below. */
7527 if (TREE_UNSIGNED (type
))
7530 return expand_abs (mode
, op0
, target
,
7531 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
7535 target
= original_target
;
7536 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
7537 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
7538 || GET_MODE (target
) != mode
7539 || (GET_CODE (target
) == REG
7540 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7541 target
= gen_reg_rtx (mode
);
7542 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7543 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7545 /* First try to do it with a special MIN or MAX instruction.
7546 If that does not win, use a conditional jump to select the proper
7548 this_optab
= (TREE_UNSIGNED (type
)
7549 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
7550 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
7552 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7557 /* At this point, a MEM target is no longer useful; we will get better
7560 if (GET_CODE (target
) == MEM
)
7561 target
= gen_reg_rtx (mode
);
7564 emit_move_insn (target
, op0
);
7566 op0
= gen_label_rtx ();
7568 /* If this mode is an integer too wide to compare properly,
7569 compare word by word. Rely on cse to optimize constant cases. */
7570 if (GET_MODE_CLASS (mode
) == MODE_INT
7571 && ! can_compare_p (GE
, mode
, ccp_jump
))
7573 if (code
== MAX_EXPR
)
7574 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7575 target
, op1
, NULL_RTX
, op0
);
7577 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7578 op1
, target
, NULL_RTX
, op0
);
7582 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7583 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
7584 unsignedp
, mode
, NULL_RTX
, 0, NULL_RTX
,
7587 emit_move_insn (target
, op1
);
7592 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7593 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
7599 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7600 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
7605 /* ??? Can optimize bitwise operations with one arg constant.
7606 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7607 and (a bitwise1 b) bitwise2 b (etc)
7608 but that is probably not worth while. */
7610 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7611 boolean values when we want in all cases to compute both of them. In
7612 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7613 as actual zero-or-1 values and then bitwise anding. In cases where
7614 there cannot be any side effects, better code would be made by
7615 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7616 how to recognize those cases. */
7618 case TRUTH_AND_EXPR
:
7620 this_optab
= and_optab
;
7625 this_optab
= ior_optab
;
7628 case TRUTH_XOR_EXPR
:
7630 this_optab
= xor_optab
;
7637 preexpand_calls (exp
);
7638 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7640 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7641 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
7644 /* Could determine the answer when only additive constants differ. Also,
7645 the addition of one can be handled by changing the condition. */
7652 case UNORDERED_EXPR
:
7659 preexpand_calls (exp
);
7660 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
7664 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7665 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
7667 && GET_CODE (original_target
) == REG
7668 && (GET_MODE (original_target
)
7669 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7671 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
7674 if (temp
!= original_target
)
7675 temp
= copy_to_reg (temp
);
7677 op1
= gen_label_rtx ();
7678 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
7679 GET_MODE (temp
), unsignedp
, 0, op1
);
7680 emit_move_insn (temp
, const1_rtx
);
7685 /* If no set-flag instruction, must generate a conditional
7686 store into a temporary variable. Drop through
7687 and handle this like && and ||. */
7689 case TRUTH_ANDIF_EXPR
:
7690 case TRUTH_ORIF_EXPR
:
7692 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
7693 /* Make sure we don't have a hard reg (such as function's return
7694 value) live across basic blocks, if not optimizing. */
7695 || (!optimize
&& GET_CODE (target
) == REG
7696 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
7697 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7700 emit_clr_insn (target
);
7702 op1
= gen_label_rtx ();
7703 jumpifnot (exp
, op1
);
7706 emit_0_to_1_insn (target
);
7709 return ignore
? const0_rtx
: target
;
7711 case TRUTH_NOT_EXPR
:
7712 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7713 /* The parser is careful to generate TRUTH_NOT_EXPR
7714 only with operands that are always zero or one. */
7715 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
7716 target
, 1, OPTAB_LIB_WIDEN
);
7722 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
7724 return expand_expr (TREE_OPERAND (exp
, 1),
7725 (ignore
? const0_rtx
: target
),
7729 /* If we would have a "singleton" (see below) were it not for a
7730 conversion in each arm, bring that conversion back out. */
7731 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7732 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
7733 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
7734 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
7736 tree
true = TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
7737 tree
false = TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
7739 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7740 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7741 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7742 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7743 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7744 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7745 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7746 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7747 return expand_expr (build1 (NOP_EXPR
, type
,
7748 build (COND_EXPR
, TREE_TYPE (true),
7749 TREE_OPERAND (exp
, 0),
7751 target
, tmode
, modifier
);
7755 /* Note that COND_EXPRs whose type is a structure or union
7756 are required to be constructed to contain assignments of
7757 a temporary variable, so that we can evaluate them here
7758 for side effect only. If type is void, we must do likewise. */
7760 /* If an arm of the branch requires a cleanup,
7761 only that cleanup is performed. */
7764 tree binary_op
= 0, unary_op
= 0;
7766 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7767 convert it to our mode, if necessary. */
7768 if (integer_onep (TREE_OPERAND (exp
, 1))
7769 && integer_zerop (TREE_OPERAND (exp
, 2))
7770 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
7774 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
7779 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, ro_modifier
);
7780 if (GET_MODE (op0
) == mode
)
7784 target
= gen_reg_rtx (mode
);
7785 convert_move (target
, op0
, unsignedp
);
7789 /* Check for X ? A + B : A. If we have this, we can copy A to the
7790 output and conditionally add B. Similarly for unary operations.
7791 Don't do this if X has side-effects because those side effects
7792 might affect A or B and the "?" operation is a sequence point in
7793 ANSI. (operand_equal_p tests for side effects.) */
7795 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
7796 && operand_equal_p (TREE_OPERAND (exp
, 2),
7797 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
7798 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
7799 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
7800 && operand_equal_p (TREE_OPERAND (exp
, 1),
7801 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
7802 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
7803 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
7804 && operand_equal_p (TREE_OPERAND (exp
, 2),
7805 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
7806 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
7807 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
7808 && operand_equal_p (TREE_OPERAND (exp
, 1),
7809 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
7810 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
7812 /* If we are not to produce a result, we have no target. Otherwise,
7813 if a target was specified use it; it will not be used as an
7814 intermediate target unless it is safe. If no target, use a
7819 else if (original_target
7820 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
7821 || (singleton
&& GET_CODE (original_target
) == REG
7822 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
7823 && original_target
== var_rtx (singleton
)))
7824 && GET_MODE (original_target
) == mode
7825 #ifdef HAVE_conditional_move
7826 && (! can_conditionally_move_p (mode
)
7827 || GET_CODE (original_target
) == REG
7828 || TREE_ADDRESSABLE (type
))
7830 && ! (GET_CODE (original_target
) == MEM
7831 && MEM_VOLATILE_P (original_target
)))
7832 temp
= original_target
;
7833 else if (TREE_ADDRESSABLE (type
))
7836 temp
= assign_temp (type
, 0, 0, 1);
7838 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7839 do the test of X as a store-flag operation, do this as
7840 A + ((X != 0) << log C). Similarly for other simple binary
7841 operators. Only do for C == 1 if BRANCH_COST is low. */
7842 if (temp
&& singleton
&& binary_op
7843 && (TREE_CODE (binary_op
) == PLUS_EXPR
7844 || TREE_CODE (binary_op
) == MINUS_EXPR
7845 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
7846 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
7847 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
7848 : integer_onep (TREE_OPERAND (binary_op
, 1)))
7849 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
7852 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
7853 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
7854 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
7857 /* If we had X ? A : A + 1, do this as A + (X == 0).
7859 We have to invert the truth value here and then put it
7860 back later if do_store_flag fails. We cannot simply copy
7861 TREE_OPERAND (exp, 0) to another variable and modify that
7862 because invert_truthvalue can modify the tree pointed to
7864 if (singleton
== TREE_OPERAND (exp
, 1))
7865 TREE_OPERAND (exp
, 0)
7866 = invert_truthvalue (TREE_OPERAND (exp
, 0));
7868 result
= do_store_flag (TREE_OPERAND (exp
, 0),
7869 (safe_from_p (temp
, singleton
, 1)
7871 mode
, BRANCH_COST
<= 1);
7873 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
7874 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
7875 build_int_2 (tree_log2
7879 (safe_from_p (temp
, singleton
, 1)
7880 ? temp
: NULL_RTX
), 0);
7884 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
7885 return expand_binop (mode
, boptab
, op1
, result
, temp
,
7886 unsignedp
, OPTAB_LIB_WIDEN
);
7888 else if (singleton
== TREE_OPERAND (exp
, 1))
7889 TREE_OPERAND (exp
, 0)
7890 = invert_truthvalue (TREE_OPERAND (exp
, 0));
7893 do_pending_stack_adjust ();
7895 op0
= gen_label_rtx ();
7897 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
7901 /* If the target conflicts with the other operand of the
7902 binary op, we can't use it. Also, we can't use the target
7903 if it is a hard register, because evaluating the condition
7904 might clobber it. */
7906 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
7907 || (GET_CODE (temp
) == REG
7908 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
7909 temp
= gen_reg_rtx (mode
);
7910 store_expr (singleton
, temp
, 0);
7913 expand_expr (singleton
,
7914 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7915 if (singleton
== TREE_OPERAND (exp
, 1))
7916 jumpif (TREE_OPERAND (exp
, 0), op0
);
7918 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7920 start_cleanup_deferral ();
7921 if (binary_op
&& temp
== 0)
7922 /* Just touch the other operand. */
7923 expand_expr (TREE_OPERAND (binary_op
, 1),
7924 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7926 store_expr (build (TREE_CODE (binary_op
), type
,
7927 make_tree (type
, temp
),
7928 TREE_OPERAND (binary_op
, 1)),
7931 store_expr (build1 (TREE_CODE (unary_op
), type
,
7932 make_tree (type
, temp
)),
7936 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7937 comparison operator. If we have one of these cases, set the
7938 output to A, branch on A (cse will merge these two references),
7939 then set the output to FOO. */
7941 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
7942 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
7943 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7944 TREE_OPERAND (exp
, 1), 0)
7945 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
7946 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
7947 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
7949 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
7950 temp
= gen_reg_rtx (mode
);
7951 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
7952 jumpif (TREE_OPERAND (exp
, 0), op0
);
7954 start_cleanup_deferral ();
7955 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
7959 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
7960 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
7961 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7962 TREE_OPERAND (exp
, 2), 0)
7963 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
7964 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
7965 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
7967 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
7968 temp
= gen_reg_rtx (mode
);
7969 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
7970 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7972 start_cleanup_deferral ();
7973 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
7978 op1
= gen_label_rtx ();
7979 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7981 start_cleanup_deferral ();
7983 /* One branch of the cond can be void, if it never returns. For
7984 example A ? throw : E */
7986 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
7987 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
7989 expand_expr (TREE_OPERAND (exp
, 1),
7990 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7991 end_cleanup_deferral ();
7993 emit_jump_insn (gen_jump (op1
));
7996 start_cleanup_deferral ();
7998 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
7999 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8001 expand_expr (TREE_OPERAND (exp
, 2),
8002 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8005 end_cleanup_deferral ();
8016 /* Something needs to be initialized, but we didn't know
8017 where that thing was when building the tree. For example,
8018 it could be the return value of a function, or a parameter
8019 to a function which lays down in the stack, or a temporary
8020 variable which must be passed by reference.
8022 We guarantee that the expression will either be constructed
8023 or copied into our original target. */
8025 tree slot
= TREE_OPERAND (exp
, 0);
8026 tree cleanups
= NULL_TREE
;
8029 if (TREE_CODE (slot
) != VAR_DECL
)
8033 target
= original_target
;
8035 /* Set this here so that if we get a target that refers to a
8036 register variable that's already been used, put_reg_into_stack
8037 knows that it should fix up those uses. */
8038 TREE_USED (slot
) = 1;
8042 if (DECL_RTL (slot
) != 0)
8044 target
= DECL_RTL (slot
);
8045 /* If we have already expanded the slot, so don't do
8047 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8052 target
= assign_temp (type
, 2, 0, 1);
8053 /* All temp slots at this level must not conflict. */
8054 preserve_temp_slots (target
);
8055 DECL_RTL (slot
) = target
;
8056 if (TREE_ADDRESSABLE (slot
))
8058 TREE_ADDRESSABLE (slot
) = 0;
8059 mark_addressable (slot
);
8062 /* Since SLOT is not known to the called function
8063 to belong to its stack frame, we must build an explicit
8064 cleanup. This case occurs when we must build up a reference
8065 to pass the reference as an argument. In this case,
8066 it is very likely that such a reference need not be
8069 if (TREE_OPERAND (exp
, 2) == 0)
8070 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
8071 cleanups
= TREE_OPERAND (exp
, 2);
8076 /* This case does occur, when expanding a parameter which
8077 needs to be constructed on the stack. The target
8078 is the actual stack address that we want to initialize.
8079 The function we call will perform the cleanup in this case. */
8081 /* If we have already assigned it space, use that space,
8082 not target that we were passed in, as our target
8083 parameter is only a hint. */
8084 if (DECL_RTL (slot
) != 0)
8086 target
= DECL_RTL (slot
);
8087 /* If we have already expanded the slot, so don't do
8089 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8094 DECL_RTL (slot
) = target
;
8095 /* If we must have an addressable slot, then make sure that
8096 the RTL that we just stored in slot is OK. */
8097 if (TREE_ADDRESSABLE (slot
))
8099 TREE_ADDRESSABLE (slot
) = 0;
8100 mark_addressable (slot
);
8105 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8106 /* Mark it as expanded. */
8107 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8109 store_expr (exp1
, target
, 0);
8111 expand_decl_cleanup (NULL_TREE
, cleanups
);
8118 tree lhs
= TREE_OPERAND (exp
, 0);
8119 tree rhs
= TREE_OPERAND (exp
, 1);
8120 tree noncopied_parts
= 0;
8121 tree lhs_type
= TREE_TYPE (lhs
);
8123 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8124 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
8125 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
8126 TYPE_NONCOPIED_PARTS (lhs_type
));
8127 while (noncopied_parts
!= 0)
8129 expand_assignment (TREE_VALUE (noncopied_parts
),
8130 TREE_PURPOSE (noncopied_parts
), 0, 0);
8131 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
8138 /* If lhs is complex, expand calls in rhs before computing it.
8139 That's so we don't compute a pointer and save it over a call.
8140 If lhs is simple, compute it first so we can give it as a
8141 target if the rhs is just a call. This avoids an extra temp and copy
8142 and that prevents a partial-subsumption which makes bad code.
8143 Actually we could treat component_ref's of vars like vars. */
8145 tree lhs
= TREE_OPERAND (exp
, 0);
8146 tree rhs
= TREE_OPERAND (exp
, 1);
8147 tree noncopied_parts
= 0;
8148 tree lhs_type
= TREE_TYPE (lhs
);
8152 if (TREE_CODE (lhs
) != VAR_DECL
8153 && TREE_CODE (lhs
) != RESULT_DECL
8154 && TREE_CODE (lhs
) != PARM_DECL
8155 && ! (TREE_CODE (lhs
) == INDIRECT_REF
8156 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs
, 0)))))
8157 preexpand_calls (exp
);
8159 /* Check for |= or &= of a bitfield of size one into another bitfield
8160 of size 1. In this case, (unless we need the result of the
8161 assignment) we can do this more efficiently with a
8162 test followed by an assignment, if necessary.
8164 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8165 things change so we do, this code should be enhanced to
8168 && TREE_CODE (lhs
) == COMPONENT_REF
8169 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8170 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8171 && TREE_OPERAND (rhs
, 0) == lhs
8172 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8173 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8174 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8176 rtx label
= gen_label_rtx ();
8178 do_jump (TREE_OPERAND (rhs
, 1),
8179 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8180 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8181 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8182 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8184 : integer_zero_node
)),
8186 do_pending_stack_adjust ();
8191 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
8192 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
8193 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
8194 TYPE_NONCOPIED_PARTS (lhs_type
));
8196 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8197 while (noncopied_parts
!= 0)
8199 expand_assignment (TREE_PURPOSE (noncopied_parts
),
8200 TREE_VALUE (noncopied_parts
), 0, 0);
8201 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
8207 if (!TREE_OPERAND (exp
, 0))
8208 expand_null_return ();
8210 expand_return (TREE_OPERAND (exp
, 0));
8213 case PREINCREMENT_EXPR
:
8214 case PREDECREMENT_EXPR
:
8215 return expand_increment (exp
, 0, ignore
);
8217 case POSTINCREMENT_EXPR
:
8218 case POSTDECREMENT_EXPR
:
8219 /* Faster to treat as pre-increment if result is not used. */
8220 return expand_increment (exp
, ! ignore
, ignore
);
8223 /* If nonzero, TEMP will be set to the address of something that might
8224 be a MEM corresponding to a stack slot. */
8227 /* Are we taking the address of a nested function? */
8228 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8229 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8230 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8231 && ! TREE_STATIC (exp
))
8233 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8234 op0
= force_operand (op0
, target
);
8236 /* If we are taking the address of something erroneous, just
8238 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8242 /* We make sure to pass const0_rtx down if we came in with
8243 ignore set, to avoid doing the cleanups twice for something. */
8244 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8245 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8246 (modifier
== EXPAND_INITIALIZER
8247 ? modifier
: EXPAND_CONST_ADDRESS
));
8249 /* If we are going to ignore the result, OP0 will have been set
8250 to const0_rtx, so just return it. Don't get confused and
8251 think we are taking the address of the constant. */
8255 op0
= protect_from_queue (op0
, 0);
8257 /* We would like the object in memory. If it is a constant, we can
8258 have it be statically allocated into memory. For a non-constant,
8259 we need to allocate some memory and store the value into it. */
8261 if (CONSTANT_P (op0
))
8262 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8264 else if (GET_CODE (op0
) == MEM
)
8266 mark_temp_addr_taken (op0
);
8267 temp
= XEXP (op0
, 0);
8270 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8271 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
8273 /* If this object is in a register, it must be not
8275 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8276 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8278 mark_temp_addr_taken (memloc
);
8279 emit_move_insn (memloc
, op0
);
8283 if (GET_CODE (op0
) != MEM
)
8286 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8288 temp
= XEXP (op0
, 0);
8289 #ifdef POINTERS_EXTEND_UNSIGNED
8290 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
8291 && mode
== ptr_mode
)
8292 temp
= convert_memory_address (ptr_mode
, temp
);
8297 op0
= force_operand (XEXP (op0
, 0), target
);
8300 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
8301 op0
= force_reg (Pmode
, op0
);
8303 if (GET_CODE (op0
) == REG
8304 && ! REG_USERVAR_P (op0
))
8305 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8307 /* If we might have had a temp slot, add an equivalent address
8310 update_temp_slot_address (temp
, op0
);
8312 #ifdef POINTERS_EXTEND_UNSIGNED
8313 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8314 && mode
== ptr_mode
)
8315 op0
= convert_memory_address (ptr_mode
, op0
);
8320 case ENTRY_VALUE_EXPR
:
8323 /* COMPLEX type for Extended Pascal & Fortran */
8326 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8329 /* Get the rtx code of the operands. */
8330 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8331 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8334 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8338 /* Move the real (op0) and imaginary (op1) parts to their location. */
8339 emit_move_insn (gen_realpart (mode
, target
), op0
);
8340 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8342 insns
= get_insns ();
8345 /* Complex construction should appear as a single unit. */
8346 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8347 each with a separate pseudo as destination.
8348 It's not correct for flow to treat them as a unit. */
8349 if (GET_CODE (target
) != CONCAT
)
8350 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8358 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8359 return gen_realpart (mode
, op0
);
8362 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8363 return gen_imagpart (mode
, op0
);
8367 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8371 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8374 target
= gen_reg_rtx (mode
);
8378 /* Store the realpart and the negated imagpart to target. */
8379 emit_move_insn (gen_realpart (partmode
, target
),
8380 gen_realpart (partmode
, op0
));
8382 imag_t
= gen_imagpart (partmode
, target
);
8383 temp
= expand_unop (partmode
, neg_optab
,
8384 gen_imagpart (partmode
, op0
), imag_t
, 0);
8386 emit_move_insn (imag_t
, temp
);
8388 insns
= get_insns ();
8391 /* Conjugate should appear as a single unit
8392 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8393 each with a separate pseudo as destination.
8394 It's not correct for flow to treat them as a unit. */
8395 if (GET_CODE (target
) != CONCAT
)
8396 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8403 case TRY_CATCH_EXPR
:
8405 tree handler
= TREE_OPERAND (exp
, 1);
8407 expand_eh_region_start ();
8409 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8411 expand_eh_region_end (handler
);
8416 case TRY_FINALLY_EXPR
:
8418 tree try_block
= TREE_OPERAND (exp
, 0);
8419 tree finally_block
= TREE_OPERAND (exp
, 1);
8420 rtx finally_label
= gen_label_rtx ();
8421 rtx done_label
= gen_label_rtx ();
8422 rtx return_link
= gen_reg_rtx (Pmode
);
8423 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8424 (tree
) finally_label
, (tree
) return_link
);
8425 TREE_SIDE_EFFECTS (cleanup
) = 1;
8427 /* Start a new binding layer that will keep track of all cleanup
8428 actions to be performed. */
8429 expand_start_bindings (2);
8431 target_temp_slot_level
= temp_slot_level
;
8433 expand_decl_cleanup (NULL_TREE
, cleanup
);
8434 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8436 preserve_temp_slots (op0
);
8437 expand_end_bindings (NULL_TREE
, 0, 0);
8438 emit_jump (done_label
);
8439 emit_label (finally_label
);
8440 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8441 emit_indirect_jump (return_link
);
8442 emit_label (done_label
);
8446 case GOTO_SUBROUTINE_EXPR
:
8448 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
8449 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
8450 rtx return_address
= gen_label_rtx ();
8451 emit_move_insn (return_link
, gen_rtx_LABEL_REF (Pmode
, return_address
));
8453 emit_label (return_address
);
8459 rtx dcc
= get_dynamic_cleanup_chain ();
8460 emit_move_insn (dcc
, validize_mem (gen_rtx_MEM (Pmode
, dcc
)));
8466 rtx dhc
= get_dynamic_handler_chain ();
8467 emit_move_insn (dhc
, validize_mem (gen_rtx_MEM (Pmode
, dhc
)));
8472 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
8475 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
8478 /* Here to do an ordinary binary operator, generating an instruction
8479 from the optab already placed in `this_optab'. */
8481 preexpand_calls (exp
);
8482 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8484 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8485 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8487 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8488 unsignedp
, OPTAB_LIB_WIDEN
);
8494 /* Similar to expand_expr, except that we don't specify a target, target
8495 mode, or modifier and we return the alignment of the inner type. This is
8496 used in cases where it is not necessary to align the result to the
8497 alignment of its type as long as we know the alignment of the result, for
8498 example for comparisons of BLKmode values. */
8501 expand_expr_unaligned (exp
, palign
)
8503 unsigned int *palign
;
8506 tree type
= TREE_TYPE (exp
);
8507 register enum machine_mode mode
= TYPE_MODE (type
);
8509 /* Default the alignment we return to that of the type. */
8510 *palign
= TYPE_ALIGN (type
);
8512 /* The only cases in which we do anything special is if the resulting mode
8514 if (mode
!= BLKmode
)
8515 return expand_expr (exp
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8517 switch (TREE_CODE (exp
))
8521 case NON_LVALUE_EXPR
:
8522 /* Conversions between BLKmode values don't change the underlying
8523 alignment or value. */
8524 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == BLKmode
)
8525 return expand_expr_unaligned (TREE_OPERAND (exp
, 0), palign
);
8529 /* Much of the code for this case is copied directly from expand_expr.
8530 We need to duplicate it here because we will do something different
8531 in the fall-through case, so we need to handle the same exceptions
8534 tree array
= TREE_OPERAND (exp
, 0);
8535 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
8536 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
8537 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
8540 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
8543 /* Optimize the special-case of a zero lower bound.
8545 We convert the low_bound to sizetype to avoid some problems
8546 with constant folding. (E.g. suppose the lower bound is 1,
8547 and its mode is QI. Without the conversion, (ARRAY
8548 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8549 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8551 if (! integer_zerop (low_bound
))
8552 index
= size_diffop (index
, convert (sizetype
, low_bound
));
8554 /* If this is a constant index into a constant array,
8555 just get the value from the array. Handle both the cases when
8556 we have an explicit constructor and when our operand is a variable
8557 that was declared const. */
8559 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
8560 && 0 > compare_tree_int (index
,
8561 list_length (CONSTRUCTOR_ELTS
8562 (TREE_OPERAND (exp
, 0)))))
8566 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
8567 i
= TREE_INT_CST_LOW (index
);
8568 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
8572 return expand_expr_unaligned (fold (TREE_VALUE (elem
)), palign
);
8575 else if (optimize
>= 1
8576 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
8577 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
8578 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
8580 if (TREE_CODE (index
) == INTEGER_CST
)
8582 tree init
= DECL_INITIAL (array
);
8584 if (TREE_CODE (init
) == CONSTRUCTOR
)
8588 for (elem
= CONSTRUCTOR_ELTS (init
);
8589 ! tree_int_cst_equal (TREE_PURPOSE (elem
), index
);
8590 elem
= TREE_CHAIN (elem
))
8594 return expand_expr_unaligned (fold (TREE_VALUE (elem
)),
8601 /* ... fall through ... */
8605 /* If the operand is a CONSTRUCTOR, we can just extract the
8606 appropriate field if it is present. Don't do this if we have
8607 already written the data since we want to refer to that copy
8608 and varasm.c assumes that's what we'll do. */
8609 if (TREE_CODE (exp
) != ARRAY_REF
8610 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8611 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
8615 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
8616 elt
= TREE_CHAIN (elt
))
8617 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1))
8618 /* Note that unlike the case in expand_expr, we know this is
8619 BLKmode and hence not an integer. */
8620 return expand_expr_unaligned (TREE_VALUE (elt
), palign
);
8624 enum machine_mode mode1
;
8625 HOST_WIDE_INT bitsize
, bitpos
;
8628 unsigned int alignment
;
8630 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
8631 &mode1
, &unsignedp
, &volatilep
,
8634 /* If we got back the original object, something is wrong. Perhaps
8635 we are evaluating an expression too early. In any event, don't
8636 infinitely recurse. */
8640 op0
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8642 /* If this is a constant, put it into a register if it is a
8643 legitimate constant and OFFSET is 0 and memory if it isn't. */
8644 if (CONSTANT_P (op0
))
8646 enum machine_mode inner_mode
= TYPE_MODE (TREE_TYPE (tem
));
8648 if (inner_mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
8650 op0
= force_reg (inner_mode
, op0
);
8652 op0
= validize_mem (force_const_mem (inner_mode
, op0
));
8657 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
8659 /* If this object is in a register, put it into memory.
8660 This case can't occur in C, but can in Ada if we have
8661 unchecked conversion of an expression from a scalar type to
8662 an array or record type. */
8663 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8664 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
8666 rtx memloc
= assign_temp (TREE_TYPE (tem
), 1, 1, 1);
8668 mark_temp_addr_taken (memloc
);
8669 emit_move_insn (memloc
, op0
);
8673 if (GET_CODE (op0
) != MEM
)
8676 if (GET_MODE (offset_rtx
) != ptr_mode
)
8678 #ifdef POINTERS_EXTEND_UNSIGNED
8679 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
8681 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
8685 op0
= change_address (op0
, VOIDmode
,
8686 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
8687 force_reg (ptr_mode
,
8691 /* Don't forget about volatility even if this is a bitfield. */
8692 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
8694 op0
= copy_rtx (op0
);
8695 MEM_VOLATILE_P (op0
) = 1;
8698 /* Check the access. */
8699 if (current_function_check_memory_usage
&& GET_CODE (op0
) == MEM
)
8704 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
8705 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
8707 /* Check the access right of the pointer. */
8708 if (size
> BITS_PER_UNIT
)
8709 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
8710 to
, ptr_mode
, GEN_INT (size
/ BITS_PER_UNIT
),
8711 TYPE_MODE (sizetype
),
8712 GEN_INT (MEMORY_USE_RO
),
8713 TYPE_MODE (integer_type_node
));
8716 /* In cases where an aligned union has an unaligned object
8717 as a field, we might be extracting a BLKmode value from
8718 an integer-mode (e.g., SImode) object. Handle this case
8719 by doing the extract into an object as wide as the field
8720 (which we know to be the width of a basic mode), then
8721 storing into memory, and changing the mode to BLKmode.
8722 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8723 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8724 if (mode1
== VOIDmode
8725 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8726 || (SLOW_UNALIGNED_ACCESS (mode1
, alignment
)
8727 && (TYPE_ALIGN (type
) > alignment
8728 || bitpos
% TYPE_ALIGN (type
) != 0)))
8730 enum machine_mode ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
8732 if (ext_mode
== BLKmode
)
8734 /* In this case, BITPOS must start at a byte boundary. */
8735 if (GET_CODE (op0
) != MEM
8736 || bitpos
% BITS_PER_UNIT
!= 0)
8739 op0
= change_address (op0
, VOIDmode
,
8740 plus_constant (XEXP (op0
, 0),
8741 bitpos
/ BITS_PER_UNIT
));
8745 rtx
new = assign_stack_temp (ext_mode
,
8746 bitsize
/ BITS_PER_UNIT
, 0);
8748 op0
= extract_bit_field (validize_mem (op0
), bitsize
, bitpos
,
8749 unsignedp
, NULL_RTX
, ext_mode
,
8750 ext_mode
, alignment
,
8751 int_size_in_bytes (TREE_TYPE (tem
)));
8753 /* If the result is a record type and BITSIZE is narrower than
8754 the mode of OP0, an integral mode, and this is a big endian
8755 machine, we must put the field into the high-order bits. */
8756 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
8757 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
8758 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
8759 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
8760 size_int (GET_MODE_BITSIZE
8766 emit_move_insn (new, op0
);
8767 op0
= copy_rtx (new);
8768 PUT_MODE (op0
, BLKmode
);
8772 /* Get a reference to just this component. */
8773 op0
= change_address (op0
, mode1
,
8774 plus_constant (XEXP (op0
, 0),
8775 (bitpos
/ BITS_PER_UNIT
)));
8777 MEM_ALIAS_SET (op0
) = get_alias_set (exp
);
8779 /* Adjust the alignment in case the bit position is not
8780 a multiple of the alignment of the inner object. */
8781 while (bitpos
% alignment
!= 0)
8784 if (GET_CODE (XEXP (op0
, 0)) == REG
)
8785 mark_reg_pointer (XEXP (op0
, 0), alignment
);
8787 MEM_IN_STRUCT_P (op0
) = 1;
8788 MEM_VOLATILE_P (op0
) |= volatilep
;
8790 *palign
= alignment
;
8799 return expand_expr (exp
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8802 /* Return the tree node if a ARG corresponds to a string constant or zero
8803 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8804 in bytes within the string that ARG is accessing. The type of the
8805 offset will be `sizetype'. */
8808 string_constant (arg
, ptr_offset
)
8814 if (TREE_CODE (arg
) == ADDR_EXPR
8815 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8817 *ptr_offset
= size_zero_node
;
8818 return TREE_OPERAND (arg
, 0);
8820 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8822 tree arg0
= TREE_OPERAND (arg
, 0);
8823 tree arg1
= TREE_OPERAND (arg
, 1);
8828 if (TREE_CODE (arg0
) == ADDR_EXPR
8829 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
8831 *ptr_offset
= convert (sizetype
, arg1
);
8832 return TREE_OPERAND (arg0
, 0);
8834 else if (TREE_CODE (arg1
) == ADDR_EXPR
8835 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
8837 *ptr_offset
= convert (sizetype
, arg0
);
8838 return TREE_OPERAND (arg1
, 0);
8845 /* Expand code for a post- or pre- increment or decrement
8846 and return the RTX for the result.
8847 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8850 expand_increment (exp
, post
, ignore
)
8854 register rtx op0
, op1
;
8855 register rtx temp
, value
;
8856 register tree incremented
= TREE_OPERAND (exp
, 0);
8857 optab this_optab
= add_optab
;
8859 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
8860 int op0_is_copy
= 0;
8861 int single_insn
= 0;
8862 /* 1 means we can't store into OP0 directly,
8863 because it is a subreg narrower than a word,
8864 and we don't dare clobber the rest of the word. */
8867 /* Stabilize any component ref that might need to be
8868 evaluated more than once below. */
8870 || TREE_CODE (incremented
) == BIT_FIELD_REF
8871 || (TREE_CODE (incremented
) == COMPONENT_REF
8872 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
8873 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
8874 incremented
= stabilize_reference (incremented
);
8875 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8876 ones into save exprs so that they don't accidentally get evaluated
8877 more than once by the code below. */
8878 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
8879 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
8880 incremented
= save_expr (incremented
);
8882 /* Compute the operands as RTX.
8883 Note whether OP0 is the actual lvalue or a copy of it:
8884 I believe it is a copy iff it is a register or subreg
8885 and insns were generated in computing it. */
8887 temp
= get_last_insn ();
8888 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_RW
);
8890 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8891 in place but instead must do sign- or zero-extension during assignment,
8892 so we copy it into a new register and let the code below use it as
8895 Note that we can safely modify this SUBREG since it is know not to be
8896 shared (it was made by the expand_expr call above). */
8898 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
8901 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
8905 else if (GET_CODE (op0
) == SUBREG
8906 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
8908 /* We cannot increment this SUBREG in place. If we are
8909 post-incrementing, get a copy of the old value. Otherwise,
8910 just mark that we cannot increment in place. */
8912 op0
= copy_to_reg (op0
);
8917 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
8918 && temp
!= get_last_insn ());
8919 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
8920 EXPAND_MEMORY_USE_BAD
);
8922 /* Decide whether incrementing or decrementing. */
8923 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
8924 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
8925 this_optab
= sub_optab
;
8927 /* Convert decrement by a constant into a negative increment. */
8928 if (this_optab
== sub_optab
8929 && GET_CODE (op1
) == CONST_INT
)
8931 op1
= GEN_INT (- INTVAL (op1
));
8932 this_optab
= add_optab
;
8935 /* For a preincrement, see if we can do this with a single instruction. */
8938 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
8939 if (icode
!= (int) CODE_FOR_nothing
8940 /* Make sure that OP0 is valid for operands 0 and 1
8941 of the insn we want to queue. */
8942 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
8943 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
8944 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
8948 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8949 then we cannot just increment OP0. We must therefore contrive to
8950 increment the original value. Then, for postincrement, we can return
8951 OP0 since it is a copy of the old value. For preincrement, expand here
8952 unless we can do it with a single insn.
8954 Likewise if storing directly into OP0 would clobber high bits
8955 we need to preserve (bad_subreg). */
8956 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
8958 /* This is the easiest way to increment the value wherever it is.
8959 Problems with multiple evaluation of INCREMENTED are prevented
8960 because either (1) it is a component_ref or preincrement,
8961 in which case it was stabilized above, or (2) it is an array_ref
8962 with constant index in an array in a register, which is
8963 safe to reevaluate. */
8964 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
8965 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
8966 ? MINUS_EXPR
: PLUS_EXPR
),
8969 TREE_OPERAND (exp
, 1));
8971 while (TREE_CODE (incremented
) == NOP_EXPR
8972 || TREE_CODE (incremented
) == CONVERT_EXPR
)
8974 newexp
= convert (TREE_TYPE (incremented
), newexp
);
8975 incremented
= TREE_OPERAND (incremented
, 0);
8978 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
8979 return post
? op0
: temp
;
8984 /* We have a true reference to the value in OP0.
8985 If there is an insn to add or subtract in this mode, queue it.
8986 Queueing the increment insn avoids the register shuffling
8987 that often results if we must increment now and first save
8988 the old value for subsequent use. */
8990 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8991 op0
= stabilize (op0
);
8994 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
8995 if (icode
!= (int) CODE_FOR_nothing
8996 /* Make sure that OP0 is valid for operands 0 and 1
8997 of the insn we want to queue. */
8998 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
8999 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9001 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9002 op1
= force_reg (mode
, op1
);
9004 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9006 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9008 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9009 ? force_reg (Pmode
, XEXP (op0
, 0))
9010 : copy_to_reg (XEXP (op0
, 0)));
9013 op0
= change_address (op0
, VOIDmode
, addr
);
9014 temp
= force_reg (GET_MODE (op0
), op0
);
9015 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9016 op1
= force_reg (mode
, op1
);
9018 /* The increment queue is LIFO, thus we have to `queue'
9019 the instructions in reverse order. */
9020 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9021 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9026 /* Preincrement, or we can't increment with one simple insn. */
9028 /* Save a copy of the value before inc or dec, to return it later. */
9029 temp
= value
= copy_to_reg (op0
);
9031 /* Arrange to return the incremented value. */
9032 /* Copy the rtx because expand_binop will protect from the queue,
9033 and the results of that would be invalid for us to return
9034 if our caller does emit_queue before using our result. */
9035 temp
= copy_rtx (value
= op0
);
9037 /* Increment however we can. */
9038 op1
= expand_binop (mode
, this_optab
, value
, op1
,
9039 current_function_check_memory_usage
? NULL_RTX
: op0
,
9040 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9041 /* Make sure the value is stored into OP0. */
9043 emit_move_insn (op0
, op1
);
9048 /* Expand all function calls contained within EXP, innermost ones first.
9049 But don't look within expressions that have sequence points.
9050 For each CALL_EXPR, record the rtx for its value
9051 in the CALL_EXPR_RTL field. */
9054 preexpand_calls (exp
)
9057 register int nops
, i
;
9058 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
9060 if (! do_preexpand_calls
)
9063 /* Only expressions and references can contain calls. */
9065 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
9068 switch (TREE_CODE (exp
))
9071 /* Do nothing if already expanded. */
9072 if (CALL_EXPR_RTL (exp
) != 0
9073 /* Do nothing if the call returns a variable-sized object. */
9074 || (TREE_CODE (TREE_TYPE (exp
)) != VOID_TYPE
9075 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
9076 /* Do nothing to built-in functions. */
9077 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
9078 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
9080 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
9083 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
9088 case TRUTH_ANDIF_EXPR
:
9089 case TRUTH_ORIF_EXPR
:
9090 /* If we find one of these, then we can be sure
9091 the adjust will be done for it (since it makes jumps).
9092 Do it now, so that if this is inside an argument
9093 of a function, we don't get the stack adjustment
9094 after some other args have already been pushed. */
9095 do_pending_stack_adjust ();
9100 case WITH_CLEANUP_EXPR
:
9101 case CLEANUP_POINT_EXPR
:
9102 case TRY_CATCH_EXPR
:
9106 if (SAVE_EXPR_RTL (exp
) != 0)
9113 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
9114 for (i
= 0; i
< nops
; i
++)
9115 if (TREE_OPERAND (exp
, i
) != 0)
9117 if (TREE_CODE (exp
) == TARGET_EXPR
&& i
== 2)
9118 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9119 It doesn't happen before the call is made. */
9123 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
9124 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
9126 preexpand_calls (TREE_OPERAND (exp
, i
));
9131 /* At the start of a function, record that we have no previously-pushed
9132 arguments waiting to be popped. */
9135 init_pending_stack_adjust ()
9137 pending_stack_adjust
= 0;
9140 /* When exiting from function, if safe, clear out any pending stack adjust
9141 so the adjustment won't get done.
9143 Note, if the current function calls alloca, then it must have a
9144 frame pointer regardless of the value of flag_omit_frame_pointer. */
9147 clear_pending_stack_adjust ()
9149 #ifdef EXIT_IGNORE_STACK
9151 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9152 && EXIT_IGNORE_STACK
9153 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9154 && ! flag_inline_functions
)
9156 stack_pointer_delta
-= pending_stack_adjust
,
9157 pending_stack_adjust
= 0;
9162 /* Pop any previously-pushed arguments that have not been popped yet. */
9165 do_pending_stack_adjust ()
9167 if (inhibit_defer_pop
== 0)
9169 if (pending_stack_adjust
!= 0)
9170 adjust_stack (GEN_INT (pending_stack_adjust
));
9171 pending_stack_adjust
= 0;
9175 /* Expand conditional expressions. */
9177 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9178 LABEL is an rtx of code CODE_LABEL, in this function and all the
9182 jumpifnot (exp
, label
)
9186 do_jump (exp
, label
, NULL_RTX
);
9189 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9196 do_jump (exp
, NULL_RTX
, label
);
9199 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9200 the result is zero, or IF_TRUE_LABEL if the result is one.
9201 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9202 meaning fall through in that case.
9204 do_jump always does any pending stack adjust except when it does not
9205 actually perform a jump. An example where there is no jump
9206 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9208 This function is responsible for optimizing cases such as
9209 &&, || and comparison operators in EXP. */
9212 do_jump (exp
, if_false_label
, if_true_label
)
9214 rtx if_false_label
, if_true_label
;
9216 register enum tree_code code
= TREE_CODE (exp
);
9217 /* Some cases need to create a label to jump to
9218 in order to properly fall through.
9219 These cases set DROP_THROUGH_LABEL nonzero. */
9220 rtx drop_through_label
= 0;
9224 enum machine_mode mode
;
9226 #ifdef MAX_INTEGER_COMPUTATION_MODE
9227 check_max_integer_computation_mode (exp
);
9238 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9244 /* This is not true with #pragma weak */
9246 /* The address of something can never be zero. */
9248 emit_jump (if_true_label
);
9253 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9254 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9255 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
9258 /* If we are narrowing the operand, we have to do the compare in the
9260 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9261 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9263 case NON_LVALUE_EXPR
:
9264 case REFERENCE_EXPR
:
9269 /* These cannot change zero->non-zero or vice versa. */
9270 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9273 case WITH_RECORD_EXPR
:
9274 /* Put the object on the placeholder list, recurse through our first
9275 operand, and pop the list. */
9276 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9278 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9279 placeholder_list
= TREE_CHAIN (placeholder_list
);
9283 /* This is never less insns than evaluating the PLUS_EXPR followed by
9284 a test and can be longer if the test is eliminated. */
9286 /* Reduce to minus. */
9287 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9288 TREE_OPERAND (exp
, 0),
9289 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9290 TREE_OPERAND (exp
, 1))));
9291 /* Process as MINUS. */
9295 /* Non-zero iff operands of minus differ. */
9296 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9297 TREE_OPERAND (exp
, 0),
9298 TREE_OPERAND (exp
, 1)),
9299 NE
, NE
, if_false_label
, if_true_label
);
9303 /* If we are AND'ing with a small constant, do this comparison in the
9304 smallest type that fits. If the machine doesn't have comparisons
9305 that small, it will be converted back to the wider comparison.
9306 This helps if we are testing the sign bit of a narrower object.
9307 combine can't do this for us because it can't know whether a
9308 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9310 if (! SLOW_BYTE_ACCESS
9311 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9312 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9313 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9314 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9315 && (type
= type_for_mode (mode
, 1)) != 0
9316 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9317 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9318 != CODE_FOR_nothing
))
9320 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9325 case TRUTH_NOT_EXPR
:
9326 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9329 case TRUTH_ANDIF_EXPR
:
9330 if (if_false_label
== 0)
9331 if_false_label
= drop_through_label
= gen_label_rtx ();
9332 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9333 start_cleanup_deferral ();
9334 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9335 end_cleanup_deferral ();
9338 case TRUTH_ORIF_EXPR
:
9339 if (if_true_label
== 0)
9340 if_true_label
= drop_through_label
= gen_label_rtx ();
9341 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9342 start_cleanup_deferral ();
9343 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9344 end_cleanup_deferral ();
9349 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9350 preserve_temp_slots (NULL_RTX
);
9354 do_pending_stack_adjust ();
9355 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9362 HOST_WIDE_INT bitsize
, bitpos
;
9364 enum machine_mode mode
;
9368 unsigned int alignment
;
9370 /* Get description of this reference. We don't actually care
9371 about the underlying object here. */
9372 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9373 &unsignedp
, &volatilep
, &alignment
);
9375 type
= type_for_size (bitsize
, unsignedp
);
9376 if (! SLOW_BYTE_ACCESS
9377 && type
!= 0 && bitsize
>= 0
9378 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9379 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9380 != CODE_FOR_nothing
))
9382 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9389 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9390 if (integer_onep (TREE_OPERAND (exp
, 1))
9391 && integer_zerop (TREE_OPERAND (exp
, 2)))
9392 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9394 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9395 && integer_onep (TREE_OPERAND (exp
, 2)))
9396 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9400 register rtx label1
= gen_label_rtx ();
9401 drop_through_label
= gen_label_rtx ();
9403 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9405 start_cleanup_deferral ();
9406 /* Now the THEN-expression. */
9407 do_jump (TREE_OPERAND (exp
, 1),
9408 if_false_label
? if_false_label
: drop_through_label
,
9409 if_true_label
? if_true_label
: drop_through_label
);
9410 /* In case the do_jump just above never jumps. */
9411 do_pending_stack_adjust ();
9412 emit_label (label1
);
9414 /* Now the ELSE-expression. */
9415 do_jump (TREE_OPERAND (exp
, 2),
9416 if_false_label
? if_false_label
: drop_through_label
,
9417 if_true_label
? if_true_label
: drop_through_label
);
9418 end_cleanup_deferral ();
9424 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9426 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9427 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9429 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9430 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9433 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9434 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9435 fold (build1 (REALPART_EXPR
,
9436 TREE_TYPE (inner_type
),
9438 fold (build1 (REALPART_EXPR
,
9439 TREE_TYPE (inner_type
),
9441 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9442 fold (build1 (IMAGPART_EXPR
,
9443 TREE_TYPE (inner_type
),
9445 fold (build1 (IMAGPART_EXPR
,
9446 TREE_TYPE (inner_type
),
9448 if_false_label
, if_true_label
);
9451 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9452 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9454 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9455 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9456 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9458 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9464 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9466 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9467 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9469 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9470 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9473 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9474 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9475 fold (build1 (REALPART_EXPR
,
9476 TREE_TYPE (inner_type
),
9478 fold (build1 (REALPART_EXPR
,
9479 TREE_TYPE (inner_type
),
9481 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9482 fold (build1 (IMAGPART_EXPR
,
9483 TREE_TYPE (inner_type
),
9485 fold (build1 (IMAGPART_EXPR
,
9486 TREE_TYPE (inner_type
),
9488 if_false_label
, if_true_label
);
9491 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9492 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9494 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9495 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9496 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9498 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9503 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9504 if (GET_MODE_CLASS (mode
) == MODE_INT
9505 && ! can_compare_p (LT
, mode
, ccp_jump
))
9506 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9508 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9512 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9513 if (GET_MODE_CLASS (mode
) == MODE_INT
9514 && ! can_compare_p (LE
, mode
, ccp_jump
))
9515 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9517 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9521 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9522 if (GET_MODE_CLASS (mode
) == MODE_INT
9523 && ! can_compare_p (GT
, mode
, ccp_jump
))
9524 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9526 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9530 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9531 if (GET_MODE_CLASS (mode
) == MODE_INT
9532 && ! can_compare_p (GE
, mode
, ccp_jump
))
9533 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9535 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9538 case UNORDERED_EXPR
:
9541 enum rtx_code cmp
, rcmp
;
9544 if (code
== UNORDERED_EXPR
)
9545 cmp
= UNORDERED
, rcmp
= ORDERED
;
9547 cmp
= ORDERED
, rcmp
= UNORDERED
;
9548 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9551 if (! can_compare_p (cmp
, mode
, ccp_jump
)
9552 && (can_compare_p (rcmp
, mode
, ccp_jump
)
9553 /* If the target doesn't provide either UNORDERED or ORDERED
9554 comparisons, canonicalize on UNORDERED for the library. */
9555 || rcmp
== UNORDERED
))
9559 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
9561 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
9566 enum rtx_code rcode1
;
9567 enum tree_code tcode2
;
9591 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9592 if (can_compare_p (rcode1
, mode
, ccp_jump
))
9593 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
9597 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
9598 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
9601 /* If the target doesn't support combined unordered
9602 compares, decompose into UNORDERED + comparison. */
9603 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
9604 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
9605 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
9606 do_jump (exp
, if_false_label
, if_true_label
);
9613 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9615 /* This is not needed any more and causes poor code since it causes
9616 comparisons and tests from non-SI objects to have different code
9618 /* Copy to register to avoid generating bad insns by cse
9619 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9620 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9621 temp
= copy_to_reg (temp
);
9623 do_pending_stack_adjust ();
9624 /* Do any postincrements in the expression that was tested. */
9627 if (GET_CODE (temp
) == CONST_INT
|| GET_CODE (temp
) == LABEL_REF
)
9629 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
9633 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
9634 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
9635 /* Note swapping the labels gives us not-equal. */
9636 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
9637 else if (GET_MODE (temp
) != VOIDmode
)
9638 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
9639 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9640 GET_MODE (temp
), NULL_RTX
, 0,
9641 if_false_label
, if_true_label
);
9646 if (drop_through_label
)
9648 /* If do_jump produces code that might be jumped around,
9649 do any stack adjusts from that code, before the place
9650 where control merges in. */
9651 do_pending_stack_adjust ();
9652 emit_label (drop_through_label
);
9656 /* Given a comparison expression EXP for values too wide to be compared
9657 with one insn, test the comparison and jump to the appropriate label.
9658 The code of EXP is ignored; we always test GT if SWAP is 0,
9659 and LT if SWAP is 1. */
9662 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
9665 rtx if_false_label
, if_true_label
;
9667 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
9668 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
9669 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9670 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9672 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
9675 /* Compare OP0 with OP1, word at a time, in mode MODE.
9676 UNSIGNEDP says to do unsigned comparison.
9677 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9680 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
9681 enum machine_mode mode
;
9684 rtx if_false_label
, if_true_label
;
9686 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9687 rtx drop_through_label
= 0;
9690 if (! if_true_label
|| ! if_false_label
)
9691 drop_through_label
= gen_label_rtx ();
9692 if (! if_true_label
)
9693 if_true_label
= drop_through_label
;
9694 if (! if_false_label
)
9695 if_false_label
= drop_through_label
;
9697 /* Compare a word at a time, high order first. */
9698 for (i
= 0; i
< nwords
; i
++)
9700 rtx op0_word
, op1_word
;
9702 if (WORDS_BIG_ENDIAN
)
9704 op0_word
= operand_subword_force (op0
, i
, mode
);
9705 op1_word
= operand_subword_force (op1
, i
, mode
);
9709 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
9710 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
9713 /* All but high-order word must be compared as unsigned. */
9714 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
9715 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
, 0,
9716 NULL_RTX
, if_true_label
);
9718 /* Consider lower words only if these are equal. */
9719 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
9720 NULL_RTX
, 0, NULL_RTX
, if_false_label
);
9724 emit_jump (if_false_label
);
9725 if (drop_through_label
)
9726 emit_label (drop_through_label
);
9729 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9730 with one insn, test the comparison and jump to the appropriate label. */
9733 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
9735 rtx if_false_label
, if_true_label
;
9737 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9738 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9739 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9740 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9742 rtx drop_through_label
= 0;
9744 if (! if_false_label
)
9745 drop_through_label
= if_false_label
= gen_label_rtx ();
9747 for (i
= 0; i
< nwords
; i
++)
9748 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
9749 operand_subword_force (op1
, i
, mode
),
9750 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9751 word_mode
, NULL_RTX
, 0, if_false_label
,
9755 emit_jump (if_true_label
);
9756 if (drop_through_label
)
9757 emit_label (drop_through_label
);
9760 /* Jump according to whether OP0 is 0.
9761 We assume that OP0 has an integer mode that is too wide
9762 for the available compare insns. */
9765 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
9767 rtx if_false_label
, if_true_label
;
9769 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
9772 rtx drop_through_label
= 0;
9774 /* The fastest way of doing this comparison on almost any machine is to
9775 "or" all the words and compare the result. If all have to be loaded
9776 from memory and this is a very wide item, it's possible this may
9777 be slower, but that's highly unlikely. */
9779 part
= gen_reg_rtx (word_mode
);
9780 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
9781 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
9782 part
= expand_binop (word_mode
, ior_optab
, part
,
9783 operand_subword_force (op0
, i
, GET_MODE (op0
)),
9784 part
, 1, OPTAB_WIDEN
);
9788 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
9789 NULL_RTX
, 0, if_false_label
, if_true_label
);
9794 /* If we couldn't do the "or" simply, do this with a series of compares. */
9795 if (! if_false_label
)
9796 drop_through_label
= if_false_label
= gen_label_rtx ();
9798 for (i
= 0; i
< nwords
; i
++)
9799 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
9800 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0,
9801 if_false_label
, NULL_RTX
);
9804 emit_jump (if_true_label
);
9806 if (drop_through_label
)
9807 emit_label (drop_through_label
);
9810 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9811 (including code to compute the values to be compared)
9812 and set (CC0) according to the result.
9813 The decision as to signed or unsigned comparison must be made by the caller.
9815 We force a stack adjustment unless there are currently
9816 things pushed on the stack that aren't yet used.
9818 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9821 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9822 size of MODE should be used. */
9825 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
9826 register rtx op0
, op1
;
9829 enum machine_mode mode
;
9835 /* If one operand is constant, make it the second one. Only do this
9836 if the other operand is not constant as well. */
9838 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
9839 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
9844 code
= swap_condition (code
);
9849 op0
= force_not_mem (op0
);
9850 op1
= force_not_mem (op1
);
9853 do_pending_stack_adjust ();
9855 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
9856 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
9860 /* There's no need to do this now that combine.c can eliminate lots of
9861 sign extensions. This can be less efficient in certain cases on other
9864 /* If this is a signed equality comparison, we can do it as an
9865 unsigned comparison since zero-extension is cheaper than sign
9866 extension and comparisons with zero are done as unsigned. This is
9867 the case even on machines that can do fast sign extension, since
9868 zero-extension is easier to combine with other operations than
9869 sign-extension is. If we are comparing against a constant, we must
9870 convert it to what it would look like unsigned. */
9871 if ((code
== EQ
|| code
== NE
) && ! unsignedp
9872 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
9874 if (GET_CODE (op1
) == CONST_INT
9875 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
9876 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
9881 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
9883 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9886 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9887 The decision as to signed or unsigned comparison must be made by the caller.
9889 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9892 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9893 size of MODE should be used. */
9896 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
, align
,
9897 if_false_label
, if_true_label
)
9898 register rtx op0
, op1
;
9901 enum machine_mode mode
;
9904 rtx if_false_label
, if_true_label
;
9907 int dummy_true_label
= 0;
9909 /* Reverse the comparison if that is safe and we want to jump if it is
9911 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
9913 if_true_label
= if_false_label
;
9915 code
= reverse_condition (code
);
9918 /* If one operand is constant, make it the second one. Only do this
9919 if the other operand is not constant as well. */
9921 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
9922 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
9927 code
= swap_condition (code
);
9932 op0
= force_not_mem (op0
);
9933 op1
= force_not_mem (op1
);
9936 do_pending_stack_adjust ();
9938 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
9939 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
9941 if (tem
== const_true_rtx
)
9944 emit_jump (if_true_label
);
9949 emit_jump (if_false_label
);
9955 /* There's no need to do this now that combine.c can eliminate lots of
9956 sign extensions. This can be less efficient in certain cases on other
9959 /* If this is a signed equality comparison, we can do it as an
9960 unsigned comparison since zero-extension is cheaper than sign
9961 extension and comparisons with zero are done as unsigned. This is
9962 the case even on machines that can do fast sign extension, since
9963 zero-extension is easier to combine with other operations than
9964 sign-extension is. If we are comparing against a constant, we must
9965 convert it to what it would look like unsigned. */
9966 if ((code
== EQ
|| code
== NE
) && ! unsignedp
9967 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
9969 if (GET_CODE (op1
) == CONST_INT
9970 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
9971 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
9976 if (! if_true_label
)
9978 dummy_true_label
= 1;
9979 if_true_label
= gen_label_rtx ();
9982 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
, align
,
9986 emit_jump (if_false_label
);
9987 if (dummy_true_label
)
9988 emit_label (if_true_label
);
9991 /* Generate code for a comparison expression EXP (including code to compute
9992 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9993 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9994 generated code will drop through.
9995 SIGNED_CODE should be the rtx operation for this comparison for
9996 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9998 We force a stack adjustment unless there are currently
9999 things pushed on the stack that aren't yet used. */
10002 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10005 enum rtx_code signed_code
, unsigned_code
;
10006 rtx if_false_label
, if_true_label
;
10008 unsigned int align0
, align1
;
10009 register rtx op0
, op1
;
10010 register tree type
;
10011 register enum machine_mode mode
;
10013 enum rtx_code code
;
10015 /* Don't crash if the comparison was erroneous. */
10016 op0
= expand_expr_unaligned (TREE_OPERAND (exp
, 0), &align0
);
10017 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10020 op1
= expand_expr_unaligned (TREE_OPERAND (exp
, 1), &align1
);
10021 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10022 mode
= TYPE_MODE (type
);
10023 unsignedp
= TREE_UNSIGNED (type
);
10024 code
= unsignedp
? unsigned_code
: signed_code
;
10026 #ifdef HAVE_canonicalize_funcptr_for_compare
10027 /* If function pointers need to be "canonicalized" before they can
10028 be reliably compared, then canonicalize them. */
10029 if (HAVE_canonicalize_funcptr_for_compare
10030 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10031 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10034 rtx new_op0
= gen_reg_rtx (mode
);
10036 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10040 if (HAVE_canonicalize_funcptr_for_compare
10041 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10042 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10045 rtx new_op1
= gen_reg_rtx (mode
);
10047 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10052 /* Do any postincrements in the expression that was tested. */
10055 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10057 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10058 MIN (align0
, align1
),
10059 if_false_label
, if_true_label
);
10062 /* Generate code to calculate EXP using a store-flag instruction
10063 and return an rtx for the result. EXP is either a comparison
10064 or a TRUTH_NOT_EXPR whose operand is a comparison.
10066 If TARGET is nonzero, store the result there if convenient.
10068 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10071 Return zero if there is no suitable set-flag instruction
10072 available on this machine.
10074 Once expand_expr has been called on the arguments of the comparison,
10075 we are committed to doing the store flag, since it is not safe to
10076 re-evaluate the expression. We emit the store-flag insn by calling
10077 emit_store_flag, but only expand the arguments if we have a reason
10078 to believe that emit_store_flag will be successful. If we think that
10079 it will, but it isn't, we have to simulate the store-flag with a
10080 set/jump/set sequence. */
10083 do_store_flag (exp
, target
, mode
, only_cheap
)
10086 enum machine_mode mode
;
10089 enum rtx_code code
;
10090 tree arg0
, arg1
, type
;
10092 enum machine_mode operand_mode
;
10096 enum insn_code icode
;
10097 rtx subtarget
= target
;
10100 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10101 result at the end. We can't simply invert the test since it would
10102 have already been inverted if it were valid. This case occurs for
10103 some floating-point comparisons. */
10105 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10106 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10108 arg0
= TREE_OPERAND (exp
, 0);
10109 arg1
= TREE_OPERAND (exp
, 1);
10110 type
= TREE_TYPE (arg0
);
10111 operand_mode
= TYPE_MODE (type
);
10112 unsignedp
= TREE_UNSIGNED (type
);
10114 /* We won't bother with BLKmode store-flag operations because it would mean
10115 passing a lot of information to emit_store_flag. */
10116 if (operand_mode
== BLKmode
)
10119 /* We won't bother with store-flag operations involving function pointers
10120 when function pointers must be canonicalized before comparisons. */
10121 #ifdef HAVE_canonicalize_funcptr_for_compare
10122 if (HAVE_canonicalize_funcptr_for_compare
10123 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10124 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10126 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10127 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10128 == FUNCTION_TYPE
))))
10135 /* Get the rtx comparison code to use. We know that EXP is a comparison
10136 operation of some type. Some comparisons against 1 and -1 can be
10137 converted to comparisons with zero. Do so here so that the tests
10138 below will be aware that we have a comparison with zero. These
10139 tests will not catch constants in the first operand, but constants
10140 are rarely passed as the first operand. */
10142 switch (TREE_CODE (exp
))
10151 if (integer_onep (arg1
))
10152 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10154 code
= unsignedp
? LTU
: LT
;
10157 if (! unsignedp
&& integer_all_onesp (arg1
))
10158 arg1
= integer_zero_node
, code
= LT
;
10160 code
= unsignedp
? LEU
: LE
;
10163 if (! unsignedp
&& integer_all_onesp (arg1
))
10164 arg1
= integer_zero_node
, code
= GE
;
10166 code
= unsignedp
? GTU
: GT
;
10169 if (integer_onep (arg1
))
10170 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10172 code
= unsignedp
? GEU
: GE
;
10175 case UNORDERED_EXPR
:
10201 /* Put a constant second. */
10202 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10204 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10205 code
= swap_condition (code
);
10208 /* If this is an equality or inequality test of a single bit, we can
10209 do this by shifting the bit being tested to the low-order bit and
10210 masking the result with the constant 1. If the condition was EQ,
10211 we xor it with 1. This does not require an scc insn and is faster
10212 than an scc insn even if we have it. */
10214 if ((code
== NE
|| code
== EQ
)
10215 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10216 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10218 tree inner
= TREE_OPERAND (arg0
, 0);
10219 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10222 /* If INNER is a right shift of a constant and it plus BITNUM does
10223 not overflow, adjust BITNUM and INNER. */
10225 if (TREE_CODE (inner
) == RSHIFT_EXPR
10226 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10227 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10228 && bitnum
< TYPE_PRECISION (type
)
10229 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10230 bitnum
- TYPE_PRECISION (type
)))
10232 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10233 inner
= TREE_OPERAND (inner
, 0);
10236 /* If we are going to be able to omit the AND below, we must do our
10237 operations as unsigned. If we must use the AND, we have a choice.
10238 Normally unsigned is faster, but for some machines signed is. */
10239 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10240 #ifdef LOAD_EXTEND_OP
10241 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10247 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
10248 || GET_MODE (subtarget
) != operand_mode
10249 || ! safe_from_p (subtarget
, inner
, 1))
10252 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10255 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
10256 size_int (bitnum
), subtarget
, ops_unsignedp
);
10258 if (GET_MODE (op0
) != mode
)
10259 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10261 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10262 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10263 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10265 /* Put the AND last so it can combine with more things. */
10266 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10267 op0
= expand_and (op0
, const1_rtx
, subtarget
);
10272 /* Now see if we are likely to be able to do this. Return if not. */
10273 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10276 icode
= setcc_gen_code
[(int) code
];
10277 if (icode
== CODE_FOR_nothing
10278 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10280 /* We can only do this if it is one of the special cases that
10281 can be handled without an scc insn. */
10282 if ((code
== LT
&& integer_zerop (arg1
))
10283 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10285 else if (BRANCH_COST
>= 0
10286 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10287 && TREE_CODE (type
) != REAL_TYPE
10288 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10289 != CODE_FOR_nothing
)
10290 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10291 != CODE_FOR_nothing
)))
10297 preexpand_calls (exp
);
10298 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
10299 || GET_MODE (subtarget
) != operand_mode
10300 || ! safe_from_p (subtarget
, arg1
, 1))
10303 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10304 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10307 target
= gen_reg_rtx (mode
);
10309 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10310 because, if the emit_store_flag does anything it will succeed and
10311 OP0 and OP1 will not be used subsequently. */
10313 result
= emit_store_flag (target
, code
,
10314 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10315 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10316 operand_mode
, unsignedp
, 1);
10321 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10322 result
, 0, OPTAB_LIB_WIDEN
);
10326 /* If this failed, we have to do this with set/compare/jump/set code. */
10327 if (GET_CODE (target
) != REG
10328 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10329 target
= gen_reg_rtx (GET_MODE (target
));
10331 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10332 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10333 operand_mode
, NULL_RTX
, 0);
10334 if (GET_CODE (result
) == CONST_INT
)
10335 return (((result
== const0_rtx
&& ! invert
)
10336 || (result
!= const0_rtx
&& invert
))
10337 ? const0_rtx
: const1_rtx
);
10339 label
= gen_label_rtx ();
10340 if (bcc_gen_fctn
[(int) code
] == 0)
10343 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10344 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10345 emit_label (label
);
10350 /* Generate a tablejump instruction (used for switch statements). */
10352 #ifdef HAVE_tablejump
10354 /* INDEX is the value being switched on, with the lowest value
10355 in the table already subtracted.
10356 MODE is its expected mode (needed if INDEX is constant).
10357 RANGE is the length of the jump table.
10358 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10360 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10361 index value is out of range. */
10364 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10365 rtx index
, range
, table_label
, default_label
;
10366 enum machine_mode mode
;
10368 register rtx temp
, vector
;
10370 /* Do an unsigned comparison (in the proper mode) between the index
10371 expression and the value which represents the length of the range.
10372 Since we just finished subtracting the lower bound of the range
10373 from the index expression, this comparison allows us to simultaneously
10374 check that the original index expression value is both greater than
10375 or equal to the minimum value of the range and less than or equal to
10376 the maximum value of the range. */
10378 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10381 /* If index is in range, it must fit in Pmode.
10382 Convert to Pmode so we can index with it. */
10384 index
= convert_to_mode (Pmode
, index
, 1);
10386 /* Don't let a MEM slip thru, because then INDEX that comes
10387 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10388 and break_out_memory_refs will go to work on it and mess it up. */
10389 #ifdef PIC_CASE_VECTOR_ADDRESS
10390 if (flag_pic
&& GET_CODE (index
) != REG
)
10391 index
= copy_to_mode_reg (Pmode
, index
);
10394 /* If flag_force_addr were to affect this address
10395 it could interfere with the tricky assumptions made
10396 about addresses that contain label-refs,
10397 which may be valid only very near the tablejump itself. */
10398 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10399 GET_MODE_SIZE, because this indicates how large insns are. The other
10400 uses should all be Pmode, because they are addresses. This code
10401 could fail if addresses and insns are not the same size. */
10402 index
= gen_rtx_PLUS (Pmode
,
10403 gen_rtx_MULT (Pmode
, index
,
10404 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10405 gen_rtx_LABEL_REF (Pmode
, table_label
));
10406 #ifdef PIC_CASE_VECTOR_ADDRESS
10408 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10411 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10412 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10413 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10414 RTX_UNCHANGING_P (vector
) = 1;
10415 convert_move (temp
, vector
, 0);
10417 emit_jump_insn (gen_tablejump (temp
, table_label
));
10419 /* If we are generating PIC code or if the table is PC-relative, the
10420 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10421 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10425 #endif /* HAVE_tablejump */