: (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
#endif
\f
-static rtvec initialize_for_inline PROTO((tree, int));
+static rtvec initialize_for_inline PROTO((tree));
static void adjust_copied_decl_tree PROTO((tree));
-static tree copy_decl_list PROTO((tree));
-static tree copy_decl_tree PROTO((tree));
-static void copy_decl_rtls PROTO((tree));
-static void save_constants PROTO((rtx *));
static void note_modified_parmregs PROTO((rtx, rtx));
-static rtx copy_for_inline PROTO((rtx));
static void integrate_parm_decls PROTO((tree, struct inline_remap *,
rtvec));
static void integrate_decl_tree PROTO((tree, int,
struct inline_remap *));
-static void save_constants_in_decl_trees PROTO ((tree));
static void subst_constants PROTO((rtx *, rtx,
struct inline_remap *));
-static void restore_constants PROTO((rtx *));
static void set_block_origin_self PROTO((tree));
static void set_decl_origin_self PROTO((tree));
static void set_block_abstract_flags PROTO((tree, int));
int inline_max_insns = 10000;
+/* Used by copy_rtx_and_substitute; this indicates whether the function is
+ called for the purpose of inlining or some other purpose (i.e. loop
+ unrolling). This affects how constant pool references are handled.
+ This variable contains the FUNCTION_DECL for the inlined function. */
+static struct function *inlining = 0;
\f
/* Returns the Ith entry in the label_map contained in MAP. If the
Ith entry has not yet been set, return a fresh label. This function
return 0;
}
\f
-/* Variables used within save_for_inline. */
-
-/* Mapping from old pseudo-register to new pseudo-registers.
- The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
- It is allocated in `save_for_inline' and `expand_inline_function',
- and deallocated on exit from each of those routines. */
-static rtx *reg_map;
-
-/* Mapping from old code-labels to new code-labels.
- The first element of this map is label_map[min_labelno].
- It is allocated in `save_for_inline' and `expand_inline_function',
- and deallocated on exit from each of those routines. */
-static rtx *label_map;
-
-/* Mapping from old insn uid's to copied insns.
- It is allocated in `save_for_inline' and `expand_inline_function',
- and deallocated on exit from each of those routines. */
-static rtx *insn_map;
-
/* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
Zero for a reg that isn't a parm's home.
Only reg numbers less than max_parm_reg are mapped here. */
static tree *parmdecl_map;
-/* When an insn is being copied by copy_for_inline,
- this is nonzero if we have copied an ASM_OPERANDS.
- In that case, it is the original input-operand vector. */
-static rtvec orig_asm_operands_vector;
-
-/* When an insn is being copied by copy_for_inline,
- this is nonzero if we have copied an ASM_OPERANDS.
- In that case, it is the copied input-operand vector. */
-static rtvec copy_asm_operands_vector;
-
-/* Likewise, this is the copied constraints vector. */
-static rtvec copy_asm_constraints_vector;
-
/* In save_for_inline, nonzero if past the parm-initialization insns. */
static int in_nonparm_insns;
\f
-/* subroutines passed to duplicate_eh_handlers to map exception labels */
-
-static rtx
-save_for_inline_eh_labelmap (label)
- rtx label;
-{
- int index = CODE_LABEL_NUMBER (label);
- return label_map[index];
-}
-
-/* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
+/* Subroutine for `save_for_inline_nocopy'. Performs initialization
needed to save FNDECL's insns and info for future inline expansion. */
-
+
static rtvec
-initialize_for_inline (fndecl, copy)
+initialize_for_inline (fndecl)
tree fndecl;
- int copy;
{
int i;
rtvec arg_vector;
parms = TREE_CHAIN (parms), i++)
{
rtx p = DECL_RTL (parms);
- int copied_incoming = 0;
/* If we have (mem (addressof (mem ...))), use the inner MEM since
otherwise the copy_rtx call below will not unshare the MEM since
&& GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
p = XEXP (XEXP (p, 0), 0);
- if (GET_CODE (p) == MEM && copy)
- {
- /* Copy the rtl so that modifications of the addresses
- later in compilation won't affect this arg_vector.
- Virtual register instantiation can screw the address
- of the rtl. */
- rtx new = copy_rtx (p);
-
- /* Don't leave the old copy anywhere in this decl. */
- if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
- || (GET_CODE (DECL_RTL (parms)) == MEM
- && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
- && (XEXP (DECL_RTL (parms), 0)
- == XEXP (DECL_INCOMING_RTL (parms), 0))))
- DECL_INCOMING_RTL (parms) = new, copied_incoming = 1;
-
- DECL_RTL (parms) = new;
- }
-
RTVEC_ELT (arg_vector, i) = p;
if (GET_CODE (p) == REG)
/* This flag is cleared later
if the function ever modifies the value of the parm. */
TREE_READONLY (parms) = 1;
-
- /* Copy DECL_INCOMING_RTL if not done already. This can
- happen if DECL_RTL is a reg. */
- if (copy && ! copied_incoming)
- {
- p = DECL_INCOMING_RTL (parms);
-
- /* If we have (mem (addressof (mem ...))), use the inner MEM since
- otherwise the copy_rtx call below will not unshare the MEM since
- it shares ADDRESSOF. */
- if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
- && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
- p = XEXP (XEXP (p, 0), 0);
-
- if (GET_CODE (p) == MEM)
- DECL_INCOMING_RTL (parms) = copy_rtx (p);
- }
}
return arg_vector;
adjust_copied_decl_tree (subblock);
}
-/* Make the insns and PARM_DECLs of the current function permanent
- and record other information in DECL_SAVED_INSNS to allow inlining
- of this function in subsequent calls.
-
- This function is called when we are going to immediately compile
- the insns for FNDECL. The insns in maybepermanent_obstack cannot be
- modified by the compilation process, so we copy all of them to
- new storage and consider the new insns to be the insn chain to be
- compiled. Our caller (rest_of_compilation) saves the original
- DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
-
-/* ??? The nonlocal_label list should be adjusted also. However, since
- a function that contains a nested function never gets inlined currently,
- the nonlocal_label list will always be empty, so we don't worry about
- it for now. */
-
-void
-save_for_inline_copying (fndecl)
- tree fndecl;
-{
- rtvec argvec;
- rtx new_first_insn, new_last_insn, insn;
- int max_labelno, min_labelno, i, len;
- int max_reg;
- int max_uid;
- rtx first_nonparm_insn;
- char *new, *new1;
- rtx *new_parm_reg_stack_loc;
- rtx *new2;
- struct emit_status *es
- = (struct emit_status *) xmalloc (sizeof (struct emit_status));
-
- /* Make and emit a return-label if we have not already done so.
- Do this before recording the bounds on label numbers. */
-
- if (return_label == 0)
- {
- return_label = gen_label_rtx ();
- emit_label (return_label);
- }
-
- *es = *current_function->emit;
-
- /* Get some bounds on the labels and registers used. */
-
- max_labelno = max_label_num ();
- min_labelno = get_first_label_num ();
- max_reg = max_reg_num ();
-
- /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
- Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
- Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
- for the parms, prior to elimination of virtual registers.
- These values are needed for substituting parms properly. */
-
- parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
-
- argvec = initialize_for_inline (fndecl, 1);
-
- if (current_function_uses_const_pool)
- {
- /* Replace any constant pool references with the actual constant. We
- will put the constants back in the copy made below. */
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
- {
- save_constants (&PATTERN (insn));
- if (REG_NOTES (insn))
- save_constants (®_NOTES (insn));
- }
-
- /* Also scan all decls, and replace any constant pool references with the
- actual constant. */
- save_constants_in_decl_trees (DECL_INITIAL (fndecl));
-
- /* Clear out the constant pool so that we can recreate it with the
- copied constants below. */
- init_const_rtx_hash_table ();
- clear_const_double_mem ();
- }
-
- max_uid = get_max_uid ();
-
- /* We have now allocated all that needs to be allocated permanently
- on the rtx obstack. Set our high-water mark, so that we
- can free the rest of this when the time comes. */
-
- preserve_data ();
-
- /* Copy the chain insns of this function.
- Install the copied chain as the insns of this function,
- for continued compilation;
- the original chain is recorded as the DECL_SAVED_INSNS
- for inlining future calls. */
-
- /* If there are insns that copy parms from the stack into pseudo registers,
- those insns are not copied. `expand_inline_function' must
- emit the correct code to handle such things. */
-
- insn = get_insns ();
- if (GET_CODE (insn) != NOTE)
- abort ();
- new_first_insn = rtx_alloc (NOTE);
- NOTE_SOURCE_FILE (new_first_insn) = NOTE_SOURCE_FILE (insn);
- NOTE_LINE_NUMBER (new_first_insn) = NOTE_LINE_NUMBER (insn);
- INSN_UID (new_first_insn) = INSN_UID (insn);
- PREV_INSN (new_first_insn) = NULL;
- NEXT_INSN (new_first_insn) = NULL;
- new_last_insn = new_first_insn;
-
- /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
- Make these new rtx's now, and install them in regno_reg_rtx, so they
- will be the official pseudo-reg rtx's for the rest of compilation. */
-
- reg_map = (rtx *) savealloc (es->regno_pointer_flag_length * sizeof (rtx));
-
- len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
- for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
- reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
- regno_reg_rtx[i], len);
-
- es->x_regno_reg_rtx = reg_map;
-
- /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
- init_virtual_regs (es);
-
- /* Likewise each label rtx must have a unique rtx as its copy. */
-
- /* We used to use alloca here, but the size of what it would try to
- allocate would occasionally cause it to exceed the stack limit and
- cause unpredictable core dumps. Some examples were > 2Mb in size. */
- label_map = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
-
- for (i = min_labelno; i < max_labelno; i++)
- label_map[i] = gen_label_rtx ();
-
- /* Likewise for parm_reg_stack_slot. */
- new_parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
- for (i = 0; i < max_parm_reg; i++)
- new_parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
-
- parm_reg_stack_loc = new_parm_reg_stack_loc;
-
- /* Record the mapping of old insns to copied insns. */
-
- insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
- bzero ((char *) insn_map, max_uid * sizeof (rtx));
-
- /* Get the insn which signals the end of parameter setup code. */
- first_nonparm_insn = get_first_nonparm_insn ();
-
- /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
- (the former occurs when a variable has its address taken)
- since these may be shared and can be changed by virtual
- register instantiation. DECL_RTL values for our arguments
- have already been copied by initialize_for_inline. */
- for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
- if (GET_CODE (regno_reg_rtx[i]) == MEM)
- XEXP (regno_reg_rtx[i], 0)
- = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
-
- /* Copy the parm_reg_stack_loc array, and substitute for all of the rtx
- contained in it. */
- new2 = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
- bcopy ((char *) parm_reg_stack_loc, (char *) new2,
- max_parm_reg * sizeof (rtx));
- parm_reg_stack_loc = new2;
- for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; ++i)
- if (parm_reg_stack_loc[i])
- parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
-
- /* Copy the tree of subblocks of the function, and the decls in them.
- We will use the copy for compiling this function, then restore the original
- subblocks and decls for use when inlining this function.
-
- Several parts of the compiler modify BLOCK trees. In particular,
- instantiate_virtual_regs will instantiate any virtual regs
- mentioned in the DECL_RTLs of the decls, and loop
- unrolling will replicate any BLOCK trees inside an unrolled loop.
-
- The modified subblocks or DECL_RTLs would be incorrect for the original rtl
- which we will use for inlining. The rtl might even contain pseudoregs
- whose space has been freed. */
-
- DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
- DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
-
- /* Now copy each DECL_RTL which is a MEM,
- so it is safe to modify their addresses. */
- copy_decl_rtls (DECL_INITIAL (fndecl));
-
- /* The fndecl node acts as its own progenitor, so mark it as such. */
- DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
-
- /* Now copy the chain of insns. Do this twice. The first copy the insn
- itself and its body. The second time copy of REG_NOTES. This is because
- a REG_NOTE may have a forward pointer to another insn. */
-
- for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
- {
- rtx copy;
- orig_asm_operands_vector = 0;
-
- if (insn == first_nonparm_insn)
- in_nonparm_insns = 1;
-
- switch (GET_CODE (insn))
- {
- case NOTE:
- /* No need to keep these. */
- if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
- continue;
-
- copy = rtx_alloc (NOTE);
- NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
- if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
- NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
- else
- {
- NOTE_SOURCE_FILE (insn) = (char *) copy;
- NOTE_SOURCE_FILE (copy) = 0;
- }
- if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
- || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END)
- {
- int new_region = CODE_LABEL_NUMBER
- (label_map[NOTE_BLOCK_NUMBER (copy)]);
-
- /* we have to duplicate the handlers for the original */
- if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
- duplicate_eh_handlers (NOTE_BLOCK_NUMBER (copy), new_region,
- save_for_inline_eh_labelmap);
-
- /* We have to forward these both to match the new exception
- region. */
- NOTE_BLOCK_NUMBER (copy) = new_region;
-
- }
- RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
- break;
-
- case INSN:
- case JUMP_INSN:
- case CALL_INSN:
- copy = rtx_alloc (GET_CODE (insn));
-
- if (GET_CODE (insn) == CALL_INSN)
- CALL_INSN_FUNCTION_USAGE (copy)
- = copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
-
- PATTERN (copy) = copy_for_inline (PATTERN (insn));
- INSN_CODE (copy) = -1;
- LOG_LINKS (copy) = NULL_RTX;
- RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
- break;
-
- case CODE_LABEL:
- copy = label_map[CODE_LABEL_NUMBER (insn)];
- LABEL_NAME (copy) = LABEL_NAME (insn);
- break;
-
- case BARRIER:
- copy = rtx_alloc (BARRIER);
- break;
-
- default:
- abort ();
- }
- INSN_UID (copy) = INSN_UID (insn);
- insn_map[INSN_UID (insn)] = copy;
- NEXT_INSN (new_last_insn) = copy;
- PREV_INSN (copy) = new_last_insn;
- new_last_insn = copy;
- }
-
- adjust_copied_decl_tree (DECL_INITIAL (fndecl));
-
- /* Now copy the REG_NOTES. */
- for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
- if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
- && insn_map[INSN_UID(insn)])
- REG_NOTES (insn_map[INSN_UID (insn)])
- = copy_for_inline (REG_NOTES (insn));
-
- NEXT_INSN (new_last_insn) = NULL;
-
- /* Make new versions of the register tables. */
- new = (char *) savealloc (es->regno_pointer_flag_length);
- memcpy (new, es->regno_pointer_flag, es->regno_pointer_flag_length);
- new1 = (char *) savealloc (es->regno_pointer_flag_length);
- memcpy (new1, es->regno_pointer_align, es->regno_pointer_flag_length);
- es->regno_pointer_flag = new;
- es->regno_pointer_align = new1;
-
- free (label_map);
-
- current_function->inl_max_label_num = max_label_num ();
- current_function->inl_last_parm_insn = current_function->x_last_parm_insn;
- current_function->original_arg_vector = argvec;
- current_function->original_decl_initial = DECL_INITIAL (fndecl);
- /* Use the copy we made for compiling the function now, and
- use the original values for inlining. */
- current_function->inl_emit = current_function->emit;
- current_function->emit = es;
- set_new_first_and_last_insn (new_first_insn, new_last_insn);
- DECL_SAVED_INSNS (fndecl) = current_function;
-}
-
/* Copy NODE (as with copy_node). NODE must be a DECL. Set the
DECL_ABSTRACT_ORIGIN for the new accordinly. */
return copy;
}
-/* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
- For example, this can copy a list made of TREE_LIST nodes. While copying,
- set DECL_ABSTRACT_ORIGIN appropriately. */
-
-static tree
-copy_decl_list (list)
- tree list;
-{
- tree head;
- register tree prev, next;
-
- if (list == 0)
- return 0;
-
- head = prev = copy_and_set_decl_abstract_origin (list);
- next = TREE_CHAIN (list);
- while (next)
- {
- register tree copy;
-
- copy = copy_and_set_decl_abstract_origin (next);
- TREE_CHAIN (prev) = copy;
- prev = copy;
- next = TREE_CHAIN (next);
- }
- return head;
-}
-
-/* Make a copy of the entire tree of blocks BLOCK, and return it. */
-
-static tree
-copy_decl_tree (block)
- tree block;
-{
- tree t, vars, subblocks;
-
- vars = copy_decl_list (BLOCK_VARS (block));
- subblocks = 0;
-
- /* Process all subblocks. */
- for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
- {
- tree copy = copy_decl_tree (t);
- TREE_CHAIN (copy) = subblocks;
- subblocks = copy;
- }
-
- t = copy_node (block);
- BLOCK_VARS (t) = vars;
- BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
- /* If the BLOCK being cloned is already marked as having been instantiated
- from something else, then leave that `origin' marking alone. Otherwise,
- mark the clone as having originated from the BLOCK we are cloning. */
- if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
- BLOCK_ABSTRACT_ORIGIN (t) = block;
- return t;
-}
-
-/* Copy DECL_RTLs in all decls in the given BLOCK node. */
-
-static void
-copy_decl_rtls (block)
- tree block;
-{
- tree t;
-
- for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
- if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
- DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
-
- /* Process all subblocks. */
- for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
- copy_decl_rtls (t);
-}
-
/* Make the insns and PARM_DECLs of the current function permanent
and record other information in DECL_SAVED_INSNS to allow inlining
of this function in subsequent calls.
emit_label (return_label);
}
- argvec = initialize_for_inline (fndecl, 0);
+ argvec = initialize_for_inline (fndecl);
/* If there are insns that copy parms from the stack into pseudo registers,
those insns are not copied. `expand_inline_function' must
in_nonparm_insns = 1;
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
- {
- if (current_function_uses_const_pool)
- {
- /* Replace any constant pool references with the actual constant.
- We will put the constant back if we need to write the
- function out after all. */
- save_constants (&PATTERN (insn));
- if (REG_NOTES (insn))
- save_constants (®_NOTES (insn));
- }
-
- /* Record what interesting things happen to our parameters. */
- note_stores (PATTERN (insn), note_modified_parmregs);
- }
+ /* Record what interesting things happen to our parameters. */
+ note_stores (PATTERN (insn), note_modified_parmregs);
}
- /* Also scan all decls, and replace any constant pool references with the
- actual constant. */
- save_constants_in_decl_trees (DECL_INITIAL (fndecl));
-
/* We have now allocated all that needs to be allocated permanently
on the rtx obstack. Set our high-water mark, so that we
can free the rest of this when the time comes. */
preserve_data ();
- current_function->inl_emit = current_function->emit;
current_function->inl_max_label_num = max_label_num ();
current_function->inl_last_parm_insn = current_function->x_last_parm_insn;
current_function->original_arg_vector = argvec;
DECL_SAVED_INSNS (fndecl) = current_function;
}
\f
-/* Given PX, a pointer into an insn, search for references to the constant
- pool. Replace each with a CONST that has the mode of the original
- constant, contains the constant, and has RTX_INTEGRATED_P set.
- Similarly, constant pool addresses not enclosed in a MEM are replaced
- with an ADDRESS and CONST rtx which also gives the constant, its
- mode, the mode of the address, and has RTX_INTEGRATED_P set. */
-
-static void
-save_constants (px)
- rtx *px;
-{
- rtx x;
- int i, j;
-
- again:
- x = *px;
-
- /* If this is a CONST_DOUBLE, don't try to fix things up in
- CONST_DOUBLE_MEM, because this is an infinite recursion. */
- if (GET_CODE (x) == CONST_DOUBLE)
- return;
- else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
- && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
- {
- enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
- rtx new = gen_rtx_CONST (const_mode, get_pool_constant (XEXP (x, 0)));
- RTX_INTEGRATED_P (new) = 1;
-
- /* If the MEM was in a different mode than the constant (perhaps we
- were only looking at the low-order part), surround it with a
- SUBREG so we can save both modes. */
-
- if (GET_MODE (x) != const_mode)
- {
- new = gen_rtx_SUBREG (GET_MODE (x), new, 0);
- RTX_INTEGRATED_P (new) = 1;
- }
-
- *px = new;
- save_constants (&XEXP (*px, 0));
- }
- else if (GET_CODE (x) == SYMBOL_REF
- && CONSTANT_POOL_ADDRESS_P (x))
- {
- *px = gen_rtx_ADDRESS (GET_MODE (x),
- gen_rtx_CONST (get_pool_mode (x),
- get_pool_constant (x)));
- save_constants (&XEXP (*px, 0));
- RTX_INTEGRATED_P (*px) = 1;
- }
-
- else
- {
- const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
- int len = GET_RTX_LENGTH (GET_CODE (x));
-
- for (i = len-1; i >= 0; i--)
- {
- switch (fmt[i])
- {
- case 'E':
- for (j = 0; j < XVECLEN (x, i); j++)
- save_constants (&XVECEXP (x, i, j));
- break;
-
- case 'e':
- if (XEXP (x, i) == 0)
- continue;
- if (i == 0)
- {
- /* Hack tail-recursion here. */
- px = &XEXP (x, 0);
- goto again;
- }
- save_constants (&XEXP (x, i));
- break;
- }
- }
- }
-}
-\f
/* Note whether a parameter is modified or not. */
static void
TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
}
-/* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
- according to `reg_map' and `label_map'. The original rtl insns
- will be saved for inlining; this is used to make a copy
- which is used to finish compiling the inline function itself.
-
- If we find a "saved" constant pool entry, one which was replaced with
- the value of the constant, convert it back to a constant pool entry.
- Since the pool wasn't touched, this should simply restore the old
- address.
-
- All other kinds of rtx are copied except those that can never be
- changed during compilation. */
-
-static rtx
-copy_for_inline (orig)
- rtx orig;
-{
- register rtx x = orig;
- register rtx new;
- register int i;
- register enum rtx_code code;
- register const char *format_ptr;
-
- if (x == 0)
- return x;
-
- code = GET_CODE (x);
-
- /* These types may be freely shared. */
-
- switch (code)
- {
- case QUEUED:
- case CONST_INT:
- case PC:
- case CC0:
- return x;
-
- case SYMBOL_REF:
- if (! SYMBOL_REF_NEED_ADJUST (x))
- return x;
- return rethrow_symbol_map (x, save_for_inline_eh_labelmap);
-
- case CONST_DOUBLE:
- /* We have to make a new CONST_DOUBLE to ensure that we account for
- it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
- if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
- {
- REAL_VALUE_TYPE d;
-
- REAL_VALUE_FROM_CONST_DOUBLE (d, x);
- return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
- }
- else
- return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
- VOIDmode);
-
- case CONST:
- /* Get constant pool entry for constant in the pool. */
- if (RTX_INTEGRATED_P (x))
- return validize_mem (force_const_mem (GET_MODE (x),
- copy_for_inline (XEXP (x, 0))));
- break;
-
- case SUBREG:
- /* Get constant pool entry, but access in different mode. */
- if (RTX_INTEGRATED_P (x))
- {
- new = force_const_mem (GET_MODE (SUBREG_REG (x)),
- copy_for_inline (XEXP (SUBREG_REG (x), 0)));
-
- PUT_MODE (new, GET_MODE (x));
- return validize_mem (new);
- }
- break;
-
- case ADDRESS:
- /* If not special for constant pool error. Else get constant pool
- address. */
- if (! RTX_INTEGRATED_P (x))
- abort ();
-
- new = force_const_mem (GET_MODE (XEXP (x, 0)),
- copy_for_inline (XEXP (XEXP (x, 0), 0)));
- new = XEXP (new, 0);
-
-#ifdef POINTERS_EXTEND_UNSIGNED
- if (GET_MODE (new) != GET_MODE (x))
- new = convert_memory_address (GET_MODE (x), new);
-#endif
-
- return new;
-
- case ASM_OPERANDS:
- /* If a single asm insn contains multiple output operands
- then it contains multiple ASM_OPERANDS rtx's that share operand 3.
- We must make sure that the copied insn continues to share it. */
- if (orig_asm_operands_vector == XVEC (orig, 3))
- {
- x = rtx_alloc (ASM_OPERANDS);
- x->volatil = orig->volatil;
- XSTR (x, 0) = XSTR (orig, 0);
- XSTR (x, 1) = XSTR (orig, 1);
- XINT (x, 2) = XINT (orig, 2);
- XVEC (x, 3) = copy_asm_operands_vector;
- XVEC (x, 4) = copy_asm_constraints_vector;
- XSTR (x, 5) = XSTR (orig, 5);
- XINT (x, 6) = XINT (orig, 6);
- return x;
- }
- break;
-
- case MEM:
- /* A MEM is usually allowed to be shared if its address is constant
- or is a constant plus one of the special registers.
-
- We do not allow sharing of addresses that are either a special
- register or the sum of a constant and a special register because
- it is possible for unshare_all_rtl to copy the address, into memory
- that won't be saved. Although the MEM can safely be shared, and
- won't be copied there, the address itself cannot be shared, and may
- need to be copied.
-
- There are also two exceptions with constants: The first is if the
- constant is a LABEL_REF or the sum of the LABEL_REF
- and an integer. This case can happen if we have an inline
- function that supplies a constant operand to the call of another
- inline function that uses it in a switch statement. In this case,
- we will be replacing the LABEL_REF, so we have to replace this MEM
- as well.
-
- The second case is if we have a (const (plus (address ..) ...)).
- In that case we need to put back the address of the constant pool
- entry. */
-
- if (CONSTANT_ADDRESS_P (XEXP (x, 0))
- && GET_CODE (XEXP (x, 0)) != LABEL_REF
- && ! (GET_CODE (XEXP (x, 0)) == CONST
- && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
- && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
- == LABEL_REF)
- || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
- == ADDRESS)))))
- return x;
- break;
-
- case LABEL_REF:
- /* If this is a non-local label, just make a new LABEL_REF.
- Otherwise, use the new label as well. */
- x = gen_rtx_LABEL_REF (GET_MODE (orig),
- LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
- : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
- LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
- LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
- return x;
-
- case REG:
- if (REGNO (x) > LAST_VIRTUAL_REGISTER)
- return reg_map [REGNO (x)];
- else
- return x;
-
- case SET:
- /* If a parm that gets modified lives in a pseudo-reg,
- clear its TREE_READONLY to prevent certain optimizations. */
- {
- rtx dest = SET_DEST (x);
-
- while (GET_CODE (dest) == STRICT_LOW_PART
- || GET_CODE (dest) == ZERO_EXTRACT
- || GET_CODE (dest) == SUBREG)
- dest = XEXP (dest, 0);
-
- if (GET_CODE (dest) == REG
- && REGNO (dest) < max_parm_reg
- && REGNO (dest) >= FIRST_PSEUDO_REGISTER
- && parmdecl_map[REGNO (dest)] != 0
- /* The insn to load an arg pseudo from a stack slot
- does not count as modifying it. */
- && in_nonparm_insns)
- TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
- }
- break;
-
-#if 0 /* This is a good idea, but here is the wrong place for it. */
- /* Arrange that CONST_INTs always appear as the second operand
- if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
- always appear as the first. */
- case PLUS:
- if (GET_CODE (XEXP (x, 0)) == CONST_INT
- || (XEXP (x, 1) == frame_pointer_rtx
- || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
- && XEXP (x, 1) == arg_pointer_rtx)))
- {
- rtx t = XEXP (x, 0);
- XEXP (x, 0) = XEXP (x, 1);
- XEXP (x, 1) = t;
- }
- break;
-#endif
- default:
- break;
- }
-
- /* Replace this rtx with a copy of itself. */
-
- x = rtx_alloc (code);
- bcopy ((char *) orig, (char *) x,
- (sizeof (*x) - sizeof (x->fld)
- + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
-
- /* Now scan the subexpressions recursively.
- We can store any replaced subexpressions directly into X
- since we know X is not shared! Any vectors in X
- must be copied if X was copied. */
-
- format_ptr = GET_RTX_FORMAT (code);
-
- for (i = 0; i < GET_RTX_LENGTH (code); i++)
- {
- switch (*format_ptr++)
- {
- case 'e':
- XEXP (x, i) = copy_for_inline (XEXP (x, i));
- break;
-
- case 'u':
- /* Change any references to old-insns to point to the
- corresponding copied insns. */
- XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
- break;
-
- case 'E':
- if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
- {
- register int j;
-
- XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), XVEC (x, i)->elem);
- for (j = 0; j < XVECLEN (x, i); j++)
- XVECEXP (x, i, j)
- = copy_for_inline (XVECEXP (x, i, j));
- }
- break;
- }
- }
-
- if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
- {
- orig_asm_operands_vector = XVEC (orig, 3);
- copy_asm_operands_vector = XVEC (x, 3);
- copy_asm_constraints_vector = XVEC (x, 4);
- }
-
- return x;
-}
-
/* Unfortunately, we need a global copy of const_equiv map for communication
with a function called from note_stores. Be *very* careful that this
is used properly in the presence of recursion. */
tree type;
rtx structure_value_addr;
{
+ struct function *inlining_previous;
struct function *inl_f = DECL_SAVED_INSNS (fndecl);
tree formal, actual, block;
- rtx parm_insns = inl_f->inl_emit->x_first_insn;
+ rtx parm_insns = inl_f->emit->x_first_insn;
rtx insns = (inl_f->inl_last_parm_insn
? NEXT_INSN (inl_f->inl_last_parm_insn)
: parm_insns);
rtx insn;
int max_regno;
register int i;
- int min_labelno = inl_f->inl_emit->x_first_label_num;
+ int min_labelno = inl_f->emit->x_first_label_num;
int max_labelno = inl_f->inl_max_label_num;
int nargs;
rtx local_return_label = 0;
rtx *real_label_map = 0;
/* Allow for equivalences of the pseudos we make for virtual fp and ap. */
- max_regno = inl_f->inl_emit->x_reg_rtx_no + 3;
+ max_regno = inl_f->emit->x_reg_rtx_no + 3;
if (max_regno < FIRST_PSEUDO_REGISTER)
abort ();
= (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
map->label_map = real_label_map;
- inl_max_uid = (inl_f->inl_emit->x_cur_insn_uid + 1);
+ inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
map->insn_map = (rtx *) alloca (inl_max_uid * sizeof (rtx));
bzero ((char *) map->insn_map, inl_max_uid * sizeof (rtx));
map->min_insnno = 0;
if (map->insns_at_start == 0)
map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
- map->regno_pointer_flag = inl_f->inl_emit->regno_pointer_flag;
- map->regno_pointer_align = inl_f->inl_emit->regno_pointer_align;
+ map->regno_pointer_flag = inl_f->emit->regno_pointer_flag;
+ map->regno_pointer_align = inl_f->emit->regno_pointer_align;
/* Update the outgoing argument size to allow for those in the inlined
function. */
abort ();
}
+ /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
+ specially. This function can be called recursively, so we need to
+ save the previous value. */
+ inlining_previous = inlining;
+ inlining = inl_f;
+
/* Now do the parameters that will be placed in memory. */
for (formal = DECL_ARGUMENTS (fndecl), i = 0;
free (real_label_map);
if (map)
VARRAY_FREE (map->const_equiv_varray);
+ inlining = inlining_previous;
return target;
}
}
}
}
-
-/* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
- through save_constants. */
-
-static void
-save_constants_in_decl_trees (let)
- tree let;
-{
- tree t;
-
- for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
- if (DECL_RTL (t) != 0)
- save_constants (&DECL_RTL (t));
-
- for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
- save_constants_in_decl_trees (t);
-}
\f
/* Create a new copy of an rtx.
Recursively copies the operands of the rtx,
remapped label. Otherwise, symbols are returned unchanged. */
if (CONSTANT_POOL_ADDRESS_P (orig))
{
- rtx constant = get_pool_constant (orig);
- if (GET_CODE (constant) == LABEL_REF)
+ struct function *f = inlining ? inlining : current_function;
+ rtx constant = get_pool_constant_for_function (f, orig);
+ enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
+ if (inlining)
+ {
+ rtx temp = force_const_mem (const_mode,
+ copy_rtx_and_substitute (constant, map));
+#if 0
+ /* Legitimizing the address here is incorrect.
+
+ Since we had a SYMBOL_REF before, we can assume it is valid
+ to have one in this position in the insn.
+
+ Also, change_address may create new registers. These
+ registers will not have valid reg_map entries. This can
+ cause try_constants() to fail because assumes that all
+ registers in the rtx have valid reg_map entries, and it may
+ end up replacing one of these new registers with junk. */
+
+ if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
+ temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
+#endif
+
+ temp = XEXP (temp, 0);
+
+#ifdef POINTERS_EXTEND_UNSIGNED
+ if (GET_MODE (temp) != GET_MODE (orig))
+ temp = convert_memory_address (GET_MODE (orig), temp);
+#endif
+ return temp;
+ }
+ else if (GET_CODE (constant) == LABEL_REF)
return XEXP (force_const_mem (GET_MODE (orig),
copy_rtx_and_substitute (constant,
map)),
/* Make new constant pool entry for a constant
that was in the pool of the inline function. */
if (RTX_INTEGRATED_P (orig))
- {
- /* If this was an address of a constant pool entry that itself
- had to be placed in the constant pool, it might not be a
- valid address. So the recursive call below might turn it
- into a register. In that case, it isn't a constant any
- more, so return it. This has the potential of changing a
- MEM into a REG, but we'll assume that it safe. */
- temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
- if (! CONSTANT_P (temp))
- return temp;
- return validize_mem (force_const_mem (GET_MODE (orig), temp));
- }
- break;
-
- case ADDRESS:
- /* If from constant pool address, make new constant pool entry and
- return its address. */
- if (! RTX_INTEGRATED_P (orig))
abort ();
-
- temp
- = force_const_mem (GET_MODE (XEXP (orig, 0)),
- copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
- map));
-
-#if 0
- /* Legitimizing the address here is incorrect.
-
- The only ADDRESS rtx's that can reach here are ones created by
- save_constants. Hence the operand of the ADDRESS is always valid
- in this position of the instruction, since the original rtx without
- the ADDRESS was valid.
-
- The reason we don't legitimize the address here is that on the
- Sparc, the caller may have a (high ...) surrounding this ADDRESS.
- This code forces the operand of the address to a register, which
- fails because we can not take the HIGH part of a register.
-
- Also, change_address may create new registers. These registers
- will not have valid reg_map entries. This can cause try_constants()
- to fail because assumes that all registers in the rtx have valid
- reg_map entries, and it may end up replacing one of these new
- registers with junk. */
-
- if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
- temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
-#endif
-
- temp = XEXP (temp, 0);
-
-#ifdef POINTERS_EXTEND_UNSIGNED
- if (GET_MODE (temp) != GET_MODE (orig))
- temp = convert_memory_address (GET_MODE (orig), temp);
-#endif
-
- return temp;
+ break;
case ASM_OPERANDS:
/* If a single asm insn contains multiple output operands
break;
case MEM:
+ if (inlining
+ && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
+ && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
+ {
+ enum machine_mode const_mode = get_pool_mode_for_function (inlining, XEXP (orig, 0));
+ rtx constant = get_pool_constant_for_function (inlining, XEXP (orig, 0));
+ constant = copy_rtx_and_substitute (constant, map);
+ /* If this was an address of a constant pool entry that itself
+ had to be placed in the constant pool, it might not be a
+ valid address. So the recursive call might have turned it
+ into a register. In that case, it isn't a constant any
+ more, so return it. This has the potential of changing a
+ MEM into a REG, but we'll assume that it safe. */
+ if (! CONSTANT_P (constant))
+ return constant;
+ return validize_mem (force_const_mem (const_mode, constant));
+ }
copy = rtx_alloc (MEM);
PUT_MODE (copy, mode);
XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
}
}
\f
-/* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
- pointed to by PX, they represent constants in the constant pool.
- Replace these with a new memory reference obtained from force_const_mem.
- Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
- address of a constant pool entry. Replace them with the address of
- a new constant pool entry obtained from force_const_mem. */
-
-static void
-restore_constants (px)
- rtx *px;
-{
- rtx x = *px;
- int i, j;
- const char *fmt;
-
- if (x == 0)
- return;
-
- if (GET_CODE (x) == CONST_DOUBLE)
- {
- /* We have to make a new CONST_DOUBLE to ensure that we account for
- it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
- if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
- {
- REAL_VALUE_TYPE d;
-
- REAL_VALUE_FROM_CONST_DOUBLE (d, x);
- *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
- }
- else
- *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
- VOIDmode);
- }
-
- else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
- {
- restore_constants (&XEXP (x, 0));
- *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
- }
- else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
- {
- /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
- rtx new = XEXP (SUBREG_REG (x), 0);
-
- restore_constants (&new);
- new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
- PUT_MODE (new, GET_MODE (x));
- *px = validize_mem (new);
- }
- else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
- {
- rtx new = XEXP (force_const_mem (GET_MODE (XEXP (x, 0)),
- XEXP (XEXP (x, 0), 0)),
- 0);
-
-#ifdef POINTERS_EXTEND_UNSIGNED
- if (GET_MODE (new) != GET_MODE (x))
- new = convert_memory_address (GET_MODE (x), new);
-#endif
-
- *px = new;
- }
- else
- {
- fmt = GET_RTX_FORMAT (GET_CODE (x));
- for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
- {
- switch (*fmt++)
- {
- case 'E':
- for (j = 0; j < XVECLEN (x, i); j++)
- restore_constants (&XVECEXP (x, i, j));
- break;
-
- case 'e':
- restore_constants (&XEXP (x, i));
- break;
- }
- }
- }
-}
-\f
/* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
that it points to the node itself, thus indicating that the node is its
output_inline_function (fndecl)
tree fndecl;
{
+ struct function *curf = current_function;
struct function *f = DECL_SAVED_INSNS (fndecl);
- rtx last;
- /* Things we allocate from here on are part of this function, not
- permanent. */
- temporary_allocation ();
current_function = f;
current_function_decl = fndecl;
clear_emit_caches ();
- /* Find last insn and rebuild the constant pool. */
- init_const_rtx_hash_table ();
- for (last = get_insns (); NEXT_INSN (last); last = NEXT_INSN (last))
- {
- if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
- {
- restore_constants (&PATTERN (last));
- restore_constants (®_NOTES (last));
- }
- }
+ /* Things we allocate from here on are part of this function, not
+ permanent. */
+ temporary_allocation ();
set_new_last_label_num (f->inl_max_label_num);
/* Compile this function all the way down to assembly code. */
rest_of_compilation (fndecl);
- current_function = 0;
- current_function_decl = 0;
+ current_function = curf;
+ current_function_decl = curf ? curf->decl : 0;
}
extern struct obstack permanent_obstack;
#define obstack_chunk_alloc xmalloc
+struct addr_const;
+struct constant_descriptor;
+struct rtx_const;
+struct pool_constant;
+
+#define MAX_RTX_HASH_TABLE 61
+
+struct varasm_status
+{
+ /* Hash facility for making memory-constants
+ from constant rtl-expressions. It is used on RISC machines
+ where immediate integer arguments and constant addresses are restricted
+ so that such constants must be stored in memory.
+
+ This pool of constants is reinitialized for each function
+ so each function gets its own constants-pool that comes right before
+ it. */
+ struct constant_descriptor **x_const_rtx_hash_table;
+ struct pool_sym **x_const_rtx_sym_hash_table;
+
+ /* Pointers to first and last constant in pool. */
+ struct pool_constant *x_first_pool, *x_last_pool;
+
+ /* Current offset in constant pool (does not include any machine-specific
+ header. */
+ int x_pool_offset;
+
+ /* Chain of all CONST_DOUBLE rtx's constructed for the current function.
+ They are chained through the CONST_DOUBLE_CHAIN.
+ A CONST_DOUBLE rtx has CONST_DOUBLE_MEM != cc0_rtx iff it is on this chain.
+ In that case, CONST_DOUBLE_MEM is either a MEM,
+ or const0_rtx if no MEM has been made for this CONST_DOUBLE yet. */
+ rtx x_const_double_chain;
+};
+
+#define const_rtx_hash_table (current_function->varasm->x_const_rtx_hash_table)
+#define const_rtx_sym_hash_table (current_function->varasm->x_const_rtx_sym_hash_table)
+#define first_pool (current_function->varasm->x_first_pool)
+#define last_pool (current_function->varasm->x_last_pool)
+#define pool_offset (current_function->varasm->x_pool_offset)
+#define const_double_chain (current_function->varasm->x_const_double_chain)
+
/* Number for making the label on the next
constant that is stored in memory. */
static int function_defined;
-struct addr_const;
-struct constant_descriptor;
-struct rtx_const;
-struct pool_constant;
-
static const char *strip_reg_name PROTO((const char *));
static int contains_pointers_p PROTO((tree));
static void decode_addr_const PROTO((tree, struct addr_const *));
struct constant_descriptor *));
static struct constant_descriptor *record_constant_rtx PROTO((enum machine_mode,
rtx));
-static struct pool_constant *find_pool_constant PROTO((rtx));
+static struct pool_constant *find_pool_constant PROTO((struct function *, rtx));
static void mark_constant_pool PROTO((void));
static void mark_constants PROTO((rtx));
static int output_addressed_constants PROTO((tree));
/* Here we combine duplicate floating constants to make
CONST_DOUBLE rtx's, and force those out to memory when necessary. */
-/* Chain of all CONST_DOUBLE rtx's constructed for the current function.
- They are chained through the CONST_DOUBLE_CHAIN.
- A CONST_DOUBLE rtx has CONST_DOUBLE_MEM != cc0_rtx iff it is on this chain.
- In that case, CONST_DOUBLE_MEM is either a MEM,
- or const0_rtx if no MEM has been made for this CONST_DOUBLE yet.
-
- (CONST_DOUBLE_MEM is used only for top-level functions.
- See force_const_mem for explanation.) */
-
-static rtx const_double_chain;
-
/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair of ints.
For an integer, I0 is the low-order word and I1 is the high-order word.
For a real number, I0 is the word with the low address
/* Search the chain for an existing CONST_DOUBLE with the right value.
If one is found, return it. */
-
- for (r = const_double_chain; r; r = CONST_DOUBLE_CHAIN (r))
- if (CONST_DOUBLE_LOW (r) == i0 && CONST_DOUBLE_HIGH (r) == i1
- && GET_MODE (r) == mode)
- return r;
+ if (current_function != 0)
+ for (r = const_double_chain; r; r = CONST_DOUBLE_CHAIN (r))
+ if (CONST_DOUBLE_LOW (r) == i0 && CONST_DOUBLE_HIGH (r) == i1
+ && GET_MODE (r) == mode)
+ return r;
/* No; make a new one and add it to the chain.
r = gen_rtx_CONST_DOUBLE (mode, NULL_RTX, i0, i1);
pop_obstacks ();
- /* Don't touch const_double_chain in nested function; see force_const_mem.
- Also, don't touch it if not inside any function. */
- if (outer_function_chain == 0 && current_function_decl != 0)
+ /* Don't touch const_double_chain if not inside any function. */
+ if (current_function_decl != 0)
{
CONST_DOUBLE_CHAIN (r) = const_double_chain;
const_double_chain = r;
/* Search the chain for an existing CONST_DOUBLE with the right value.
If one is found, return it. */
-
- for (r = const_double_chain; r; r = CONST_DOUBLE_CHAIN (r))
- if (! bcmp ((char *) &CONST_DOUBLE_LOW (r), (char *) &u, sizeof u)
- && GET_MODE (r) == mode)
- return r;
+ if (current_function != 0)
+ for (r = const_double_chain; r; r = CONST_DOUBLE_CHAIN (r))
+ if (! bcmp ((char *) &CONST_DOUBLE_LOW (r), (char *) &u, sizeof u)
+ && GET_MODE (r) == mode)
+ return r;
/* No; make a new one and add it to the chain.
we will be leaving this constant on the chain, so we cannot tolerate
freed memory. So switch to saveable_obstack for this allocation
and then switch back if we were in current_obstack. */
-
push_obstacks_nochange ();
rtl_in_saveable_obstack ();
r = rtx_alloc (CONST_DOUBLE);
+ pop_obstacks ();
PUT_MODE (r, mode);
bcopy ((char *) &u, (char *) &CONST_DOUBLE_LOW (r), sizeof u);
- pop_obstacks ();
- /* Don't touch const_double_chain in nested function; see force_const_mem.
- Also, don't touch it if not inside any function. */
- if (outer_function_chain == 0 && current_function_decl != 0)
+ /* Don't touch const_double_chain if not inside any function. */
+ if (current_function_decl != 0)
{
CONST_DOUBLE_CHAIN (r) = const_double_chain;
const_double_chain = r;
{
register rtx r, next;
- /* Don't touch CONST_DOUBLE_MEM for nested functions.
- See force_const_mem for explanation. */
- if (outer_function_chain != 0)
- return;
-
for (r = const_double_chain; r; r = next)
{
next = CONST_DOUBLE_CHAIN (r);
}
\f
-/* Similar hash facility for making memory-constants
- from constant rtl-expressions. It is used on RISC machines
- where immediate integer arguments and constant addresses are restricted
- so that such constants must be stored in memory.
-
- This pool of constants is reinitialized for each function
- so each function gets its own constants-pool that comes right before it.
-
- All structures allocated here are discarded when functions are saved for
- inlining, so they do not need to be allocated permanently. */
-
-#define MAX_RTX_HASH_TABLE 61
-static struct constant_descriptor **const_rtx_hash_table;
-
/* Structure to represent sufficient information about a constant so that
it can be output when the constant pool is output, so that function
integration can be done, and to simplify handling on machines that reference
int mark;
};
-/* Pointers to first and last constant in pool. */
-
-static struct pool_constant *first_pool, *last_pool;
-
-/* Current offset in constant pool (does not include any machine-specific
- header. */
-
-static int pool_offset;
-
/* Structure used to maintain hash table mapping symbols used to their
corresponding constants. */
struct pool_sym *next;
};
-static struct pool_sym **const_rtx_sym_hash_table;
-
/* Hash code for a SYMBOL_REF with CONSTANT_POOL_ADDRESS_P true.
The argument is XSTR (... , 0) */
#define SYMHASH(LABEL) \
((((unsigned long) (LABEL)) & ((1 << HASHBITS) - 1)) % MAX_RTX_HASH_TABLE)
\f
-/* Initialize constant pool hashing for next function. */
+/* Initialize constant pool hashing for a new function. */
void
-init_const_rtx_hash_table ()
+init_varasm_status (f)
+ struct function *f;
{
- const_rtx_hash_table
+ struct varasm_status *p;
+ p = (struct varasm_status *) xmalloc (sizeof (struct varasm_status));
+ f->varasm = p;
+ p->x_const_rtx_hash_table
= ((struct constant_descriptor **)
- oballoc (MAX_RTX_HASH_TABLE * sizeof (struct constant_descriptor *)));
- const_rtx_sym_hash_table
+ xmalloc (MAX_RTX_HASH_TABLE * sizeof (struct constant_descriptor *)));
+ p->x_const_rtx_sym_hash_table
= ((struct pool_sym **)
- oballoc (MAX_RTX_HASH_TABLE * sizeof (struct pool_sym *)));
- bzero ((char *) const_rtx_hash_table,
+ xmalloc (MAX_RTX_HASH_TABLE * sizeof (struct pool_sym *)));
+ bzero ((char *) p->x_const_rtx_hash_table,
MAX_RTX_HASH_TABLE * sizeof (struct constant_descriptor *));
- bzero ((char *) const_rtx_sym_hash_table,
+ bzero ((char *) p->x_const_rtx_sym_hash_table,
MAX_RTX_HASH_TABLE * sizeof (struct pool_sym *));
- first_pool = last_pool = 0;
- pool_offset = 0;
-}
-
-/* Save and restore status for a nested function. */
-
-void
-save_varasm_status (p, context)
- struct function *p;
- tree context;
-{
- p->const_rtx_hash_table = const_rtx_hash_table;
- p->const_rtx_sym_hash_table = const_rtx_sym_hash_table;
- p->first_pool = first_pool;
- p->last_pool = last_pool;
- p->pool_offset = pool_offset;
- p->const_double_chain = const_double_chain;
-
- /* If we are pushing to toplevel, we can't reuse const_double_chain. */
- if (context == NULL_TREE)
- const_double_chain = 0;
-}
-
-void
-restore_varasm_status (p)
- struct function *p;
-{
- const_rtx_hash_table = p->const_rtx_hash_table;
- const_rtx_sym_hash_table = p->const_rtx_sym_hash_table;
- first_pool = p->first_pool;
- last_pool = p->last_pool;
- pool_offset = p->pool_offset;
- const_double_chain = p->const_double_chain;
+ p->x_first_pool = p->x_last_pool = 0;
+ p->x_pool_offset = 0;
+ p->x_const_double_chain = 0;
}
\f
enum kind { RTX_DOUBLE, RTX_INT };
modes in an alternating fashion, we will allocate a lot of different
memory locations, but this should be extremely rare. */
- /* Don't use CONST_DOUBLE_MEM in a nested function.
- Nested functions have their own constant pools,
- so they can't share the same values in CONST_DOUBLE_MEM
- with the containing function. */
- if (outer_function_chain == 0)
- if (GET_CODE (x) == CONST_DOUBLE
- && GET_CODE (CONST_DOUBLE_MEM (x)) == MEM
- && GET_MODE (CONST_DOUBLE_MEM (x)) == mode)
- return CONST_DOUBLE_MEM (x);
+ if (GET_CODE (x) == CONST_DOUBLE
+ && GET_CODE (CONST_DOUBLE_MEM (x)) == MEM
+ && GET_MODE (CONST_DOUBLE_MEM (x)) == mode)
+ return CONST_DOUBLE_MEM (x);
/* Compute hash code of X. Search the descriptors for that hash code
to see if any of them describes X. If yes, the descriptor records
CONSTANT_POOL_ADDRESS_P (XEXP (def, 0)) = 1;
current_function_uses_const_pool = 1;
- if (outer_function_chain == 0)
- if (GET_CODE (x) == CONST_DOUBLE)
- {
- if (CONST_DOUBLE_MEM (x) == cc0_rtx)
- {
- CONST_DOUBLE_CHAIN (x) = const_double_chain;
- const_double_chain = x;
- }
- CONST_DOUBLE_MEM (x) = def;
- }
+ if (GET_CODE (x) == CONST_DOUBLE)
+ {
+ if (CONST_DOUBLE_MEM (x) == cc0_rtx)
+ {
+ CONST_DOUBLE_CHAIN (x) = const_double_chain;
+ const_double_chain = x;
+ }
+ CONST_DOUBLE_MEM (x) = def;
+ }
return def;
}
the corresponding pool_constant structure. */
static struct pool_constant *
-find_pool_constant (addr)
+find_pool_constant (f, addr)
+ struct function *f;
rtx addr;
{
struct pool_sym *sym;
char *label = XSTR (addr, 0);
- for (sym = const_rtx_sym_hash_table[SYMHASH (label)]; sym; sym = sym->next)
+ for (sym = f->varasm->x_const_rtx_sym_hash_table[SYMHASH (label)]; sym; sym = sym->next)
if (sym->label == label)
return sym->pool;
get_pool_constant (addr)
rtx addr;
{
- return (find_pool_constant (addr))->constant;
+ return (find_pool_constant (current_function, addr))->constant;
+}
+
+/* Likewise, but for the constant pool of a specific function. */
+
+rtx
+get_pool_constant_for_function (f, addr)
+ struct function *f;
+ rtx addr;
+{
+ return (find_pool_constant (f, addr))->constant;
}
/* Similar, return the mode. */
get_pool_mode (addr)
rtx addr;
{
- return (find_pool_constant (addr))->mode;
+ return (find_pool_constant (current_function, addr))->mode;
+}
+
+enum machine_mode
+get_pool_mode_for_function (f, addr)
+ struct function *f;
+ rtx addr;
+{
+ return (find_pool_constant (f, addr))->mode;
}
/* Similar, return the offset in the constant pool. */
get_pool_offset (addr)
rtx addr;
{
- return (find_pool_constant (addr))->offset;
+ return (find_pool_constant (current_function, addr))->offset;
}
/* Return the size of the constant pool. */
if (GET_CODE (x) == SYMBOL_REF)
{
if (CONSTANT_POOL_ADDRESS_P (x))
- find_pool_constant (x)->mark = 1;
+ find_pool_constant (current_function, x)->mark = 1;
return;
}
/* Never search inside a CONST_DOUBLE, because CONST_DOUBLE_MEM may be
- a MEM, but does not constitute a use of that MEM. This is particularly
- important inside a nested function, because CONST_DOUBLE_MEM may be
- a reference to a MEM in the parent's constant pool. See the comment
- in force_const_mem. */
+ a MEM, but does not constitute a use of that MEM. */
else if (GET_CODE (x) == CONST_DOUBLE)
return;