/* Subroutines for manipulating rtx's in semantically interesting ways.
- Copyright (C) 1987-2014 Free Software Foundation, Inc.
+ Copyright (C) 1987-2016 Free Software Foundation, Inc.
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "tm.h"
-#include "diagnostic-core.h"
+#include "target.h"
+#include "function.h"
#include "rtl.h"
#include "tree.h"
-#include "stor-layout.h"
#include "tm_p.h"
-#include "flags.h"
-#include "except.h"
-#include "function.h"
-#include "expr.h"
+#include "expmed.h"
#include "optabs.h"
-#include "libfuncs.h"
-#include "hard-reg-set.h"
-#include "insn-config.h"
-#include "ggc.h"
+#include "emit-rtl.h"
#include "recog.h"
-#include "langhooks.h"
-#include "target.h"
+#include "diagnostic-core.h"
+#include "stor-layout.h"
+#include "except.h"
+#include "dojump.h"
+#include "explow.h"
+#include "expr.h"
#include "common/common-target.h"
#include "output.h"
/* Truncate and perhaps sign-extend C as appropriate for MODE. */
HOST_WIDE_INT
-trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
+trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
{
int width = GET_MODE_PRECISION (mode);
/* You want to truncate to a _what_? */
- gcc_assert (SCALAR_INT_MODE_P (mode));
+ gcc_assert (SCALAR_INT_MODE_P (mode)
+ || POINTER_BOUNDS_MODE_P (mode));
/* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
if (mode == BImode)
if it must be treated as immutable. */
rtx
-plus_constant (enum machine_mode mode, rtx x, HOST_WIDE_INT c,
+plus_constant (machine_mode mode, rtx x, HOST_WIDE_INT c,
bool inplace)
{
RTX_CODE code;
{
tem = plus_constant (mode, get_pool_constant (XEXP (x, 0)), c);
tem = force_const_mem (GET_MODE (x), tem);
- if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
+ /* Targets may disallow some constants in the constant pool, thus
+ force_const_mem may return NULL_RTX. */
+ if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
return tem;
}
break;
return x;
}
-/* Returns a tree for the size of EXP in bytes. */
-
-static tree
-tree_expr_size (const_tree exp)
-{
- if (DECL_P (exp)
- && DECL_SIZE_UNIT (exp) != 0)
- return DECL_SIZE_UNIT (exp);
- else
- return size_in_bytes (TREE_TYPE (exp));
-}
-
-/* Return an rtx for the size in bytes of the value of EXP. */
-
-rtx
-expr_size (tree exp)
-{
- tree size;
-
- if (TREE_CODE (exp) == WITH_SIZE_EXPR)
- size = TREE_OPERAND (exp, 1);
- else
- {
- size = tree_expr_size (exp);
- gcc_assert (size);
- gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
- }
-
- return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
-}
-
-/* Return a wide integer for the size in bytes of the value of EXP, or -1
- if the size can vary or is larger than an integer. */
-
-HOST_WIDE_INT
-int_expr_size (tree exp)
-{
- tree size;
-
- if (TREE_CODE (exp) == WITH_SIZE_EXPR)
- size = TREE_OPERAND (exp, 1);
- else
- {
- size = tree_expr_size (exp);
- gcc_assert (size);
- }
-
- if (size == 0 || !tree_fits_shwi_p (size))
- return -1;
-
- return tree_to_shwi (size);
-}
\f
/* Return a copy of X in which all memory references
and all constants that involve symbol refs
an address in the address space's address mode, or vice versa (TO_MODE says
which way). We take advantage of the fact that pointers are not allowed to
overflow by commuting arithmetic operations over conversions so that address
- arithmetic insns can be used. */
+ arithmetic insns can be used. IN_CONST is true if this conversion is inside
+ a CONST. */
-rtx
-convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED,
- rtx x, addr_space_t as ATTRIBUTE_UNUSED)
+static rtx
+convert_memory_address_addr_space_1 (machine_mode to_mode ATTRIBUTE_UNUSED,
+ rtx x, addr_space_t as ATTRIBUTE_UNUSED,
+ bool in_const ATTRIBUTE_UNUSED)
{
#ifndef POINTERS_EXTEND_UNSIGNED
gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
return x;
#else /* defined(POINTERS_EXTEND_UNSIGNED) */
- enum machine_mode pointer_mode, address_mode, from_mode;
+ machine_mode pointer_mode, address_mode, from_mode;
rtx temp;
enum rtx_code code;
break;
case LABEL_REF:
- temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
+ temp = gen_rtx_LABEL_REF (to_mode, LABEL_REF_LABEL (x));
LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
return temp;
break;
case CONST:
return gen_rtx_CONST (to_mode,
- convert_memory_address_addr_space
- (to_mode, XEXP (x, 0), as));
+ convert_memory_address_addr_space_1
+ (to_mode, XEXP (x, 0), as, true));
break;
case PLUS:
case MULT:
- /* FIXME: For addition, we used to permute the conversion and
- addition operation only if one operand is a constant and
- converting the constant does not change it or if one operand
- is a constant and we are using a ptr_extend instruction
- (POINTERS_EXTEND_UNSIGNED < 0) even if the resulting address
- may overflow/underflow. We relax the condition to include
- zero-extend (POINTERS_EXTEND_UNSIGNED > 0) since the other
- parts of the compiler depend on it. See PR 49721.
-
+ /* For addition we can safely permute the conversion and addition
+ operation if one operand is a constant and converting the constant
+ does not change it or if one operand is a constant and we are
+ using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
We can always safely permute them if we are making the address
- narrower. */
+ narrower. Inside a CONST RTL, this is safe for both pointers
+ zero or sign extended as pointers cannot wrap. */
if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
|| (GET_CODE (x) == PLUS
&& CONST_INT_P (XEXP (x, 1))
- && (POINTERS_EXTEND_UNSIGNED != 0
- || XEXP (x, 1) == convert_memory_address_addr_space
- (to_mode, XEXP (x, 1), as))))
+ && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
+ || XEXP (x, 1) == convert_memory_address_addr_space_1
+ (to_mode, XEXP (x, 1), as, in_const)
+ || POINTERS_EXTEND_UNSIGNED < 0)))
return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
- convert_memory_address_addr_space
- (to_mode, XEXP (x, 0), as),
+ convert_memory_address_addr_space_1
+ (to_mode, XEXP (x, 0), as, in_const),
XEXP (x, 1));
break;
x, POINTERS_EXTEND_UNSIGNED);
#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
}
+
+/* Given X, a memory address in address space AS' pointer mode, convert it to
+ an address in the address space's address mode, or vice versa (TO_MODE says
+ which way). We take advantage of the fact that pointers are not allowed to
+ overflow by commuting arithmetic operations over conversions so that address
+ arithmetic insns can be used. */
+
+rtx
+convert_memory_address_addr_space (machine_mode to_mode, rtx x, addr_space_t as)
+{
+ return convert_memory_address_addr_space_1 (to_mode, x, as, false);
+}
\f
+
/* Return something equivalent to X but valid as a memory address for something
of mode MODE in the named address space AS. When X is not itself valid,
this works by copying X or subexpressions of it into registers. */
rtx
-memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as)
+memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
{
rtx oldx = x;
- enum machine_mode address_mode = targetm.addr_space.address_mode (as);
+ machine_mode address_mode = targetm.addr_space.address_mode (as);
x = convert_memory_address_addr_space (address_mode, x, as);
{
rtx base;
HOST_WIDE_INT offset;
- enum machine_mode mode;
+ machine_mode mode;
if (!flag_section_anchors)
return x;
in case X is a constant. */
rtx
-copy_to_mode_reg (enum machine_mode mode, rtx x)
+copy_to_mode_reg (machine_mode mode, rtx x)
{
rtx temp = gen_reg_rtx (mode);
since we mark it as a "constant" register. */
rtx
-force_reg (enum machine_mode mode, rtx x)
+force_reg (machine_mode mode, rtx x)
{
rtx temp, set;
rtx_insn *insn;
MODE is the mode to use for X in case it is a constant. */
rtx
-copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
+copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
{
rtx temp;
FOR_RETURN is nonzero if the caller is promoting the return value
of FNDECL, else it is for promoting args. */
-enum machine_mode
-promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp,
+machine_mode
+promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
const_tree funtype, int for_return)
{
/* Called without a type node for a libcall. */
PUNSIGNEDP points to the signedness of the type and may be adjusted
to show what signedness to use on extension operations. */
-enum machine_mode
-promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode,
+machine_mode
+promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
int *punsignedp ATTRIBUTE_UNUSED)
{
#ifdef PROMOTE_MODE
mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
of DECL after promotion. */
-enum machine_mode
+machine_mode
promote_decl_mode (const_tree decl, int *punsignedp)
{
tree type = TREE_TYPE (decl);
int unsignedp = TYPE_UNSIGNED (type);
- enum machine_mode mode = DECL_MODE (decl);
- enum machine_mode pmode;
+ machine_mode mode = DECL_MODE (decl);
+ machine_mode pmode;
- if (TREE_CODE (decl) == RESULT_DECL
- || TREE_CODE (decl) == PARM_DECL)
+ if (TREE_CODE (decl) == RESULT_DECL && !DECL_BY_REFERENCE (decl))
+ pmode = promote_function_mode (type, mode, &unsignedp,
+ TREE_TYPE (current_function_decl), 1);
+ else if (TREE_CODE (decl) == RESULT_DECL || TREE_CODE (decl) == PARM_DECL)
pmode = promote_function_mode (type, mode, &unsignedp,
TREE_TYPE (current_function_decl), 2);
else
return pmode;
}
+/* Return the promoted mode for name. If it is a named SSA_NAME, it
+ is the same as promote_decl_mode. Otherwise, it is the promoted
+ mode of a temp decl of same type as the SSA_NAME, if we had created
+ one. */
+
+machine_mode
+promote_ssa_mode (const_tree name, int *punsignedp)
+{
+ gcc_assert (TREE_CODE (name) == SSA_NAME);
+
+ /* Partitions holding parms and results must be promoted as expected
+ by function.c. */
+ if (SSA_NAME_VAR (name)
+ && (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
+ || TREE_CODE (SSA_NAME_VAR (name)) == RESULT_DECL))
+ {
+ machine_mode mode = promote_decl_mode (SSA_NAME_VAR (name), punsignedp);
+ if (mode != BLKmode)
+ return mode;
+ }
+
+ tree type = TREE_TYPE (name);
+ int unsignedp = TYPE_UNSIGNED (type);
+ machine_mode mode = TYPE_MODE (type);
+
+ /* Bypass TYPE_MODE when it maps vector modes to BLKmode. */
+ if (mode == BLKmode)
+ {
+ gcc_assert (VECTOR_TYPE_P (type));
+ mode = type->type_common.mode;
+ }
+
+ machine_mode pmode = promote_mode (type, mode, &unsignedp);
+ if (punsignedp)
+ *punsignedp = unsignedp;
+
+ return pmode;
+}
+
+
\f
/* Controls the behaviour of {anti_,}adjust_stack. */
static bool suppress_reg_args_size;
rtx temp;
rtx_insn *insn;
-#ifndef STACK_GROWS_DOWNWARD
/* Hereafter anti_p means subtract_p. */
- anti_p = !anti_p;
-#endif
+ if (!STACK_GROWS_DOWNWARD)
+ anti_p = !anti_p;
temp = expand_binop (Pmode,
anti_p ? sub_optab : add_optab,
{
rtx sa = *psave;
/* The default is that we use a move insn and save in a Pmode object. */
- rtx (*fcn) (rtx, rtx) = gen_move_insn;
- enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
+ rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
+ machine_mode mode = STACK_SAVEAREA_MODE (save_level);
/* See if this machine has anything special to do for this kind of save. */
switch (save_level)
{
-#ifdef HAVE_save_stack_block
case SAVE_BLOCK:
- if (HAVE_save_stack_block)
- fcn = gen_save_stack_block;
+ if (targetm.have_save_stack_block ())
+ fcn = targetm.gen_save_stack_block;
break;
-#endif
-#ifdef HAVE_save_stack_function
case SAVE_FUNCTION:
- if (HAVE_save_stack_function)
- fcn = gen_save_stack_function;
+ if (targetm.have_save_stack_function ())
+ fcn = targetm.gen_save_stack_function;
break;
-#endif
-#ifdef HAVE_save_stack_nonlocal
case SAVE_NONLOCAL:
- if (HAVE_save_stack_nonlocal)
- fcn = gen_save_stack_nonlocal;
+ if (targetm.have_save_stack_nonlocal ())
+ fcn = targetm.gen_save_stack_nonlocal;
break;
-#endif
default:
break;
}
emit_stack_restore (enum save_level save_level, rtx sa)
{
/* The default is that we use a move insn. */
- rtx (*fcn) (rtx, rtx) = gen_move_insn;
+ rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
/* If stack_realign_drap, the x86 backend emits a prologue that aligns both
STACK_POINTER and HARD_FRAME_POINTER.
/* See if this machine has anything special to do for this kind of save. */
switch (save_level)
{
-#ifdef HAVE_restore_stack_block
case SAVE_BLOCK:
- if (HAVE_restore_stack_block)
- fcn = gen_restore_stack_block;
+ if (targetm.have_restore_stack_block ())
+ fcn = targetm.gen_restore_stack_block;
break;
-#endif
-#ifdef HAVE_restore_stack_function
case SAVE_FUNCTION:
- if (HAVE_restore_stack_function)
- fcn = gen_restore_stack_function;
+ if (targetm.have_restore_stack_function ())
+ fcn = targetm.gen_restore_stack_function;
break;
-#endif
-#ifdef HAVE_restore_stack_nonlocal
case SAVE_NONLOCAL:
- if (HAVE_restore_stack_nonlocal)
- fcn = gen_restore_stack_nonlocal;
+ if (targetm.have_restore_stack_nonlocal ())
+ fcn = targetm.gen_restore_stack_nonlocal;
break;
-#endif
default:
break;
}
}
/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
- function. This function should be called whenever we allocate or
- deallocate dynamic stack space. */
+ function. This should be called whenever we allocate or deallocate
+ dynamic stack space. */
void
update_nonlocal_goto_save_area (void)
emit_stack_save (SAVE_NONLOCAL, &r_save);
}
+
+/* Record a new stack level for the current function. This should be called
+ whenever we allocate or deallocate dynamic stack space. */
+
+void
+record_new_stack_level (void)
+{
+ /* Record the new stack level for nonlocal gotos. */
+ if (cfun->nonlocal_goto_save_area)
+ update_nonlocal_goto_save_area ();
+
+ /* Record the new stack level for SJLJ exceptions. */
+ if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
+ update_sjlj_context ();
+}
\f
/* Return an rtx representing the address of an area of memory dynamically
pushed on the stack.
available_label = NULL;
-#ifdef HAVE_split_stack_space_check
- if (HAVE_split_stack_space_check)
+ if (targetm.have_split_stack_space_check ())
{
available_label = gen_label_rtx ();
/* This instruction will branch to AVAILABLE_LABEL if there
are SIZE bytes available on the stack. */
- emit_insn (gen_split_stack_space_check (size, available_label));
+ emit_insn (targetm.gen_split_stack_space_check
+ (size, available_label));
}
-#endif
/* The __morestack_allocate_stack_space function will allocate
memory using malloc. If the alignment of the memory returned
/* Perform the required allocation from the stack. Some systems do
this differently than simply incrementing/decrementing from the
stack pointer, such as acquiring the space by calling malloc(). */
-#ifdef HAVE_allocate_stack
- if (HAVE_allocate_stack)
+ if (targetm.have_allocate_stack ())
{
struct expand_operand ops[2];
/* We don't have to check against the predicate for operand 0 since
be valid for the operand. */
create_fixed_operand (&ops[0], target);
create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
- expand_insn (CODE_FOR_allocate_stack, 2, ops);
+ expand_insn (targetm.code_for_allocate_stack, 2, ops);
}
else
-#endif
{
int saved_stack_pointer_delta;
-#ifndef STACK_GROWS_DOWNWARD
- emit_move_insn (target, virtual_stack_dynamic_rtx);
-#endif
+ if (!STACK_GROWS_DOWNWARD)
+ emit_move_insn (target, virtual_stack_dynamic_rtx);
/* Check stack bounds if necessary. */
if (crtl->limit_stack)
{
rtx available;
rtx_code_label *space_available = gen_label_rtx ();
-#ifdef STACK_GROWS_DOWNWARD
- available = expand_binop (Pmode, sub_optab,
- stack_pointer_rtx, stack_limit_rtx,
- NULL_RTX, 1, OPTAB_WIDEN);
-#else
- available = expand_binop (Pmode, sub_optab,
- stack_limit_rtx, stack_pointer_rtx,
- NULL_RTX, 1, OPTAB_WIDEN);
-#endif
+ if (STACK_GROWS_DOWNWARD)
+ available = expand_binop (Pmode, sub_optab,
+ stack_pointer_rtx, stack_limit_rtx,
+ NULL_RTX, 1, OPTAB_WIDEN);
+ else
+ available = expand_binop (Pmode, sub_optab,
+ stack_limit_rtx, stack_pointer_rtx,
+ NULL_RTX, 1, OPTAB_WIDEN);
+
emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
space_available);
-#ifdef HAVE_trap
- if (HAVE_trap)
- emit_insn (gen_trap ());
+ if (targetm.have_trap ())
+ emit_insn (targetm.gen_trap ());
else
-#endif
error ("stack limits not supported on this target");
emit_barrier ();
emit_label (space_available);
crtl->preferred_stack_boundary alignment. */
stack_pointer_delta = saved_stack_pointer_delta;
-#ifdef STACK_GROWS_DOWNWARD
- emit_move_insn (target, virtual_stack_dynamic_rtx);
-#endif
+ if (STACK_GROWS_DOWNWARD)
+ emit_move_insn (target, virtual_stack_dynamic_rtx);
}
suppress_reg_args_size = false;
/* Now that we've committed to a return value, mark its alignment. */
mark_reg_pointer (target, required_align);
- /* Record the new stack level for nonlocal gotos. */
- if (cfun->nonlocal_goto_save_area != 0)
- update_nonlocal_goto_save_area ();
+ /* Record the new stack level. */
+ record_new_stack_level ();
return target;
}
void
emit_stack_probe (rtx address)
{
-#ifdef HAVE_probe_stack_address
- if (HAVE_probe_stack_address)
- emit_insn (gen_probe_stack_address (address));
+ if (targetm.have_probe_stack_address ())
+ emit_insn (targetm.gen_probe_stack_address (address));
else
-#endif
{
rtx memref = gen_rtx_MEM (word_mode, address);
MEM_VOLATILE_P (memref) = 1;
/* See if we have an insn to probe the stack. */
-#ifdef HAVE_probe_stack
- if (HAVE_probe_stack)
- emit_insn (gen_probe_stack (memref));
+ if (targetm.have_probe_stack ())
+ emit_insn (targetm.gen_probe_stack (memref));
else
-#endif
emit_move_insn (memref, const0_rtx);
}
}
#define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
-#ifdef STACK_GROWS_DOWNWARD
+#if STACK_GROWS_DOWNWARD
#define STACK_GROW_OP MINUS
#define STACK_GROW_OPTAB sub_optab
#define STACK_GROW_OFF(off) -(off)
}
/* Next see if we have an insn to check the stack. */
-#ifdef HAVE_check_stack
- else if (HAVE_check_stack)
+ else if (targetm.have_check_stack ())
{
struct expand_operand ops[1];
rtx addr = memory_address (Pmode,
size, first)));
bool success;
create_input_operand (&ops[0], addr, Pmode);
- success = maybe_expand_insn (CODE_FOR_check_stack, 1, ops);
+ success = maybe_expand_insn (targetm.code_for_check_stack, 1, ops);
gcc_assert (success);
}
-#endif
/* Otherwise we have to generate explicit probes. If we have a constant
small number of them to generate, that's the easy case. */
&& GET_MODE (val) == BLKmode)
{
unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
- enum machine_mode tmpmode;
+ machine_mode tmpmode;
/* int_size_in_bytes can return -1. We don't need a check here
since the value of bytes will then be large enough that no
in which a scalar value of mode MODE was returned by a library call. */
rtx
-hard_libcall_value (enum machine_mode mode, rtx fun)
+hard_libcall_value (machine_mode mode, rtx fun)
{
return targetm.calls.libcall_value (mode, fun);
}
/* Look up the tree code for a given rtx code
- to provide the arithmetic operation for REAL_ARITHMETIC.
+ to provide the arithmetic operation for real_arithmetic.
The function returns an int because the caller may not know
what `enum tree_code' means. */