/* Write the GIMPLE representation to a file stream.
- Copyright (C) 2009-2015 Free Software Foundation, Inc.
+ Copyright (C) 2009-2019 Free Software Foundation, Inc.
Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
Re-implemented by Diego Novillo <dnovillo@google.com>
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "tm.h"
-#include "alias.h"
-#include "symtab.h"
-#include "tree.h"
-#include "fold-const.h"
-#include "stor-layout.h"
-#include "stringpool.h"
-#include "hard-reg-set.h"
-#include "function.h"
+#include "backend.h"
+#include "target.h"
#include "rtl.h"
-#include "flags.h"
-#include "insn-config.h"
-#include "expmed.h"
-#include "dojump.h"
-#include "explow.h"
-#include "calls.h"
-#include "emit-rtl.h"
-#include "varasm.h"
-#include "stmt.h"
-#include "expr.h"
-#include "params.h"
-#include "predict.h"
-#include "dominance.h"
-#include "cfg.h"
-#include "basic-block.h"
-#include "tree-ssa-alias.h"
-#include "internal-fn.h"
-#include "gimple-expr.h"
+#include "tree.h"
#include "gimple.h"
-#include "gimple-iterator.h"
-#include "gimple-ssa.h"
-#include "tree-ssanames.h"
#include "tree-pass.h"
-#include "diagnostic-core.h"
+#include "ssa.h"
+#include "gimple-streamer.h"
+#include "alias.h"
+#include "stor-layout.h"
+#include "gimple-iterator.h"
#include "except.h"
#include "lto-symtab.h"
-#include "plugin-api.h"
-#include "ipa-ref.h"
#include "cgraph.h"
-#include "lto-streamer.h"
-#include "data-streamer.h"
-#include "gimple-streamer.h"
-#include "tree-streamer.h"
-#include "streamer-hooks.h"
#include "cfgloop.h"
#include "builtins.h"
#include "gomp-constants.h"
+#include "debug.h"
+#include "omp-offload.h"
+#include "print-tree.h"
static void lto_write_tree (struct output_block*, tree, bool);
ob->current_file = NULL;
ob->current_line = 0;
ob->current_col = 0;
+ ob->current_sysp = false;
}
create_output_block (enum lto_section_type section_type)
{
struct output_block *ob = XCNEW (struct output_block);
+ if (streamer_dump_file)
+ fprintf (streamer_dump_file, "Creating output block for %s\n",
+ lto_section_name [section_type]);
ob->section_type = section_type;
ob->decl_state = lto_get_out_decl_state ();
if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
&& DECL_CONTEXT (t))
return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
- /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
+ /* IMPORTED_DECL is put into BLOCK and thus it never can be shared.
+ We should no longer need to stream it. */
else if (TREE_CODE (t) == IMPORTED_DECL)
- return false;
- else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
+ gcc_unreachable ();
+ else if (TREE_CODE (t) == LABEL_DECL)
+ return FORCED_LABEL (t) || DECL_NONLOCAL (t);
+ else if (((VAR_P (t) && !TREE_STATIC (t))
|| TREE_CODE (t) == TYPE_DECL
|| TREE_CODE (t) == CONST_DECL
|| TREE_CODE (t) == NAMELIST_DECL)
bp_pack_value (bp, ob->current_col != xloc.column, 1);
if (ob->current_file != xloc.file)
- bp_pack_string (ob, bp, xloc.file, true);
+ {
+ bp_pack_string (ob, bp, xloc.file, true);
+ bp_pack_value (bp, xloc.sysp, 1);
+ }
ob->current_file = xloc.file;
+ ob->current_sysp = xloc.sysp;
if (ob->current_line != xloc.line)
bp_pack_var_len_unsigned (bp, xloc.line);
case VAR_DECL:
case DEBUG_EXPR_DECL:
gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
+ /* FALLTHRU */
case PARM_DECL:
streamer_write_record_start (ob, LTO_global_decl_ref);
lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
name version in lto_output_tree_ref (see output_ssa_names). */
return !is_lang_specific (expr)
&& code != SSA_NAME
- && code != CALL_EXPR
&& code != LANG_TYPE
&& code != MODIFY_EXPR
&& code != INIT_EXPR
|| TREE_CODE_CLASS (code) != tcc_statement);
}
+/* Very rough estimate of streaming size of the initializer. If we ignored
+ presence of strings, we could simply just count number of non-indexable
+ tree nodes and number of references to indexable nodes. Strings however
+ may be very large and we do not want to dump them int othe global stream.
+
+ Count the size of initializer until the size in DATA is positive. */
+
+static tree
+subtract_estimated_size (tree *tp, int *ws, void *data)
+{
+ long *sum = (long *)data;
+ if (tree_is_indexable (*tp))
+ {
+ /* Indexable tree is one reference to global stream.
+ Guess it may be about 4 bytes. */
+ *sum -= 4;
+ *ws = 0;
+ }
+ /* String table entry + base of tree node needs to be streamed. */
+ if (TREE_CODE (*tp) == STRING_CST)
+ *sum -= TREE_STRING_LENGTH (*tp) + 8;
+ else
+ {
+ /* Identifiers are also variable length but should not appear
+ naked in constructor. */
+ gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
+ /* We do not really make attempt to work out size of pickled tree, as
+ it is very variable. Make it bigger than the reference. */
+ *sum -= 16;
+ }
+ if (*sum < 0)
+ return *tp;
+ return NULL_TREE;
+}
+
/* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
/* Handle DECL_INITIAL for symbols. */
tree initial = DECL_INITIAL (expr);
- if (TREE_CODE (expr) == VAR_DECL
+ if (VAR_P (expr)
&& (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
&& !DECL_IN_CONSTANT_POOL (expr)
&& initial)
varpool_node *vnode;
/* Extra section needs about 30 bytes; do not produce it for simple
scalar values. */
- if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
- || !(vnode = varpool_node::get (expr))
+ if (!(vnode = varpool_node::get (expr))
|| !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
initial = error_mark_node;
+ if (initial != error_mark_node)
+ {
+ long max_size = 30;
+ if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
+ NULL))
+ initial = error_mark_node;
+ }
}
return initial;
(ob->decl_state->symtab_node_encoder, expr);
stream_write_tree (ob, initial, ref_p);
}
+
+ /* Stream references to early generated DIEs. Keep in sync with the
+ trees handled in dwarf2out_die_ref_for_decl. */
+ if ((DECL_P (expr)
+ && TREE_CODE (expr) != FIELD_DECL
+ && TREE_CODE (expr) != DEBUG_EXPR_DECL
+ && TREE_CODE (expr) != TYPE_DECL)
+ || TREE_CODE (expr) == BLOCK)
+ {
+ const char *sym;
+ unsigned HOST_WIDE_INT off;
+ if (debug_info_level > DINFO_LEVEL_NONE
+ && debug_hooks->die_ref_for_decl (expr, &sym, &off))
+ {
+ streamer_write_string (ob, ob->main_stream, sym, true);
+ streamer_write_uhwi (ob, off);
+ }
+ else
+ streamer_write_string (ob, ob->main_stream, NULL, true);
+ }
}
/* Write a physical representation of tree node EXPR to output block
streamer_write_zero (ob);
}
-/* Emit the physical representation of tree node EXPR to output block
- OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
- via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
+/* Emit the physical representation of tree node EXPR to output block OB,
+ If THIS_REF_P is true, the leaves of EXPR are emitted as references via
+ lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
static void
lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
expr, hash, &ix);
gcc_assert (!exists_p);
- if (streamer_handle_as_builtin_p (expr))
- {
- /* MD and NORMAL builtins do not need to be written out
- completely as they are always instantiated by the
- compiler on startup. The only builtins that need to
- be written out are BUILT_IN_FRONTEND. For all other
- builtins, we simply write the class and code. */
- streamer_write_builtin (ob, expr);
- }
- else if (TREE_CODE (expr) == INTEGER_CST
- && !TREE_OVERFLOW (expr))
+ if (TREE_CODE (expr) == INTEGER_CST
+ && !TREE_OVERFLOW (expr))
{
/* Shared INTEGER_CST nodes are special because they need their
original type to be materialized by the reader (to implement
tree expr, bool ref_p, bool this_ref_p);
hashval_t
- hash_scc (struct output_block *ob, unsigned first, unsigned size);
+ hash_scc (struct output_block *ob, unsigned first, unsigned size,
+ bool ref_p, bool this_ref_p);
hash_map<tree, sccs *> sccstate;
vec<worklist> worklist_vec;
struct obstack sccstate_obstack;
};
+/* Emit the physical representation of tree node EXPR to output block OB,
+ using depth-first search on the subgraph. If THIS_REF_P is true, the
+ leaves of EXPR are emitted as references via lto_output_tree_ref.
+ REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
+ this is for a rewalk of a single leaf SCC. */
+
DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
bool single_p)
{
cstate->low = cstate->dfsnum;
w.cstate = cstate;
- if (streamer_handle_as_builtin_p (expr))
- ;
- else if (TREE_CODE (expr) == INTEGER_CST
- && !TREE_OVERFLOW (expr))
+ if (TREE_CODE (expr) == INTEGER_CST
+ && !TREE_OVERFLOW (expr))
DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
else
{
unsigned first, size;
tree x;
- /* If we are re-walking a single leaf-SCC just pop it,
+ /* If we are re-walking a single leaf SCC just pop it,
let earlier worklist item access the sccstack. */
if (single_p)
{
unsigned scc_entry_len = 0;
if (!flag_wpa)
{
- scc_hash = hash_scc (ob, first, size);
+ scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
/* Put the entries with the least number of collisions first. */
unsigned entry_start = 0;
std::swap (sccstack[first + i],
sccstack[first + entry_start + i]);
- if (scc_entry_len == 1)
- ; /* We already sorted SCC deterministically in hash_scc. */
- else
- /* Check that we have only one SCC.
- Naturally we may have conflicts if hash function is not
- strong enough. Lets see how far this gets. */
- {
-#ifdef ENABLE_CHECKING
- gcc_unreachable ();
-#endif
- }
+ /* We already sorted SCC deterministically in hash_scc. */
+
+ /* Check that we have only one SCC.
+ Naturally we may have conflicts if hash function is not
+ strong enough. Lets see how far this gets. */
+ gcc_checking_assert (scc_entry_len == 1);
}
/* Write LTO_tree_scc. */
"in LTO streams",
get_tree_code_name (TREE_CODE (t)));
- gcc_checking_assert (!streamer_handle_as_builtin_p (t));
-
/* Write the header, containing everything needed to
materialize EXPR on the reading side. */
streamer_write_tree_header (ob, t);
enum tree_code code;
+ if (streamer_dump_file)
+ {
+ print_node_brief (streamer_dump_file, " Streaming ",
+ expr, 4);
+ fprintf (streamer_dump_file, " to %s\n",
+ lto_section_name [ob->section_type]);
+ }
+
code = TREE_CODE (expr);
if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
{
- for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
- DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
+ unsigned int count = vector_cst_encoded_nelts (expr);
+ for (unsigned int i = 0; i < count; ++i)
+ DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
}
+ if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
+
if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
{
DFS_follow_tree_edge (TREE_REALPART (expr));
;
else
DFS_follow_tree_edge (DECL_NAME (expr));
- DFS_follow_tree_edge (DECL_CONTEXT (expr));
+ if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
+ && ! DECL_CONTEXT (expr))
+ DFS_follow_tree_edge ((*all_translation_units)[0]);
+ else
+ DFS_follow_tree_edge (DECL_CONTEXT (expr));
}
if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
- /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
- for early inlining so drop it on the floor instead of ICEing in
- dwarf2out.c. */
+ /* We use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
+ declarations which should be eliminated by decl merging. Be sure none
+ leaks to this point. */
+ gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
+ DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
- if ((TREE_CODE (expr) == VAR_DECL
+ if ((VAR_P (expr)
|| TREE_CODE (expr) == PARM_DECL)
&& DECL_HAS_VALUE_EXPR_P (expr))
DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
- if (TREE_CODE (expr) == VAR_DECL)
+ if (VAR_P (expr)
+ && DECL_HAS_DEBUG_EXPR_P (expr))
DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
}
- if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
- {
- if (TREE_CODE (expr) == TYPE_DECL)
- DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
- }
-
if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
{
/* Make sure we don't inadvertently set the assembler name. */
DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
- DFS_follow_tree_edge (DECL_FCONTEXT (expr));
+ gcc_checking_assert (!DECL_FCONTEXT (expr));
}
if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
{
- DFS_follow_tree_edge (DECL_VINDEX (expr));
+ gcc_checking_assert (DECL_VINDEX (expr) == NULL);
DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
DFS_follow_tree_edge (TYPE_CONTEXT (expr));
/* TYPE_CANONICAL is re-computed during type merging, so no need
to follow it here. */
- DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
+ /* Do not stream TYPE_STUB_DECL; it is not needed by LTO but currently
+ it cannot be freed by free_lang_data without triggering ICEs in
+ langhooks. */
}
if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
if (!POINTER_TYPE_P (expr))
- DFS_follow_tree_edge (TYPE_MINVAL (expr));
- DFS_follow_tree_edge (TYPE_MAXVAL (expr));
- if (RECORD_OR_UNION_TYPE_P (expr))
- DFS_follow_tree_edge (TYPE_BINFO (expr));
+ DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
+ DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
}
if (CODE_CONTAINS_STRUCT (code, TS_LIST))
if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
{
for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
- if (VAR_OR_FUNCTION_DECL_P (t)
- && DECL_EXTERNAL (t))
- /* We have to stream externals in the block chain as
- non-references. See also
- tree-streamer-out.c:streamer_write_chain. */
- DFS_write_tree (ob, expr_state, t, ref_p, false);
- else
+ {
+ /* We would have to stream externals in the block chain as
+ non-references but we should have dropped them in
+ free-lang-data. */
+ gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
DFS_follow_tree_edge (t);
+ }
DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
+ DFS_follow_tree_edge (BLOCK_ABSTRACT_ORIGIN (expr));
- /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
- handle - those that represent inlined function scopes.
- For the drop rest them on the floor instead of ICEing
- in dwarf2out.c. */
- if (inlined_function_outer_scope_p (expr))
- {
- tree ultimate_origin = block_ultimate_origin (expr);
- DFS_follow_tree_edge (ultimate_origin);
- }
/* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
information for early inlined BLOCKs so drop it on the floor instead
of ICEing in dwarf2out.c. */
DFS_follow_tree_edge (t);
DFS_follow_tree_edge (BINFO_OFFSET (expr));
DFS_follow_tree_edge (BINFO_VTABLE (expr));
- DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
- /* The number of BINFO_BASE_ACCESSES has already been emitted in
- EXPR's bitfield section. */
- FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
- DFS_follow_tree_edge (t);
-
- /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
- and BINFO_VPTR_INDEX; these are used by C++ FE only. */
+ /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX,
+ BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
+ by C++ FE only. */
}
if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
hstate.add_flag (TREE_PRIVATE (t));
if (TYPE_P (t))
{
- hstate.add_flag (TYPE_SATURATING (t));
+ hstate.add_flag (AGGREGATE_TYPE_P (t)
+ ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
hstate.add_flag (TYPE_ADDR_SPACE (t));
}
else if (code == SSA_NAME)
hstate.commit_flag ();
if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
- {
- int i;
- hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
- hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
- for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
- hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
- }
+ hstate.add_wide_int (wi::to_widest (t));
if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
{
if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
{
- hstate.add_wide_int (DECL_MODE (t));
+ hstate.add_hwi (DECL_MODE (t));
hstate.add_flag (DECL_NONLOCAL (t));
hstate.add_flag (DECL_VIRTUAL_P (t));
hstate.add_flag (DECL_IGNORED_P (t));
{
hstate.add_flag (DECL_PACKED (t));
hstate.add_flag (DECL_NONADDRESSABLE_P (t));
+ hstate.add_flag (DECL_PADDING_P (t));
hstate.add_int (DECL_OFFSET_ALIGN (t));
}
else if (code == VAR_DECL)
if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
{
- hstate.add_wide_int (TYPE_MODE (t));
+ hstate.add_hwi (TYPE_MODE (t));
hstate.add_flag (TYPE_STRING_FLAG (t));
/* TYPE_NO_FORCE_BLK is private to stor-layout and need
no streaming. */
- hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
hstate.add_flag (TYPE_PACKED (t));
hstate.add_flag (TYPE_RESTRICT (t));
hstate.add_flag (TYPE_USER_ALIGN (t));
}
else if (code == ARRAY_TYPE)
hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
+ if (AGGREGATE_TYPE_P (t))
+ hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
hstate.commit_flag ();
hstate.add_int (TYPE_PRECISION (t));
hstate.add_int (TYPE_ALIGN (t));
- hstate.add_int ((TYPE_ALIAS_SET (t) == 0
- || (!in_lto_p
- && get_alias_set (t) == 0))
- ? 0 : -1);
+ hstate.add_int (TYPE_EMPTY_P (t));
}
if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
/* We don't stream these when passing things to a different target. */
&& !lto_stream_offload_p)
- hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
+ hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
- hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
+ hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
}
if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
- for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
- visit (VECTOR_CST_ELT (t, i));
+ {
+ unsigned int count = vector_cst_encoded_nelts (t);
+ for (unsigned int i = 0; i < count; ++i)
+ visit (VECTOR_CST_ENCODED_ELT (t, i));
+ }
+
+ if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ visit (POLY_INT_CST_COEFF (t, i));
if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
{
be able to call get_symbol_initial_value. */
}
- if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
- {
- if (code == TYPE_DECL)
- visit (DECL_ORIGINAL_TYPE (t));
- }
-
if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
{
if (DECL_ASSEMBLER_NAME_SET_P (t))
visit (DECL_BIT_FIELD_TYPE (t));
visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
visit (DECL_FIELD_BIT_OFFSET (t));
- visit (DECL_FCONTEXT (t));
}
if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
{
- visit (DECL_VINDEX (t));
visit (DECL_FUNCTION_PERSONALITY (t));
visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
;
else
visit (TYPE_CONTEXT (t));
- visit (TYPE_STUB_DECL (t));
}
if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
|| code == METHOD_TYPE)
visit (TYPE_ARG_TYPES (t));
if (!POINTER_TYPE_P (t))
- visit (TYPE_MINVAL (t));
- visit (TYPE_MAXVAL (t));
- if (RECORD_OR_UNION_TYPE_P (t))
- visit (TYPE_BINFO (t));
+ visit (TYPE_MIN_VALUE_RAW (t));
+ visit (TYPE_MAX_VALUE_RAW (t));
}
if (CODE_CONTAINS_STRUCT (code, TS_LIST))
if (CODE_CONTAINS_STRUCT (code, TS_EXP))
{
- hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
+ hstate.add_hwi (TREE_OPERAND_LENGTH (t));
for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
visit (TREE_OPERAND (t, i));
}
visit (b);
visit (BINFO_OFFSET (t));
visit (BINFO_VTABLE (t));
- visit (BINFO_VPTR_FIELD (t));
- FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
- visit (b);
/* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
- and BINFO_VPTR_INDEX; these are used by C++ FE only. */
+ BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
+ by C++ FE only. */
}
if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
{
unsigned i;
tree index, value;
- hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
+ hstate.add_hwi (CONSTRUCTOR_NELTS (t));
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
{
visit (index);
int i;
HOST_WIDE_INT val;
- hstate.add_wide_int (OMP_CLAUSE_CODE (t));
+ hstate.add_hwi (OMP_CLAUSE_CODE (t));
switch (OMP_CLAUSE_CODE (t))
{
case OMP_CLAUSE_DEFAULT:
val = OMP_CLAUSE_PROC_BIND_KIND (t);
break;
case OMP_CLAUSE_REDUCTION:
+ case OMP_CLAUSE_TASK_REDUCTION:
+ case OMP_CLAUSE_IN_REDUCTION:
val = OMP_CLAUSE_REDUCTION_CODE (t);
break;
default:
val = 0;
break;
}
- hstate.add_wide_int (val);
+ hstate.add_hwi (val);
for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
visit (OMP_CLAUSE_OPERAND (t, i));
visit (OMP_CLAUSE_CHAIN (t));
return 0;
}
-/* Return a hash value for the SCC on the SCC stack from FIRST with
- size SIZE. */
+/* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
+ THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
hashval_t
-DFS::hash_scc (struct output_block *ob,
- unsigned first, unsigned size)
+DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
+ bool ref_p, bool this_ref_p)
{
unsigned int last_classes = 0, iterations = 0;
/* Compute hash values for the SCC members. */
for (unsigned i = 0; i < size; ++i)
- sccstack[first+i].hash = hash_tree (ob->writer_cache, NULL,
- sccstack[first+i].t);
+ sccstack[first+i].hash
+ = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
if (size == 1)
return sccstack[first].hash;
/* We aim to get unique hash for every tree within SCC and compute hash value
- of the whole SCC by combing all values together in an stable (entry point
+ of the whole SCC by combining all values together in a stable (entry-point
independent) order. This guarantees that the same SCC regions within
different translation units will get the same hash values and therefore
will be merged at WPA time.
- Often the hashes are already unique. In that case we compute scc hash
+ Often the hashes are already unique. In that case we compute the SCC hash
by combining individual hash values in an increasing order.
- If thre are duplicates we seek at least one tree with unique hash (and
- pick one with minimal hash and this property). Then we obtain stable
- order by DFS walk starting from this unique tree and then use index
+ If there are duplicates, we seek at least one tree with unique hash (and
+ pick one with minimal hash and this property). Then we obtain a stable
+ order by DFS walk starting from this unique tree and then use the index
within this order to make individual hash values unique.
If there is no tree with unique hash, we iteratively propagate the hash
values across the internal edges of SCC. This usually quickly leads
to unique hashes. Consider, for example, an SCC containing two pointers
- that are identical except for type they point and assume that these
- types are also part of the SCC.
- The propagation will add the points-to type information into their hash
- values. */
+ that are identical except for the types they point to and assume that
+ these types are also part of the SCC. The propagation will add the
+ points-to type information into their hash values. */
do
{
- /* Sort the SCC so we can easily see check for uniqueness. */
+ /* Sort the SCC so we can easily check for uniqueness. */
qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
unsigned int classes = 1;
int firstunique = -1;
- /* Find tree with lowest unique hash (if it exists) and compute
- number of equivalence classes. */
+ /* Find the tree with lowest unique hash (if it exists) and compute
+ the number of equivalence classes. */
if (sccstack[first].hash != sccstack[first+1].hash)
firstunique = 0;
for (unsigned i = 1; i < size; ++i)
firstunique = i;
}
- /* If we found tree with unique hash; stop the iteration. */
+ /* If we found a tree with unique hash, stop the iteration. */
if (firstunique != -1
/* Also terminate if we run out of iterations or if the number of
equivalence classes is no longer increasing.
hashval_t scc_hash;
/* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
- starting from FIRSTUNIQUE to obstain stable order. */
+ starting from FIRSTUNIQUE to obtain a stable order. */
if (classes != size && firstunique != -1)
{
hash_map <tree, hashval_t> map(size*2);
/* Store hash values into a map, so we can associate them with
- reordered SCC. */
+ the reordered SCC. */
for (unsigned i = 0; i < size; ++i)
map.put (sccstack[first+i].t, sccstack[first+i].hash);
- DFS again (ob, sccstack[first+firstunique].t, false, false, true);
+ DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
+ true);
gcc_assert (again.sccstack.length () == size);
memcpy (sccstack.address () + first,
/* Update hash values of individual members by hashing in the
index within the stable order. This ensures uniqueness.
- Also compute the scc_hash by mixing in all hash values in the
- stable order we obtained. */
+ Also compute the SCC hash by mixing in all hash values in
+ the stable order we obtained. */
sccstack[first].hash = *map.get (sccstack[first].t);
scc_hash = sccstack[first].hash;
for (unsigned i = 1; i < size; ++i)
sccstack[first+i].hash
= iterative_hash_hashval_t (i,
*map.get (sccstack[first+i].t));
- scc_hash = iterative_hash_hashval_t (scc_hash,
- sccstack[first+i].hash);
+ scc_hash
+ = iterative_hash_hashval_t (scc_hash,
+ sccstack[first+i].hash);
}
}
- /* If we got unique hash values for each tree, then sort already
- ensured entry point independent order. Only compute the final
- scc hash.
+ /* If we got a unique hash value for each tree, then sort already
+ ensured entry-point independent order. Only compute the final
+ SCC hash.
If we failed to find the unique entry point, we go by the same
- route. We will eventually introduce unwanted hash conflicts. */
+ route. We will eventually introduce unwanted hash conflicts. */
else
{
scc_hash = sccstack[first].hash;
for (unsigned i = 1; i < size; ++i)
- scc_hash = iterative_hash_hashval_t (scc_hash,
- sccstack[first+i].hash);
- /* We can not 100% guarantee that the hash will not conflict in
- in a way so the unique hash is not found. This however
- should be extremely rare situation. ICE for now so possible
- issues are found and evaulated. */
+ scc_hash
+ = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
+
+ /* We cannot 100% guarantee that the hash won't conflict so as
+ to make it impossible to find a unique hash. This however
+ should be an extremely rare case. ICE for now so possible
+ issues are found and evaluated. */
gcc_checking_assert (classes == size);
}
- /* To avoid conflicts across SCCs iteratively hash the whole SCC
- hash into the hash of each of the elements. */
+ /* To avoid conflicts across SCCs, iteratively hash the whole SCC
+ hash into the hash of each element. */
for (unsigned i = 0; i < size; ++i)
sccstack[first+i].hash
= iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
/* We failed to identify the entry point; propagate hash values across
the edges. */
- {
- hash_map <tree, hashval_t> map(size*2);
- for (unsigned i = 0; i < size; ++i)
- map.put (sccstack[first+i].t, sccstack[first+i].hash);
+ hash_map <tree, hashval_t> map(size*2);
- for (unsigned i = 0; i < size; i++)
- sccstack[first+i].hash = hash_tree (ob->writer_cache, &map,
- sccstack[first+i].t);
- }
+ for (unsigned i = 0; i < size; ++i)
+ map.put (sccstack[first+i].t, sccstack[first+i].hash);
+
+ for (unsigned i = 0; i < size; i++)
+ sccstack[first+i].hash
+ = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
}
while (true);
}
}
-/* Emit the physical representation of tree node EXPR to output block
- OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
- via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
+/* Emit the physical representation of tree node EXPR to output block OB.
+ If THIS_REF_P is true, the leaves of EXPR are emitted as references via
+ lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
void
lto_output_tree (struct output_block *ob, tree expr,
we stream out. */
gcc_assert (!in_dfs_walk);
+ if (streamer_dump_file)
+ {
+ print_node_brief (streamer_dump_file, " Streaming SCC of ",
+ expr, 4);
+ fprintf (streamer_dump_file, "\n");
+ }
+
/* Start the DFS walk. */
/* Save ob state ... */
/* let's see ... */
streamer_write_uhwi (ob, ix);
streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
lto_tree_code_to_tag (TREE_CODE (expr)));
+ if (streamer_dump_file)
+ {
+ print_node_brief (streamer_dump_file, " Finished SCC of ",
+ expr, 4);
+ fprintf (streamer_dump_file, "\n\n");
+ }
lto_stats.num_pickle_refs_output++;
}
}
if (ptr == NULL_TREE
|| SSA_NAME_IN_FREE_LIST (ptr)
- || virtual_operand_p (ptr))
+ || virtual_operand_p (ptr)
+ /* Simply skip unreleased SSA names. */
+ || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
+ && (! SSA_NAME_DEF_STMT (ptr)
+ || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
continue;
streamer_write_uhwi (ob, i);
}
-/* Output a wide-int. */
-
-static void
-streamer_write_wi (struct output_block *ob,
- const widest_int &w)
-{
- int len = w.get_len ();
-
- streamer_write_uhwi (ob, w.get_precision ());
- streamer_write_uhwi (ob, len);
- for (int i = 0; i < len; i++)
- streamer_write_hwi (ob, w.elt (i));
-}
-
/* Output the cfg. */
FOR_EACH_EDGE (e, ei, bb->succs)
{
streamer_write_uhwi (ob, e->dest->index);
- streamer_write_hwi (ob, e->probability);
- streamer_write_gcov_count (ob, e->count);
+ e->probability.stream_out (ob);
streamer_write_uhwi (ob, e->flags);
}
}
loop_estimation, EST_LAST, loop->estimate_state);
streamer_write_hwi (ob, loop->any_upper_bound);
if (loop->any_upper_bound)
- streamer_write_wi (ob, loop->nb_iterations_upper_bound);
+ streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
+ streamer_write_hwi (ob, loop->any_likely_upper_bound);
+ if (loop->any_likely_upper_bound)
+ streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
streamer_write_hwi (ob, loop->any_estimate);
if (loop->any_estimate)
- streamer_write_wi (ob, loop->nb_iterations_estimate);
+ streamer_write_widest_int (ob, loop->nb_iterations_estimate);
/* Write OMP SIMD related info. */
streamer_write_hwi (ob, loop->safelen);
+ streamer_write_hwi (ob, loop->unroll);
streamer_write_hwi (ob, loop->dont_vectorize);
streamer_write_hwi (ob, loop->force_vectorize);
stream_write_tree (ob, loop->simduid, true);
bp_pack_value (&bp, fn->after_inlining, 1);
bp_pack_value (&bp, fn->stdarg, 1);
bp_pack_value (&bp, fn->has_nonlocal_label, 1);
+ bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
bp_pack_value (&bp, fn->calls_alloca, 1);
bp_pack_value (&bp, fn->calls_setjmp, 1);
bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
stream_output_location (ob, &bp, fn->function_start_locus);
stream_output_location (ob, &bp, fn->function_end_locus);
+ /* Save the instance discriminator if present. */
+ int *instance_number_p = NULL;
+ if (decl_to_instance_map)
+ instance_number_p = decl_to_instance_map->get (fn->decl);
+ bp_pack_value (&bp, !!instance_number_p, 1);
+ if (instance_number_p)
+ bp_pack_value (&bp, *instance_number_p, sizeof (int) * CHAR_BIT);
+
streamer_write_bitpack (&bp);
}
+/* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
+
+static void
+collect_block_tree_leafs (tree root, vec<tree> &leafs)
+{
+ for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
+ if (! BLOCK_SUBBLOCKS (root))
+ leafs.safe_push (root);
+ else
+ collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
+}
+
/* Output the body of function NODE->DECL. */
static void
basic_block bb;
struct output_block *ob;
+ if (streamer_dump_file)
+ fprintf (streamer_dump_file, "\nStreaming body of %s\n",
+ node->name ());
+
function = node->decl;
fn = DECL_STRUCT_FUNCTION (function);
ob = create_output_block (LTO_section_function_body);
stream_write_tree (ob, DECL_RESULT (function), true);
streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
+ /* Output debug args if available. */
+ vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
+ if (! debugargs)
+ streamer_write_uhwi (ob, 0);
+ else
+ {
+ streamer_write_uhwi (ob, (*debugargs)->length ());
+ for (unsigned i = 0; i < (*debugargs)->length (); ++i)
+ stream_write_tree (ob, (**debugargs)[i], true);
+ }
+
/* Output DECL_INITIAL for the function, which contains the tree of
lexical scopes. */
stream_write_tree (ob, DECL_INITIAL (function), true);
+ /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
+ collect block tree leafs and stream those. */
+ auto_vec<tree> block_tree_leafs;
+ if (DECL_INITIAL (function))
+ collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
+ streamer_write_uhwi (ob, block_tree_leafs.length ());
+ for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
+ stream_write_tree (ob, block_tree_leafs[i], true);
/* We also stream abstract functions where we stream only stuff needed for
debug info. */
if (gimple_has_body_p (function))
{
+ /* Fixup loops if required to match discovery done in the reader. */
+ loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
+
streamer_write_uhwi (ob, 1);
output_struct_function_base (ob, fn);
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
}
output_cfg (ob, fn);
+ loop_optimizer_finalize ();
pop_cfun ();
}
else
produce_asm (ob, function);
destroy_output_block (ob);
+ if (streamer_dump_file)
+ fprintf (streamer_dump_file, "Finished streaming %s\n",
+ node->name ());
}
/* Output the body of function NODE->DECL. */
tree var = node->decl;
struct output_block *ob;
+ if (streamer_dump_file)
+ fprintf (streamer_dump_file, "\nStreaming constructor of %s\n",
+ node->name ());
+
ob = create_output_block (LTO_section_function_body);
clear_line_info (ob);
produce_asm (ob, var);
destroy_output_block (ob);
+ if (streamer_dump_file)
+ fprintf (streamer_dump_file, "Finished streaming %s\n",
+ node->name ());
}
struct lto_in_decl_state *in_state;
struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
- lto_begin_section (section_name, !flag_wpa);
+ if (streamer_dump_file)
+ fprintf (streamer_dump_file, "Copying section for %s\n", name);
+ lto_begin_section (section_name, false);
free (section_name);
/* We may have renamed the declaration, e.g., a static function. */
name = lto_get_decl_name_mapping (file_data, name);
- data = lto_get_section_data (file_data, LTO_section_function_body,
- name, &len);
+ data = lto_get_raw_section_data (file_data, LTO_section_function_body,
+ name, &len);
gcc_assert (data);
/* Do a bit copy of the function body. */
- lto_write_data (data, len);
+ lto_write_raw_data (data, len);
/* Copy decls. */
in_state =
lto_get_function_in_decl_state (node->lto_file_data, function);
+ out_state->compressed = in_state->compressed;
gcc_assert (in_state);
for (i = 0; i < LTO_N_DECL_STREAMS; i++)
encoder->trees.safe_push ((*trees)[j]);
}
- lto_free_section_data (file_data, LTO_section_function_body, name,
- data, len);
+ lto_free_raw_section_data (file_data, LTO_section_function_body, name,
+ data, len);
lto_end_section ();
}
{
tree t = *tp;
if (handled_component_p (t)
- && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
+ && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
+ && TREE_PUBLIC (TREE_OPERAND (t, 0)))
{
tree decl = TREE_OPERAND (t, 0);
tree ptrtype = build_pointer_type (TREE_TYPE (decl));
return NULL_TREE;
}
+/* Remove functions that are no longer used from offload_funcs, and mark the
+ remaining ones with DECL_PRESERVE_P. */
+
+static void
+prune_offload_funcs (void)
+{
+ if (!offload_funcs)
+ return;
+
+ unsigned ix, ix2;
+ tree *elem_ptr;
+ VEC_ORDERED_REMOVE_IF (*offload_funcs, ix, ix2, elem_ptr,
+ cgraph_node::get (*elem_ptr) == NULL);
+
+ tree fn_decl;
+ FOR_EACH_VEC_ELT (*offload_funcs, ix, fn_decl)
+ DECL_PRESERVE_P (fn_decl) = 1;
+}
+
/* Main entry point from the pass manager. */
void
lto_output (void)
{
struct lto_out_decl_state *decl_state;
-#ifdef ENABLE_CHECKING
- bitmap output = lto_bitmap_alloc ();
-#endif
+ bitmap output = NULL;
int i, n_nodes;
lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
+ prune_offload_funcs ();
+
+ if (flag_checking)
+ output = lto_bitmap_alloc ();
+
/* Initialize the streamer. */
lto_streamer_init ();
if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
{
if (lto_symtab_encoder_encode_body_p (encoder, node)
- && !node->alias)
+ && !node->alias
+ && (!node->thunk.thunk_p || !node->thunk.add_pointer_bounds_args))
{
-#ifdef ENABLE_CHECKING
- gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
- bitmap_set_bit (output, DECL_UID (node->decl));
-#endif
+ if (flag_checking)
+ {
+ gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
+ bitmap_set_bit (output, DECL_UID (node->decl));
+ }
decl_state = lto_new_out_decl_state ();
lto_push_out_decl_state (decl_state);
- if (gimple_has_body_p (node->decl) || !flag_wpa
+ if (gimple_has_body_p (node->decl)
+ || (!flag_wpa
+ && flag_incremental_link != INCREMENTAL_LINK_LTO)
/* Thunks have no body but they may be synthetized
at WPA time. */
|| DECL_ARGUMENTS (node->decl))
&& !node->alias)
{
timevar_push (TV_IPA_LTO_CTORS_OUT);
-#ifdef ENABLE_CHECKING
- gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
- bitmap_set_bit (output, DECL_UID (node->decl));
-#endif
+ if (flag_checking)
+ {
+ gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
+ bitmap_set_bit (output, DECL_UID (node->decl));
+ }
decl_state = lto_new_out_decl_state ();
lto_push_out_decl_state (decl_state);
if (DECL_INITIAL (node->decl) != error_mark_node
- || !flag_wpa)
+ || (!flag_wpa
+ && flag_incremental_link != INCREMENTAL_LINK_LTO))
output_constructor (node);
else
copy_function_or_variable (node);
output_offload_tables ();
-#ifdef ENABLE_CHECKING
+#if CHECKING_P
lto_bitmap_free (output);
#endif
}
for (index = 0; index < size; index++)
{
t = lto_tree_ref_encoder_get_tree (encoder, index);
+ if (streamer_dump_file)
+ {
+ fprintf (streamer_dump_file, " %i:", (int)index);
+ print_node_brief (streamer_dump_file, "", t, 4);
+ fprintf (streamer_dump_file, "\n");
+ }
if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
stream_write_tree (ob, t, false);
}
for (index = 0; index < size; index++)
{
- uint32_t slot_num;
+ unsigned slot_num;
t = lto_tree_ref_encoder_get_tree (encoder, index);
streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
struct lto_out_decl_state *state)
{
unsigned i;
- uint32_t ref;
+ unsigned ref;
tree decl;
/* Write reference to FUNCTION_DECL. If there is not function,
decl = (state->fn_decl) ? state->fn_decl : void_type_node;
streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
gcc_assert (ref != (unsigned)-1);
+ ref = ref * 2 + (state->compressed ? 1 : 0);
lto_write_data (&ref, sizeof (uint32_t));
for (i = 0; i < LTO_N_DECL_STREAMS; i++)
const char *comdat;
unsigned char c;
- /* None of the following kinds of symbols are needed in the
- symbol table. */
- if (!TREE_PUBLIC (t)
- || is_builtin_fn (t)
- || DECL_ABSTRACT_P (t)
- || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
- return;
- gcc_assert (TREE_CODE (t) != RESULT_DECL);
+ gcc_checking_assert (TREE_PUBLIC (t)
+ && (TREE_CODE (t) != FUNCTION_DECL
+ || !fndecl_built_in_p (t))
+ && !DECL_ABSTRACT_P (t)
+ && (!VAR_P (t) || !DECL_HARD_REGISTER (t)));
- gcc_assert (TREE_CODE (t) == VAR_DECL
- || TREE_CODE (t) == FUNCTION_DECL);
+ gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
kind = GCCPK_DEF;
/* When something is defined, it should have node attached. */
- gcc_assert (alias || TREE_CODE (t) != VAR_DECL
- || varpool_node::get (t)->definition);
+ gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
|| (cgraph_node::get (t)
&& cgraph_node::get (t)->definition));
lto_write_data (&slot_num, 4);
}
-/* Return true if NODE should appear in the plugin symbol table. */
-
-bool
-output_symbol_p (symtab_node *node)
-{
- struct cgraph_node *cnode;
- if (!node->real_symbol_p ())
- return false;
- /* We keep external functions in symtab for sake of inlining
- and devirtualization. We do not want to see them in symbol table as
- references unless they are really used. */
- cnode = dyn_cast <cgraph_node *> (node);
- if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
- && cnode->callers)
- return true;
-
- /* Ignore all references from external vars initializers - they are not really
- part of the compilation unit until they are used by folding. Some symbols,
- like references to external construction vtables can not be referred to at all.
- We decide this at can_refer_decl_in_current_unit_p. */
- if (!node->definition || DECL_EXTERNAL (node->decl))
- {
- int i;
- struct ipa_ref *ref;
- for (i = 0; node->iterate_referring (i, ref); i++)
- {
- if (ref->use == IPA_REF_ALIAS)
- continue;
- if (is_a <cgraph_node *> (ref->referring))
- return true;
- if (!DECL_EXTERNAL (ref->referring->decl))
- return true;
- }
- return false;
- }
- return true;
-}
-
-
/* Write an IL symbol table to OB.
SET and VSET are cgraph/varpool node sets we are outputting. */
{
symtab_node *node = lsei_node (lsei);
- if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
+ if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
continue;
write_symbol (cache, node->decl, &seen, false);
}
{
symtab_node *node = lsei_node (lsei);
- if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
+ if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
continue;
write_symbol (cache, node->decl, &seen, false);
}
ob = create_output_block (LTO_section_mode_table);
bitpack_d bp = bitpack_create (ob->main_stream);
- /* Ensure that for GET_MODE_INNER (m) != VOIDmode we have
+ /* Ensure that for GET_MODE_INNER (m) != m we have
also the inner mode marked. */
for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
if (streamer_mode_table[i])
{
machine_mode m = (machine_mode) i;
- if (GET_MODE_INNER (m) != VOIDmode)
- streamer_mode_table[(int) GET_MODE_INNER (m)] = 1;
+ machine_mode inner_m = GET_MODE_INNER (m);
+ if (inner_m != m)
+ streamer_mode_table[(int) inner_m] = 1;
}
- /* First stream modes that have GET_MODE_INNER (m) == VOIDmode,
+ /* First stream modes that have GET_MODE_INNER (m) == m,
so that we can refer to them afterwards. */
for (int pass = 0; pass < 2; pass++)
for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
{
machine_mode m = (machine_mode) i;
- if ((GET_MODE_INNER (m) == VOIDmode) ^ (pass == 0))
+ if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
continue;
bp_pack_value (&bp, m, 8);
bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
- bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
- bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
+ bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
+ bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
bp_pack_value (&bp, GET_MODE_INNER (m), 8);
- bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
+ bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
switch (GET_MODE_CLASS (m))
{
case MODE_FRACT:
}
/* Write the global symbols. */
+ if (streamer_dump_file)
+ fprintf (streamer_dump_file, "Outputting global stream\n");
lto_output_decl_state_streams (ob, out_state);
num_fns = lto_function_decl_states.length ();
for (idx = 0; idx < num_fns; idx++)
{
fn_out_state =
lto_function_decl_states[idx];
+ if (streamer_dump_file)
+ fprintf (streamer_dump_file, "Outputting stream for %s\n",
+ IDENTIFIER_POINTER
+ (DECL_ASSEMBLER_NAME (fn_out_state->fn_decl)));
lto_output_decl_state_streams (ob, fn_out_state);
}