Commit ff4c81cc by Trevor Saunders Committed by Jeff Law

df-scan.c (df_collection_rec): Adjust.

	* df-scan.c (df_collection_rec): Adjust.
	(copy_defs): New constant.
	(copy_uses): Likewise.
	(copy_eq_uses): Likewise.
	(copy_mw): Likewise.
	(copy_all): Likewise.
	(df_insn_rescan): Adjust.
	(df_notes_rescan): Likewise.
	(df_swap_refs): Likewise.
	(df_sort_and_compress_refs): Likewise.
	(df_sort_and_compress_mws): Likewise.
	(df_install_refs): Likewise.
	(df_install_mws): Likewise.
	(df_refs_add_to_chains): Add flags parameter controlling which vectors
	are coppied.
	(df_bb_refs_record): Adjust.
	(df_record_entry_block_defs): Likewise.
	(df_record_exit_block_defs): Likewise.
	(df_refs_verify): Likewise.
	(df_mws_verify): Likewise.
	(df_insn_refs_verify): Likewise.
	(df_bb_verify): Likewise.
	* ipa-pure-const.c (finish_state): Remove.
	(propagate): Adjust.
	* tree-data-ref.c tree-ssa-alias.c tree-ssa-loop-ivcanon.c
	tree-ssa-threadedge.c tree-vect-loop-manip.c tree-vect-slp.c
	var-tracking.c: Adjust.
	* vec.c (stack_vecs): Remove.
	(register_stack_vec): Likewise.
	(stack_vec_register_index): Likewise.
	(unregister_stack_vec): Likewise.
	* vec.h (struct va_stack): Remove.
	(struct vec<T, A, vl_ptr>): Specialize as
	struct vec<T, va_heap, vl_ptr> instead since va_heap is the only
	allocation strategy compatable with the vl_ptr layout.
	(struct vec<T, va_gc, vl_ptr>): Remove because it now gets an empty
	specialization anyway.
	(class stack_vec): New class.
	(vec_stack_alloc): Remove.
	(vec<T, va_heap, vl_ptr>::using_auto_storage): New method.

	* gcc-interface/decl.c (components_to_record): Adjust.

From-SVN: r204137
parent b868b7ca
2013-10-28 Trevor Saunders <tsaunders@mozilla.com>
* df-scan.c (df_collection_rec): Adjust.
(copy_defs): New constant.
(copy_uses): Likewise.
(copy_eq_uses): Likewise.
(copy_mw): Likewise.
(copy_all): Likewise.
(df_insn_rescan): Adjust.
(df_notes_rescan): Likewise.
(df_swap_refs): Likewise.
(df_sort_and_compress_refs): Likewise.
(df_sort_and_compress_mws): Likewise.
(df_install_refs): Likewise.
(df_install_mws): Likewise.
(df_refs_add_to_chains): Add flags parameter controlling which vectors
are coppied.
(df_bb_refs_record): Adjust.
(df_record_entry_block_defs): Likewise.
(df_record_exit_block_defs): Likewise.
(df_refs_verify): Likewise.
(df_mws_verify): Likewise.
(df_insn_refs_verify): Likewise.
(df_bb_verify): Likewise.
* ipa-pure-const.c (finish_state): Remove.
(propagate): Adjust.
* tree-data-ref.c tree-ssa-alias.c tree-ssa-loop-ivcanon.c
tree-ssa-threadedge.c tree-vect-loop-manip.c tree-vect-slp.c
var-tracking.c: Adjust.
* vec.c (stack_vecs): Remove.
(register_stack_vec): Likewise.
(stack_vec_register_index): Likewise.
(unregister_stack_vec): Likewise.
* vec.h (struct va_stack): Remove.
(struct vec<T, A, vl_ptr>): Specialize as
struct vec<T, va_heap, vl_ptr> instead since va_heap is the only
allocation strategy compatable with the vl_ptr layout.
(struct vec<T, va_gc, vl_ptr>): Remove because it now gets an empty
specialization anyway.
(class stack_vec): New class.
(vec_stack_alloc): Remove.
(vec<T, va_heap, vl_ptr>::using_auto_storage): New method.
2013-10-28 Alexander Ivchenko <alexander.ivchenko@intel.com>
Maxim Kuznetsov <maxim.kuznetsov@intel.com>
Sergey Lega <sergey.s.lega@intel.com>
2013-10-28 Trevor Saunders <tsaunders@mozilla.com>
* gcc-interface/decl.c (components_to_record): Adjust.
2013-10-24 Rainer Orth <ro@CeBiTec.Uni-Bielefeld.DE>
* gcc-interface/Make-lang.in (ADA_DEPS): Fix quoting.
......
......@@ -7003,13 +7003,11 @@ components_to_record (tree gnu_record_type, Node_Id gnat_component_list,
tree gnu_union_type, gnu_union_name;
tree this_first_free_pos, gnu_variant_list = NULL_TREE;
bool union_field_needs_strict_alignment = false;
vec <vinfo_t, va_stack> variant_types;
stack_vec <vinfo_t, 16> variant_types;
vinfo_t *gnu_variant;
unsigned int variants_align = 0;
unsigned int i;
vec_stack_alloc (vinfo_t, variant_types, 16);
if (TREE_CODE (gnu_name) == TYPE_DECL)
gnu_name = DECL_NAME (gnu_name);
......@@ -7205,9 +7203,6 @@ components_to_record (tree gnu_record_type, Node_Id gnat_component_list,
gnu_variant_list = gnu_field;
}
/* We are done with the variants. */
variant_types.release ();
/* Only make the QUAL_UNION_TYPE if there are non-empty variants. */
if (gnu_variant_list)
{
......
......@@ -190,15 +190,6 @@ warn_function_noreturn (tree decl)
true, warned_about, "noreturn");
}
/* Init the function state. */
static void
finish_state (void)
{
funct_state_vec.release ();
}
/* Return true if we have a function state for NODE. */
static inline bool
......@@ -1488,7 +1479,6 @@ propagate (void)
if (has_function_state (node))
free (get_function_state (node));
funct_state_vec.release ();
finish_state ();
return 0;
}
......
......@@ -4325,7 +4325,7 @@ typedef struct data_ref_loc_d
true if STMT clobbers memory, false otherwise. */
static bool
get_references_in_stmt (gimple stmt, vec<data_ref_loc, va_stack> *references)
get_references_in_stmt (gimple stmt, vec<data_ref_loc, va_heap> *references)
{
bool clobbers_memory = false;
data_ref_loc ref;
......@@ -4417,17 +4417,13 @@ find_data_references_in_stmt (struct loop *nest, gimple stmt,
vec<data_reference_p> *datarefs)
{
unsigned i;
vec<data_ref_loc, va_stack> references;
stack_vec<data_ref_loc, 2> references;
data_ref_loc *ref;
bool ret = true;
data_reference_p dr;
vec_stack_alloc (data_ref_loc, references, 2);
if (get_references_in_stmt (stmt, &references))
{
references.release ();
return false;
}
return false;
FOR_EACH_VEC_ELT (references, i, ref)
{
......@@ -4451,17 +4447,13 @@ graphite_find_data_references_in_stmt (loop_p nest, loop_p loop, gimple stmt,
vec<data_reference_p> *datarefs)
{
unsigned i;
vec<data_ref_loc, va_stack> references;
stack_vec<data_ref_loc, 2> references;
data_ref_loc *ref;
bool ret = true;
data_reference_p dr;
vec_stack_alloc (data_ref_loc, references, 2);
if (get_references_in_stmt (stmt, &references))
{
references.release ();
return false;
}
return false;
FOR_EACH_VEC_ELT (references, i, ref)
{
......
......@@ -736,11 +736,8 @@ aliasing_component_refs_p (tree ref1,
static bool
nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
{
vec<tree, va_stack> component_refs1;
vec<tree, va_stack> component_refs2;
vec_stack_alloc (tree, component_refs1, 16);
vec_stack_alloc (tree, component_refs2, 16);
stack_vec<tree, 16> component_refs1;
stack_vec<tree, 16> component_refs2;
/* Create the stack of handled components for REF1. */
while (handled_component_p (ref1))
......
......@@ -1100,7 +1100,7 @@ propagate_constants_for_unrolling (basic_block bb)
static bool
tree_unroll_loops_completely_1 (bool may_increase_size, bool unroll_outer,
vec<loop_p, va_stack>& father_stack,
vec<loop_p, va_heap>& father_stack,
struct loop *loop)
{
struct loop *loop_father;
......@@ -1164,12 +1164,11 @@ tree_unroll_loops_completely_1 (bool may_increase_size, bool unroll_outer,
unsigned int
tree_unroll_loops_completely (bool may_increase_size, bool unroll_outer)
{
vec<loop_p, va_stack> father_stack;
stack_vec<loop_p, 16> father_stack;
bool changed;
int iteration = 0;
bool irred_invalidated = false;
vec_stack_alloc (loop_p, father_stack, 16);
do
{
changed = false;
......
......@@ -644,7 +644,7 @@ propagate_threaded_block_debug_into (basic_block dest, basic_block src)
i++;
}
vec<tree, va_stack> fewvars = vNULL;
stack_vec<tree, alloc_count> fewvars;
pointer_set_t *vars = NULL;
/* If we're already starting with 3/4 of alloc_count, go for a
......@@ -652,8 +652,6 @@ propagate_threaded_block_debug_into (basic_block dest, basic_block src)
VEC. */
if (i * 4 > alloc_count * 3)
vars = pointer_set_create ();
else if (alloc_count)
vec_stack_alloc (tree, fewvars, alloc_count);
/* Now go through the initial debug stmts in DEST again, this time
actually inserting in VARS or FEWVARS. Don't bother checking for
......
......@@ -115,7 +115,7 @@ typedef struct
with a PHI DEF that would soon become non-dominant, and when we got
to the suitable one, it wouldn't have anything to substitute any
more. */
static vec<adjust_info, va_stack> adjust_vec;
static vec<adjust_info, va_heap> adjust_vec;
/* Adjust any debug stmts that referenced AI->from values to use the
loop-closed AI->to, if the references are dominated by AI->bb and
......@@ -1133,7 +1133,7 @@ slpeel_tree_peel_loop_to_edge (struct loop *loop,
if (MAY_HAVE_DEBUG_STMTS)
{
gcc_assert (!adjust_vec.exists ());
vec_stack_alloc (adjust_info, adjust_vec, 32);
adjust_vec.create (32);
}
if (e == exit_e)
......
......@@ -1934,7 +1934,7 @@ vect_slp_analyze_operations (bb_vec_info bb_vinfo)
static unsigned
vect_bb_slp_scalar_cost (basic_block bb,
slp_tree node, vec<bool, va_stack> life)
slp_tree node, vec<bool, va_heap> *life)
{
unsigned scalar_cost = 0;
unsigned i;
......@@ -1948,7 +1948,7 @@ vect_bb_slp_scalar_cost (basic_block bb,
def_operand_p def_p;
stmt_vec_info stmt_info;
if (life[i])
if ((*life)[i])
continue;
/* If there is a non-vectorized use of the defs then the scalar
......@@ -1965,11 +1965,11 @@ vect_bb_slp_scalar_cost (basic_block bb,
|| gimple_bb (use_stmt) != bb
|| !STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (use_stmt)))
{
life[i] = true;
(*life)[i] = true;
BREAK_FROM_IMM_USE_STMT (use_iter);
}
}
if (life[i])
if ((*life)[i])
continue;
stmt_info = vinfo_for_stmt (stmt);
......@@ -2023,13 +2023,11 @@ vect_bb_vectorization_profitable_p (bb_vec_info bb_vinfo)
/* Calculate scalar cost. */
FOR_EACH_VEC_ELT (slp_instances, i, instance)
{
vec<bool, va_stack> life;
vec_stack_alloc (bool, life, SLP_INSTANCE_GROUP_SIZE (instance));
life.quick_grow_cleared (SLP_INSTANCE_GROUP_SIZE (instance));
stack_vec<bool, 20> life;
life.safe_grow_cleared (SLP_INSTANCE_GROUP_SIZE (instance));
scalar_cost += vect_bb_slp_scalar_cost (BB_VINFO_BB (bb_vinfo),
SLP_INSTANCE_TREE (instance),
life);
life.release ();
&life);
}
/* Complete the target-specific cost calculation. */
......
......@@ -7907,7 +7907,7 @@ struct expand_loc_callback_data
/* Stack of values and debug_exprs under expansion, and their
children. */
vec<rtx, va_stack> expanding;
stack_vec<rtx, 4> expanding;
/* Stack of values and debug_exprs whose expansion hit recursion
cycles. They will have VALUE_RECURSED_INTO marked when added to
......@@ -7915,7 +7915,7 @@ struct expand_loc_callback_data
resolves to a valid location. So, if the flag remains set at the
end of the search, we know no valid location for this one can
possibly exist. */
vec<rtx, va_stack> pending;
stack_vec<rtx, 4> pending;
/* The maximum depth among the sub-expressions under expansion.
Zero indicates no expansion so far. */
......@@ -8417,11 +8417,11 @@ vt_expand_loc_callback (rtx x, bitmap regs,
This function performs this finalization of NULL locations. */
static void
resolve_expansions_pending_recursion (vec<rtx, va_stack> pending)
resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
{
while (!pending.is_empty ())
while (!pending->is_empty ())
{
rtx x = pending.pop ();
rtx x = pending->pop ();
decl_or_value dv;
if (!VALUE_RECURSED_INTO (x))
......@@ -8441,8 +8441,6 @@ resolve_expansions_pending_recursion (vec<rtx, va_stack> pending)
do \
{ \
(d).vars = (v); \
vec_stack_alloc (rtx, (d).expanding, 4); \
vec_stack_alloc (rtx, (d).pending, 4); \
(d).depth.complexity = (d).depth.entryvals = 0; \
} \
while (0)
......@@ -8450,7 +8448,7 @@ resolve_expansions_pending_recursion (vec<rtx, va_stack> pending)
#define FINI_ELCD(d, l) \
do \
{ \
resolve_expansions_pending_recursion ((d).pending); \
resolve_expansions_pending_recursion (&(d).pending); \
(d).pending.release (); \
(d).expanding.release (); \
\
......@@ -8744,7 +8742,7 @@ emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
int
var_track_values_to_stack (variable_def **slot,
vec<rtx, va_stack> *changed_values_stack)
vec<rtx, va_heap> *changed_values_stack)
{
variable var = *slot;
......@@ -8779,7 +8777,7 @@ remove_value_from_changed_variables (rtx val)
static void
notify_dependents_of_changed_value (rtx val, variable_table_type htab,
vec<rtx, va_stack> *changed_values_stack)
vec<rtx, va_heap> *changed_values_stack)
{
variable_def **slot;
variable var;
......@@ -8864,13 +8862,11 @@ process_changed_values (variable_table_type htab)
{
int i, n;
rtx val;
vec<rtx, va_stack> changed_values_stack;
vec_stack_alloc (rtx, changed_values_stack, 20);
stack_vec<rtx, 20> changed_values_stack;
/* Move values from changed_variables to changed_values_stack. */
changed_variables
.traverse <vec<rtx, va_stack>*, var_track_values_to_stack>
.traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
(&changed_values_stack);
/* Back-propagate change notifications in values while popping
......@@ -8891,8 +8887,6 @@ process_changed_values (variable_table_type htab)
n--;
}
}
changed_values_stack.release ();
}
/* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
......
......@@ -217,49 +217,6 @@ vec_prefix::calculate_allocation (vec_prefix *pfx, unsigned reserve,
}
/* Stack vectors are a little different. VEC_alloc turns into a call
to vec<T, A>::stack_reserve and passes in space allocated via a
call to alloca. We record that pointer so that we know that we
shouldn't free it. If the vector is resized, we resize it on the
heap. We record the pointers in a vector and search it in LIFO
order--i.e., we look for the newest stack vectors first. We don't
expect too many stack vectors at any one level, and searching from
the end should normally be efficient even if they are used in a
recursive function. */
static vec<void *> stack_vecs;
/* Add a stack vector to STACK_VECS. */
void
register_stack_vec (void *vec)
{
stack_vecs.safe_push (vec);
}
/* If VEC is registered in STACK_VECS, return its index.
Otherwise, return -1. */
int
stack_vec_register_index (void *vec)
{
for (unsigned ix = stack_vecs.length (); ix > 0; --ix)
if (stack_vecs[ix - 1] == vec)
return static_cast<int> (ix - 1);
return -1;
}
/* Remove vector at slot IX from the list of registered stack vectors. */
void
unregister_stack_vec (unsigned ix)
{
stack_vecs.unordered_remove (ix);
}
/* Helper for qsort; sort descriptors by amount of memory consumed. */
static int
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment