Commit 526ceb68 by Trevor Saunders Committed by Trevor Saunders

remove many typedefs

gcc/ChangeLog:

2015-10-01  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* cfganal.c, compare-elim.c, coverage.c, cprop.c, df-scan.c,
	function.c, read-rtl.c, statistics.c, trans-mem.c, tree-if-conv.c,
	tree-into-ssa.c, tree-loop-distribution.c, tree-ssa-coalesce.c,
	tree-ssa-loop-ivopts.c, tree-ssa-reassoc.c, tree-ssa-strlen.c,
	tree-ssa-tail-merge.c, tree-vrp.c, var-tracking.c: Remove
unneeded typedefs.

From-SVN: r228344
parent f6f69fb0
2015-10-01 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
* cfganal.c, compare-elim.c, coverage.c, cprop.c, df-scan.c,
function.c, read-rtl.c, statistics.c, trans-mem.c, tree-if-conv.c,
tree-into-ssa.c, tree-loop-distribution.c, tree-ssa-coalesce.c,
tree-ssa-loop-ivopts.c, tree-ssa-reassoc.c, tree-ssa-strlen.c,
tree-ssa-tail-merge.c, tree-vrp.c, var-tracking.c: Remove
2015-10-01 Marek Polacek <polacek@redhat.com>
PR c/65345
......@@ -29,7 +29,7 @@ along with GCC; see the file COPYING3. If not see
#include "timevar.h"
/* Store the data structures necessary for depth-first search. */
struct depth_first_search_dsS {
struct depth_first_search_ds {
/* stack for backtracking during the algorithm */
basic_block *stack;
......@@ -40,14 +40,13 @@ struct depth_first_search_dsS {
/* record of basic blocks already seen by depth-first search */
sbitmap visited_blocks;
};
typedef struct depth_first_search_dsS *depth_first_search_ds;
static void flow_dfs_compute_reverse_init (depth_first_search_ds);
static void flow_dfs_compute_reverse_add_bb (depth_first_search_ds,
static void flow_dfs_compute_reverse_init (depth_first_search_ds *);
static void flow_dfs_compute_reverse_add_bb (depth_first_search_ds *,
basic_block);
static basic_block flow_dfs_compute_reverse_execute (depth_first_search_ds,
static basic_block flow_dfs_compute_reverse_execute (depth_first_search_ds *,
basic_block);
static void flow_dfs_compute_reverse_finish (depth_first_search_ds);
static void flow_dfs_compute_reverse_finish (depth_first_search_ds *);
/* Mark the back edges in DFS traversal.
Return nonzero if a loop (natural or otherwise) is present.
......@@ -575,7 +574,7 @@ connect_infinite_loops_to_exit (void)
{
basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
basic_block deadend_block;
struct depth_first_search_dsS dfs_ds;
depth_first_search_ds dfs_ds;
/* Perform depth-first search in the reverse graph to find nodes
reachable from the exit block. */
......@@ -1055,7 +1054,7 @@ pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
element on the stack. */
static void
flow_dfs_compute_reverse_init (depth_first_search_ds data)
flow_dfs_compute_reverse_init (depth_first_search_ds *data)
{
/* Allocate stack for back-tracking up CFG. */
data->stack = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
......@@ -1075,7 +1074,7 @@ flow_dfs_compute_reverse_init (depth_first_search_ds data)
block. */
static void
flow_dfs_compute_reverse_add_bb (depth_first_search_ds data, basic_block bb)
flow_dfs_compute_reverse_add_bb (depth_first_search_ds *data, basic_block bb)
{
data->stack[data->sp++] = bb;
bitmap_set_bit (data->visited_blocks, bb->index);
......@@ -1087,7 +1086,7 @@ flow_dfs_compute_reverse_add_bb (depth_first_search_ds data, basic_block bb)
available. */
static basic_block
flow_dfs_compute_reverse_execute (depth_first_search_ds data,
flow_dfs_compute_reverse_execute (depth_first_search_ds *data,
basic_block last_unvisited)
{
basic_block bb;
......@@ -1116,7 +1115,7 @@ flow_dfs_compute_reverse_execute (depth_first_search_ds data,
reverse graph. */
static void
flow_dfs_compute_reverse_finish (depth_first_search_ds data)
flow_dfs_compute_reverse_finish (depth_first_search_ds *data)
{
free (data->stack);
sbitmap_free (data->visited_blocks);
......
......@@ -121,9 +121,7 @@ struct comparison
bool inputs_valid;
};
typedef struct comparison *comparison_struct_p;
static vec<comparison_struct_p> all_compares;
static vec<comparison *> all_compares;
/* Look for a "conforming" comparison, as defined above. If valid, return
the rtx for the COMPARE itself. */
......
......@@ -77,7 +77,7 @@ struct GTY((chain_next ("%h.next"))) coverage_data
};
/* Counts information for a function. */
typedef struct counts_entry : pointer_hash <counts_entry>
struct counts_entry : pointer_hash <counts_entry>
{
/* We hash by */
unsigned ident;
......@@ -93,7 +93,7 @@ typedef struct counts_entry : pointer_hash <counts_entry>
static inline hashval_t hash (const counts_entry *);
static int equal (const counts_entry *, const counts_entry *);
static void remove (counts_entry *);
} counts_entry_t;
};
static GTY(()) struct coverage_data *functions_head = 0;
static struct coverage_data **functions_tail = &functions_head;
......@@ -279,7 +279,7 @@ read_counts_file (void)
}
else if (GCOV_TAG_IS_COUNTER (tag) && fn_ident)
{
counts_entry_t **slot, *entry, elt;
counts_entry **slot, *entry, elt;
unsigned n_counts = GCOV_TAG_COUNTER_NUM (length);
unsigned ix;
......@@ -290,7 +290,7 @@ read_counts_file (void)
entry = *slot;
if (!entry)
{
*slot = entry = XCNEW (counts_entry_t);
*slot = entry = XCNEW (counts_entry);
entry->ident = fn_ident;
entry->ctr = elt.ctr;
entry->lineno_checksum = lineno_checksum;
......@@ -358,7 +358,7 @@ get_coverage_counts (unsigned counter, unsigned expected,
unsigned cfg_checksum, unsigned lineno_checksum,
const struct gcov_ctr_summary **summary)
{
counts_entry_t *entry, elt;
counts_entry *entry, elt;
/* No hash table, no counts. */
if (!counts_hash)
......
......@@ -71,8 +71,6 @@ struct cprop_occr
rtx_insn *insn;
};
typedef struct cprop_occr *occr_t;
/* Hash table entry for assignment expressions. */
struct cprop_expr
......
......@@ -40,9 +40,6 @@ along with GCC; see the file COPYING3. If not see
#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
/* The set of hard registers in eliminables[i].from. */
static HARD_REG_SET elim_reg_set;
......@@ -55,7 +52,7 @@ struct df_collection_rec
auto_vec<df_ref, 128> def_vec;
auto_vec<df_ref, 32> use_vec;
auto_vec<df_ref, 32> eq_use_vec;
auto_vec<df_mw_hardreg_ptr, 32> mw_vec;
auto_vec<df_mw_hardreg *, 32> mw_vec;
};
static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
......@@ -147,9 +144,6 @@ struct df_scan_problem_data
bitmap_obstack insn_bitmaps;
};
typedef struct df_scan_bb_info *df_scan_bb_info_t;
/* Internal function to shut down the scanning problem. */
static void
df_scan_free_internal (void)
......@@ -2241,7 +2235,7 @@ df_mw_ptr_compare (const void *m1, const void *m2)
/* Sort and compress a set of refs. */
static void
df_sort_and_compress_mws (vec<df_mw_hardreg_ptr, va_heap> *mw_vec)
df_sort_and_compress_mws (vec<df_mw_hardreg *, va_heap> *mw_vec)
{
unsigned int count;
struct df_scan_problem_data *problem_data
......@@ -2405,7 +2399,7 @@ df_install_refs (basic_block bb,
insn. */
static struct df_mw_hardreg *
df_install_mws (const vec<df_mw_hardreg_ptr, va_heap> *old_vec)
df_install_mws (const vec<df_mw_hardreg *, va_heap> *old_vec)
{
unsigned int count = old_vec->length ();
if (count)
......@@ -4059,7 +4053,7 @@ df_refs_verify (const vec<df_ref, va_heap> *new_rec, df_ref old_rec,
/* Verify that NEW_REC and OLD_REC have exactly the same members. */
static bool
df_mws_verify (const vec<df_mw_hardreg_ptr, va_heap> *new_rec,
df_mws_verify (const vec<df_mw_hardreg *, va_heap> *new_rec,
struct df_mw_hardreg *old_rec,
bool abort_if_fail)
{
......
......@@ -156,9 +156,7 @@ static void do_use_return_reg (rtx, void *);
/* Stack of nested functions. */
/* Keep track of the cfun stack. */
typedef struct function *function_p;
static vec<function_p> function_context_stack;
static vec<function *> function_context_stack;
/* Save the current context for compilation of a nested function.
This is called from language-specific code. */
......@@ -4745,7 +4743,7 @@ set_cfun (struct function *new_cfun)
/* Initialized with NOGC, making this poisonous to the garbage collector. */
static vec<function_p> cfun_stack;
static vec<function *> cfun_stack;
/* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
current_function_decl accordingly. */
......
......@@ -54,9 +54,6 @@ struct mapping {
struct map_value *current_value;
};
/* Vector definitions for the above. */
typedef struct mapping *mapping_ptr;
/* A structure for abstracting the common parts of iterators. */
struct iterator_group {
/* Tables of "mapping" structures, one for attributes and one for
......@@ -117,7 +114,7 @@ static rtx read_rtx_variadic (rtx);
static struct iterator_group modes, codes, ints, substs;
/* All iterators used in the current rtx. */
static vec<mapping_ptr> current_iterators;
static vec<mapping *> current_iterators;
/* The list of all iterator uses in the current rtx. */
static vec<iterator_use> iterator_uses;
......
......@@ -36,28 +36,28 @@ static FILE *statistics_dump_file;
/* Statistics entry. A integer counter associated to a string ID
and value. */
typedef struct statistics_counter_s {
struct statistics_counter {
const char *id;
int val;
bool histogram_p;
unsigned HOST_WIDE_INT count;
unsigned HOST_WIDE_INT prev_dumped_count;
} statistics_counter_t;
};
/* Hashtable helpers. */
struct stats_counter_hasher : pointer_hash <statistics_counter_t>
struct stats_counter_hasher : pointer_hash <statistics_counter>
{
static inline hashval_t hash (const statistics_counter_t *);
static inline bool equal (const statistics_counter_t *,
const statistics_counter_t *);
static inline void remove (statistics_counter_t *);
static inline hashval_t hash (const statistics_counter *);
static inline bool equal (const statistics_counter *,
const statistics_counter *);
static inline void remove (statistics_counter *);
};
/* Hash a statistic counter by its string ID. */
inline hashval_t
stats_counter_hasher::hash (const statistics_counter_t *c)
stats_counter_hasher::hash (const statistics_counter *c)
{
return htab_hash_string (c->id) + c->val;
}
......@@ -65,8 +65,8 @@ stats_counter_hasher::hash (const statistics_counter_t *c)
/* Compare two statistic counters by their string IDs. */
inline bool
stats_counter_hasher::equal (const statistics_counter_t *c1,
const statistics_counter_t *c2)
stats_counter_hasher::equal (const statistics_counter *c1,
const statistics_counter *c2)
{
return c1->val == c2->val && strcmp (c1->id, c2->id) == 0;
}
......@@ -74,7 +74,7 @@ stats_counter_hasher::equal (const statistics_counter_t *c1,
/* Free a statistics entry. */
inline void
stats_counter_hasher::remove (statistics_counter_t *v)
stats_counter_hasher::remove (statistics_counter *v)
{
free (CONST_CAST (char *, v->id));
free (v);
......@@ -120,10 +120,10 @@ curr_statistics_hash (void)
since the last dump for the pass dump files. */
int
statistics_fini_pass_1 (statistics_counter_t **slot,
statistics_fini_pass_1 (statistics_counter **slot,
void *data ATTRIBUTE_UNUSED)
{
statistics_counter_t *counter = *slot;
statistics_counter *counter = *slot;
unsigned HOST_WIDE_INT count = counter->count - counter->prev_dumped_count;
if (count == 0)
return 1;
......@@ -141,10 +141,10 @@ statistics_fini_pass_1 (statistics_counter_t **slot,
since the last dump for the statistics dump. */
int
statistics_fini_pass_2 (statistics_counter_t **slot,
statistics_fini_pass_2 (statistics_counter **slot,
void *data ATTRIBUTE_UNUSED)
{
statistics_counter_t *counter = *slot;
statistics_counter *counter = *slot;
unsigned HOST_WIDE_INT count = counter->count - counter->prev_dumped_count;
if (count == 0)
return 1;
......@@ -172,10 +172,10 @@ statistics_fini_pass_2 (statistics_counter_t **slot,
/* Helper for statistics_fini_pass, reset the counters. */
int
statistics_fini_pass_3 (statistics_counter_t **slot,
statistics_fini_pass_3 (statistics_counter **slot,
void *data ATTRIBUTE_UNUSED)
{
statistics_counter_t *counter = *slot;
statistics_counter *counter = *slot;
counter->prev_dumped_count = counter->count;
return 1;
}
......@@ -210,9 +210,9 @@ statistics_fini_pass (void)
/* Helper for printing summary information. */
int
statistics_fini_1 (statistics_counter_t **slot, opt_pass *pass)
statistics_fini_1 (statistics_counter **slot, opt_pass *pass)
{
statistics_counter_t *counter = *slot;
statistics_counter *counter = *slot;
if (counter->count == 0)
return 1;
if (counter->histogram_p)
......@@ -280,18 +280,18 @@ statistics_init (void)
/* Lookup or add a statistics counter in the hashtable HASH with ID, VAL
and HISTOGRAM_P. */
static statistics_counter_t *
static statistics_counter *
lookup_or_add_counter (stats_counter_table_type *hash, const char *id, int val,
bool histogram_p)
{
statistics_counter_t **counter;
statistics_counter_t c;
statistics_counter **counter;
statistics_counter c;
c.id = id;
c.val = val;
counter = hash->find_slot (&c, INSERT);
if (!*counter)
{
*counter = XNEW (struct statistics_counter_s);
*counter = XNEW (statistics_counter);
(*counter)->id = xstrdup (id);
(*counter)->val = val;
(*counter)->histogram_p = histogram_p;
......@@ -308,7 +308,7 @@ lookup_or_add_counter (stats_counter_table_type *hash, const char *id, int val,
void
statistics_counter_event (struct function *fn, const char *id, int incr)
{
statistics_counter_t *counter;
statistics_counter *counter;
if ((!(dump_flags & TDF_STATS)
&& !statistics_dump_file)
......@@ -342,7 +342,7 @@ statistics_counter_event (struct function *fn, const char *id, int incr)
void
statistics_histogram_event (struct function *fn, const char *id, int val)
{
statistics_counter_t *counter;
statistics_counter *counter;
if (!(dump_flags & TDF_STATS)
&& !statistics_dump_file)
......
......@@ -927,7 +927,7 @@ make_pass_diagnose_tm_blocks (gcc::context *ctxt)
/* One individual log entry. We may have multiple statements for the
same location if neither dominate each other (on different
execution paths). */
typedef struct tm_log_entry
struct tm_log_entry
{
/* Address to save. */
tree addr;
......@@ -940,7 +940,7 @@ typedef struct tm_log_entry
save/restore sequence. Later, when generating the save sequence
we place the SSA temp generated here. */
tree save_var;
} *tm_log_entry_t;
};
/* Log entry hashtable helpers. */
......@@ -1009,29 +1009,29 @@ enum thread_memory_type
mem_max
};
typedef struct tm_new_mem_map
struct tm_new_mem_map
{
/* SSA_NAME being dereferenced. */
tree val;
enum thread_memory_type local_new_memory;
} tm_new_mem_map_t;
};
/* Hashtable helpers. */
struct tm_mem_map_hasher : free_ptr_hash <tm_new_mem_map_t>
struct tm_mem_map_hasher : free_ptr_hash <tm_new_mem_map>
{
static inline hashval_t hash (const tm_new_mem_map_t *);
static inline bool equal (const tm_new_mem_map_t *, const tm_new_mem_map_t *);
static inline hashval_t hash (const tm_new_mem_map *);
static inline bool equal (const tm_new_mem_map *, const tm_new_mem_map *);
};
inline hashval_t
tm_mem_map_hasher::hash (const tm_new_mem_map_t *v)
tm_mem_map_hasher::hash (const tm_new_mem_map *v)
{
return (intptr_t)v->val >> 4;
}
inline bool
tm_mem_map_hasher::equal (const tm_new_mem_map_t *v, const tm_new_mem_map_t *c)
tm_mem_map_hasher::equal (const tm_new_mem_map *v, const tm_new_mem_map *c)
{
return v->val == c->val;
}
......@@ -1362,8 +1362,8 @@ thread_private_new_memory (basic_block entry_block, tree x)
{
gimple *stmt = NULL;
enum tree_code code;
tm_new_mem_map_t **slot;
tm_new_mem_map_t elt, *elt_p;
tm_new_mem_map **slot;
tm_new_mem_map elt, *elt_p;
tree val = x;
enum thread_memory_type retval = mem_transaction_local;
......@@ -1383,7 +1383,7 @@ thread_private_new_memory (basic_block entry_block, tree x)
/* Optimistically assume the memory is transaction local during
processing. This catches recursion into this variable. */
*slot = elt_p = XNEW (tm_new_mem_map_t);
*slot = elt_p = XNEW (tm_new_mem_map);
elt_p->val = val;
elt_p->local_new_memory = mem_transaction_local;
......@@ -1864,8 +1864,6 @@ public:
bitmap irr_blocks;
};
typedef struct tm_region *tm_region_p;
/* True if there are pending edge statements to be committed for the
current function being scanned in the tmmark pass. */
bool pending_edge_inserts_p;
......@@ -1970,7 +1968,7 @@ tm_region_init (struct tm_region *region)
auto_vec<basic_block> queue;
bitmap visited_blocks = BITMAP_ALLOC (NULL);
struct tm_region *old_region;
auto_vec<tm_region_p> bb_regions;
auto_vec<tm_region *> bb_regions;
all_tm_regions = region;
bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
......@@ -2594,7 +2592,7 @@ get_tm_region_blocks (basic_block entry_block,
// Callback data for collect_bb2reg.
struct bb2reg_stuff
{
vec<tm_region_p> *bb2reg;
vec<tm_region *> *bb2reg;
bool include_uninstrumented_p;
};
......@@ -2603,7 +2601,7 @@ static void *
collect_bb2reg (struct tm_region *region, void *data)
{
struct bb2reg_stuff *stuff = (struct bb2reg_stuff *)data;
vec<tm_region_p> *bb2reg = stuff->bb2reg;
vec<tm_region *> *bb2reg = stuff->bb2reg;
vec<basic_block> queue;
unsigned int i;
basic_block bb;
......@@ -2647,13 +2645,13 @@ collect_bb2reg (struct tm_region *region, void *data)
// ??? There is currently a hack inside tree-ssa-pre.c to work around the
// only known instance of this block sharing.
static vec<tm_region_p>
static vec<tm_region *>
get_bb_regions_instrumented (bool traverse_clones,
bool include_uninstrumented_p)
{
unsigned n = last_basic_block_for_fn (cfun);
struct bb2reg_stuff stuff;
vec<tm_region_p> ret;
vec<tm_region *> ret;
ret.create (n);
ret.safe_grow_cleared (n);
......@@ -2986,7 +2984,7 @@ execute_tm_mark (void)
tm_log_init ();
vec<tm_region_p> bb_regions
vec<tm_region *> bb_regions
= get_bb_regions_instrumented (/*traverse_clones=*/true,
/*include_uninstrumented_p=*/false);
struct tm_region *r;
......@@ -3223,7 +3221,7 @@ public:
unsigned int
pass_tm_edges::execute (function *fun)
{
vec<tm_region_p> bb_regions
vec<tm_region *> bb_regions
= get_bb_regions_instrumented (/*traverse_clones=*/false,
/*include_uninstrumented_p=*/true);
struct tm_region *r;
......@@ -3307,13 +3305,13 @@ expand_regions (struct tm_region *region,
/* A unique TM memory operation. */
typedef struct tm_memop
struct tm_memop
{
/* Unique ID that all memory operations to the same location have. */
unsigned int value_id;
/* Address of load/store. */
tree addr;
} *tm_memop_t;
};
/* TM memory operation hashtable helpers. */
......
......@@ -131,7 +131,7 @@ static bool aggressive_if_conv;
/* Structure used to predicate basic blocks. This is attached to the
->aux field of the BBs in the loop to be if-converted. */
typedef struct bb_predicate_s {
struct bb_predicate {
/* The condition under which this basic block is executed. */
tree predicate;
......@@ -140,7 +140,7 @@ typedef struct bb_predicate_s {
recorded here, in order to avoid the duplication of computations
that occur in previous conditions. See PR44483. */
gimple_seq predicate_gimplified_stmts;
} *bb_predicate_p;
};
/* Returns true when the basic block BB has a predicate. */
......@@ -155,7 +155,7 @@ bb_has_predicate (basic_block bb)
static inline tree
bb_predicate (basic_block bb)
{
return ((bb_predicate_p) bb->aux)->predicate;
return ((struct bb_predicate *) bb->aux)->predicate;
}
/* Sets the gimplified predicate COND for basic block BB. */
......@@ -166,7 +166,7 @@ set_bb_predicate (basic_block bb, tree cond)
gcc_assert ((TREE_CODE (cond) == TRUTH_NOT_EXPR
&& is_gimple_condexpr (TREE_OPERAND (cond, 0)))
|| is_gimple_condexpr (cond));
((bb_predicate_p) bb->aux)->predicate = cond;
((struct bb_predicate *) bb->aux)->predicate = cond;
}
/* Returns the sequence of statements of the gimplification of the
......@@ -175,7 +175,7 @@ set_bb_predicate (basic_block bb, tree cond)
static inline gimple_seq
bb_predicate_gimplified_stmts (basic_block bb)
{
return ((bb_predicate_p) bb->aux)->predicate_gimplified_stmts;
return ((struct bb_predicate *) bb->aux)->predicate_gimplified_stmts;
}
/* Sets the sequence of statements STMTS of the gimplification of the
......@@ -184,7 +184,7 @@ bb_predicate_gimplified_stmts (basic_block bb)
static inline void
set_bb_predicate_gimplified_stmts (basic_block bb, gimple_seq stmts)
{
((bb_predicate_p) bb->aux)->predicate_gimplified_stmts = stmts;
((struct bb_predicate *) bb->aux)->predicate_gimplified_stmts = stmts;
}
/* Adds the sequence of statements STMTS to the sequence of statements
......@@ -194,7 +194,7 @@ static inline void
add_bb_predicate_gimplified_stmts (basic_block bb, gimple_seq stmts)
{
gimple_seq_add_seq
(&(((bb_predicate_p) bb->aux)->predicate_gimplified_stmts), stmts);
(&(((struct bb_predicate *) bb->aux)->predicate_gimplified_stmts), stmts);
}
/* Initializes to TRUE the predicate of basic block BB. */
......@@ -202,7 +202,7 @@ add_bb_predicate_gimplified_stmts (basic_block bb, gimple_seq stmts)
static inline void
init_bb_predicate (basic_block bb)
{
bb->aux = XNEW (struct bb_predicate_s);
bb->aux = XNEW (struct bb_predicate);
set_bb_predicate_gimplified_stmts (bb, NULL);
set_bb_predicate (bb, boolean_true_node);
}
......
......@@ -65,7 +65,7 @@ along with GCC; see the file COPYING3. If not see
/* Structure to map a variable VAR to the set of blocks that contain
definitions for VAR. */
struct def_blocks_d
struct def_blocks
{
/* Blocks that contain definitions of VAR. Bit I will be set if the
Ith block contains a definition of VAR. */
......@@ -79,9 +79,6 @@ struct def_blocks_d
bitmap livein_blocks;
};
typedef struct def_blocks_d *def_blocks_p;
/* Stack of trees used to restore the global currdefs to its original
state after completing rewriting of a block and its dominator
children. Its elements have the following properties:
......@@ -169,7 +166,7 @@ enum need_phi_state {
};
/* Information stored for both SSA names and decls. */
struct common_info_d
struct common_info
{
/* This field indicates whether or not the variable may need PHI nodes.
See the enum's definition for more detailed information about the
......@@ -180,29 +177,23 @@ struct common_info_d
tree current_def;
/* Definitions for this var. */
struct def_blocks_d def_blocks;
struct def_blocks def_blocks;
};
/* The information associated with decls and SSA names. */
typedef struct common_info_d *common_info_p;
/* Information stored for decls. */
struct var_info_d
struct var_info
{
/* The variable. */
tree var;
/* Information stored for both SSA names and decls. */
struct common_info_d info;
common_info info;
};
/* The information associated with decls. */
typedef struct var_info_d *var_info_p;
/* VAR_INFOS hashtable helpers. */
struct var_info_hasher : free_ptr_hash <var_info_d>
struct var_info_hasher : free_ptr_hash <var_info>
{
static inline hashval_t hash (const value_type &);
static inline bool equal (const value_type &, const compare_type &);
......@@ -238,13 +229,10 @@ struct ssa_name_info
bitmap repl_set;
/* Information stored for both SSA names and decls. */
struct common_info_d info;
common_info info;
};
/* The information associated with names. */
typedef struct ssa_name_info *ssa_name_info_p;
static vec<ssa_name_info_p> info_for_ssa_name;
static vec<ssa_name_info *> info_for_ssa_name;
static unsigned current_info_for_ssa_name_age;
static bitmap_obstack update_ssa_obstack;
......@@ -339,7 +327,7 @@ set_register_defs (gimple *stmt, bool register_defs_p)
/* Get the information associated with NAME. */
static inline ssa_name_info_p
static inline ssa_name_info *
get_ssa_name_ann (tree name)
{
unsigned ver = SSA_NAME_VERSION (name);
......@@ -376,16 +364,16 @@ get_ssa_name_ann (tree name)
/* Return and allocate the auxiliar information for DECL. */
static inline var_info_p
static inline var_info *
get_var_info (tree decl)
{
struct var_info_d vi;
var_info_d **slot;
var_info vi;
var_info **slot;
vi.var = decl;
slot = var_infos->find_slot_with_hash (&vi, DECL_UID (decl), INSERT);
if (*slot == NULL)
{
var_info_p v = XCNEW (struct var_info_d);
var_info *v = XCNEW (var_info);
v->var = decl;
*slot = v;
return v;
......@@ -409,7 +397,7 @@ clear_ssa_name_info (void)
/* Get access to the auxiliar information stored per SSA name or decl. */
static inline common_info_p
static inline common_info *
get_common_info (tree var)
{
if (TREE_CODE (var) == SSA_NAME)
......@@ -480,10 +468,10 @@ mark_block_for_update (basic_block bb)
where VAR is live on entry (livein). If no entry is found in
DEF_BLOCKS, a new one is created and returned. */
static inline struct def_blocks_d *
get_def_blocks_for (common_info_p info)
static inline def_blocks *
get_def_blocks_for (common_info *info)
{
struct def_blocks_d *db_p = &info->def_blocks;
def_blocks *db_p = &info->def_blocks;
if (!db_p->def_blocks)
{
db_p->def_blocks = BITMAP_ALLOC (&update_ssa_obstack);
......@@ -501,8 +489,8 @@ get_def_blocks_for (common_info_p info)
static void
set_def_block (tree var, basic_block bb, bool phi_p)
{
struct def_blocks_d *db_p;
common_info_p info;
def_blocks *db_p;
common_info *info;
info = get_common_info (var);
db_p = get_def_blocks_for (info);
......@@ -536,8 +524,8 @@ set_def_block (tree var, basic_block bb, bool phi_p)
static void
set_livein_block (tree var, basic_block bb)
{
common_info_p info;
struct def_blocks_d *db_p;
common_info *info;
def_blocks *db_p;
info = get_common_info (var);
db_p = get_def_blocks_for (info);
......@@ -935,10 +923,10 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
where VAR is live on entry (livein). Return NULL, if no entry is
found in DEF_BLOCKS. */
static inline struct def_blocks_d *
static inline def_blocks *
find_def_blocks_for (tree var)
{
def_blocks_p p = &get_common_info (var)->def_blocks;
def_blocks *p = &get_common_info (var)->def_blocks;
if (!p->def_blocks)
return NULL;
return p;
......@@ -992,7 +980,7 @@ insert_phi_nodes_for (tree var, bitmap phi_insertion_points, bool update_p)
gphi *phi;
basic_block bb;
bitmap_iterator bi;
struct def_blocks_d *def_map = find_def_blocks_for (var);
def_blocks *def_map = find_def_blocks_for (var);
/* Remove the blocks where we already have PHI nodes for VAR. */
bitmap_and_compl_into (phi_insertion_points, def_map->phi_blocks);
......@@ -1068,8 +1056,8 @@ insert_phi_nodes_for (tree var, bitmap phi_insertion_points, bool update_p)
static int
insert_phi_nodes_compare_var_infos (const void *a, const void *b)
{
const struct var_info_d *defa = *(struct var_info_d * const *)a;
const struct var_info_d *defb = *(struct var_info_d * const *)b;
const var_info *defa = *(var_info * const *)a;
const var_info *defb = *(var_info * const *)b;
if (DECL_UID (defa->var) < DECL_UID (defb->var))
return -1;
else
......@@ -1085,11 +1073,11 @@ insert_phi_nodes (bitmap_head *dfs)
{
hash_table<var_info_hasher>::iterator hi;
unsigned i;
var_info_p info;
var_info *info;
timevar_push (TV_TREE_INSERT_PHI_NODES);
auto_vec<var_info_p> vars (var_infos->elements ());
auto_vec<var_info *> vars (var_infos->elements ());
FOR_EACH_HASH_TABLE_ELEMENT (*var_infos, info, var_info_p, hi)
if (info->info.need_phi_state != NEED_PHI_STATE_NO)
vars.quick_push (info);
......@@ -1115,7 +1103,7 @@ insert_phi_nodes (bitmap_head *dfs)
static void
register_new_def (tree def, tree sym)
{
common_info_p info = get_common_info (sym);
common_info *info = get_common_info (sym);
tree currdef;
/* If this variable is set in a single basic block and all uses are
......@@ -1183,7 +1171,7 @@ register_new_def (tree def, tree sym)
static tree
get_reaching_def (tree var)
{
common_info_p info = get_common_info (var);
common_info *info = get_common_info (var);
tree currdef;
/* Lookup the current reaching definition for VAR. */
......@@ -1215,7 +1203,7 @@ rewrite_debug_stmt_uses (gimple *stmt)
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
{
tree var = USE_FROM_PTR (use_p), def;
common_info_p info = get_common_info (var);
common_info *info = get_common_info (var);
gcc_checking_assert (DECL_P (var));
def = info->current_def;
if (!def)
......@@ -1282,7 +1270,7 @@ rewrite_debug_stmt_uses (gimple *stmt)
;
else
{
struct def_blocks_d *db_p = get_def_blocks_for (info);
def_blocks *db_p = get_def_blocks_for (info);
/* If there are some non-debug uses in the current bb,
it is fine. */
......@@ -1602,7 +1590,7 @@ dump_currdefs (FILE *file)
fprintf (file, "\n\nCurrent reaching definitions\n\n");
FOR_EACH_VEC_ELT (symbols_to_rename, i, var)
{
common_info_p info = get_common_info (var);
common_info *info = get_common_info (var);
fprintf (file, "CURRDEF (");
print_generic_expr (file, var, 0);
fprintf (file, ") = ");
......@@ -1689,9 +1677,9 @@ debug_tree_ssa_stats (void)
/* Callback for htab_traverse to dump the VAR_INFOS hash table. */
int
debug_var_infos_r (var_info_d **slot, FILE *file)
debug_var_infos_r (var_info **slot, FILE *file)
{
struct var_info_d *info = *slot;
var_info *info = *slot;
fprintf (file, "VAR: ");
print_generic_expr (file, info->var, dump_flags);
......@@ -1731,7 +1719,7 @@ debug_var_infos (void)
static inline void
register_new_update_single (tree new_name, tree old_name)
{
common_info_p info = get_common_info (old_name);
common_info *info = get_common_info (old_name);
tree currdef = info->current_def;
/* Push the current reaching definition into BLOCK_DEFS_STACK.
......@@ -2487,7 +2475,7 @@ mark_use_interesting (tree var, gimple *stmt, basic_block bb,
replace it). */
if (insert_phi_p)
{
struct def_blocks_d *db_p = get_def_blocks_for (get_common_info (var));
def_blocks *db_p = get_def_blocks_for (get_common_info (var));
if (!bitmap_bit_p (db_p->def_blocks, bb->index))
set_livein_block (var, bb);
}
......@@ -3006,7 +2994,7 @@ insert_updated_phi_nodes_for (tree var, bitmap_head *dfs, bitmap blocks,
unsigned update_flags)
{
basic_block entry;
struct def_blocks_d *db;
def_blocks *db;
bitmap idf, pruned_idf;
bitmap_iterator bi;
unsigned i;
......
......@@ -73,7 +73,7 @@ along with GCC; see the file COPYING3. If not see
/* A Reduced Dependence Graph (RDG) vertex representing a statement. */
typedef struct rdg_vertex
struct rdg_vertex
{
/* The statement represented by this vertex. */
gimple *stmt;
......@@ -86,7 +86,7 @@ typedef struct rdg_vertex
/* True when the statement contains a read from memory. */
bool has_mem_reads;
} *rdg_vertex_p;
};
#define RDGV_STMT(V) ((struct rdg_vertex *) ((V)->data))->stmt
#define RDGV_DATAREFS(V) ((struct rdg_vertex *) ((V)->data))->datarefs
......@@ -110,11 +110,11 @@ enum rdg_dep_type
/* Dependence information attached to an edge of the RDG. */
typedef struct rdg_edge
struct rdg_edge
{
/* Type of the dependence. */
enum rdg_dep_type type;
} *rdg_edge_p;
};
#define RDGE_TYPE(E) ((struct rdg_edge *) ((E)->data))->type
......@@ -474,7 +474,7 @@ enum partition_kind {
PKIND_NORMAL, PKIND_MEMSET, PKIND_MEMCPY
};
typedef struct partition_s
struct partition
{
bitmap stmts;
bitmap loops;
......@@ -485,15 +485,15 @@ typedef struct partition_s
data_reference_p secondary_dr;
tree niter;
bool plus_one;
} *partition_t;
};
/* Allocate and initialize a partition from BITMAP. */
static partition_t
static partition *
partition_alloc (bitmap stmts, bitmap loops)
{
partition_t partition = XCNEW (struct partition_s);
partition *partition = XCNEW (struct partition);
partition->stmts = stmts ? stmts : BITMAP_ALLOC (NULL);
partition->loops = loops ? loops : BITMAP_ALLOC (NULL);
partition->reduction_p = false;
......@@ -504,7 +504,7 @@ partition_alloc (bitmap stmts, bitmap loops)
/* Free PARTITION. */
static void
partition_free (partition_t partition)
partition_free (partition *partition)
{
BITMAP_FREE (partition->stmts);
BITMAP_FREE (partition->loops);
......@@ -514,7 +514,7 @@ partition_free (partition_t partition)
/* Returns true if the partition can be generated as a builtin. */
static bool
partition_builtin_p (partition_t partition)
partition_builtin_p (partition *partition)
{
return partition->kind != PKIND_NORMAL;
}
......@@ -522,7 +522,7 @@ partition_builtin_p (partition_t partition)
/* Returns true if the partition contains a reduction. */
static bool
partition_reduction_p (partition_t partition)
partition_reduction_p (partition *partition)
{
return partition->reduction_p;
}
......@@ -530,7 +530,7 @@ partition_reduction_p (partition_t partition)
/* Merge PARTITION into the partition DEST. */
static void
partition_merge_into (partition_t dest, partition_t partition)
partition_merge_into (partition *dest, partition *partition)
{
dest->kind = PKIND_NORMAL;
bitmap_ior_into (dest->stmts, partition->stmts);
......@@ -615,7 +615,7 @@ create_bb_after_loop (struct loop *loop)
basic blocks of a loop are taken in dom order. */
static void
generate_loops_for_partition (struct loop *loop, partition_t partition,
generate_loops_for_partition (struct loop *loop, partition *partition,
bool copy_p)
{
unsigned i;
......@@ -776,7 +776,7 @@ const_with_all_bytes_same (tree val)
/* Generate a call to memset for PARTITION in LOOP. */
static void
generate_memset_builtin (struct loop *loop, partition_t partition)
generate_memset_builtin (struct loop *loop, partition *partition)
{
gimple_stmt_iterator gsi;
gimple *stmt, *fn_call;
......@@ -832,7 +832,7 @@ generate_memset_builtin (struct loop *loop, partition_t partition)
/* Generate a call to memcpy for PARTITION in LOOP. */
static void
generate_memcpy_builtin (struct loop *loop, partition_t partition)
generate_memcpy_builtin (struct loop *loop, partition *partition)
{
gimple_stmt_iterator gsi;
gimple *stmt, *fn_call;
......@@ -927,7 +927,7 @@ destroy_loop (struct loop *loop)
static void
generate_code_for_partition (struct loop *loop,
partition_t partition, bool copy_p)
partition *partition, bool copy_p)
{
switch (partition->kind)
{
......@@ -960,10 +960,10 @@ generate_code_for_partition (struct loop *loop,
/* Returns a partition with all the statements needed for computing
the vertex V of the RDG, also including the loop exit conditions. */
static partition_t
static partition *
build_rdg_partition_for_vertex (struct graph *rdg, int v)
{
partition_t partition = partition_alloc (NULL, NULL);
partition *partition = partition_alloc (NULL, NULL);
auto_vec<int, 3> nodes;
unsigned i;
int x;
......@@ -984,7 +984,7 @@ build_rdg_partition_for_vertex (struct graph *rdg, int v)
For the moment we detect only the memset zero pattern. */
static void
classify_partition (loop_p loop, struct graph *rdg, partition_t partition)
classify_partition (loop_p loop, struct graph *rdg, partition *partition)
{
bitmap_iterator bi;
unsigned i;
......@@ -1167,8 +1167,8 @@ ref_base_address (data_reference_p dr)
accesses in RDG. */
static bool
similar_memory_accesses (struct graph *rdg, partition_t partition1,
partition_t partition2)
similar_memory_accesses (struct graph *rdg, partition *partition1,
partition *partition2)
{
unsigned i, j, k, l;
bitmap_iterator bi, bj;
......@@ -1210,7 +1210,7 @@ similar_memory_accesses (struct graph *rdg, partition_t partition1,
static void
rdg_build_partitions (struct graph *rdg,
vec<gimple *> starting_stmts,
vec<partition_t> *partitions)
vec<partition *> *partitions)
{
bitmap processed = BITMAP_ALLOC (NULL);
int i;
......@@ -1229,7 +1229,7 @@ rdg_build_partitions (struct graph *rdg,
if (bitmap_bit_p (processed, v))
continue;
partition_t partition = build_rdg_partition_for_vertex (rdg, v);
partition *partition = build_rdg_partition_for_vertex (rdg, v);
bitmap_ior_into (processed, partition->stmts);
if (dump_file && (dump_flags & TDF_DETAILS))
......@@ -1250,20 +1250,20 @@ rdg_build_partitions (struct graph *rdg,
/* Dump to FILE the PARTITIONS. */
static void
dump_rdg_partitions (FILE *file, vec<partition_t> partitions)
dump_rdg_partitions (FILE *file, vec<partition *> partitions)
{
int i;
partition_t partition;
partition *partition;
FOR_EACH_VEC_ELT (partitions, i, partition)
debug_bitmap_file (file, partition->stmts);
}
/* Debug PARTITIONS. */
extern void debug_rdg_partitions (vec<partition_t> );
extern void debug_rdg_partitions (vec<partition *> );
DEBUG_FUNCTION void
debug_rdg_partitions (vec<partition_t> partitions)
debug_rdg_partitions (vec<partition *> partitions)
{
dump_rdg_partitions (stderr, partitions);
}
......@@ -1291,7 +1291,7 @@ number_of_rw_in_rdg (struct graph *rdg)
the RDG. */
static int
number_of_rw_in_partition (struct graph *rdg, partition_t partition)
number_of_rw_in_partition (struct graph *rdg, partition *partition)
{
int res = 0;
unsigned i;
......@@ -1314,10 +1314,10 @@ number_of_rw_in_partition (struct graph *rdg, partition_t partition)
static bool
partition_contains_all_rw (struct graph *rdg,
vec<partition_t> partitions)
vec<partition *> partitions)
{
int i;
partition_t partition;
partition *partition;
int nrw = number_of_rw_in_rdg (rdg);
FOR_EACH_VEC_ELT (partitions, i, partition)
......@@ -1410,7 +1410,7 @@ distribute_loop (struct loop *loop, vec<gimple *> stmts,
control_dependences *cd, int *nb_calls)
{
struct graph *rdg;
partition_t partition;
partition *partition;
bool any_builtin;
int i, nbp;
graph *pg = NULL;
......@@ -1435,7 +1435,7 @@ distribute_loop (struct loop *loop, vec<gimple *> stmts,
if (dump_file && (dump_flags & TDF_DETAILS))
dump_rdg (dump_file, rdg);
auto_vec<partition_t, 3> partitions;
auto_vec<struct partition *, 3> partitions;
rdg_build_partitions (rdg, stmts, &partitions);
any_builtin = false;
......@@ -1458,7 +1458,7 @@ distribute_loop (struct loop *loop, vec<gimple *> stmts,
were not classified as builtins. This also avoids chopping
a loop into pieces, separated by builtin calls. That is, we
only want no or a single loop body remaining. */
partition_t into;
struct partition *into;
if (!flag_tree_loop_distribution)
{
for (i = 0; partitions.iterate (i, &into); ++i)
......@@ -1535,7 +1535,7 @@ distribute_loop (struct loop *loop, vec<gimple *> stmts,
{
pg = new_graph (partitions.length ());
struct pgdata {
partition_t partition;
struct partition *partition;
vec<data_reference_p> writes;
vec<data_reference_p> reads;
};
......@@ -1559,7 +1559,7 @@ distribute_loop (struct loop *loop, vec<gimple *> stmts,
else
data->writes.safe_push (dr);
}
partition_t partition1, partition2;
struct partition *partition1, *partition2;
for (i = 0; partitions.iterate (i, &partition1); ++i)
for (int j = i + 1; partitions.iterate (j, &partition2); ++j)
{
......@@ -1599,7 +1599,7 @@ distribute_loop (struct loop *loop, vec<gimple *> stmts,
num_sccs = graphds_scc (pg, NULL);
for (i = 0; i < num_sccs; ++i)
{
partition_t first;
struct partition *first;
int j;
for (j = 0; partitions.iterate (j, &first); ++j)
if (pg->vertices[j].component == i)
......
......@@ -267,10 +267,6 @@ struct iv_inv_expr_ent
/* The data used by the induction variable optimizations. */
typedef struct iv_use *iv_use_p;
typedef struct iv_cand *iv_cand_p;
/* Hashtable helpers. */
struct iv_inv_expr_hasher : free_ptr_hash <iv_inv_expr_ent>
......@@ -326,10 +322,10 @@ struct ivopts_data
bitmap relevant;
/* The uses of induction variables. */
vec<iv_use_p> iv_uses;
vec<iv_use *> iv_uses;
/* The candidates. */
vec<iv_cand_p> iv_candidates;
vec<iv_cand *> iv_candidates;
/* A bitmap of important candidates. */
bitmap important_candidates;
......@@ -3747,12 +3743,12 @@ enum ainc_type
AINC_NONE /* Also the number of auto increment types. */
};
typedef struct address_cost_data_s
struct address_cost_data
{
HOST_WIDE_INT min_offset, max_offset;
unsigned costs[2][2][2][2];
unsigned ainc_costs[AINC_NONE];
} *address_cost_data;
};
static comp_cost
......@@ -3763,9 +3759,9 @@ get_address_cost (bool symbol_present, bool var_present,
bool stmt_after_inc, bool *may_autoinc)
{
machine_mode address_mode = targetm.addr_space.address_mode (as);
static vec<address_cost_data> address_cost_data_list;
static vec<address_cost_data *> address_cost_data_list;
unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mem_mode;
address_cost_data data;
address_cost_data *data;
static bool has_preinc[MAX_MACHINE_MODE], has_postinc[MAX_MACHINE_MODE];
static bool has_predec[MAX_MACHINE_MODE], has_postdec[MAX_MACHINE_MODE];
unsigned cost, acost, complexity;
......@@ -3789,7 +3785,7 @@ get_address_cost (bool symbol_present, bool var_present,
rtx addr, base;
rtx reg0, reg1;
data = (address_cost_data) xcalloc (1, sizeof (*data));
data = (address_cost_data *) xcalloc (1, sizeof (*data));
reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment