Commit 9bf7acfc by Zdenek Dvorak Committed by Zdenek Dvorak

Makefile.in (df.o): Remove fibheap dependency.

	* Makefile.in (df.o): Remove fibheap dependency.
	* df.h: Do not include sbitmap.h.
	(struct ref): New field "data".
	(DF_REF_DATA): New accessor macro.
	(struct df): Field "dom" removed.
	(df_analyze_subcfg): New function.
	(transfer_function_sbitmap, transfer_function_bitmap): Replaced by ...
	(transfer_function): ... new type.
	(iterative_dataflow_sbitmap, iterative_dataflow_bitmap): Replaced by ...
	(iterative_dataflow): ... new function.
	(enum set_representation, struct dataflow): New.
	* df.c: Do not include fibheap.h.

	(df_reg_def_chain_clean, df_reg_use_chain_clean,
	(df_bb_table_realloc, df_analyse_subcfg, free_reg_ref_chain,
	prune_to_subcfg, df_bb_modify): New functions.
	(df_bitmaps_alloc, df_reg_def_chain_create, df_reg_use_chain_create,
	df_refs_update, df_reg_table_realloc, df_ref_create,
	df_bb_reg_def_chain_create, df_bb_reg_use_chain_create,
	df_bb_rd_local_compute, df_bb_ru_local_compute, df_bb_lr_local_compute,
	df_analyse_1, df_insn_modify): Support analysing only a part of the cfg.

	(dataflow_set_a_op_b, dataflow_set_copy): New functions.
	(df_rd_transfer_function, df_ru_transfer_function,
	df_lr_transfer_function): Type of bitmaps changed to void *.
	(hybrid_search_bitmap, hybrid_search_sbitmap): Merge into ...
	(hybrid_search): ... new function.
	(iterative_dataflow_bitmap, iterative_dataflow_sbitmap): Merge into ...
	(iterative_dataflow): ... new function. Avoid use of fibheaps for
	a worklist.  Do not process basic blocks unnecessarily.

From-SVN: r82921
parent 109e0040
2004-06-10 Zdenek Dvorak <rakdver@atrey.karlin.mff.cuni.cz>
* Makefile.in (df.o): Remove fibheap dependency.
* df.h: Do not include sbitmap.h.
(struct ref): New field "data".
(DF_REF_DATA): New accessor macro.
(struct df): Field "dom" removed.
(df_analyze_subcfg): New function.
(transfer_function_sbitmap, transfer_function_bitmap): Replaced by ...
(transfer_function): ... new type.
(iterative_dataflow_sbitmap, iterative_dataflow_bitmap): Replaced by ...
(iterative_dataflow): ... new function.
(enum set_representation, struct dataflow): New.
* df.c: Do not include fibheap.h.
(df_reg_def_chain_clean, df_reg_use_chain_clean,
(df_bb_table_realloc, df_analyse_subcfg, free_reg_ref_chain,
prune_to_subcfg, df_bb_modify): New functions.
(df_bitmaps_alloc, df_reg_def_chain_create, df_reg_use_chain_create,
df_refs_update, df_reg_table_realloc, df_ref_create,
df_bb_reg_def_chain_create, df_bb_reg_use_chain_create,
df_bb_rd_local_compute, df_bb_ru_local_compute, df_bb_lr_local_compute,
df_analyse_1, df_insn_modify): Support analysing only a part of the cfg.
(dataflow_set_a_op_b, dataflow_set_copy): New functions.
(df_rd_transfer_function, df_ru_transfer_function,
df_lr_transfer_function): Type of bitmaps changed to void *.
(hybrid_search_bitmap, hybrid_search_sbitmap): Merge into ...
(hybrid_search): ... new function.
(iterative_dataflow_bitmap, iterative_dataflow_sbitmap): Merge into ...
(iterative_dataflow): ... new function. Avoid use of fibheaps for
a worklist. Do not process basic blocks unnecessarily.
2004-06-10 Roger Sayle <roger@eyesopen.com>
* fold-const.c (fold_abs_const): Make extern.
......
......@@ -140,7 +140,7 @@ XCFLAGS =
TCFLAGS =
CFLAGS = -g
STAGE1_CFLAGS = -g @stage1_cflags@
BOOT_CFLAGS = -g -O2
BOOT_CFLAGS = -g -O2
# Flags to determine code coverage. When coverage is disabled, this will
# contain the optimization flags, as you normally want code coverage
......@@ -1881,7 +1881,7 @@ tree-complex.o : tree-complex.c $(CONFIG_H) system.h $(TREE_H) \
flags.h
df.o : df.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
insn-config.h $(RECOG_H) function.h $(REGS_H) alloc-pool.h hard-reg-set.h \
$(BASIC_BLOCK_H) $(DF_H) $(FIBHEAP_H)
$(BASIC_BLOCK_H) $(DF_H)
var-tracking.o : var-tracking.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) $(TREE_H) hard-reg-set.h insn-config.h reload.h flags.h \
$(BASIC_BLOCK_H) output.h sbitmap.h alloc-pool.h $(FIBHEAP_H) $(HASHTAB_H)
......
......@@ -187,7 +187,6 @@ and again mark them read/write.
#include "sbitmap.h"
#include "bitmap.h"
#include "df.h"
#include "fibheap.h"
#define FOR_EACH_BB_IN_BITMAP(BITMAP, MIN, BB, CODE) \
do \
......@@ -204,12 +203,12 @@ static struct df *ddf;
static void df_reg_table_realloc (struct df *, int);
static void df_insn_table_realloc (struct df *, unsigned int);
static void df_bitmaps_alloc (struct df *, int);
static void df_bb_table_realloc (struct df *, unsigned int);
static void df_bitmaps_alloc (struct df *, bitmap, int);
static void df_bitmaps_free (struct df *, int);
static void df_free (struct df *);
static void df_alloc (struct df *, int);
static rtx df_reg_clobber_gen (unsigned int);
static rtx df_reg_use_gen (unsigned int);
static inline struct df_link *df_link_create (struct ref *, struct df_link *);
......@@ -237,9 +236,9 @@ static void df_bb_refs_record (struct df *, basic_block);
static void df_refs_record (struct df *, bitmap);
static void df_bb_reg_def_chain_create (struct df *, basic_block);
static void df_reg_def_chain_create (struct df *, bitmap);
static void df_reg_def_chain_create (struct df *, bitmap, bool);
static void df_bb_reg_use_chain_create (struct df *, basic_block);
static void df_reg_use_chain_create (struct df *, bitmap);
static void df_reg_use_chain_create (struct df *, bitmap, bool);
static void df_bb_du_chain_create (struct df *, basic_block, bitmap);
static void df_du_chain_create (struct df *, bitmap);
static void df_bb_ud_chain_create (struct df *, basic_block);
......@@ -260,7 +259,7 @@ static int df_modified_p (struct df *, bitmap);
static int df_refs_queue (struct df *);
static int df_refs_process (struct df *);
static int df_bb_refs_update (struct df *, basic_block);
static int df_refs_update (struct df *);
static int df_refs_update (struct df *, bitmap);
static void df_analyze_1 (struct df *, bitmap, int, int);
static void df_insns_modify (struct df *, basic_block, rtx, rtx);
......@@ -279,22 +278,14 @@ static void df_chain_dump (struct df_link *, FILE *file);
static void df_chain_dump_regno (struct df_link *, FILE *file);
static void df_regno_debug (struct df *, unsigned int, FILE *);
static void df_ref_debug (struct df *, struct ref *, FILE *);
static void df_rd_transfer_function (int, int *, bitmap, bitmap, bitmap,
bitmap, void *);
static void df_ru_transfer_function (int, int *, bitmap, bitmap, bitmap,
bitmap, void *);
static void df_lr_transfer_function (int, int *, bitmap, bitmap, bitmap,
bitmap, void *);
static void hybrid_search_bitmap (basic_block, bitmap *, bitmap *,
bitmap *, bitmap *, enum df_flow_dir,
enum df_confluence_op,
transfer_function_bitmap,
sbitmap, sbitmap, void *);
static void hybrid_search_sbitmap (basic_block, sbitmap *, sbitmap *,
sbitmap *, sbitmap *, enum df_flow_dir,
enum df_confluence_op,
transfer_function_sbitmap,
sbitmap, sbitmap, void *);
static void df_rd_transfer_function (int, int *, void *, void *, void *,
void *, void *);
static void df_ru_transfer_function (int, int *, void *, void *, void *,
void *, void *);
static void df_lr_transfer_function (int, int *, void *, void *, void *,
void *, void *);
static void hybrid_search (basic_block, struct dataflow *,
sbitmap, sbitmap, sbitmap);
/* Local memory allocation/deallocation routines. */
......@@ -327,6 +318,26 @@ df_insn_table_realloc (struct df *df, unsigned int size)
}
}
/* Increase the bb info table to have space for at least SIZE + 1
elements. */
static void
df_bb_table_realloc (struct df *df, unsigned int size)
{
size++;
if (size <= df->n_bbs)
return;
/* Make the table a little larger than requested, so we do not need
to enlarge it so often. */
size += df->n_bbs / 4;
df->bbs = xrealloc (df->bbs, size * sizeof (struct bb_info));
memset (df->bbs + df->n_bbs, 0, (size - df->n_bbs) * sizeof (struct bb_info));
df->n_bbs = size;
}
/* Increase the reg info table by SIZE more elements. */
static void
......@@ -341,6 +352,8 @@ df_reg_table_realloc (struct df *df, int size)
size = max_reg_num ();
df->regs = xrealloc (df->regs, size * sizeof (struct reg_info));
df->reg_def_last = xrealloc (df->reg_def_last,
size * sizeof (struct ref *));
/* Zero the new entries. */
memset (df->regs + df->reg_size, 0,
......@@ -351,67 +364,79 @@ df_reg_table_realloc (struct df *df, int size)
/* Allocate bitmaps for each basic block. */
static void
df_bitmaps_alloc (struct df *df, int flags)
df_bitmaps_alloc (struct df *df, bitmap blocks, int flags)
{
int dflags = 0;
basic_block bb;
/* Free the bitmaps if they need resizing. */
if ((flags & DF_LR) && df->n_regs < (unsigned int) max_reg_num ())
dflags |= DF_LR | DF_RU;
if ((flags & DF_RU) && df->n_uses < df->use_id)
dflags |= DF_RU;
if ((flags & DF_RD) && df->n_defs < df->def_id)
dflags |= DF_RD;
if (dflags)
df_bitmaps_free (df, dflags);
df->n_defs = df->def_id;
df->n_uses = df->use_id;
FOR_EACH_BB (bb)
if (!blocks)
blocks = df->all_blocks;
FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
{
struct bb_info *bb_info = DF_BB_INFO (df, bb);
if (flags & DF_RD && ! bb_info->rd_in)
if (flags & DF_RD)
{
/* Allocate bitmaps for reaching definitions. */
bb_info->rd_kill = BITMAP_XMALLOC ();
bitmap_zero (bb_info->rd_kill);
bb_info->rd_gen = BITMAP_XMALLOC ();
bitmap_zero (bb_info->rd_gen);
bb_info->rd_in = BITMAP_XMALLOC ();
bb_info->rd_out = BITMAP_XMALLOC ();
bb_info->rd_valid = 0;
if (!bb_info->rd_in)
{
/* Allocate bitmaps for reaching definitions. */
bb_info->rd_kill = BITMAP_XMALLOC ();
bb_info->rd_gen = BITMAP_XMALLOC ();
bb_info->rd_in = BITMAP_XMALLOC ();
bb_info->rd_out = BITMAP_XMALLOC ();
}
else
{
bitmap_clear (bb_info->rd_kill);
bitmap_clear (bb_info->rd_gen);
bitmap_clear (bb_info->rd_in);
bitmap_clear (bb_info->rd_out);
}
}
if (flags & DF_RU && ! bb_info->ru_in)
if (flags & DF_RU)
{
/* Allocate bitmaps for upward exposed uses. */
bb_info->ru_kill = BITMAP_XMALLOC ();
bitmap_zero (bb_info->ru_kill);
/* Note the lack of symmetry. */
bb_info->ru_gen = BITMAP_XMALLOC ();
bitmap_zero (bb_info->ru_gen);
bb_info->ru_in = BITMAP_XMALLOC ();
bb_info->ru_out = BITMAP_XMALLOC ();
bb_info->ru_valid = 0;
if (!bb_info->ru_in)
{
/* Allocate bitmaps for upward exposed uses. */
bb_info->ru_kill = BITMAP_XMALLOC ();
bb_info->ru_gen = BITMAP_XMALLOC ();
bb_info->ru_in = BITMAP_XMALLOC ();
bb_info->ru_out = BITMAP_XMALLOC ();
}
else
{
bitmap_clear (bb_info->ru_kill);
bitmap_clear (bb_info->ru_gen);
bitmap_clear (bb_info->ru_in);
bitmap_clear (bb_info->ru_out);
}
}
if (flags & DF_LR && ! bb_info->lr_in)
if (flags & DF_LR)
{
/* Allocate bitmaps for live variables. */
bb_info->lr_def = BITMAP_XMALLOC ();
bitmap_zero (bb_info->lr_def);
bb_info->lr_use = BITMAP_XMALLOC ();
bitmap_zero (bb_info->lr_use);
bb_info->lr_in = BITMAP_XMALLOC ();
bb_info->lr_out = BITMAP_XMALLOC ();
bb_info->lr_valid = 0;
if (!bb_info->lr_in)
{
/* Allocate bitmaps for live variables. */
bb_info->lr_def = BITMAP_XMALLOC ();
bb_info->lr_use = BITMAP_XMALLOC ();
bb_info->lr_in = BITMAP_XMALLOC ();
bb_info->lr_out = BITMAP_XMALLOC ();
}
else
{
bitmap_clear (bb_info->lr_def);
bitmap_clear (bb_info->lr_use);
bitmap_clear (bb_info->lr_in);
bitmap_clear (bb_info->lr_out);
}
}
}
});
}
......@@ -502,8 +527,6 @@ df_alloc (struct df *df, int n_regs)
df->n_bbs = last_basic_block;
/* Allocate temporary working array used during local dataflow analysis. */
df->reg_def_last = xmalloc (df->n_regs * sizeof (struct ref *));
df_insn_table_realloc (df, n_insns);
df_reg_table_realloc (df, df->n_regs);
......@@ -566,7 +589,6 @@ df_free (struct df *df)
free_alloc_pool (df_ref_pool);
free_alloc_pool (df_link_pool);
}
/* Local miscellaneous routines. */
......@@ -610,6 +632,21 @@ df_link_create (struct ref *ref, struct df_link *next)
return link;
}
/* Releases members of the CHAIN. */
static void
free_reg_ref_chain (struct df_link **chain)
{
struct df_link *act, *next;
for (act = *chain; act; act = next)
{
next = act->next;
pool_free (df_link_pool, act);
}
*chain = NULL;
}
/* Add REF to chain head pointed to by PHEAD. */
static struct df_link *
......@@ -736,6 +773,7 @@ df_ref_create (struct df *df, rtx reg, rtx *loc, rtx insn,
DF_REF_CHAIN (this_ref) = 0;
DF_REF_TYPE (this_ref) = ref_type;
DF_REF_FLAGS (this_ref) = ref_flags;
DF_REF_DATA (this_ref) = NULL;
if (ref_type == DF_REF_REG_DEF)
{
......@@ -1212,15 +1250,13 @@ df_bb_refs_record (struct df *df, basic_block bb)
rtx insn;
/* Scan the block an insn at a time from beginning to end. */
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
FOR_BB_INSNS (bb, insn)
{
if (INSN_P (insn))
{
/* Record defs within INSN. */
df_insn_refs_record (df, bb, insn);
}
if (insn == BB_END (bb))
break;
}
}
......@@ -1239,21 +1275,18 @@ df_refs_record (struct df *df, bitmap blocks)
/* Dataflow analysis routines. */
/* Create reg-def chains for basic block BB. These are a list of
definitions for each register. */
static void
df_bb_reg_def_chain_create (struct df *df, basic_block bb)
{
rtx insn;
/* Perhaps the defs should be sorted using a depth first search
of the CFG (or possibly a breadth first search). We currently
scan the basic blocks in reverse order so that the first defs
appear at the start of the chain. */
of the CFG (or possibly a breadth first search). */
for (insn = BB_END (bb); insn && insn != PREV_INSN (BB_HEAD (bb));
insn = PREV_INSN (insn))
FOR_BB_INSNS_REVERSE (bb, insn)
{
struct df_link *link;
unsigned int uid = INSN_UID (insn);
......@@ -1273,29 +1306,59 @@ df_bb_reg_def_chain_create (struct df *df, basic_block bb)
if (DF_REF_ID (def) < df->def_id_save)
continue;
df->regs[dregno].defs
= df_link_create (def, df->regs[dregno].defs);
df->regs[dregno].defs = df_link_create (def, df->regs[dregno].defs);
}
}
}
/* Create reg-def chains for each basic block within BLOCKS. These
are a list of definitions for each register. */
are a list of definitions for each register. If REDO is true, add
all defs, otherwise just add the new defs. */
static void
df_reg_def_chain_create (struct df *df, bitmap blocks)
df_reg_def_chain_create (struct df *df, bitmap blocks, bool redo)
{
basic_block bb;
#ifdef ENABLE_CHECKING
unsigned regno;
#endif
unsigned old_def_id_save = df->def_id_save;
FOR_EACH_BB_IN_BITMAP/*_REV*/ (blocks, 0, bb,
if (redo)
{
#ifdef ENABLE_CHECKING
for (regno = 0; regno < df->n_regs; regno++)
if (df->regs[regno].defs)
abort ();
#endif
/* Pretend that all defs are new. */
df->def_id_save = 0;
}
FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
{
df_bb_reg_def_chain_create (df, bb);
});
df->def_id_save = old_def_id_save;
}
/* Remove all reg-def chains stored in the dataflow object DF. */
static void
df_reg_def_chain_clean (struct df *df)
{
unsigned regno;
for (regno = 0; regno < df->n_regs; regno++)
free_reg_ref_chain (&df->regs[regno].defs);
}
/* Create reg-use chains for basic block BB. These are a list of uses
for each register. */
static void
df_bb_reg_use_chain_create (struct df *df, basic_block bb)
{
......@@ -1304,8 +1367,7 @@ df_bb_reg_use_chain_create (struct df *df, basic_block bb)
/* Scan in forward order so that the last uses appear at the start
of the chain. */
for (insn = BB_HEAD (bb); insn && insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
FOR_BB_INSNS (bb, insn)
{
struct df_link *link;
unsigned int uid = INSN_UID (insn);
......@@ -1333,18 +1395,48 @@ df_bb_reg_use_chain_create (struct df *df, basic_block bb)
/* Create reg-use chains for each basic block within BLOCKS. These
are a list of uses for each register. */
are a list of uses for each register. If REDO is true, remove the
old reg-use chains first, otherwise just add new uses to them. */
static void
df_reg_use_chain_create (struct df *df, bitmap blocks)
df_reg_use_chain_create (struct df *df, bitmap blocks, bool redo)
{
basic_block bb;
#ifdef ENABLE_CHECKING
unsigned regno;
#endif
unsigned old_use_id_save = df->use_id_save;
if (redo)
{
#ifdef ENABLE_CHECKING
for (regno = 0; regno < df->n_regs; regno++)
if (df->regs[regno].uses)
abort ();
#endif
/* Pretend that all uses are new. */
df->use_id_save = 0;
}
FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
{
df_bb_reg_use_chain_create (df, bb);
});
df->use_id_save = old_use_id_save;
}
/* Remove all reg-use chains stored in the dataflow object DF. */
static void
df_reg_use_chain_clean (struct df *df)
{
unsigned regno;
for (regno = 0; regno < df->n_regs; regno++)
free_reg_ref_chain (&df->regs[regno].uses);
}
/* Create def-use chains from reaching use bitmaps for basic block BB. */
static void
......@@ -1357,8 +1449,7 @@ df_bb_du_chain_create (struct df *df, basic_block bb, bitmap ru)
/* For each def in BB create a linked list (chain) of uses
reached from the def. */
for (insn = BB_END (bb); insn && insn != PREV_INSN (BB_HEAD (bb));
insn = PREV_INSN (insn))
FOR_BB_INSNS_REVERSE (bb, insn)
{
struct df_link *def_link;
struct df_link *use_link;
......@@ -1434,8 +1525,7 @@ df_bb_ud_chain_create (struct df *df, basic_block bb)
/* For each use in BB create a linked list (chain) of defs
that reach the use. */
for (insn = BB_HEAD (bb); insn && insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
FOR_BB_INSNS (bb, insn)
{
unsigned int uid = INSN_UID (insn);
struct df_link *use_link;
......@@ -1511,8 +1601,8 @@ df_ud_chain_create (struct df *df, bitmap blocks)
static void
df_rd_transfer_function (int bb ATTRIBUTE_UNUSED, int *changed, bitmap in,
bitmap out, bitmap gen, bitmap kill,
df_rd_transfer_function (int bb ATTRIBUTE_UNUSED, int *changed, void *in,
void *out, void *gen, void *kill,
void *data ATTRIBUTE_UNUSED)
{
*changed = bitmap_union_of_diff (out, gen, in, kill);
......@@ -1520,8 +1610,8 @@ df_rd_transfer_function (int bb ATTRIBUTE_UNUSED, int *changed, bitmap in,
static void
df_ru_transfer_function (int bb ATTRIBUTE_UNUSED, int *changed, bitmap in,
bitmap out, bitmap gen, bitmap kill,
df_ru_transfer_function (int bb ATTRIBUTE_UNUSED, int *changed, void *in,
void *out, void *gen, void *kill,
void *data ATTRIBUTE_UNUSED)
{
*changed = bitmap_union_of_diff (in, gen, out, kill);
......@@ -1529,8 +1619,8 @@ df_ru_transfer_function (int bb ATTRIBUTE_UNUSED, int *changed, bitmap in,
static void
df_lr_transfer_function (int bb ATTRIBUTE_UNUSED, int *changed, bitmap in,
bitmap out, bitmap use, bitmap def,
df_lr_transfer_function (int bb ATTRIBUTE_UNUSED, int *changed, void *in,
void *out, void *use, void *def,
void *data ATTRIBUTE_UNUSED)
{
*changed = bitmap_union_of_diff (in, use, out, def);
......@@ -1608,8 +1698,7 @@ df_bb_ru_local_compute (struct df *df, basic_block bb)
rtx insn;
for (insn = BB_END (bb); insn && insn != PREV_INSN (BB_HEAD (bb));
insn = PREV_INSN (insn))
FOR_BB_INSNS_REVERSE (bb, insn)
{
unsigned int uid = INSN_UID (insn);
struct df_link *def_link;
......@@ -1646,7 +1735,6 @@ df_bb_ru_local_compute (struct df *df, basic_block bb)
bitmap_set_bit (bb_info->ru_gen, DF_REF_ID (use));
}
}
bb_info->ru_valid = 1;
}
......@@ -1671,8 +1759,7 @@ df_bb_lr_local_compute (struct df *df, basic_block bb)
struct bb_info *bb_info = DF_BB_INFO (df, bb);
rtx insn;
for (insn = BB_END (bb); insn && insn != PREV_INSN (BB_HEAD (bb));
insn = PREV_INSN (insn))
FOR_BB_INSNS_REVERSE (bb, insn)
{
unsigned int uid = INSN_UID (insn);
struct df_link *link;
......@@ -1698,7 +1785,6 @@ df_bb_lr_local_compute (struct df *df, basic_block bb)
bitmap_set_bit (bb_info->lr_use, DF_REF_REGNO (use));
}
}
bb_info->lr_valid = 1;
}
......@@ -1726,8 +1812,7 @@ df_bb_reg_info_compute (struct df *df, basic_block bb, bitmap live)
bitmap_copy (live, bb_info->lr_out);
for (insn = BB_END (bb); insn && insn != PREV_INSN (BB_HEAD (bb));
insn = PREV_INSN (insn))
FOR_BB_INSNS_REVERSE (bb, insn)
{
unsigned int uid = INSN_UID (insn);
unsigned int regno;
......@@ -1792,14 +1877,11 @@ df_bb_luids_set (struct df *df, basic_block bb)
/* The LUIDs are monotonically increasing for each basic block. */
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
FOR_BB_INSNS (bb, insn)
{
if (INSN_P (insn))
DF_INSN_LUID (df, insn) = luid++;
DF_INSN_LUID (df, insn) = luid;
if (insn == BB_END (bb))
break;
}
return luid;
}
......@@ -1829,6 +1911,7 @@ df_analyze_1 (struct df *df, bitmap blocks, int flags, int update)
int dflags;
int i;
basic_block bb;
struct dataflow dflow;
dflags = 0;
aflags = flags;
......@@ -1850,7 +1933,7 @@ df_analyze_1 (struct df *df, bitmap blocks, int flags, int update)
df->flags = flags;
if (update)
{
df_refs_update (df);
df_refs_update (df, NULL);
/* More fine grained incremental dataflow analysis would be
nice. For now recompute the whole shebang for the
modified blocks. */
......@@ -1874,7 +1957,7 @@ df_analyze_1 (struct df *df, bitmap blocks, int flags, int update)
/* Allocate the bitmaps now the total number of defs and uses are
known. If the number of defs or uses have changed, then
these bitmaps need to be reallocated. */
df_bitmaps_alloc (df, aflags);
df_bitmaps_alloc (df, NULL, aflags);
/* Set the LUIDs for each specified basic block. */
df_luids_set (df, blocks);
......@@ -1885,12 +1968,12 @@ df_analyze_1 (struct df *df, bitmap blocks, int flags, int update)
regs local to a basic block as it speeds up searching. */
if (aflags & DF_RD_CHAIN)
{
df_reg_def_chain_create (df, blocks);
df_reg_def_chain_create (df, blocks, false);
}
if (aflags & DF_RU_CHAIN)
{
df_reg_use_chain_create (df, blocks);
df_reg_use_chain_create (df, blocks, false);
}
df->dfs_order = xmalloc (sizeof (int) * n_basic_blocks);
......@@ -1911,27 +1994,33 @@ df_analyze_1 (struct df *df, bitmap blocks, int flags, int update)
if (aflags & DF_RD)
{
/* Compute the sets of gens and kills for the defs of each bb. */
dflow.in = xmalloc (sizeof (bitmap) * last_basic_block);
dflow.out = xmalloc (sizeof (bitmap) * last_basic_block);
dflow.gen = xmalloc (sizeof (bitmap) * last_basic_block);
dflow.kill = xmalloc (sizeof (bitmap) * last_basic_block);
df_rd_local_compute (df, df->flags & DF_RD ? blocks : df->all_blocks);
{
bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
FOR_EACH_BB (bb)
{
in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
out[bb->index] = DF_BB_INFO (df, bb)->rd_out;
gen[bb->index] = DF_BB_INFO (df, bb)->rd_gen;
kill[bb->index] = DF_BB_INFO (df, bb)->rd_kill;
}
iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
DF_FORWARD, DF_UNION, df_rd_transfer_function,
df->inverse_rc_map, NULL);
free (in);
free (out);
free (gen);
free (kill);
}
FOR_EACH_BB (bb)
{
dflow.in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
dflow.out[bb->index] = DF_BB_INFO (df, bb)->rd_out;
dflow.gen[bb->index] = DF_BB_INFO (df, bb)->rd_gen;
dflow.kill[bb->index] = DF_BB_INFO (df, bb)->rd_kill;
}
dflow.repr = SR_BITMAP;
dflow.dir = DF_FORWARD;
dflow.conf_op = DF_UNION;
dflow.transfun = df_rd_transfer_function;
dflow.n_blocks = n_basic_blocks;
dflow.order = df->rc_order;
dflow.data = NULL;
iterative_dataflow (&dflow);
free (dflow.in);
free (dflow.out);
free (dflow.gen);
free (dflow.kill);
}
if (aflags & DF_UD_CHAIN)
......@@ -1947,27 +2036,34 @@ df_analyze_1 (struct df *df, bitmap blocks, int flags, int update)
{
/* Compute the sets of gens and kills for the upwards exposed
uses in each bb. */
dflow.in = xmalloc (sizeof (bitmap) * last_basic_block);
dflow.out = xmalloc (sizeof (bitmap) * last_basic_block);
dflow.gen = xmalloc (sizeof (bitmap) * last_basic_block);
dflow.kill = xmalloc (sizeof (bitmap) * last_basic_block);
df_ru_local_compute (df, df->flags & DF_RU ? blocks : df->all_blocks);
{
bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
FOR_EACH_BB (bb)
{
in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
out[bb->index] = DF_BB_INFO (df, bb)->ru_out;
gen[bb->index] = DF_BB_INFO (df, bb)->ru_gen;
kill[bb->index] = DF_BB_INFO (df, bb)->ru_kill;
}
iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
DF_BACKWARD, DF_UNION, df_ru_transfer_function,
df->inverse_rts_map, NULL);
free (in);
free (out);
free (gen);
free (kill);
}
FOR_EACH_BB (bb)
{
dflow.in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
dflow.out[bb->index] = DF_BB_INFO (df, bb)->ru_out;
dflow.gen[bb->index] = DF_BB_INFO (df, bb)->ru_gen;
dflow.kill[bb->index] = DF_BB_INFO (df, bb)->ru_kill;
}
dflow.repr = SR_BITMAP;
dflow.dir = DF_BACKWARD;
dflow.conf_op = DF_UNION;
dflow.transfun = df_ru_transfer_function;
dflow.n_blocks = n_basic_blocks;
dflow.order = df->rts_order;
dflow.data = NULL;
iterative_dataflow (&dflow);
free (dflow.in);
free (dflow.out);
free (dflow.gen);
free (dflow.kill);
}
if (aflags & DF_DU_CHAIN)
......@@ -1986,27 +2082,34 @@ df_analyze_1 (struct df *df, bitmap blocks, int flags, int update)
if (aflags & DF_LR)
{
/* Compute the sets of defs and uses of live variables. */
dflow.in = xmalloc (sizeof (bitmap) * last_basic_block);
dflow.out = xmalloc (sizeof (bitmap) * last_basic_block);
dflow.gen = xmalloc (sizeof (bitmap) * last_basic_block);
dflow.kill = xmalloc (sizeof (bitmap) * last_basic_block);
df_lr_local_compute (df, df->flags & DF_LR ? blocks : df->all_blocks);
{
bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *use = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *def = xmalloc (sizeof (bitmap) * last_basic_block);
FOR_EACH_BB (bb)
{
in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
out[bb->index] = DF_BB_INFO (df, bb)->lr_out;
use[bb->index] = DF_BB_INFO (df, bb)->lr_use;
def[bb->index] = DF_BB_INFO (df, bb)->lr_def;
}
iterative_dataflow_bitmap (in, out, use, def, df->all_blocks,
DF_BACKWARD, DF_UNION, df_lr_transfer_function,
df->inverse_rts_map, NULL);
free (in);
free (out);
free (use);
free (def);
}
FOR_EACH_BB (bb)
{
dflow.in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
dflow.out[bb->index] = DF_BB_INFO (df, bb)->lr_out;
dflow.gen[bb->index] = DF_BB_INFO (df, bb)->lr_use;
dflow.kill[bb->index] = DF_BB_INFO (df, bb)->lr_def;
}
dflow.repr = SR_BITMAP;
dflow.dir = DF_BACKWARD;
dflow.conf_op = DF_UNION;
dflow.transfun = df_lr_transfer_function;
dflow.n_blocks = n_basic_blocks;
dflow.order = df->rts_order;
dflow.data = NULL;
iterative_dataflow (&dflow);
free (dflow.in);
free (dflow.out);
free (dflow.gen);
free (dflow.kill);
}
if (aflags & DF_REG_INFO)
......@@ -2093,7 +2196,7 @@ df_bb_refs_update (struct df *df, basic_block bb)
a bitmap for insns_modified saves memory and avoids queuing
duplicates. */
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
FOR_BB_INSNS (bb, insn)
{
unsigned int uid;
......@@ -2109,8 +2212,6 @@ df_bb_refs_update (struct df *df, basic_block bb)
count++;
}
if (insn == BB_END (bb))
break;
}
return count;
}
......@@ -2118,20 +2219,31 @@ df_bb_refs_update (struct df *df, basic_block bb)
/* Process all the modified/deleted insns that were queued. */
static int
df_refs_update (struct df *df)
df_refs_update (struct df *df, bitmap blocks)
{
basic_block bb;
int count = 0;
int count = 0, bbno;
if ((unsigned int) max_reg_num () >= df->reg_size)
df->n_regs = max_reg_num ();
if (df->n_regs >= df->reg_size)
df_reg_table_realloc (df, 0);
df_refs_queue (df);
FOR_EACH_BB_IN_BITMAP (df->bbs_modified, 0, bb,
if (!blocks)
{
count += df_bb_refs_update (df, bb);
});
FOR_EACH_BB_IN_BITMAP (df->bbs_modified, 0, bb,
{
count += df_bb_refs_update (df, bb);
});
}
else
{
EXECUTE_IF_AND_IN_BITMAP (df->bbs_modified, blocks, 0, bbno,
{
count += df_bb_refs_update (df, BASIC_BLOCK (bbno));
});
}
df_refs_process (df);
return count;
......@@ -2160,10 +2272,10 @@ df_modified_p (struct df *df, bitmap blocks)
return update;
}
/* Analyze dataflow info for the basic blocks specified by the bitmap
BLOCKS, or for the whole CFG if BLOCKS is zero, or just for the
modified blocks if BLOCKS is -1. */
int
df_analyze (struct df *df, bitmap blocks, int flags)
{
......@@ -2205,6 +2317,220 @@ df_analyze (struct df *df, bitmap blocks, int flags)
return update;
}
/* Remove the entries not in BLOCKS from the LIST of length LEN, preserving
the order of the remaining entries. Returns the length of the resulting
list. */
static unsigned
prune_to_subcfg (int list[], unsigned len, bitmap blocks)
{
unsigned act, last;
for (act = 0, last = 0; act < len; act++)
if (bitmap_bit_p (blocks, list[act]))
list[last++] = list[act];
return last;
}
/* Alternative entry point to the analysis. Analyse just the part of the cfg
graph induced by BLOCKS.
TODO I am not quite sure how to avoid code duplication with df_analyze_1
here, and simultaneously not make even greater chaos in it. We behave
slightly differently in some details, especially in handling modified
insns. */
void
df_analyze_subcfg (struct df *df, bitmap blocks, int flags)
{
rtx insn;
basic_block bb;
struct dataflow dflow;
unsigned n_blocks;
if (flags & DF_UD_CHAIN)
flags |= DF_RD | DF_RD_CHAIN;
if (flags & DF_DU_CHAIN)
flags |= DF_RU;
if (flags & DF_RU)
flags |= DF_RU_CHAIN;
if (flags & DF_REG_INFO)
flags |= DF_LR;
if (!df->n_bbs)
{
df_alloc (df, max_reg_num ());
/* Mark all insns as modified. */
FOR_EACH_BB (bb)
{
FOR_BB_INSNS (bb, insn)
{
df_insn_modify (df, bb, insn);
}
}
}
df->flags = flags;
df_reg_def_chain_clean (df);
df_reg_use_chain_clean (df);
df_refs_update (df, blocks);
/* Clear the updated stuff from ``modified'' bitmaps. */
FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
{
if (bitmap_bit_p (df->bbs_modified, bb->index))
{
FOR_BB_INSNS (bb, insn)
{
bitmap_clear_bit (df->insns_modified, INSN_UID (insn));
}
bitmap_clear_bit (df->bbs_modified, bb->index);
}
});
/* Allocate the bitmaps now the total number of defs and uses are
known. If the number of defs or uses have changed, then
these bitmaps need to be reallocated. */
df_bitmaps_alloc (df, blocks, flags);
/* Set the LUIDs for each specified basic block. */
df_luids_set (df, blocks);
/* Recreate reg-def and reg-use chains from scratch so that first
def is at the head of the reg-def chain and the last use is at
the head of the reg-use chain. This is only important for
regs local to a basic block as it speeds up searching. */
if (flags & DF_RD_CHAIN)
{
df_reg_def_chain_create (df, blocks, true);
}
if (flags & DF_RU_CHAIN)
{
df_reg_use_chain_create (df, blocks, true);
}
df->dfs_order = xmalloc (sizeof (int) * n_basic_blocks);
df->rc_order = xmalloc (sizeof (int) * n_basic_blocks);
df->rts_order = xmalloc (sizeof (int) * n_basic_blocks);
flow_depth_first_order_compute (df->dfs_order, df->rc_order);
flow_reverse_top_sort_order_compute (df->rts_order);
n_blocks = prune_to_subcfg (df->dfs_order, n_basic_blocks, blocks);
prune_to_subcfg (df->rc_order, n_basic_blocks, blocks);
prune_to_subcfg (df->rts_order, n_basic_blocks, blocks);
dflow.in = xmalloc (sizeof (bitmap) * last_basic_block);
dflow.out = xmalloc (sizeof (bitmap) * last_basic_block);
dflow.gen = xmalloc (sizeof (bitmap) * last_basic_block);
dflow.kill = xmalloc (sizeof (bitmap) * last_basic_block);
if (flags & DF_RD)
{
/* Compute the sets of gens and kills for the defs of each bb. */
df_rd_local_compute (df, blocks);
FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
{
dflow.in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
dflow.out[bb->index] = DF_BB_INFO (df, bb)->rd_out;
dflow.gen[bb->index] = DF_BB_INFO (df, bb)->rd_gen;
dflow.kill[bb->index] = DF_BB_INFO (df, bb)->rd_kill;
});
dflow.repr = SR_BITMAP;
dflow.dir = DF_FORWARD;
dflow.conf_op = DF_UNION;
dflow.transfun = df_rd_transfer_function;
dflow.n_blocks = n_blocks;
dflow.order = df->rc_order;
dflow.data = NULL;
iterative_dataflow (&dflow);
}
if (flags & DF_UD_CHAIN)
{
/* Create use-def chains. */
df_ud_chain_create (df, blocks);
}
if (flags & DF_RU)
{
/* Compute the sets of gens and kills for the upwards exposed
uses in each bb. */
df_ru_local_compute (df, blocks);
FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
{
dflow.in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
dflow.out[bb->index] = DF_BB_INFO (df, bb)->ru_out;
dflow.gen[bb->index] = DF_BB_INFO (df, bb)->ru_gen;
dflow.kill[bb->index] = DF_BB_INFO (df, bb)->ru_kill;
});
dflow.repr = SR_BITMAP;
dflow.dir = DF_BACKWARD;
dflow.conf_op = DF_UNION;
dflow.transfun = df_ru_transfer_function;
dflow.n_blocks = n_blocks;
dflow.order = df->rts_order;
dflow.data = NULL;
iterative_dataflow (&dflow);
}
if (flags & DF_DU_CHAIN)
{
/* Create def-use chains. */
df_du_chain_create (df, blocks);
}
if (flags & DF_LR)
{
/* Compute the sets of defs and uses of live variables. */
df_lr_local_compute (df, blocks);
FOR_EACH_BB (bb)
{
dflow.in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
dflow.out[bb->index] = DF_BB_INFO (df, bb)->lr_out;
dflow.gen[bb->index] = DF_BB_INFO (df, bb)->lr_use;
dflow.kill[bb->index] = DF_BB_INFO (df, bb)->lr_def;
}
dflow.repr = SR_BITMAP;
dflow.dir = DF_BACKWARD;
dflow.conf_op = DF_UNION;
dflow.transfun = df_lr_transfer_function;
dflow.n_blocks = n_blocks;
dflow.order = df->rts_order;
dflow.data = NULL;
iterative_dataflow (&dflow);
}
if (flags & DF_REG_INFO)
{
df_reg_info_compute (df, blocks);
}
free (dflow.in);
free (dflow.out);
free (dflow.gen);
free (dflow.kill);
free (df->dfs_order);
free (df->rc_order);
free (df->rts_order);
}
/* Free all the dataflow info and the DF structure. */
void
......@@ -2214,7 +2540,6 @@ df_finish (struct df *df)
free (df);
}
/* Unlink INSN from its reference information. */
static void
df_insn_refs_unlink (struct df *df, basic_block bb ATTRIBUTE_UNUSED, rtx insn)
......@@ -2302,6 +2627,16 @@ df_insn_delete (struct df *df, basic_block bb ATTRIBUTE_UNUSED, rtx insn)
return NEXT_INSN (insn);
}
/* Mark that basic block BB was modified. */
static void
df_bb_modify (struct df *df, basic_block bb)
{
if ((unsigned) bb->index >= df->n_bbs)
df_bb_table_realloc (df, df->n_bbs);
bitmap_set_bit (df->bbs_modified, bb->index);
}
/* Mark that INSN within BB may have changed (created/modified/deleted).
This may be called multiple times for the same insn. There is no
......@@ -2316,7 +2651,7 @@ df_insn_modify (struct df *df, basic_block bb, rtx insn)
if (uid >= df->insn_size)
df_insn_table_realloc (df, uid);
bitmap_set_bit (df->bbs_modified, bb->index);
df_bb_modify (df, bb);
bitmap_set_bit (df->insns_modified, uid);
/* For incremental updating on the fly, perhaps we could make a copy
......@@ -2326,7 +2661,6 @@ df_insn_modify (struct df *df, basic_block bb, rtx insn)
will just get ignored. */
}
typedef struct replace_args
{
rtx match;
......@@ -3393,354 +3727,193 @@ debug_df_chain (struct df_link *link)
}
/* Hybrid search algorithm from "Implementation Techniques for
Efficient Data-Flow Analysis of Large Programs". */
static void
hybrid_search_bitmap (basic_block block, bitmap *in, bitmap *out, bitmap *gen,
bitmap *kill, enum df_flow_dir dir,
enum df_confluence_op conf_op,
transfer_function_bitmap transfun, sbitmap visited,
sbitmap pending, void *data)
dataflow_set_a_op_b (enum set_representation repr,
enum df_confluence_op op,
void *rslt, void *op1, void *op2)
{
int changed;
int i = block->index;
edge e;
basic_block bb = block;
SET_BIT (visited, block->index);
if (TEST_BIT (pending, block->index))
switch (repr)
{
if (dir == DF_FORWARD)
{
/* Calculate <conf_op> of predecessor_outs. */
bitmap_zero (in[i]);
for (e = bb->pred; e != 0; e = e->pred_next)
{
if (e->src == ENTRY_BLOCK_PTR)
continue;
switch (conf_op)
{
case DF_UNION:
bitmap_a_or_b (in[i], in[i], out[e->src->index]);
break;
case DF_INTERSECTION:
bitmap_a_and_b (in[i], in[i], out[e->src->index]);
break;
}
}
}
else
{
/* Calculate <conf_op> of successor ins. */
bitmap_zero (out[i]);
for (e = bb->succ; e != 0; e = e->succ_next)
{
if (e->dest == EXIT_BLOCK_PTR)
continue;
switch (conf_op)
{
case DF_UNION:
bitmap_a_or_b (out[i], out[i], in[e->dest->index]);
break;
case DF_INTERSECTION:
bitmap_a_and_b (out[i], out[i], in[e->dest->index]);
break;
}
}
}
/* Common part */
(*transfun)(i, &changed, in[i], out[i], gen[i], kill[i], data);
RESET_BIT (pending, i);
if (changed)
case SR_SBITMAP:
switch (op)
{
if (dir == DF_FORWARD)
{
for (e = bb->succ; e != 0; e = e->succ_next)
{
if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
continue;
SET_BIT (pending, e->dest->index);
}
}
else
{
for (e = bb->pred; e != 0; e = e->pred_next)
{
if (e->src == ENTRY_BLOCK_PTR || e->dest->index == i)
continue;
SET_BIT (pending, e->src->index);
}
}
case DF_UNION:
sbitmap_a_or_b (rslt, op1, op2);
break;
case DF_INTERSECTION:
sbitmap_a_and_b (rslt, op1, op2);
break;
default:
abort ();
}
}
if (dir == DF_FORWARD)
{
for (e = bb->succ; e != 0; e = e->succ_next)
break;
case SR_BITMAP:
switch (op)
{
if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
continue;
if (!TEST_BIT (visited, e->dest->index))
hybrid_search_bitmap (e->dest, in, out, gen, kill, dir,
conf_op, transfun, visited, pending,
data);
case DF_UNION:
bitmap_a_or_b (rslt, op1, op2);
break;
case DF_INTERSECTION:
bitmap_a_and_b (rslt, op1, op2);
break;
default:
abort ();
}
break;
default:
abort ();
}
else
}
static void
dataflow_set_copy (enum set_representation repr, void *dest, void *src)
{
switch (repr)
{
for (e = bb->pred; e != 0; e = e->pred_next)
{
if (e->src == ENTRY_BLOCK_PTR || e->src->index == i)
continue;
if (!TEST_BIT (visited, e->src->index))
hybrid_search_bitmap (e->src, in, out, gen, kill, dir,
conf_op, transfun, visited, pending,
data);
}
case SR_SBITMAP:
sbitmap_copy (dest, src);
break;
case SR_BITMAP:
bitmap_copy (dest, src);
break;
default:
abort ();
}
}
/* Hybrid search algorithm from "Implementation Techniques for
Efficient Data-Flow Analysis of Large Programs". */
/* Hybrid search for sbitmaps, rather than bitmaps. */
static void
hybrid_search_sbitmap (basic_block block, sbitmap *in, sbitmap *out,
sbitmap *gen, sbitmap *kill, enum df_flow_dir dir,
enum df_confluence_op conf_op,
transfer_function_sbitmap transfun, sbitmap visited,
sbitmap pending, void *data)
hybrid_search (basic_block bb, struct dataflow *dataflow,
sbitmap visited, sbitmap pending, sbitmap considered)
{
int changed;
int i = block->index;
int i = bb->index;
edge e;
basic_block bb = block;
SET_BIT (visited, block->index);
if (TEST_BIT (pending, block->index))
{
if (dir == DF_FORWARD)
{
/* Calculate <conf_op> of predecessor_outs. */
sbitmap_zero (in[i]);
for (e = bb->pred; e != 0; e = e->pred_next)
{
if (e->src == ENTRY_BLOCK_PTR)
continue;
switch (conf_op)
{
case DF_UNION:
sbitmap_a_or_b (in[i], in[i], out[e->src->index]);
break;
case DF_INTERSECTION:
sbitmap_a_and_b (in[i], in[i], out[e->src->index]);
break;
}
}
}
else
{
/* Calculate <conf_op> of successor ins. */
sbitmap_zero (out[i]);
for (e = bb->succ; e != 0; e = e->succ_next)
{
if (e->dest == EXIT_BLOCK_PTR)
continue;
switch (conf_op)
{
case DF_UNION:
sbitmap_a_or_b (out[i], out[i], in[e->dest->index]);
break;
case DF_INTERSECTION:
sbitmap_a_and_b (out[i], out[i], in[e->dest->index]);
break;
}
}
}
/* Common part. */
(*transfun)(i, &changed, in[i], out[i], gen[i], kill[i], data);
RESET_BIT (pending, i);
if (changed)
{
if (dir == DF_FORWARD)
{
for (e = bb->succ; e != 0; e = e->succ_next)
{
if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
continue;
SET_BIT (pending, e->dest->index);
}
}
else
{
for (e = bb->pred; e != 0; e = e->pred_next)
{
if (e->src == ENTRY_BLOCK_PTR || e->dest->index == i)
continue;
SET_BIT (pending, e->src->index);
}
}
}
}
if (dir == DF_FORWARD)
{
for (e = bb->succ; e != 0; e = e->succ_next)
{
if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
continue;
if (!TEST_BIT (visited, e->dest->index))
hybrid_search_sbitmap (e->dest, in, out, gen, kill, dir,
conf_op, transfun, visited, pending,
data);
}
}
SET_BIT (visited, bb->index);
if (!TEST_BIT (pending, bb->index))
abort ();
RESET_BIT (pending, i);
#define HS(E_ANTI, E_ANTI_NEXT, E_ANTI_BB, E_ANTI_START_BB, IN_SET, \
E, E_NEXT, E_BB, E_START_BB, OUT_SET) \
do \
{ \
/* Calculate <conf_op> of predecessor_outs. */ \
bitmap_zero (IN_SET[i]); \
for (e = bb->E_ANTI; e; e = e->E_ANTI_NEXT) \
{ \
if (e->E_ANTI_BB == E_ANTI_START_BB) \
continue; \
if (!TEST_BIT (considered, e->E_ANTI_BB->index)) \
continue; \
\
dataflow_set_a_op_b (dataflow->repr, dataflow->conf_op, \
IN_SET[i], IN_SET[i], \
OUT_SET[e->E_ANTI_BB->index]); \
} \
\
(*dataflow->transfun)(i, &changed, \
dataflow->in[i], dataflow->out[i], \
dataflow->gen[i], dataflow->kill[i], \
dataflow->data); \
\
if (!changed) \
break; \
\
for (e = bb->E; e; e = e->E_NEXT) \
{ \
if (e->E_BB == E_START_BB || e->E_BB->index == i) \
continue; \
\
if (!TEST_BIT (considered, e->E_BB->index)) \
continue; \
\
SET_BIT (pending, e->E_BB->index); \
} \
\
for (e = bb->E; e; e = e->E_NEXT) \
{ \
if (e->E_BB == E_START_BB || e->E_BB->index == i) \
continue; \
\
if (!TEST_BIT (considered, e->E_BB->index)) \
continue; \
\
if (!TEST_BIT (visited, e->E_BB->index)) \
hybrid_search (e->E_BB, dataflow, visited, pending, considered); \
} \
} while (0)
if (dataflow->dir == DF_FORWARD)
HS (pred, pred_next, src, ENTRY_BLOCK_PTR, dataflow->in,
succ, succ_next, dest, EXIT_BLOCK_PTR, dataflow->out);
else
{
for (e = bb->pred; e != 0; e = e->pred_next)
{
if (e->src == ENTRY_BLOCK_PTR || e->src->index == i)
continue;
if (!TEST_BIT (visited, e->src->index))
hybrid_search_sbitmap (e->src, in, out, gen, kill, dir,
conf_op, transfun, visited, pending,
data);
}
}
HS (succ, succ_next, dest, EXIT_BLOCK_PTR, dataflow->out,
pred, pred_next, src, ENTRY_BLOCK_PTR, dataflow->in);
}
/* gen = GEN set.
kill = KILL set.
in, out = Filled in by function.
blocks = Blocks to analyze.
dir = Dataflow direction.
conf_op = Confluence operation.
transfun = Transfer function.
order = Order to iterate in. (Should map block numbers -> order)
data = Whatever you want. It's passed to the transfer function.
This function will perform iterative bitvector dataflow, producing
the in and out sets. Even if you only want to perform it for a
small number of blocks, the vectors for in and out must be large
enough for *all* blocks, because changing one block might affect
others. However, it'll only put what you say to analyze on the
initial worklist.
/* This function will perform iterative bitvector dataflow described by
DATAFLOW, producing the in and out sets. Only the part of the cfg
induced by blocks in DATAFLOW->order is taken into account.
For forward problems, you probably want to pass in a mapping of
block number to rc_order (like df->inverse_rc_map).
*/
block number to rc_order (like df->inverse_rc_map). */
void
iterative_dataflow_sbitmap (sbitmap *in, sbitmap *out, sbitmap *gen,
sbitmap *kill, bitmap blocks,
enum df_flow_dir dir,
enum df_confluence_op conf_op,
transfer_function_sbitmap transfun, int *order,
void *data)
iterative_dataflow (struct dataflow *dataflow)
{
int i;
fibheap_t worklist;
basic_block bb;
sbitmap visited, pending;
unsigned i, idx;
sbitmap visited, pending, considered;
pending = sbitmap_alloc (last_basic_block);
visited = sbitmap_alloc (last_basic_block);
considered = sbitmap_alloc (last_basic_block);
sbitmap_zero (pending);
sbitmap_zero (visited);
worklist = fibheap_new ();
EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
{
fibheap_insert (worklist, order[i], (void *) (size_t) i);
SET_BIT (pending, i);
if (dir == DF_FORWARD)
sbitmap_copy (out[i], gen[i]);
else
sbitmap_copy (in[i], gen[i]);
});
sbitmap_zero (considered);
while (sbitmap_first_set_bit (pending) != -1)
for (i = 0; i < dataflow->n_blocks; i++)
{
while (!fibheap_empty (worklist))
{
i = (size_t) fibheap_extract_min (worklist);
bb = BASIC_BLOCK (i);
if (!TEST_BIT (visited, bb->index))
hybrid_search_sbitmap (bb, in, out, gen, kill, dir,
conf_op, transfun, visited, pending, data);
}
if (sbitmap_first_set_bit (pending) != -1)
{
EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
{
fibheap_insert (worklist, order[i], (void *) (size_t) i);
});
sbitmap_zero (visited);
}
idx = dataflow->order[i];
SET_BIT (pending, idx);
SET_BIT (considered, idx);
if (dataflow->dir == DF_FORWARD)
dataflow_set_copy (dataflow->repr,
dataflow->out[idx], dataflow->gen[idx]);
else
{
break;
}
}
sbitmap_free (pending);
sbitmap_free (visited);
fibheap_delete (worklist);
}
/* Exactly the same as iterative_dataflow_sbitmap, except it works on
bitmaps instead. */
void
iterative_dataflow_bitmap (bitmap *in, bitmap *out, bitmap *gen, bitmap *kill,
bitmap blocks, enum df_flow_dir dir,
enum df_confluence_op conf_op,
transfer_function_bitmap transfun, int *order,
void *data)
{
int i;
fibheap_t worklist;
basic_block bb;
sbitmap visited, pending;
pending = sbitmap_alloc (last_basic_block);
visited = sbitmap_alloc (last_basic_block);
sbitmap_zero (pending);
sbitmap_zero (visited);
worklist = fibheap_new ();
EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
{
fibheap_insert (worklist, order[i], (void *) (size_t) i);
SET_BIT (pending, i);
if (dir == DF_FORWARD)
bitmap_copy (out[i], gen[i]);
else
bitmap_copy (in[i], gen[i]);
});
dataflow_set_copy (dataflow->repr,
dataflow->in[idx], dataflow->gen[idx]);
};
while (sbitmap_first_set_bit (pending) != -1)
while (1)
{
while (!fibheap_empty (worklist))
for (i = 0; i < dataflow->n_blocks; i++)
{
i = (size_t) fibheap_extract_min (worklist);
bb = BASIC_BLOCK (i);
if (!TEST_BIT (visited, bb->index))
hybrid_search_bitmap (bb, in, out, gen, kill, dir,
conf_op, transfun, visited, pending, data);
}
idx = dataflow->order[i];
if (sbitmap_first_set_bit (pending) != -1)
{
EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
{
fibheap_insert (worklist, order[i], (void *) (size_t) i);
});
sbitmap_zero (visited);
}
else
{
break;
if (TEST_BIT (pending, idx) && !TEST_BIT (visited, idx))
hybrid_search (BASIC_BLOCK (idx), dataflow,
visited, pending, considered);
}
if (sbitmap_first_set_bit (pending) == -1)
break;
sbitmap_zero (visited);
}
sbitmap_free (pending);
sbitmap_free (visited);
fibheap_delete (worklist);
sbitmap_free (considered);
}
......@@ -24,7 +24,6 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
#define GCC_DF_H
#include "bitmap.h"
#include "sbitmap.h"
#include "basic-block.h"
#define DF_RD 1 /* Reaching definitions. */
......@@ -91,6 +90,7 @@ struct ref
unsigned int id; /* Ref index. */
enum df_ref_type type; /* Type of ref. */
enum df_ref_flags flags; /* Various flags. */
void *data; /* The data assigned to it by user. */
};
......@@ -164,9 +164,6 @@ struct df
bitmap insns_modified; /* Insns that (may) have changed. */
bitmap bbs_modified; /* Blocks that (may) have changed. */
bitmap all_blocks; /* All blocks in CFG. */
/* The sbitmap vector of dominators or NULL if not computed.
Ideally, this should be a pointer to a CFG object. */
sbitmap *dom;
int *dfs_order; /* DFS order -> block number. */
int *rc_order; /* Reverse completion order -> block number. */
int *rts_order; /* Reverse top sort order -> block number. */
......@@ -203,6 +200,7 @@ struct df_map
#define DF_REF_CHAIN(REF) ((REF)->chain)
#define DF_REF_ID(REF) ((REF)->id)
#define DF_REF_FLAGS(REF) ((REF)->flags)
#define DF_REF_DATA(REF) ((REF)->data)
/* Macros to determine the reference type. */
......@@ -241,6 +239,7 @@ struct df_map
extern struct df *df_init (void);
extern int df_analyze (struct df *, bitmap, int);
extern void df_analyze_subcfg (struct df *, bitmap, int);
extern void df_finish (struct df *);
......@@ -308,7 +307,6 @@ extern struct ref *df_find_def (struct df *, rtx, rtx);
extern int df_reg_used (struct df *, rtx, rtx);
/* Functions for debugging from GDB. */
extern void debug_df_insn (rtx);
......@@ -346,24 +344,39 @@ enum df_flow_dir
};
typedef void (*transfer_function_sbitmap) (int, int *, sbitmap, sbitmap,
sbitmap, sbitmap, void *);
typedef void (*transfer_function) (int, int *, void *, void *,
void *, void *, void *);
/* The description of a dataflow problem to solve. */
typedef void (*transfer_function_bitmap) (int, int *, bitmap, bitmap,
bitmap, bitmap, void *);
enum set_representation
{
SR_SBITMAP, /* Represent sets by bitmaps. */
SR_BITMAP /* Represent sets by sbitmaps. */
};
extern void iterative_dataflow_sbitmap (sbitmap *, sbitmap *, sbitmap *,
sbitmap *, bitmap, enum df_flow_dir,
enum df_confluence_op,
transfer_function_sbitmap,
int *, void *);
struct dataflow
{
enum set_representation repr; /* The way the sets are represented. */
/* The following arrays are indexed by block indices, so they must always
be large enough even if we restrict ourselves just to a subset of cfg. */
void **gen, **kill; /* Gen and kill sets. */
void **in, **out; /* Results. */
enum df_flow_dir dir; /* Dataflow direction. */
enum df_confluence_op conf_op; /* Confluence operator. */
unsigned n_blocks; /* Number of basic blocks in the
order. */
int *order; /* The list of basic blocks to work
with, in the order they should
be processed in. */
transfer_function transfun; /* The transfer function. */
void *data; /* Data used by the transfer
function. */
};
extern void iterative_dataflow_bitmap (bitmap *, bitmap *, bitmap *,
bitmap *, bitmap,
enum df_flow_dir,
enum df_confluence_op,
transfer_function_bitmap,
int *, void *);
extern void iterative_dataflow (struct dataflow *);
extern bool read_modify_subreg_p (rtx);
#endif /* GCC_DF_H */
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment