Commit 6ce2bcb7 by Steven Bosscher Committed by Steven Bosscher

basic-block.c (tail_recursion_label_list): Don't declare.

	* basic-block.c (tail_recursion_label_list): Don't declare.
	(CLEANUP_PRE_SIBCALL): Remove.  Renumber the other CLEANUP_*
	accordingly.
	* cfgbuild.c (find_label_refs): Remove.
	(find_basic_blocks_1): Don't handle CALL_PLACEHOLDER insns.
	* cfgcleanup.c (tail_recursion_label_p): Remove.
	(merge_blocks_move): Do not check for tail recursion.
	(try_optimize_cfg): Likewise.
	(cleanup_cfg): Never handle CLEANUP_PRE_SIBCALL.
	* cfgrtl.c (tail_recursion_label_list): Remove.
	* except.c (remove_unreachable_regions): Don't handle
	CALL_PLACEHOLDER insns.
	(convert_from_eh_region_ranges_1, can_throw_internal,
	can_throw_external): Likewise.
	* function.c (free_after_compilation): Don't clear
	x_tail_recursion_label.
	(fixup_var_refs_insns): Don't handle CALL_PLACEHOLDER insns.
	(identify_blocks_1): Don't recurse for CALL_PLACEHOLDER insns.
	(reorder_blocks_1): Likewise.
	* function.h (struct function): Remove x_tail_recursion_label
	member.  Don't define tail_recursion_label.
	* jump.c (mark_all_labels): Don't handle CALL_PLACEHOLDER insns.
	* print-rtl.c (print_rtx): Likewise.
	* rtl.def (CALL_PLACEHOLDER): Remove.
	* rtl.h (sibcall_use_t): Remove enum.
	(optimize_sibling_and_tail_recursive_calls,
	replace_call_placeholder): Remove function prototypes.
	* stmt.c (tail_recursion_args): Remove.
	(optimize_tail_recursion): Remove.
	(expand_return): Don't check for possible tail recursion.
	* tree.h (optimize_tail_recursion): Remove prototype.

From-SVN: r82597
parent 53ef271f
2004-06-03 Steevn Bosscher <stevenb@suse.de>
* basic-block.c (tail_recursion_label_list): Don't declare.
(CLEANUP_PRE_SIBCALL): Remove. Renumber the other CLEANUP_*
accordingly.
* cfgbuild.c (find_label_refs): Remove.
(find_basic_blocks_1): Don't handle CALL_PLACEHOLDER insns.
* cfgcleanup.c (tail_recursion_label_p): Remove.
(merge_blocks_move): Do not check for tail recursion.
(try_optimize_cfg): Likewise.
(cleanup_cfg): Never handle CLEANUP_PRE_SIBCALL.
* cfgrtl.c (tail_recursion_label_list): Remove.
* except.c (remove_unreachable_regions): Don't handle
CALL_PLACEHOLDER insns.
(convert_from_eh_region_ranges_1, can_throw_internal,
can_throw_external): Likewise.
* function.c (free_after_compilation): Don't clear
x_tail_recursion_label.
(fixup_var_refs_insns): Don't handle CALL_PLACEHOLDER insns.
(identify_blocks_1): Don't recurse for CALL_PLACEHOLDER insns.
(reorder_blocks_1): Likewise.
* function.h (struct function): Remove x_tail_recursion_label
member. Don't define tail_recursion_label.
* jump.c (mark_all_labels): Don't handle CALL_PLACEHOLDER insns.
* print-rtl.c (print_rtx): Likewise.
* rtl.def (CALL_PLACEHOLDER): Remove.
* rtl.h (sibcall_use_t): Remove enum.
(optimize_sibling_and_tail_recursive_calls,
replace_call_placeholder): Remove function prototypes.
* stmt.c (tail_recursion_args): Remove.
(optimize_tail_recursion): Remove.
(expand_return): Don't check for possible tail recursion.
* tree.h (optimize_tail_recursion): Remove prototype.
2004-06-02 Jan Hubicka <jh@suse.cz>
* tree-cfg.c (tree_find_edge_insert_loc): Allow inserting before
......
......@@ -367,7 +367,6 @@ extern regset regs_live_at_setjmp;
/* Special labels found during CFG build. */
extern GTY(()) rtx label_value_list;
extern GTY(()) rtx tail_recursion_label_list;
extern struct obstack flow_obstack;
......@@ -554,16 +553,15 @@ enum update_life_extent
#define CLEANUP_CROSSJUMP 2 /* Do crossjumping. */
#define CLEANUP_POST_REGSTACK 4 /* We run after reg-stack and need
to care REG_DEAD notes. */
#define CLEANUP_PRE_SIBCALL 8 /* Do not get confused by code hidden
inside call_placeholders.. */
#define CLEANUP_PRE_LOOP 16 /* Take care to preserve syntactic loop
#define CLEANUP_PRE_LOOP 8 /* Take care to preserve syntactic loop
notes. */
#define CLEANUP_UPDATE_LIFE 32 /* Keep life information up to date. */
#define CLEANUP_THREADING 64 /* Do jump threading. */
#define CLEANUP_NO_INSN_DEL 128 /* Do not try to delete trivially dead
#define CLEANUP_UPDATE_LIFE 16 /* Keep life information up to date. */
#define CLEANUP_THREADING 32 /* Do jump threading. */
#define CLEANUP_NO_INSN_DEL 64 /* Do not try to delete trivially dead
insns. */
#define CLEANUP_CFGLAYOUT 256 /* Do cleanup in cfglayout mode. */
#define CLEANUP_LOG_LINKS 512 /* Update log links. */
#define CLEANUP_CFGLAYOUT 128 /* Do cleanup in cfglayout mode. */
#define CLEANUP_LOG_LINKS 256 /* Update log links. */
extern void life_analysis (FILE *, int);
extern int update_life_info (sbitmap, enum update_life_extent, int);
extern int update_life_info_in_dirty_blocks (enum update_life_extent, int);
......
......@@ -50,7 +50,6 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
static int count_basic_blocks (rtx);
static void find_basic_blocks_1 (rtx);
static rtx find_label_refs (rtx, rtx);
static void make_edges (rtx, basic_block, basic_block, int);
static void make_label_edge (sbitmap *, basic_block, rtx, int);
static void find_bb_boundaries (basic_block);
......@@ -175,51 +174,6 @@ count_basic_blocks (rtx f)
return count;
}
/* Scan a list of insns for labels referred to other than by jumps.
This is used to scan the alternatives of a call placeholder. */
static rtx
find_label_refs (rtx f, rtx lvl)
{
rtx insn;
for (insn = f; insn; insn = NEXT_INSN (insn))
if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
{
rtx note;
/* Make a list of all labels referred to other than by jumps
(which just don't have the REG_LABEL notes).
Make a special exception for labels followed by an ADDR*VEC,
as this would be a part of the tablejump setup code.
Make a special exception to registers loaded with label
values just before jump insns that use them. */
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
if (REG_NOTE_KIND (note) == REG_LABEL)
{
rtx lab = XEXP (note, 0), next;
if ((next = next_nonnote_insn (lab)) != NULL
&& GET_CODE (next) == JUMP_INSN
&& (GET_CODE (PATTERN (next)) == ADDR_VEC
|| GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
;
else if (GET_CODE (lab) == NOTE)
;
else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
&& find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
;
else
lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
}
}
return lvl;
}
/* Create an edge between two basic blocks. FLAGS are auxiliary information
about the edge that is accumulated between calls. */
......@@ -464,7 +418,6 @@ find_basic_blocks_1 (rtx f)
rtx insn, next;
rtx bb_note = NULL_RTX;
rtx lvl = NULL_RTX;
rtx trll = NULL_RTX;
rtx head = NULL_RTX;
rtx end = NULL_RTX;
basic_block prev = ENTRY_BLOCK_PTR;
......@@ -525,23 +478,11 @@ find_basic_blocks_1 (rtx f)
case CODE_LABEL:
case JUMP_INSN:
case CALL_INSN:
case INSN:
case BARRIER:
break;
case CALL_INSN:
if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
{
/* Scan each of the alternatives for label refs. */
lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
/* Record its tail recursion label, if any. */
if (XEXP (PATTERN (insn), 3) != NULL_RTX)
trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
}
break;
default:
abort ();
}
......@@ -588,7 +529,6 @@ find_basic_blocks_1 (rtx f)
abort ();
label_value_list = lvl;
tail_recursion_label_list = trll;
clear_aux_for_blocks ();
}
......
......@@ -78,7 +78,6 @@ static bool outgoing_edges_match (int, basic_block, basic_block);
static int flow_find_cross_jump (int, basic_block, basic_block, rtx *, rtx *);
static bool insns_match_p (int, rtx, rtx);
static bool tail_recursion_label_p (rtx);
static void merge_blocks_move_predecessor_nojumps (basic_block, basic_block);
static void merge_blocks_move_successor_nojumps (basic_block, basic_block);
static bool try_optimize_cfg (int);
......@@ -670,19 +669,6 @@ try_forward_edges (int mode, basic_block b)
return changed;
}
/* Return true if LABEL is used for tail recursion. */
static bool
tail_recursion_label_p (rtx label)
{
rtx x;
for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
if (label == XEXP (x, 0))
return true;
return false;
}
/* Blocks A and B are to be merged into a single block. A has no incoming
fallthru edge, so it can be moved before B without adding or modifying
......@@ -809,14 +795,6 @@ static basic_block
merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
{
basic_block next;
/* If C has a tail recursion label, do not merge. There is no
edge recorded from the call_placeholder back to this label, as
that would make optimize_sibling_and_tail_recursive_calls more
complex for no gain. */
if ((mode & CLEANUP_PRE_SIBCALL)
&& GET_CODE (BB_HEAD (c)) == CODE_LABEL
&& tail_recursion_label_p (BB_HEAD (c)))
return NULL;
/* If we are partitioning hot/cold basic blocks, we don't want to
mess up unconditional or indirect jumps that cross between hot
......@@ -1852,15 +1830,11 @@ try_optimize_cfg (int mode)
b = c;
}
/* Remove code labels no longer used. Don't do this
before CALL_PLACEHOLDER is removed, as some branches
may be hidden within. */
/* Remove code labels no longer used. */
if (b->pred->pred_next == NULL
&& (b->pred->flags & EDGE_FALLTHRU)
&& !(b->pred->flags & EDGE_COMPLEX)
&& GET_CODE (BB_HEAD (b)) == CODE_LABEL
&& (!(mode & CLEANUP_PRE_SIBCALL)
|| !tail_recursion_label_p (BB_HEAD (b)))
/* If the previous block ends with a branch to this
block, we can't delete the label. Normally this
is a condjump that is yet to be simplified, but
......@@ -2077,8 +2051,7 @@ cleanup_cfg (int mode)
changed = true;
/* We've possibly created trivially dead code. Cleanup it right
now to introduce more opportunities for try_optimize_cfg. */
if (!(mode & (CLEANUP_NO_INSN_DEL
| CLEANUP_UPDATE_LIFE | CLEANUP_PRE_SIBCALL))
if (!(mode & (CLEANUP_NO_INSN_DEL | CLEANUP_UPDATE_LIFE))
&& !reload_completed)
delete_trivially_dead_insns (get_insns(), max_reg_num ());
}
......@@ -2101,7 +2074,7 @@ cleanup_cfg (int mode)
? PROP_LOG_LINKS : 0)))
break;
}
else if (!(mode & (CLEANUP_NO_INSN_DEL | CLEANUP_PRE_SIBCALL))
else if (!(mode & CLEANUP_NO_INSN_DEL)
&& (mode & CLEANUP_EXPENSIVE)
&& !reload_completed)
{
......
......@@ -62,7 +62,6 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
/* ??? Should probably be using LABEL_NUSES instead. It would take a
bit of surgery to be able to use or co-opt the routines in jump. */
rtx label_value_list;
rtx tail_recursion_label_list;
static int can_delete_note_p (rtx);
static int can_delete_label_p (rtx);
......
......@@ -1157,18 +1157,7 @@ remove_unreachable_regions (rtx insns)
}
for (insn = insns; insn; insn = NEXT_INSN (insn))
{
reachable[uid_region_num[INSN_UID (insn)]] = true;
if (GET_CODE (insn) == CALL_INSN
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
for (i = 0; i < 3; i++)
{
rtx sub = XEXP (PATTERN (insn), i);
for (; sub ; sub = NEXT_INSN (sub))
reachable[uid_region_num[INSN_UID (sub)]] = true;
}
}
reachable[uid_region_num[INSN_UID (insn)]] = true;
for (i = cfun->eh->last_region_number; i > 0; --i)
{
......@@ -1259,8 +1248,6 @@ convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
else
cur = *--sp;
/* Removing the first insn of a CALL_PLACEHOLDER sequence
requires extra care to adjust sequence start. */
if (insn == *pinsns)
*pinsns = next;
remove_insn (insn);
......@@ -1285,17 +1272,6 @@ convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
REG_NOTES (insn));
}
if (GET_CODE (insn) == CALL_INSN
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
{
convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
sp, cur);
convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
sp, cur);
convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
sp, cur);
}
}
}
......@@ -3131,20 +3107,6 @@ can_throw_internal (rtx insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
if (GET_CODE (insn) == CALL_INSN
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
{
int i;
for (i = 0; i < 3; ++i)
{
rtx sub = XEXP (PATTERN (insn), i);
for (; sub ; sub = NEXT_INSN (sub))
if (can_throw_internal (sub))
return true;
}
return false;
}
/* Every insn that might throw has an EH_REGION note. */
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
if (!note || INTVAL (XEXP (note, 0)) <= 0)
......@@ -3192,20 +3154,6 @@ can_throw_external (rtx insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
if (GET_CODE (insn) == CALL_INSN
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
{
int i;
for (i = 0; i < 3; ++i)
{
rtx sub = XEXP (PATTERN (insn), i);
for (; sub ; sub = NEXT_INSN (sub))
if (can_throw_external (sub))
return true;
}
return false;
}
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
if (!note)
{
......
......@@ -444,7 +444,6 @@ free_after_compilation (struct function *f)
f->x_save_expr_regs = NULL;
f->x_stack_slot_list = NULL;
f->x_rtl_expr_chain = NULL;
f->x_tail_recursion_label = NULL;
f->x_tail_recursion_reentry = NULL;
f->x_arg_pointer_save_area = NULL;
f->x_parm_birth_insn = NULL;
......@@ -1683,32 +1682,7 @@ fixup_var_refs_insns (rtx insn, rtx var, enum machine_mode promoted_mode,
pointer now. */
rtx next = NEXT_INSN (insn);
/* CALL_PLACEHOLDERs are special; we have to switch into each of
the three sequences they (potentially) contain, and process
them recursively. The CALL_INSN itself is not interesting. */
if (GET_CODE (insn) == CALL_INSN
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
{
int i;
/* Look at the Normal call, sibling call and tail recursion
sequences attached to the CALL_PLACEHOLDER. */
for (i = 0; i < 3; i++)
{
rtx seq = XEXP (PATTERN (insn), i);
if (seq)
{
push_to_sequence (seq);
fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
may_share);
XEXP (PATTERN (insn), i) = get_insns ();
end_sequence ();
}
}
}
else if (INSN_P (insn))
if (INSN_P (insn))
fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel,
may_share);
......@@ -1717,11 +1691,7 @@ fixup_var_refs_insns (rtx insn, rtx var, enum machine_mode promoted_mode,
}
/* Look up the insns which reference VAR in HT and fix them up. Other
arguments are the same as fixup_var_refs_insns.
N.B. No need for special processing of CALL_PLACEHOLDERs here,
because the hash table will point straight to the interesting insn
(inside the CALL_PLACEHOLDER). */
arguments are the same as fixup_var_refs_insns. */
static void
fixup_var_refs_insns_with_hash (htab_t ht, rtx var, enum machine_mode promoted_mode,
......@@ -5903,7 +5873,7 @@ identify_blocks (void)
}
/* Subroutine of identify_blocks. Do the block substitution on the
insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
insn chain beginning with INSNS.
BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
BLOCK_VECTOR is incremented for each block seen. */
......@@ -5942,20 +5912,6 @@ identify_blocks_1 (rtx insns, tree *block_vector, tree *end_block_vector,
NOTE_BLOCK (insn) = *--block_stack;
}
}
else if (GET_CODE (insn) == CALL_INSN
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
{
rtx cp = PATTERN (insn);
block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
end_block_vector, block_stack);
if (XEXP (cp, 1))
block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
end_block_vector, block_stack);
if (XEXP (cp, 2))
block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
end_block_vector, block_stack);
}
}
/* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
......@@ -6066,16 +6022,6 @@ reorder_blocks_1 (rtx insns, tree current_block, varray_type *p_block_stack)
current_block = BLOCK_SUPERCONTEXT (current_block);
}
}
else if (GET_CODE (insn) == CALL_INSN
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
{
rtx cp = PATTERN (insn);
reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
if (XEXP (cp, 1))
reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
if (XEXP (cp, 2))
reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
}
}
}
......
......@@ -278,10 +278,6 @@ struct function GTY(())
/* Chain of all RTL_EXPRs that have insns in them. */
tree x_rtl_expr_chain;
/* Label to jump back to for tail recursion, or 0 if we have
not yet needed one for this function. */
rtx x_tail_recursion_label;
/* Place after which to insert the tail_recursion_label if we need one. */
rtx x_tail_recursion_reentry;
......@@ -560,7 +556,6 @@ extern int trampolines_created;
#define stack_slot_list (cfun->x_stack_slot_list)
#define parm_birth_insn (cfun->x_parm_birth_insn)
#define frame_offset (cfun->x_frame_offset)
#define tail_recursion_label (cfun->x_tail_recursion_label)
#define tail_recursion_reentry (cfun->x_tail_recursion_reentry)
#define arg_pointer_save_area (cfun->x_arg_pointer_save_area)
#define rtl_expr_chain (cfun->x_rtl_expr_chain)
......
......@@ -192,26 +192,6 @@ mark_all_labels (rtx f)
for (insn = f; insn; insn = NEXT_INSN (insn))
if (INSN_P (insn))
{
if (GET_CODE (insn) == CALL_INSN
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
{
mark_all_labels (XEXP (PATTERN (insn), 0));
mark_all_labels (XEXP (PATTERN (insn), 1));
mark_all_labels (XEXP (PATTERN (insn), 2));
/* Canonicalize the tail recursion label attached to the
CALL_PLACEHOLDER insn. */
if (XEXP (PATTERN (insn), 3))
{
rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
XEXP (PATTERN (insn), 3));
mark_jump_label (label_ref, insn, 0);
XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
}
continue;
}
mark_jump_label (PATTERN (insn), insn, 0);
if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
{
......
......@@ -60,9 +60,6 @@ int flag_simple = 0;
/* Nonzero if we are dumping graphical description. */
int dump_for_graph;
/* Nonzero to dump all call_placeholder alternatives. */
static int debug_call_placeholder_verbose;
void
print_mem_expr (FILE *outfile, tree expr)
{
......@@ -99,7 +96,6 @@ print_rtx (rtx in_rtx)
int j;
const char *format_ptr;
int is_insn;
rtx tem;
if (sawclose)
{
......@@ -573,49 +569,6 @@ print_rtx (rtx in_rtx)
}
break;
case CALL_PLACEHOLDER:
if (debug_call_placeholder_verbose)
{
fputs (" (cond [\n (const_string \"normal\") (sequence [", outfile);
for (tem = XEXP (in_rtx, 0); tem != 0; tem = NEXT_INSN (tem))
{
fputs ("\n ", outfile);
print_inline_rtx (outfile, tem, 4);
}
tem = XEXP (in_rtx, 1);
if (tem)
fputs ("\n ])\n (const_string \"tail_call\") (sequence [",
outfile);
for (; tem != 0; tem = NEXT_INSN (tem))
{
fputs ("\n ", outfile);
print_inline_rtx (outfile, tem, 4);
}
tem = XEXP (in_rtx, 2);
if (tem)
fputs ("\n ])\n (const_string \"tail_recursion\") (sequence [",
outfile);
for (; tem != 0; tem = NEXT_INSN (tem))
{
fputs ("\n ", outfile);
print_inline_rtx (outfile, tem, 4);
}
fputs ("\n ])\n ])", outfile);
break;
}
for (tem = XEXP (in_rtx, 0); tem != 0; tem = NEXT_INSN (tem))
if (GET_CODE (tem) == CALL_INSN)
{
fprintf (outfile, " ");
print_rtx (tem);
break;
}
break;
default:
break;
}
......
......@@ -1153,27 +1153,6 @@ DEF_RTL_EXPR(RANGE_VAR, "range_var", "eti", RTX_EXTRA)
0 is the live bitmap. Operand 1 is the original block number. */
DEF_RTL_EXPR(RANGE_LIVE, "range_live", "bi", RTX_EXTRA)
/* A placeholder for a CALL_INSN which may be turned into a normal call,
a sibling (tail) call or tail recursion.
Immediately after RTL generation, this placeholder will be replaced
by the insns to perform the call, sibcall or tail recursion.
This RTX has 4 operands. The first three are lists of instructions to
perform the call as a normal call, sibling call and tail recursion
respectively. The latter two lists may be NULL, the first may never
be NULL.
The last operand is the tail recursion CODE_LABEL, which may be NULL if no
potential tail recursive calls were found.
The tail recursion label is needed so that we can clear LABEL_PRESERVE_P
after we select a call method.
This method of tail-call elimination is intended to be replaced by
tree-based optimizations once front-end conversions are complete. */
DEF_RTL_EXPR(CALL_PLACEHOLDER, "call_placeholder", "uuuu", RTX_EXTRA)
/* Describes a merge operation between two vector values.
Operands 0 and 1 are the vectors to be merged, operand 2 is a bitmask
that specifies where the parts of the result are taken from. Set bits
......
......@@ -2414,16 +2414,6 @@ extern rtx gen_hard_reg_clobber (enum machine_mode, unsigned int);
extern rtx get_reg_known_value (unsigned int);
extern bool get_reg_known_equiv_p (unsigned int);
/* In sibcall.c */
typedef enum {
sibcall_use_normal = 1,
sibcall_use_tail_recursion,
sibcall_use_sibcall
} sibcall_use_t;
extern void optimize_sibling_and_tail_recursive_calls (void);
extern void replace_call_placeholder (rtx, sibcall_use_t);
#ifdef STACK_REGS
extern int stack_regs_mentioned (rtx insn);
#endif
......
......@@ -382,7 +382,6 @@ static void expand_null_return_1 (rtx);
static enum br_predictor return_prediction (rtx);
static rtx shift_return_value (rtx);
static void expand_value_return (rtx);
static int tail_recursion_args (tree, tree);
static void expand_cleanups (tree, int, int);
static void check_seenlabel (void);
static void do_jump_if_equal (rtx, rtx, rtx, int);
......@@ -2668,39 +2667,6 @@ expand_return (tree retval)
last_insn = get_last_insn ();
/* Distribute return down conditional expr if either of the sides
may involve tail recursion (see test below). This enhances the number
of tail recursions we see. Don't do this always since it can produce
sub-optimal code in some cases and we distribute assignments into
conditional expressions when it would help. */
if (optimize && retval_rhs != 0
&& frame_offset == 0
&& TREE_CODE (retval_rhs) == COND_EXPR
&& (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
|| TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
{
rtx label = gen_label_rtx ();
tree expr;
do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
start_cleanup_deferral ();
expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
DECL_RESULT (current_function_decl),
TREE_OPERAND (retval_rhs, 1));
TREE_SIDE_EFFECTS (expr) = 1;
expand_return (expr);
emit_label (label);
expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
DECL_RESULT (current_function_decl),
TREE_OPERAND (retval_rhs, 2));
TREE_SIDE_EFFECTS (expr) = 1;
expand_return (expr);
end_cleanup_deferral ();
return;
}
result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
/* If the result is an aggregate that is being returned in one (or more)
......@@ -2850,114 +2816,6 @@ expand_return (tree retval)
}
}
/* Attempt to optimize a potential tail recursion call into a goto.
ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
where to place the jump to the tail recursion label.
Return TRUE if the call was optimized into a goto. */
int
optimize_tail_recursion (tree arguments, rtx last_insn)
{
/* Finish checking validity, and if valid emit code to set the
argument variables for the new call. */
if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
{
if (tail_recursion_label == 0)
{
tail_recursion_label = gen_label_rtx ();
emit_label_after (tail_recursion_label,
tail_recursion_reentry);
}
emit_queue ();
expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
emit_barrier ();
return 1;
}
return 0;
}
/* Emit code to alter this function's formal parms for a tail-recursive call.
ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
FORMALS is the chain of decls of formals.
Return 1 if this can be done;
otherwise return 0 and do not emit any code. */
static int
tail_recursion_args (tree actuals, tree formals)
{
tree a = actuals, f = formals;
int i;
rtx *argvec;
/* Check that number and types of actuals are compatible
with the formals. This is not always true in valid C code.
Also check that no formal needs to be addressable
and that all formals are scalars. */
/* Also count the args. */
for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
{
if (!lang_hooks.types_compatible_p (TREE_TYPE (TREE_VALUE (a)),
TREE_TYPE (f)))
return 0;
if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
return 0;
}
if (a != 0 || f != 0)
return 0;
/* Compute all the actuals. */
argvec = alloca (i * sizeof (rtx));
for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
/* Find which actual values refer to current values of previous formals.
Copy each of them now, before any formal is changed. */
for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
{
int copy = 0;
int j;
for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
{
copy = 1;
break;
}
if (copy)
argvec[i] = copy_to_reg (argvec[i]);
}
/* Store the values of the actuals into the formals. */
for (f = formals, a = actuals, i = 0; f;
f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
{
if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
emit_move_insn (DECL_RTL (f), argvec[i]);
else
{
rtx tmp = argvec[i];
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)));
promote_mode(TREE_TYPE (TREE_VALUE (a)), GET_MODE (tmp),
&unsignedp, 0);
if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
{
tmp = gen_reg_rtx (DECL_MODE (f));
convert_move (tmp, argvec[i], unsignedp);
}
convert_move (DECL_RTL (f), tmp, unsignedp);
}
}
free_temp_slots ();
return 1;
}
/* Generate the RTL code for entering a binding contour.
The variables are declared one by one, by calls to `expand_decl'.
......
......@@ -72,7 +72,7 @@ Boston, MA 02111-1307, USA. */
omit the accumulator.
There are three cases how the function may exit. The first one is
handled in adjust_return_value, the later two in adjust_accumulator_values
handled in adjust_return_value, the other two in adjust_accumulator_values
(the second case is actually a special case of the third one and we
present it separately just for clarity):
......
......@@ -3399,7 +3399,6 @@ extern void expand_stack_alloc (tree, tree);
extern rtx expand_stack_save (void);
extern void expand_stack_restore (tree);
extern void expand_return (tree);
extern int optimize_tail_recursion (tree, rtx);
extern void expand_start_bindings_and_block (int, tree);
#define expand_start_bindings(flags) \
expand_start_bindings_and_block(flags, NULL_TREE)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment