Commit 2b8a92de by Kazu Hirata Committed by Kazu Hirata

builtins.c, [...]: Fix comment typos.

	* builtins.c, cgraph.c, cgraphunit.c, final.c, fold-const.c:
	Fix comment typos.

From-SVN: r81291
parent 8fd58397
2004-04-29 Kazu Hirata <kazu@cs.umass.edu>
* builtins.c, cgraph.c, cgraphunit.c, final.c, fold-const.c:
Fix comment typos.
2004-04-29 Douglas B Rupp <rupp@gnat.com> 2004-04-29 Douglas B Rupp <rupp@gnat.com>
* gcc.c (DELETE_IF_ORDINARY): New macro default definition. * gcc.c (DELETE_IF_ORDINARY): New macro default definition.
......
...@@ -5134,7 +5134,7 @@ expand_builtin_fork_or_exec (tree fn, tree arglist, rtx target, int ignore) ...@@ -5134,7 +5134,7 @@ expand_builtin_fork_or_exec (tree fn, tree arglist, rtx target, int ignore)
/* Otherwise call the wrapper. This should be equivalent for the rest of /* Otherwise call the wrapper. This should be equivalent for the rest of
compiler, so the code does not diverge, and the wrapper may run the compiler, so the code does not diverge, and the wrapper may run the
code neccesary for keeping the profiling sane. */ code necessary for keeping the profiling sane. */
switch (DECL_FUNCTION_CODE (fn)) switch (DECL_FUNCTION_CODE (fn))
{ {
......
...@@ -68,8 +68,8 @@ The callgraph: ...@@ -68,8 +68,8 @@ The callgraph:
caller. caller.
Each edge has "inline_failed" field. When the field is set to NULL, Each edge has "inline_failed" field. When the field is set to NULL,
the call will be inlined. When it is non-NULL it contains an reason the call will be inlined. When it is non-NULL it contains a reason
why inlining wasn't performaned. why inlining wasn't performed.
The varpool data structure: The varpool data structure:
...@@ -99,8 +99,8 @@ The varpool data structure: ...@@ -99,8 +99,8 @@ The varpool data structure:
/* Hash table used to convert declarations into nodes. */ /* Hash table used to convert declarations into nodes. */
static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash; static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
/* We destructivly update callgraph during inlining and thus we need to /* We destructively update callgraph during inlining and thus we need to
keep information on whether inlining happent separately. */ keep information on whether inlining happend separately. */
htab_t cgraph_inline_hash; htab_t cgraph_inline_hash;
/* The linked list of cgraph nodes. */ /* The linked list of cgraph nodes. */
...@@ -209,7 +209,7 @@ cgraph_edge (struct cgraph_node *node, tree call_expr) ...@@ -209,7 +209,7 @@ cgraph_edge (struct cgraph_node *node, tree call_expr)
/* This loop may turn out to be performance problem. In such case adding /* This loop may turn out to be performance problem. In such case adding
hashtables into call nodes with very many edges is probably best hashtables into call nodes with very many edges is probably best
sollution. It is not good idea to add pointer into CALL_EXPR itself solution. It is not good idea to add pointer into CALL_EXPR itself
because we want to make possible having multiple cgraph nodes representing because we want to make possible having multiple cgraph nodes representing
different clones of the same body before the body is actually cloned. */ different clones of the same body before the body is actually cloned. */
for (e = node->callees; e; e= e->next_callee) for (e = node->callees; e; e= e->next_callee)
......
...@@ -923,7 +923,7 @@ cgraph_remove_unreachable_nodes (void) ...@@ -923,7 +923,7 @@ cgraph_remove_unreachable_nodes (void)
eliminated eliminated
Reachable extern inline functions we sometimes inlined will be turned into Reachable extern inline functions we sometimes inlined will be turned into
unanalyzed nodes so they look like for true extern functions to the rest unanalyzed nodes so they look like for true extern functions to the rest
of code. Body of such functions is relased via remove_node once the of code. Body of such functions is released via remove_node once the
inline clones are eliminated. */ inline clones are eliminated. */
for (node = cgraph_nodes; node; node = node->next) for (node = cgraph_nodes; node; node = node->next)
{ {
...@@ -1047,7 +1047,7 @@ cgraph_clone_inlined_nodes (struct cgraph_edge *e, bool duplicate) ...@@ -1047,7 +1047,7 @@ cgraph_clone_inlined_nodes (struct cgraph_edge *e, bool duplicate)
else else
e->callee->global.inlined_to = e->caller; e->callee->global.inlined_to = e->caller;
/* Recursivly clone all bodies. */ /* Recursively clone all bodies. */
for (e = e->callee->callees; e; e = e->next_callee) for (e = e->callee->callees; e; e = e->next_callee)
if (!e->inline_failed) if (!e->inline_failed)
cgraph_clone_inlined_nodes (e, duplicate); cgraph_clone_inlined_nodes (e, duplicate);
...@@ -1110,7 +1110,7 @@ cgraph_mark_inline (struct cgraph_edge *edge) ...@@ -1110,7 +1110,7 @@ cgraph_mark_inline (struct cgraph_edge *edge)
struct cgraph_edge *e, *next; struct cgraph_edge *e, *next;
int times = 0; int times = 0;
/* Look for all calls, mark them inline and clone recursivly /* Look for all calls, mark them inline and clone recursively
all inlined functions. */ all inlined functions. */
for (e = what->callers; e; e = next) for (e = what->callers; e; e = next)
{ {
...@@ -1182,7 +1182,7 @@ cgraph_default_inline_p (struct cgraph_node *n) ...@@ -1182,7 +1182,7 @@ cgraph_default_inline_p (struct cgraph_node *n)
/* Return true when inlining WHAT would create recursive inlining. /* Return true when inlining WHAT would create recursive inlining.
We call recursive inlining all cases where same function appears more than We call recursive inlining all cases where same function appears more than
once in the single recusion nest path in the inline graph. */ once in the single recursion nest path in the inline graph. */
static bool static bool
cgraph_recursive_inlining_p (struct cgraph_node *to, cgraph_recursive_inlining_p (struct cgraph_node *to,
......
...@@ -1713,7 +1713,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED, ...@@ -1713,7 +1713,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
case NOTE_INSN_BASIC_BLOCK: case NOTE_INSN_BASIC_BLOCK:
/* If we are performing the optimization that paritions /* If we are performing the optimization that partitions
basic blocks into hot & cold sections of the .o file, basic blocks into hot & cold sections of the .o file,
then at the start of each new basic block, before then at the start of each new basic block, before
beginning to write code for the basic block, we need to beginning to write code for the basic block, we need to
......
...@@ -4806,7 +4806,7 @@ count_cond (tree expr, int lim) ...@@ -4806,7 +4806,7 @@ count_cond (tree expr, int lim)
expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
COND is the first argument to CODE; otherwise (as in the example COND is the first argument to CODE; otherwise (as in the example
given here), it is the second argument. TYPE is the type of the given here), it is the second argument. TYPE is the type of the
original expression. Return NULL_TREE if no simplication is original expression. Return NULL_TREE if no simplification is
possible. */ possible. */
static tree static tree
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment