Commit e41d82f5 by Richard Henderson Committed by Richard Henderson

re PR tree-optimization/20610 (Real by complex multiplications perform unnecessary operations)

        PR tree-opt/20610
        * tree.h (DECL_COMPLEX_GIMPLE_REG_P): New.
        (struct tree_decl): Add gimple_reg_flag.
        * integrate.c (copy_decl_for_inlining): Copy it.
        * gimplify.c (internal_get_tmp_var): Set it.
        (gimplify_bind_expr): Likewise.
        (gimplify_function_tree): Likewise.
        (gimplify_modify_expr_complex_part): New.
        (gimplify_modify_expr): Use it.
        * tree-gimple.c (is_gimple_reg_type): Allow complex.
        (is_gimple_reg): Allow complex with DECL_COMPLEX_GIMPLE_REG_P set.

        * tree-complex.c (complex_lattice_t): New.
        (complex_lattice_values, complex_variable_components): New.
        (some_nonzerop, find_lattice_value, is_complex_reg,
        init_parameter_lattice_values, init_dont_simulate_again,
        complex_visit_stmt, complex_visit_phi, create_components,
        update_complex_components, update_parameter_components,
        update_phi_components, update_all_vops, expand_complex_move): New.
        (extract_component): Handle INDIRECT_REF, COMPONENT_REF, ARRAY_REF,
        SSA_NAME.
        (update_complex_assignment): Use update_complex_components;
        handle updates of return_expr properly.
        (expand_complex_addition): Use complex lattice values.
        (expand_complex_multiplication): Likewise.
        (expand_complex_division): Likewise.
        (expand_complex_libcall): Use update_complex_components.
        (expand_complex_comparison): Use update_stmt.
        (expand_complex_operations_1): Use expand_complex_move, retrieve
        lattice values.
        (tree_lower_complex): Compute lattice values.
        (tree_lower_complex_O0): Duplicate from tree_lower_complex.
        (pass_lower_complex_O0): Rename from pass_lower_complex.
        (pass_lower_complex, gate_no_optimization): New.
        * tree-optimize.c (init_tree_optimization_passes): Update for
        complex pass changes.
        * tree-pass.h (pass_lower_complex_O0): Declare.

From-SVN: r100793
parent 31920d83
2005-06-09 Richard Henderson <rth@redhat.com>
PR tree-opt/20610
* tree.h (DECL_COMPLEX_GIMPLE_REG_P): New.
(struct tree_decl): Add gimple_reg_flag.
* integrate.c (copy_decl_for_inlining): Copy it.
* gimplify.c (internal_get_tmp_var): Set it.
(gimplify_bind_expr): Likewise.
(gimplify_function_tree): Likewise.
(gimplify_modify_expr_complex_part): New.
(gimplify_modify_expr): Use it.
* tree-gimple.c (is_gimple_reg_type): Allow complex.
(is_gimple_reg): Allow complex with DECL_COMPLEX_GIMPLE_REG_P set.
* tree-complex.c (complex_lattice_t): New.
(complex_lattice_values, complex_variable_components): New.
(some_nonzerop, find_lattice_value, is_complex_reg,
init_parameter_lattice_values, init_dont_simulate_again,
complex_visit_stmt, complex_visit_phi, create_components,
update_complex_components, update_parameter_components,
update_phi_components, update_all_vops, expand_complex_move): New.
(extract_component): Handle INDIRECT_REF, COMPONENT_REF, ARRAY_REF,
SSA_NAME.
(update_complex_assignment): Use update_complex_components;
handle updates of return_expr properly.
(expand_complex_addition): Use complex lattice values.
(expand_complex_multiplication): Likewise.
(expand_complex_division): Likewise.
(expand_complex_libcall): Use update_complex_components.
(expand_complex_comparison): Use update_stmt.
(expand_complex_operations_1): Use expand_complex_move, retrieve
lattice values.
(tree_lower_complex): Compute lattice values.
(tree_lower_complex_O0): Duplicate from tree_lower_complex.
(pass_lower_complex_O0): Rename from pass_lower_complex.
(pass_lower_complex, gate_no_optimization): New.
* tree-optimize.c (init_tree_optimization_passes): Update for
complex pass changes.
* tree-pass.h (pass_lower_complex_O0): Declare.
2005-06-08 Dale Johannesen <dalej@apple.com>
* config/darwin.c (darwin_binds_local_p): New.
......
......@@ -470,6 +470,9 @@ internal_get_tmp_var (tree val, tree *pre_p, tree *post_p, bool is_formal)
t = lookup_tmp_var (val, is_formal);
if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
mod = build (MODIFY_EXPR, TREE_TYPE (t), t, val);
if (EXPR_HAS_LOCATION (val))
......@@ -856,7 +859,18 @@ gimplify_bind_expr (tree *expr_p, tree temp, tree *pre_p)
/* Mark variables seen in this bind expr. */
for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
{
DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
/* Preliminarily mark non-addressed complex variables as eligible
for promotion to gimple registers. We'll transform their uses
as we find them. */
if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
&& !TREE_THIS_VOLATILE (t)
&& (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
&& !needs_to_live_in_memory (t))
DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
}
gimple_push_bind_expr (bind_expr);
gimplify_ctxp->save_stack = false;
......@@ -3009,6 +3023,45 @@ gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p,
return ret;
}
/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
DECL_COMPLEX_GIMPLE_REG_P set. */
static enum gimplify_status
gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
{
enum tree_code code, ocode;
tree lhs, rhs, new_rhs, other, realpart, imagpart;
lhs = TREE_OPERAND (*expr_p, 0);
rhs = TREE_OPERAND (*expr_p, 1);
code = TREE_CODE (lhs);
lhs = TREE_OPERAND (lhs, 0);
ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
other = build1 (ocode, TREE_TYPE (rhs), lhs);
other = get_formal_tmp_var (other, pre_p);
realpart = code == REALPART_EXPR ? rhs : other;
imagpart = code == REALPART_EXPR ? other : rhs;
if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
else
new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
TREE_OPERAND (*expr_p, 0) = lhs;
TREE_OPERAND (*expr_p, 1) = new_rhs;
if (want_value)
{
append_to_statement_list (*expr_p, pre_p);
*expr_p = rhs;
}
return GS_ALL_DONE;
}
/* Gimplify the MODIFY_EXPR node pointed by EXPR_P.
modify_expr
......@@ -3084,6 +3137,14 @@ gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
}
}
/* Transform partial stores to non-addressable complex variables into
total stores. This allows us to use real instead of virtual operands
for these variables, which improves optimization. */
if ((TREE_CODE (*to_p) == REALPART_EXPR
|| TREE_CODE (*to_p) == IMAGPART_EXPR)
&& is_gimple_reg (TREE_OPERAND (*to_p, 0)))
return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
{
/* If we've somehow already got an SSA_NAME on the LHS, then
......@@ -4668,7 +4729,7 @@ gimplify_body (tree *body_p, tree fndecl, bool do_parms)
void
gimplify_function_tree (tree fndecl)
{
tree oldfn;
tree oldfn, parm, ret;
oldfn = current_function_decl;
current_function_decl = fndecl;
......@@ -4676,6 +4737,22 @@ gimplify_function_tree (tree fndecl)
if (cfun == NULL)
allocate_struct_function (fndecl);
for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
{
/* Preliminarily mark non-addressed complex variables as eligible
for promotion to gimple registers. We'll transform their uses
as we find them. */
if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
&& !TREE_THIS_VOLATILE (parm)
&& !needs_to_live_in_memory (parm))
DECL_COMPLEX_GIMPLE_REG_P (parm) = 1;
}
ret = DECL_RESULT (fndecl);
if (TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
&& !needs_to_live_in_memory (ret))
DECL_COMPLEX_GIMPLE_REG_P (ret) = 1;
gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
/* If we're instrumenting function entry/exit, then prepend the call to
......
......@@ -107,6 +107,7 @@ copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
TREE_READONLY (copy) = TREE_READONLY (decl);
TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
}
else
{
......
......@@ -21,54 +21,424 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tree.h"
#include "tm.h"
#include "tree.h"
#include "rtl.h"
#include "expr.h"
#include "insn-codes.h"
#include "diagnostic.h"
#include "optabs.h"
#include "machmode.h"
#include "langhooks.h"
#include "real.h"
#include "flags.h"
#include "tree-flow.h"
#include "tree-gimple.h"
#include "tree-iterator.h"
#include "tree-pass.h"
#include "flags.h"
#include "ggc.h"
#include "tree-ssa-propagate.h"
/* For each complex ssa name, a lattice value. We're interested in finding
out whether a complex number is degenerate in some way, having only real
or only complex parts. */
typedef enum
{
UNINITIALIZED = 0,
ONLY_REAL = 1,
ONLY_IMAG = 2,
VARYING = 3
} complex_lattice_t;
#define PAIR(a, b) ((a) << 2 | (b))
DEF_VEC_I(complex_lattice_t);
DEF_VEC_ALLOC_I(complex_lattice_t, heap);
static VEC(complex_lattice_t, heap) *complex_lattice_values;
/* For each complex variable, a pair of variables for the components. */
static VEC(tree, heap) *complex_variable_components;
/* Return true if T is not a zero constant. In the case of real values,
we're only interested in +0.0. */
static int
some_nonzerop (tree t)
{
int zerop = false;
if (TREE_CODE (t) == REAL_CST)
zerop = REAL_VALUES_IDENTICAL (TREE_REAL_CST (t), dconst0);
else if (TREE_CODE (t) == INTEGER_CST)
zerop = integer_zerop (t);
return !zerop;
}
/* Compute a lattice value from T. It may be a gimple_val, or, as a
special exception, a COMPLEX_EXPR. */
static complex_lattice_t
find_lattice_value (tree t)
{
tree real, imag;
int r, i;
complex_lattice_t ret;
switch (TREE_CODE (t))
{
case SSA_NAME:
return VEC_index (complex_lattice_t, complex_lattice_values,
SSA_NAME_VERSION (t));
case COMPLEX_CST:
real = TREE_REALPART (t);
imag = TREE_IMAGPART (t);
break;
case COMPLEX_EXPR:
real = TREE_OPERAND (t, 0);
imag = TREE_OPERAND (t, 1);
break;
default:
gcc_unreachable ();
}
r = some_nonzerop (real);
i = some_nonzerop (imag);
ret = r*ONLY_REAL + i*ONLY_IMAG;
/* ??? On occasion we could do better than mapping 0+0i to real, but we
certainly don't want to leave it UNINITIALIZED, which eventually gets
mapped to VARYING. */
if (ret == UNINITIALIZED)
ret = ONLY_REAL;
return ret;
}
/* Determine if LHS is something for which we're interested in seeing
simulation results. */
static bool
is_complex_reg (tree lhs)
{
return TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE && is_gimple_reg (lhs);
}
/* Mark the incoming parameters to the function as VARYING. */
static void
init_parameter_lattice_values (void)
{
tree parm;
for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = TREE_CHAIN (parm))
if (is_complex_reg (parm) && var_ann (parm) != NULL)
{
tree ssa_name = default_def (parm);
VEC_replace (complex_lattice_t, complex_lattice_values,
SSA_NAME_VERSION (ssa_name), VARYING);
}
}
/* Initialize DONT_SIMULATE_AGAIN for each stmt and phi. Return false if
we found no statements we want to simulate, and thus there's nothing for
the entire pass to do. */
static bool
init_dont_simulate_again (void)
{
basic_block bb;
block_stmt_iterator bsi;
tree phi;
bool saw_a_complex_value = false;
FOR_EACH_BB (bb)
{
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
DONT_SIMULATE_AGAIN (phi) = !is_complex_reg (PHI_RESULT (phi));
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
{
tree stmt = bsi_stmt (bsi);
bool dsa = true;
if (TREE_CODE (stmt) == MODIFY_EXPR
&& is_complex_reg (TREE_OPERAND (stmt, 0)))
{
dsa = false;
saw_a_complex_value = true;
}
DONT_SIMULATE_AGAIN (stmt) = dsa;
}
}
return saw_a_complex_value;
}
/* Evaluate statement STMT against the complex lattice defined above. */
static enum ssa_prop_result
complex_visit_stmt (tree stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
tree *result_p)
{
complex_lattice_t new_l, old_l, op1_l, op2_l;
unsigned int ver;
tree lhs, rhs;
/* These conditions should be satisfied due to the initial filter
set up in init_dont_simulate_again. */
gcc_assert (TREE_CODE (stmt) == MODIFY_EXPR);
lhs = TREE_OPERAND (stmt, 0);
rhs = TREE_OPERAND (stmt, 1);
gcc_assert (TREE_CODE (lhs) == SSA_NAME);
gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
*result_p = lhs;
ver = SSA_NAME_VERSION (lhs);
old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver);
switch (TREE_CODE (rhs))
{
case SSA_NAME:
case COMPLEX_EXPR:
case COMPLEX_CST:
new_l = find_lattice_value (rhs);
break;
case PLUS_EXPR:
case MINUS_EXPR:
op1_l = find_lattice_value (TREE_OPERAND (rhs, 0));
op2_l = find_lattice_value (TREE_OPERAND (rhs, 1));
/* We've set up the lattice values such that IOR neatly
models addition. */
new_l = op1_l | op2_l;
break;
case MULT_EXPR:
case RDIV_EXPR:
case TRUNC_DIV_EXPR:
case CEIL_DIV_EXPR:
case FLOOR_DIV_EXPR:
case ROUND_DIV_EXPR:
op1_l = find_lattice_value (TREE_OPERAND (rhs, 0));
op2_l = find_lattice_value (TREE_OPERAND (rhs, 1));
/* Obviously, if either varies, so does the result. */
if (op1_l == VARYING || op2_l == VARYING)
new_l = VARYING;
/* Don't prematurely promote variables if we've not yet seen
their inputs. */
else if (op1_l == UNINITIALIZED)
new_l = op2_l;
else if (op2_l == UNINITIALIZED)
new_l = op1_l;
else
{
/* At this point both numbers have only one component. If the
numbers are of opposite kind, the result is imaginary,
otherwise the result is real. The add/subtract translates
the real/imag from/to 0/1; the ^ performs the comparison. */
new_l = ((op1_l - ONLY_REAL) ^ (op2_l - ONLY_REAL)) + ONLY_REAL;
/* Don't allow the lattice value to flip-flop indefinitely. */
new_l |= old_l;
}
break;
case NEGATE_EXPR:
case CONJ_EXPR:
new_l = find_lattice_value (TREE_OPERAND (rhs, 0));
break;
default:
new_l = VARYING;
break;
}
/* If nothing changed this round, let the propagator know. */
if (new_l == old_l)
return SSA_PROP_NOT_INTERESTING;
VEC_replace (complex_lattice_t, complex_lattice_values, ver, new_l);
return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING;
}
/* Evaluate a PHI node against the complex lattice defined above. */
static enum ssa_prop_result
complex_visit_phi (tree phi)
{
complex_lattice_t new_l, old_l;
unsigned int ver;
tree lhs;
int i;
lhs = PHI_RESULT (phi);
/* This condition should be satisfied due to the initial filter
set up in init_dont_simulate_again. */
gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
/* We've set up the lattice values such that IOR neatly models PHI meet. */
new_l = UNINITIALIZED;
for (i = PHI_NUM_ARGS (phi) - 1; i >= 0; --i)
new_l |= find_lattice_value (PHI_ARG_DEF (phi, i));
ver = SSA_NAME_VERSION (lhs);
old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver);
if (new_l == old_l)
return SSA_PROP_NOT_INTERESTING;
VEC_replace (complex_lattice_t, complex_lattice_values, ver, new_l);
return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING;
}
/* For each referenced complex gimple register, set up a pair of registers
to hold the components of the complex value. */
static void
create_components (void)
{
size_t k, n;
n = num_referenced_vars;
complex_variable_components = VEC_alloc (tree, heap, 2*n);
VEC_safe_grow (tree, heap, complex_variable_components, 2*n);
for (k = 0; k < n; ++k)
{
tree var = referenced_var (k);
tree r = NULL, i = NULL;
if (var != NULL
&& TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
&& is_gimple_reg (var))
{
tree inner_type = TREE_TYPE (TREE_TYPE (var));
r = make_rename_temp (inner_type, "CR");
i = make_rename_temp (inner_type, "CI");
DECL_SOURCE_LOCATION (r) = DECL_SOURCE_LOCATION (var);
DECL_SOURCE_LOCATION (i) = DECL_SOURCE_LOCATION (var);
DECL_ARTIFICIAL (r) = 1;
DECL_ARTIFICIAL (i) = 1;
if (DECL_NAME (var) && !DECL_IGNORED_P (var))
{
const char *name = IDENTIFIER_POINTER (DECL_NAME (var));
DECL_NAME (r) = get_identifier (ACONCAT ((name, "$real", NULL)));
DECL_NAME (i) = get_identifier (ACONCAT ((name, "$imag", NULL)));
SET_DECL_DEBUG_EXPR (r, build1 (REALPART_EXPR, inner_type, var));
SET_DECL_DEBUG_EXPR (i, build1 (IMAGPART_EXPR, inner_type, var));
DECL_DEBUG_EXPR_IS_FROM (r) = 1;
DECL_DEBUG_EXPR_IS_FROM (i) = 1;
DECL_IGNORED_P (r) = 0;
DECL_IGNORED_P (i) = 0;
TREE_NO_WARNING (r) = TREE_NO_WARNING (var);
TREE_NO_WARNING (i) = TREE_NO_WARNING (var);
}
else
{
DECL_IGNORED_P (r) = 1;
DECL_IGNORED_P (i) = 1;
TREE_NO_WARNING (r) = 1;
TREE_NO_WARNING (i) = 1;
}
}
VEC_replace (tree, complex_variable_components, 2*k, r);
VEC_replace (tree, complex_variable_components, 2*k + 1, i);
}
}
/* Extract the real or imaginary part of a complex variable or constant.
Make sure that it's a proper gimple_val and gimplify it if not.
Emit any new code before BSI. */
static tree
extract_component (block_stmt_iterator *bsi, tree t, bool imagpart_p)
extract_component (block_stmt_iterator *bsi, tree t, bool imagpart_p,
bool gimple_p)
{
tree ret, inner_type;
inner_type = TREE_TYPE (TREE_TYPE (t));
switch (TREE_CODE (t))
{
case COMPLEX_CST:
ret = (imagpart_p ? TREE_IMAGPART (t) : TREE_REALPART (t));
break;
return imagpart_p ? TREE_IMAGPART (t) : TREE_REALPART (t);
case COMPLEX_EXPR:
ret = TREE_OPERAND (t, imagpart_p);
break;
return TREE_OPERAND (t, imagpart_p);
case VAR_DECL:
case PARM_DECL:
ret = build1 ((imagpart_p ? IMAGPART_EXPR : REALPART_EXPR),
inner_type, t);
break;
case INDIRECT_REF:
case COMPONENT_REF:
case ARRAY_REF:
{
tree inner_type = TREE_TYPE (TREE_TYPE (t));
t = build1 ((imagpart_p ? IMAGPART_EXPR : REALPART_EXPR),
inner_type, unshare_expr (t));
if (gimple_p)
t = gimplify_val (bsi, inner_type, t);
return t;
}
case SSA_NAME:
{
tree def = SSA_NAME_DEF_STMT (t);
if (TREE_CODE (def) == MODIFY_EXPR)
{
def = TREE_OPERAND (def, 1);
if (TREE_CODE (def) == COMPLEX_CST)
return imagpart_p ? TREE_IMAGPART (def) : TREE_REALPART (def);
if (TREE_CODE (def) == COMPLEX_EXPR)
{
def = TREE_OPERAND (def, imagpart_p);
if (TREE_CONSTANT (def))
return def;
}
}
return VEC_index (tree, complex_variable_components,
var_ann (SSA_NAME_VAR (t))->uid * 2 + imagpart_p);
}
default:
gcc_unreachable ();
}
}
/* Update the complex components of the ssa name on the lhs of STMT. */
return gimplify_val (bsi, inner_type, ret);
static void
update_complex_components (block_stmt_iterator *bsi, tree stmt, tree r, tree i)
{
unsigned int uid = var_ann (SSA_NAME_VAR (TREE_OPERAND (stmt, 0)))->uid;
tree v, x;
v = VEC_index (tree, complex_variable_components, 2*uid);
x = build2 (MODIFY_EXPR, TREE_TYPE (v), v, r);
SET_EXPR_LOCUS (x, EXPR_LOCUS (stmt));
TREE_BLOCK (x) = TREE_BLOCK (stmt);
bsi_insert_after (bsi, x, BSI_NEW_STMT);
v = VEC_index (tree, complex_variable_components, 2*uid + 1);
x = build2 (MODIFY_EXPR, TREE_TYPE (v), v, i);
SET_EXPR_LOCUS (x, EXPR_LOCUS (stmt));
TREE_BLOCK (x) = TREE_BLOCK (stmt);
bsi_insert_after (bsi, x, BSI_NEW_STMT);
}
/* Update an assignment to a complex variable in place. */
......@@ -76,15 +446,161 @@ extract_component (block_stmt_iterator *bsi, tree t, bool imagpart_p)
static void
update_complex_assignment (block_stmt_iterator *bsi, tree r, tree i)
{
tree stmt = bsi_stmt (*bsi);
tree stmt, mod;
tree type;
mod = stmt = bsi_stmt (*bsi);
if (TREE_CODE (stmt) == RETURN_EXPR)
stmt = TREE_OPERAND (stmt, 0);
mod = TREE_OPERAND (mod, 0);
else if (in_ssa_p)
update_complex_components (bsi, stmt, r, i);
type = TREE_TYPE (TREE_OPERAND (stmt, 1));
TREE_OPERAND (stmt, 1) = build (COMPLEX_EXPR, type, r, i);
mark_stmt_modified (stmt);
type = TREE_TYPE (TREE_OPERAND (mod, 1));
TREE_OPERAND (mod, 1) = build (COMPLEX_EXPR, type, r, i);
update_stmt (stmt);
}
/* Generate code at the entry point of the function to initialize the
component variables for a complex parameter. */
static void
update_parameter_components (void)
{
edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
tree parm;
for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = TREE_CHAIN (parm))
{
tree type = TREE_TYPE (parm);
tree ssa_name, x, y;
unsigned int uid;
if (TREE_CODE (type) != COMPLEX_TYPE || !is_gimple_reg (parm))
continue;
type = TREE_TYPE (type);
ssa_name = default_def (parm);
uid = var_ann (parm)->uid;
x = VEC_index (tree, complex_variable_components, 2*uid);
y = build1 (REALPART_EXPR, type, ssa_name);
bsi_insert_on_edge (entry_edge, build2 (MODIFY_EXPR, type, x, y));
x = VEC_index (tree, complex_variable_components, 2*uid + 1);
y = build1 (IMAGPART_EXPR, type, ssa_name);
bsi_insert_on_edge (entry_edge, build2 (MODIFY_EXPR, type, x, y));
}
}
/* Generate code to set the component variables of a complex variable
to match the PHI statements in block BB. */
static void
update_phi_components (basic_block bb)
{
tree phi;
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
if (is_complex_reg (PHI_RESULT (phi)))
{
unsigned int i, n, uid;
tree real, imag, type;
uid = var_ann (SSA_NAME_VAR (PHI_RESULT (phi)))->uid;
real = VEC_index (tree, complex_variable_components, 2*uid);
imag = VEC_index (tree, complex_variable_components, 2*uid + 1);
type = TREE_TYPE (real);
for (i = 0, n = PHI_NUM_ARGS (phi); i < n; ++i)
{
edge e = PHI_ARG_EDGE (phi, i);
tree arg = PHI_ARG_DEF (phi, i);
tree x;
x = extract_component (NULL, arg, 0, false);
if (real != x)
bsi_insert_on_edge (e, build2 (MODIFY_EXPR, type, real, x));
x = extract_component (NULL, arg, 1, false);
if (imag != x)
bsi_insert_on_edge (e, build2 (MODIFY_EXPR, type, imag, x));
}
}
}
/* Mark each virtual op in STMT for ssa update. */
static void
update_all_vops (tree stmt)
{
ssa_op_iter iter;
tree sym;
FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
{
if (TREE_CODE (sym) == SSA_NAME)
sym = SSA_NAME_VAR (sym);
mark_sym_for_renaming (sym);
}
}
/* Expand a complex move to scalars. */
static void
expand_complex_move (block_stmt_iterator *bsi, tree stmt, tree type,
tree lhs, tree rhs)
{
tree inner_type = TREE_TYPE (type);
tree r, i;
if (TREE_CODE (lhs) == SSA_NAME)
{
if (TREE_CODE (rhs) == CALL_EXPR || TREE_SIDE_EFFECTS (rhs))
{
r = build1 (REALPART_EXPR, inner_type, unshare_expr (lhs));
i = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
update_complex_components (bsi, stmt, r, i);
}
else
{
update_all_vops (bsi_stmt (*bsi));
r = extract_component (bsi, rhs, 0, true);
i = extract_component (bsi, rhs, 1, true);
update_complex_assignment (bsi, r, i);
}
}
else if (TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs))
{
tree x;
r = extract_component (bsi, rhs, 0, false);
i = extract_component (bsi, rhs, 1, false);
x = build1 (REALPART_EXPR, inner_type, unshare_expr (lhs));
x = build2 (MODIFY_EXPR, inner_type, x, r);
bsi_insert_before (bsi, x, BSI_SAME_STMT);
if (stmt == bsi_stmt (*bsi))
{
x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
TREE_OPERAND (stmt, 0) = x;
TREE_OPERAND (stmt, 1) = i;
TREE_TYPE (stmt) = inner_type;
}
else
{
x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
x = build2 (MODIFY_EXPR, inner_type, x, i);
bsi_insert_before (bsi, x, BSI_SAME_STMT);
stmt = bsi_stmt (*bsi);
gcc_assert (TREE_CODE (stmt) == RETURN_EXPR);
TREE_OPERAND (stmt, 0) = lhs;
}
update_all_vops (stmt);
update_stmt (stmt);
}
}
/* Expand complex addition to scalars:
......@@ -95,12 +611,72 @@ update_complex_assignment (block_stmt_iterator *bsi, tree r, tree i)
static void
expand_complex_addition (block_stmt_iterator *bsi, tree inner_type,
tree ar, tree ai, tree br, tree bi,
enum tree_code code)
enum tree_code code,
complex_lattice_t al, complex_lattice_t bl)
{
tree rr, ri;
rr = gimplify_build2 (bsi, code, inner_type, ar, br);
ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
switch (PAIR (al, bl))
{
case PAIR (ONLY_REAL, ONLY_REAL):
rr = gimplify_build2 (bsi, code, inner_type, ar, br);
ri = ai;
break;
case PAIR (ONLY_REAL, ONLY_IMAG):
rr = ar;
if (code == MINUS_EXPR)
ri = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ai, bi);
else
ri = bi;
break;
case PAIR (ONLY_IMAG, ONLY_REAL):
if (code == MINUS_EXPR)
rr = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ar, br);
else
rr = br;
ri = ai;
break;
case PAIR (ONLY_IMAG, ONLY_IMAG):
rr = ar;
ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
break;
case PAIR (VARYING, ONLY_REAL):
rr = gimplify_build2 (bsi, code, inner_type, ar, br);
ri = ai;
break;
case PAIR (VARYING, ONLY_IMAG):
rr = ar;
ri = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ai, bi);
break;
case PAIR (ONLY_REAL, VARYING):
if (code == MINUS_EXPR)
goto general;
rr = gimplify_build2 (bsi, code, inner_type, ar, br);
ri = bi;
break;
case PAIR (ONLY_IMAG, VARYING):
if (code == MINUS_EXPR)
goto general;
rr = br;
ri = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ai, bi);
break;
case PAIR (VARYING, VARYING):
general:
rr = gimplify_build2 (bsi, code, inner_type, ar, br);
ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
break;
default:
gcc_unreachable ();
}
update_complex_assignment (bsi, rr, ri);
}
......@@ -137,6 +713,14 @@ expand_complex_libcall (block_stmt_iterator *bsi, tree ar, tree ai,
TREE_OPERAND (stmt, 1)
= build3 (CALL_EXPR, type, build_fold_addr_expr (fn), args, NULL);
update_stmt (stmt);
if (in_ssa_p)
{
tree lhs = TREE_OPERAND (stmt, 0);
update_complex_components (bsi, stmt,
build1 (REALPART_EXPR, type, lhs),
build1 (IMAGPART_EXPR, type, lhs));
}
}
/* Expand complex multiplication to scalars:
......@@ -145,29 +729,81 @@ expand_complex_libcall (block_stmt_iterator *bsi, tree ar, tree ai,
static void
expand_complex_multiplication (block_stmt_iterator *bsi, tree inner_type,
tree ar, tree ai, tree br, tree bi)
tree ar, tree ai, tree br, tree bi,
complex_lattice_t al, complex_lattice_t bl)
{
tree t1, t2, t3, t4, rr, ri;
tree rr, ri;
if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type))
if (al < bl)
{
expand_complex_libcall (bsi, ar, ai, br, bi, MULT_EXPR);
return;
complex_lattice_t tl;
rr = ar, ar = br, br = rr;
ri = ai, ai = bi, bi = ri;
tl = al, al = bl, bl = tl;
}
t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
t3 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, bi);
switch (PAIR (al, bl))
{
case PAIR (ONLY_REAL, ONLY_REAL):
rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
ri = ai;
break;
/* Avoid expanding redundant multiplication for the common
case of squaring a complex number. */
if (ar == br && ai == bi)
t4 = t3;
else
t4 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
case PAIR (ONLY_IMAG, ONLY_REAL):
rr = ar;
if (TREE_CODE (ai) == REAL_CST
&& REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst1))
ri = br;
else
ri = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
break;
rr = gimplify_build2 (bsi, MINUS_EXPR, inner_type, t1, t2);
ri = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t3, t4);
case PAIR (ONLY_IMAG, ONLY_IMAG):
rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
rr = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, rr);
ri = ar;
break;
case PAIR (VARYING, ONLY_REAL):
rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
ri = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
break;
case PAIR (VARYING, ONLY_IMAG):
rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
rr = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, rr);
ri = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, bi);
break;
case PAIR (VARYING, VARYING):
if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type))
{
expand_complex_libcall (bsi, ar, ai, br, bi, MULT_EXPR);
return;
}
else
{
tree t1, t2, t3, t4;
t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
t3 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, bi);
/* Avoid expanding redundant multiplication for the common
case of squaring a complex number. */
if (ar == br && ai == bi)
t4 = t3;
else
t4 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
rr = gimplify_build2 (bsi, MINUS_EXPR, inner_type, t1, t2);
ri = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t3, t4);
}
break;
default:
gcc_unreachable ();
}
update_complex_assignment (bsi, rr, ri);
}
......@@ -349,31 +985,77 @@ expand_complex_div_wide (block_stmt_iterator *bsi, tree inner_type,
static void
expand_complex_division (block_stmt_iterator *bsi, tree inner_type,
tree ar, tree ai, tree br, tree bi,
enum tree_code code)
enum tree_code code,
complex_lattice_t al, complex_lattice_t bl)
{
switch (flag_complex_method)
tree rr, ri;
switch (PAIR (al, bl))
{
case 0:
/* straightforward implementation of complex divide acceptable. */
expand_complex_div_straight (bsi, inner_type, ar, ai, br, bi, code);
case PAIR (ONLY_REAL, ONLY_REAL):
rr = gimplify_build2 (bsi, code, inner_type, ar, br);
ri = ai;
break;
case 2:
if (SCALAR_FLOAT_TYPE_P (inner_type))
{
expand_complex_libcall (bsi, ar, ai, br, bi, code);
return;
}
/* FALLTHRU */
case PAIR (ONLY_REAL, ONLY_IMAG):
rr = ai;
ri = gimplify_build2 (bsi, code, inner_type, ar, bi);
ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ri);
break;
case PAIR (ONLY_IMAG, ONLY_REAL):
rr = ar;
ri = gimplify_build2 (bsi, code, inner_type, ai, br);
break;
case 1:
/* wide ranges of inputs must work for complex divide. */
expand_complex_div_wide (bsi, inner_type, ar, ai, br, bi, code);
case PAIR (ONLY_IMAG, ONLY_IMAG):
rr = gimplify_build2 (bsi, code, inner_type, ai, bi);
ri = ar;
break;
case PAIR (VARYING, ONLY_REAL):
rr = gimplify_build2 (bsi, code, inner_type, ar, br);
ri = gimplify_build2 (bsi, code, inner_type, ai, br);
break;
case PAIR (VARYING, ONLY_IMAG):
rr = gimplify_build2 (bsi, code, inner_type, ai, bi);
ri = gimplify_build2 (bsi, code, inner_type, ar, bi);
ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ri);
case PAIR (ONLY_REAL, VARYING):
case PAIR (ONLY_IMAG, VARYING):
case PAIR (VARYING, VARYING):
switch (flag_complex_method)
{
case 0:
/* straightforward implementation of complex divide acceptable. */
expand_complex_div_straight (bsi, inner_type, ar, ai, br, bi, code);
break;
case 2:
if (SCALAR_FLOAT_TYPE_P (inner_type))
{
expand_complex_libcall (bsi, ar, ai, br, bi, code);
break;
}
/* FALLTHRU */
case 1:
/* wide ranges of inputs must work for complex divide. */
expand_complex_div_wide (bsi, inner_type, ar, ai, br, bi, code);
break;
default:
gcc_unreachable ();
}
return;
default:
gcc_unreachable ();
}
update_complex_assignment (bsi, rr, ri);
}
/* Expand complex negation to scalars:
......@@ -439,7 +1121,7 @@ expand_complex_comparison (block_stmt_iterator *bsi, tree ar, tree ai,
gcc_unreachable ();
}
mark_stmt_modified (stmt);
update_stmt (stmt);
}
/* Process one statement. If we identify a complex operation, expand it. */
......@@ -450,6 +1132,7 @@ expand_complex_operations_1 (block_stmt_iterator *bsi)
tree stmt = bsi_stmt (*bsi);
tree rhs, type, inner_type;
tree ac, ar, ai, bc, br, bi;
complex_lattice_t al, bl;
enum tree_code code;
switch (TREE_CODE (stmt))
......@@ -503,14 +1186,30 @@ expand_complex_operations_1 (block_stmt_iterator *bsi)
break;
default:
{
tree lhs = TREE_OPERAND (stmt, 0);
tree rhs = TREE_OPERAND (stmt, 1);
if (TREE_CODE (type) == COMPLEX_TYPE)
expand_complex_move (bsi, stmt, type, lhs, rhs);
else if ((TREE_CODE (rhs) == REALPART_EXPR
|| TREE_CODE (rhs) == IMAGPART_EXPR)
&& TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
{
TREE_OPERAND (stmt, 1)
= extract_component (bsi, TREE_OPERAND (rhs, 0),
TREE_CODE (rhs) == IMAGPART_EXPR, false);
update_stmt (stmt);
}
}
return;
}
/* Extract the components of the two complex values. Make sure and
handle the common case of the same value used twice specially. */
ac = TREE_OPERAND (rhs, 0);
ar = extract_component (bsi, ac, 0);
ai = extract_component (bsi, ac, 1);
ar = extract_component (bsi, ac, 0, true);
ai = extract_component (bsi, ac, 1, true);
if (TREE_CODE_CLASS (code) == tcc_unary)
bc = br = bi = NULL;
......@@ -521,20 +1220,40 @@ expand_complex_operations_1 (block_stmt_iterator *bsi)
br = ar, bi = ai;
else
{
br = extract_component (bsi, bc, 0);
bi = extract_component (bsi, bc, 1);
br = extract_component (bsi, bc, 0, true);
bi = extract_component (bsi, bc, 1, true);
}
}
if (in_ssa_p)
{
al = find_lattice_value (ac);
if (al == UNINITIALIZED)
al = VARYING;
if (TREE_CODE_CLASS (code) == tcc_unary)
bl = UNINITIALIZED;
else if (ac == bc)
bl = al;
else
{
bl = find_lattice_value (bc);
if (bl == UNINITIALIZED)
bl = VARYING;
}
}
else
al = bl = VARYING;
switch (code)
{
case PLUS_EXPR:
case MINUS_EXPR:
expand_complex_addition (bsi, inner_type, ar, ai, br, bi, code);
expand_complex_addition (bsi, inner_type, ar, ai, br, bi, code, al, bl);
break;
case MULT_EXPR:
expand_complex_multiplication (bsi, inner_type, ar, ai, br, bi);
expand_complex_multiplication (bsi, inner_type, ar, ai, br, bi, al, bl);
break;
case TRUNC_DIV_EXPR:
......@@ -542,7 +1261,7 @@ expand_complex_operations_1 (block_stmt_iterator *bsi)
case FLOOR_DIV_EXPR:
case ROUND_DIV_EXPR:
case RDIV_EXPR:
expand_complex_division (bsi, inner_type, ar, ai, br, bi, code);
expand_complex_division (bsi, inner_type, ar, ai, br, bi, code, al, bl);
break;
case NEGATE_EXPR:
......@@ -561,25 +1280,48 @@ expand_complex_operations_1 (block_stmt_iterator *bsi)
default:
gcc_unreachable ();
}
update_stmt_if_modified (stmt);
}
/* Entry point for complex operation lowering during optimization. */
static void
tree_lower_complex (void)
{
int old_last_basic_block = last_basic_block;
int old_last_basic_block;
block_stmt_iterator bsi;
basic_block bb;
if (!init_dont_simulate_again ())
return;
complex_lattice_values = VEC_alloc (complex_lattice_t, heap, num_ssa_names);
VEC_safe_grow (complex_lattice_t, heap,
complex_lattice_values, num_ssa_names);
memset (VEC_address (complex_lattice_t, complex_lattice_values), 0,
num_ssa_names * sizeof(complex_lattice_t));
init_parameter_lattice_values ();
ssa_propagate (complex_visit_stmt, complex_visit_phi);
create_components ();
update_parameter_components ();
old_last_basic_block = last_basic_block;
FOR_EACH_BB (bb)
{
if (bb->index >= old_last_basic_block)
continue;
update_phi_components (bb);
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
expand_complex_operations_1 (&bsi);
}
}
bsi_commit_edge_inserts ();
VEC_free (tree, heap, complex_variable_components);
VEC_free (complex_lattice_t, heap, complex_lattice_values);
}
struct tree_opt_pass pass_lower_complex =
{
......@@ -590,6 +1332,50 @@ struct tree_opt_pass pass_lower_complex =
NULL, /* next */
0, /* static_pass_number */
0, /* tv_id */
PROP_ssa, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func | TODO_ggc_collect
| TODO_update_ssa
| TODO_verify_stmts, /* todo_flags_finish */
0 /* letter */
};
/* Entry point for complex operation lowering without optimization. */
static void
tree_lower_complex_O0 (void)
{
int old_last_basic_block = last_basic_block;
block_stmt_iterator bsi;
basic_block bb;
FOR_EACH_BB (bb)
{
if (bb->index >= old_last_basic_block)
continue;
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
expand_complex_operations_1 (&bsi);
}
}
static bool
gate_no_optimization (void)
{
return optimize == 0;
}
struct tree_opt_pass pass_lower_complex_O0 =
{
"cplxlower0", /* name */
gate_no_optimization, /* gate */
tree_lower_complex_O0, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
0, /* tv_id */
PROP_cfg, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
......
......@@ -260,12 +260,10 @@ is_gimple_id (tree t)
bool
is_gimple_reg_type (tree type)
{
return (!AGGREGATE_TYPE_P (type)
&& TREE_CODE (type) != COMPLEX_TYPE);
return !AGGREGATE_TYPE_P (type);
}
/* Return true if T is a scalar register variable. */
/* Return true if T is a non-aggregate register variable. */
bool
is_gimple_reg (tree t)
......@@ -275,6 +273,7 @@ is_gimple_reg (tree t)
if (!is_gimple_variable (t))
return false;
if (!is_gimple_reg_type (TREE_TYPE (t)))
return false;
......@@ -301,6 +300,11 @@ is_gimple_reg (tree t)
if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
return false;
/* Complex values must have been put into ssa form. That is, no
assignments to the individual components. */
if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
return DECL_COMPLEX_GIMPLE_REG_P (t);
return true;
}
......
......@@ -375,7 +375,7 @@ init_tree_optimization_passes (void)
NEXT_PASS (pass_lower_cf);
NEXT_PASS (pass_lower_eh);
NEXT_PASS (pass_build_cfg);
NEXT_PASS (pass_lower_complex);
NEXT_PASS (pass_lower_complex_O0);
NEXT_PASS (pass_lower_vector);
NEXT_PASS (pass_warn_function_return);
NEXT_PASS (pass_tree_profile);
......@@ -417,6 +417,7 @@ init_tree_optimization_passes (void)
NEXT_PASS (pass_profile);
NEXT_PASS (pass_ch);
NEXT_PASS (pass_stdarg);
NEXT_PASS (pass_lower_complex);
NEXT_PASS (pass_sra);
/* FIXME: SRA may generate arbitrary gimple code, exposing new
aliased and call-clobbered variables. As mentioned below,
......
......@@ -192,6 +192,7 @@ extern struct tree_opt_pass pass_may_alias;
extern struct tree_opt_pass pass_split_crit_edges;
extern struct tree_opt_pass pass_pre;
extern struct tree_opt_pass pass_profile;
extern struct tree_opt_pass pass_lower_complex_O0;
extern struct tree_opt_pass pass_lower_complex;
extern struct tree_opt_pass pass_lower_vector;
extern struct tree_opt_pass pass_lower_vector_ssa;
......
......@@ -2353,6 +2353,14 @@ extern void decl_value_expr_insert (tree, tree);
#define DECL_GIMPLE_FORMAL_TEMP_P(DECL) \
DECL_CHECK (DECL)->decl.gimple_formal_temp
/* For function local variables of COMPLEX type, indicates that the
variable is not aliased, and that all modifications to the variable
have been adjusted so that they are killing assignments. Thus the
variable may now be treated as a GIMPLE register, and use real
instead of virtual ops in SSA form. */
#define DECL_COMPLEX_GIMPLE_REG_P(DECL) \
DECL_CHECK (DECL)->decl.gimple_reg_flag
/* Enumerate visibility settings. */
#ifndef SYMBOL_VISIBILITY_DEFINED
#define SYMBOL_VISIBILITY_DEFINED
......@@ -2424,8 +2432,9 @@ struct tree_decl GTY(())
unsigned returns_twice_flag : 1;
unsigned seen_in_bind_expr : 1;
unsigned novops_flag : 1;
unsigned has_value_expr:1;
/* 8 unused bits. */
unsigned has_value_expr : 1;
unsigned gimple_reg_flag : 1;
/* 7 unused bits. */
union tree_decl_u1 {
/* In a FUNCTION_DECL for which DECL_BUILT_IN holds, this is
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment