Commit 4ca79136 by Richard Henderson Committed by Richard Henderson

expr.c (TARGET_MEM_FUNCTIONS): Transform to boolean.

        * expr.c (TARGET_MEM_FUNCTIONS): Transform to boolean.
        (emit_block_move): Split out subroutines.
        (emit_block_move_via_movstr): New.
        (emit_block_move_via_libcall): New.  Emit bcopy via normal call also.
        (emit_block_move_libcall_fn): New.  Construct function prototype for
        bcopy as well.
        (clear_storage): Split out subroutines.
        (clear_storage_via_clrstr): New.
        (clear_storage_via_libcall): New. Emit bzero as a normal call also.
        (clear_storage_libcall_fn): New.  Construct function prototype for
        bzero as well.
        (emit_push_insn): Use emit_block_move.
        (expand_assignment): Booleanize TARGET_MEM_FUNCTIONS.
        (store_constructor): Likewise.

From-SVN: r56464
parent 81eec873
2002-08-20 Richard Henderson <rth@redhat.com>
* expr.c (TARGET_MEM_FUNCTIONS): Transform to boolean.
(emit_block_move): Split out subroutines.
(emit_block_move_via_movstr): New.
(emit_block_move_via_libcall): New. Emit bcopy via normal call also.
(emit_block_move_libcall_fn): New. Construct function prototype for
bcopy as well.
(clear_storage): Split out subroutines.
(clear_storage_via_clrstr): New.
(clear_storage_via_libcall): New. Emit bzero as a normal call also.
(clear_storage_libcall_fn): New. Construct function prototype for
bzero as well.
(emit_push_insn): Use emit_block_move.
(expand_assignment): Booleanize TARGET_MEM_FUNCTIONS.
(store_constructor): Likewise.
2002-08-19 Ziemowit Laski <zlaski@apple.com> 2002-08-19 Ziemowit Laski <zlaski@apple.com>
* objc/objc-act.c (building_objc_message_expr): Rename to * objc/objc-act.c (building_objc_message_expr): Rename to
......
...@@ -73,6 +73,15 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA ...@@ -73,6 +73,15 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
#define CASE_VECTOR_PC_RELATIVE 0 #define CASE_VECTOR_PC_RELATIVE 0
#endif #endif
/* Convert defined/undefined to boolean. */
#ifdef TARGET_MEM_FUNCTIONS
#undef TARGET_MEM_FUNCTIONS
#define TARGET_MEM_FUNCTIONS 1
#else
#define TARGET_MEM_FUNCTIONS 0
#endif
/* If this is nonzero, we do not bother generating VOLATILE /* If this is nonzero, we do not bother generating VOLATILE
around volatile memory references, and we are willing to around volatile memory references, and we are willing to
output indirect addresses. If cse is to follow, we reject output indirect addresses. If cse is to follow, we reject
...@@ -123,6 +132,9 @@ static unsigned HOST_WIDE_INT move_by_pieces_ninsns ...@@ -123,6 +132,9 @@ static unsigned HOST_WIDE_INT move_by_pieces_ninsns
unsigned int)); unsigned int));
static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode, static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
struct move_by_pieces *)); struct move_by_pieces *));
static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
static tree emit_block_move_libcall_fn PARAMS ((int));
static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT, static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
enum machine_mode)); enum machine_mode));
static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT, static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
...@@ -132,6 +144,9 @@ static void store_by_pieces_1 PARAMS ((struct store_by_pieces *, ...@@ -132,6 +144,9 @@ static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...), static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
enum machine_mode, enum machine_mode,
struct store_by_pieces *)); struct store_by_pieces *));
static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
static tree clear_storage_libcall_fn PARAMS ((int));
static rtx compress_float_constant PARAMS ((rtx, rtx)); static rtx compress_float_constant PARAMS ((rtx, rtx));
static rtx get_subtarget PARAMS ((rtx)); static rtx get_subtarget PARAMS ((rtx));
static int is_zeros_p PARAMS ((tree)); static int is_zeros_p PARAMS ((tree));
...@@ -1655,33 +1670,26 @@ move_by_pieces_1 (genfun, mode, data) ...@@ -1655,33 +1670,26 @@ move_by_pieces_1 (genfun, mode, data)
} }
} }
/* Emit code to move a block Y to a block X. /* Emit code to move a block Y to a block X. This may be done with
This may be done with string-move instructions, string-move instructions, with multiple scalar move instructions,
with multiple scalar move instructions, or with a library call. or with a library call.
Both X and Y must be MEM rtx's (perhaps inside VOLATILE) Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
with mode BLKmode.
SIZE is an rtx that says how long they are. SIZE is an rtx that says how long they are.
ALIGN is the maximum alignment we can assume they have. ALIGN is the maximum alignment we can assume they have.
Return the address of the new block, if memcpy is called and returns it, Return the address of the new block, if memcpy is called and returns it,
0 otherwise. */ 0 otherwise. */
static GTY(()) tree block_move_fn;
rtx rtx
emit_block_move (x, y, size) emit_block_move (x, y, size)
rtx x, y; rtx x, y, size;
rtx size;
{ {
rtx retval = 0; rtx retval = 0;
#ifdef TARGET_MEM_FUNCTIONS
tree call_expr, arg_list;
#endif
unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y)); unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
if (GET_MODE (x) != BLKmode) if (GET_MODE (x) != BLKmode)
abort (); abort ();
if (GET_MODE (y) != BLKmode) if (GET_MODE (y) != BLKmode)
abort (); abort ();
...@@ -1698,8 +1706,22 @@ emit_block_move (x, y, size) ...@@ -1698,8 +1706,22 @@ emit_block_move (x, y, size)
if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align)) if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
move_by_pieces (x, y, INTVAL (size), align); move_by_pieces (x, y, INTVAL (size), align);
else if (emit_block_move_via_movstr (x, y, size, align))
;
else else
{ retval = emit_block_move_via_libcall (x, y, size);
return retval;
}
/* A subroutine of emit_block_move. Expand a movstr pattern;
return true if successful. */
static bool
emit_block_move_via_movstr (x, y, size, align)
rtx x, y, size;
unsigned int align;
{
/* Try the most limited insn first, because there's no point /* Try the most limited insn first, because there's no point
including more than one in the machine description unless including more than one in the machine description unless
the more limited one has some advantage. */ the more limited one has some advantage. */
...@@ -1741,12 +1763,17 @@ emit_block_move (x, y, size) ...@@ -1741,12 +1763,17 @@ emit_block_move (x, y, size)
if (pred != 0 && ! (*pred) (op2, mode)) if (pred != 0 && ! (*pred) (op2, mode))
op2 = copy_to_mode_reg (mode, op2); op2 = copy_to_mode_reg (mode, op2);
/* ??? When called via emit_block_move_for_call, it'd be
nice if there were some way to inform the backend, so
that it doesn't fail the expansion because it thinks
emitting the libcall would be more efficient. */
pat = GEN_FCN ((int) code) (x, y, op2, opalign); pat = GEN_FCN ((int) code) (x, y, op2, opalign);
if (pat) if (pat)
{ {
emit_insn (pat); emit_insn (pat);
volatile_ok = 0; volatile_ok = 0;
return 0; return true;
} }
else else
delete_insns_since (last); delete_insns_since (last);
...@@ -1754,8 +1781,21 @@ emit_block_move (x, y, size) ...@@ -1754,8 +1781,21 @@ emit_block_move (x, y, size)
} }
volatile_ok = 0; volatile_ok = 0;
return false;
}
/* X, Y, or SIZE may have been passed through protect_from_queue. /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
Return the return value from memcpy, 0 otherwise. */
static rtx
emit_block_move_via_libcall (dst, src, size)
rtx dst, src, size;
{
tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
enum machine_mode size_mode;
rtx retval;
/* DST, SRC, or SIZE may have been passed through protect_from_queue.
It is unsafe to save the value generated by protect_from_queue It is unsafe to save the value generated by protect_from_queue
and reuse it later. Consider what happens if emit_queue is and reuse it later. Consider what happens if emit_queue is
...@@ -1775,86 +1815,109 @@ emit_block_move (x, y, size) ...@@ -1775,86 +1815,109 @@ emit_block_move (x, y, size)
we may need to have library calls call emit_queue in the future we may need to have library calls call emit_queue in the future
since failing to do so could cause problems for targets which since failing to do so could cause problems for targets which
define SMALL_REGISTER_CLASSES and pass arguments in registers. */ define SMALL_REGISTER_CLASSES and pass arguments in registers. */
x = copy_to_mode_reg (Pmode, XEXP (x, 0));
y = copy_to_mode_reg (Pmode, XEXP (y, 0));
#ifdef TARGET_MEM_FUNCTIONS dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
size = copy_to_mode_reg (TYPE_MODE (sizetype), size); src = copy_to_mode_reg (Pmode, XEXP (src, 0));
#else
size = convert_to_mode (TYPE_MODE (integer_type_node), size, if (TARGET_MEM_FUNCTIONS)
TREE_UNSIGNED (integer_type_node)); size_mode = TYPE_MODE (sizetype);
size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size); else
#endif size_mode = TYPE_MODE (unsigned_type_node);
size = convert_to_mode (size_mode, size, 1);
size = copy_to_mode_reg (size_mode, size);
#ifdef TARGET_MEM_FUNCTIONS
/* It is incorrect to use the libcall calling conventions to call /* It is incorrect to use the libcall calling conventions to call
memcpy in this context. memcpy in this context. This could be a user call to memcpy and
the user may wish to examine the return value from memcpy. For
targets where libcalls and normal calls have different conventions
for returning pointers, we could end up generating incorrect code.
This could be a user call to memcpy and the user may wish to For convenience, we generate the call to bcopy this way as well. */
examine the return value from memcpy.
For targets where libcalls and normal calls have different conventions dst_tree = make_tree (ptr_type_node, dst);
for returning pointers, we could end up generating incorrect code. src_tree = make_tree (ptr_type_node, src);
if (TARGET_MEM_FUNCTIONS)
size_tree = make_tree (sizetype, size);
else
size_tree = make_tree (unsigned_type_node, size);
So instead of using a libcall sequence we build up a suitable fn = emit_block_move_libcall_fn (true);
CALL_EXPR and expand the call in the normal fashion. */ arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
if (block_move_fn == NULL_TREE) if (TARGET_MEM_FUNCTIONS)
{ {
tree fntype; arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
/* This was copied from except.c, I don't know if all this is }
necessary in this context or not. */ else
block_move_fn = get_identifier ("memcpy"); {
fntype = build_pointer_type (void_type_node); arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
fntype = build_function_type (fntype, NULL_TREE); arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
block_move_fn = build_decl (FUNCTION_DECL, block_move_fn, fntype); }
DECL_EXTERNAL (block_move_fn) = 1;
TREE_PUBLIC (block_move_fn) = 1;
DECL_ARTIFICIAL (block_move_fn) = 1;
TREE_NOTHROW (block_move_fn) = 1;
make_decl_rtl (block_move_fn, NULL);
assemble_external (block_move_fn);
}
/* We need to make an argument list for the function call.
memcpy has three arguments, the first two are void * addresses and
the last is a size_t byte count for the copy. */
arg_list
= build_tree_list (NULL_TREE,
make_tree (build_pointer_type (void_type_node), x));
TREE_CHAIN (arg_list)
= build_tree_list (NULL_TREE,
make_tree (build_pointer_type (void_type_node), y));
TREE_CHAIN (TREE_CHAIN (arg_list))
= build_tree_list (NULL_TREE, make_tree (sizetype, size));
TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
/* Now we have to build up the CALL_EXPR itself. */ /* Now we have to build up the CALL_EXPR itself. */
call_expr = build1 (ADDR_EXPR, call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
build_pointer_type (TREE_TYPE (block_move_fn)), call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
block_move_fn);
call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_move_fn)),
call_expr, arg_list, NULL_TREE); call_expr, arg_list, NULL_TREE);
TREE_SIDE_EFFECTS (call_expr) = 1; TREE_SIDE_EFFECTS (call_expr) = 1;
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
#else
emit_library_call (bcopy_libfunc, LCT_NORMAL,
VOIDmode, 3, y, Pmode, x, Pmode,
convert_to_mode (TYPE_MODE (integer_type_node), size,
TREE_UNSIGNED (integer_type_node)),
TYPE_MODE (integer_type_node));
#endif
/* If we are initializing a readonly value, show the above call /* If we are initializing a readonly value, show the above call
clobbered it. Otherwise, a load from it may erroneously be hoisted clobbered it. Otherwise, a load from it may erroneously be
from a loop. */ hoisted from a loop. */
if (RTX_UNCHANGING_P (x)) if (RTX_UNCHANGING_P (dst))
emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
}
/* A subroutine of emit_block_move_via_libcall. Create the tree node
for the function we use for block copies. The first time FOR_CALL
is true, we call assemble_external. */
static GTY(()) tree block_move_fn;
static tree
emit_block_move_libcall_fn (for_call)
int for_call;
{
static bool emitted_extern;
tree fn = block_move_fn, args;
if (!fn)
{
if (TARGET_MEM_FUNCTIONS)
{
fn = get_identifier ("memcpy");
args = build_function_type_list (ptr_type_node, ptr_type_node,
const_ptr_type_node, sizetype,
NULL_TREE);
}
else
{
fn = get_identifier ("bcopy");
args = build_function_type_list (void_type_node, const_ptr_type_node,
ptr_type_node, unsigned_type_node,
NULL_TREE);
} }
return retval; fn = build_decl (FUNCTION_DECL, fn, args);
DECL_EXTERNAL (fn) = 1;
TREE_PUBLIC (fn) = 1;
DECL_ARTIFICIAL (fn) = 1;
TREE_NOTHROW (fn) = 1;
block_move_fn = fn;
}
if (for_call && !emitted_extern)
{
emitted_extern = true;
make_decl_rtl (fn, NULL);
assemble_external (fn);
}
return fn;
} }
/* Copy all or part of a value X into registers starting at REGNO. /* Copy all or part of a value X into registers starting at REGNO.
...@@ -2624,15 +2687,11 @@ store_by_pieces_2 (genfun, mode, data) ...@@ -2624,15 +2687,11 @@ store_by_pieces_2 (genfun, mode, data)
/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
its length in bytes. */ its length in bytes. */
static GTY(()) tree block_clear_fn;
rtx rtx
clear_storage (object, size) clear_storage (object, size)
rtx object; rtx object;
rtx size; rtx size;
{ {
#ifdef TARGET_MEM_FUNCTIONS
tree call_expr, arg_list;
#endif
rtx retval = 0; rtx retval = 0;
unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object) unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
: GET_MODE_ALIGNMENT (GET_MODE (object))); : GET_MODE_ALIGNMENT (GET_MODE (object)));
...@@ -2641,7 +2700,7 @@ clear_storage (object, size) ...@@ -2641,7 +2700,7 @@ clear_storage (object, size)
just move a zero. Otherwise, do this a piece at a time. */ just move a zero. Otherwise, do this a piece at a time. */
if (GET_MODE (object) != BLKmode if (GET_MODE (object) != BLKmode
&& GET_CODE (size) == CONST_INT && GET_CODE (size) == CONST_INT
&& GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size)) && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
emit_move_insn (object, CONST0_RTX (GET_MODE (object))); emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
else else
{ {
...@@ -2651,8 +2710,23 @@ clear_storage (object, size) ...@@ -2651,8 +2710,23 @@ clear_storage (object, size)
if (GET_CODE (size) == CONST_INT if (GET_CODE (size) == CONST_INT
&& CLEAR_BY_PIECES_P (INTVAL (size), align)) && CLEAR_BY_PIECES_P (INTVAL (size), align))
clear_by_pieces (object, INTVAL (size), align); clear_by_pieces (object, INTVAL (size), align);
else if (clear_storage_via_clrstr (object, size, align))
;
else else
{ retval = clear_storage_via_libcall (object, size);
}
return retval;
}
/* A subroutine of clear_storage. Expand a clrstr pattern;
return true if successful. */
static bool
clear_storage_via_clrstr (object, size, align)
rtx object, size;
unsigned int align;
{
/* Try the most limited insn first, because there's no point /* Try the most limited insn first, because there's no point
including more than one in the machine description unless including more than one in the machine description unless
the more limited one has some advantage. */ the more limited one has some advantage. */
...@@ -2693,13 +2767,27 @@ clear_storage (object, size) ...@@ -2693,13 +2767,27 @@ clear_storage (object, size)
if (pat) if (pat)
{ {
emit_insn (pat); emit_insn (pat);
return 0; return true;
} }
else else
delete_insns_since (last); delete_insns_since (last);
} }
} }
return false;
}
/* A subroutine of clear_storage. Expand a call to memset or bzero.
Return the return value of memset, 0 otherwise. */
static rtx
clear_storage_via_libcall (object, size)
rtx object, size;
{
tree call_expr, arg_list, fn, object_tree, size_tree;
enum machine_mode size_mode;
rtx retval;
/* OBJECT or SIZE may have been passed through protect_from_queue. /* OBJECT or SIZE may have been passed through protect_from_queue.
It is unsafe to save the value generated by protect_from_queue It is unsafe to save the value generated by protect_from_queue
...@@ -2720,88 +2808,99 @@ clear_storage (object, size) ...@@ -2720,88 +2808,99 @@ clear_storage (object, size)
we may need to have library calls call emit_queue in the future we may need to have library calls call emit_queue in the future
since failing to do so could cause problems for targets which since failing to do so could cause problems for targets which
define SMALL_REGISTER_CLASSES and pass arguments in registers. */ define SMALL_REGISTER_CLASSES and pass arguments in registers. */
object = copy_to_mode_reg (Pmode, XEXP (object, 0)); object = copy_to_mode_reg (Pmode, XEXP (object, 0));
#ifdef TARGET_MEM_FUNCTIONS if (TARGET_MEM_FUNCTIONS)
size = copy_to_mode_reg (TYPE_MODE (sizetype), size); size_mode = TYPE_MODE (sizetype);
#else else
size = convert_to_mode (TYPE_MODE (integer_type_node), size, size_mode = TYPE_MODE (unsigned_type_node);
TREE_UNSIGNED (integer_type_node)); size = convert_to_mode (size_mode, size, 1);
size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size); size = copy_to_mode_reg (size_mode, size);
#endif
#ifdef TARGET_MEM_FUNCTIONS
/* It is incorrect to use the libcall calling conventions to call /* It is incorrect to use the libcall calling conventions to call
memset in this context. memset in this context. This could be a user call to memset and
the user may wish to examine the return value from memset. For
This could be a user call to memset and the user may wish to targets where libcalls and normal calls have different conventions
examine the return value from memset. for returning pointers, we could end up generating incorrect code.
For targets where libcalls and normal calls have different For convenience, we generate the call to bzero this way as well. */
conventions for returning pointers, we could end up generating
incorrect code. object_tree = make_tree (ptr_type_node, object);
if (TARGET_MEM_FUNCTIONS)
So instead of using a libcall sequence we build up a suitable size_tree = make_tree (sizetype, size);
CALL_EXPR and expand the call in the normal fashion. */ else
if (block_clear_fn == NULL_TREE) size_tree = make_tree (unsigned_type_node, size);
{
tree fntype; fn = clear_storage_libcall_fn (true);
arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
/* This was copied from except.c, I don't know if all this is if (TARGET_MEM_FUNCTIONS)
necessary in this context or not. */ arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
block_clear_fn = get_identifier ("memset"); arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
fntype = build_pointer_type (void_type_node);
fntype = build_function_type (fntype, NULL_TREE);
block_clear_fn = build_decl (FUNCTION_DECL, block_clear_fn,
fntype);
DECL_EXTERNAL (block_clear_fn) = 1;
TREE_PUBLIC (block_clear_fn) = 1;
DECL_ARTIFICIAL (block_clear_fn) = 1;
TREE_NOTHROW (block_clear_fn) = 1;
make_decl_rtl (block_clear_fn, NULL);
assemble_external (block_clear_fn);
}
/* We need to make an argument list for the function call.
memset has three arguments, the first is a void * addresses, the
second an integer with the initialization value, the last is a
size_t byte count for the copy. */
arg_list
= build_tree_list (NULL_TREE,
make_tree (build_pointer_type (void_type_node),
object));
TREE_CHAIN (arg_list)
= build_tree_list (NULL_TREE,
make_tree (integer_type_node, const0_rtx));
TREE_CHAIN (TREE_CHAIN (arg_list))
= build_tree_list (NULL_TREE, make_tree (sizetype, size));
TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
/* Now we have to build up the CALL_EXPR itself. */ /* Now we have to build up the CALL_EXPR itself. */
call_expr = build1 (ADDR_EXPR, call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
build_pointer_type (TREE_TYPE (block_clear_fn)), call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
block_clear_fn);
call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_clear_fn)),
call_expr, arg_list, NULL_TREE); call_expr, arg_list, NULL_TREE);
TREE_SIDE_EFFECTS (call_expr) = 1; TREE_SIDE_EFFECTS (call_expr) = 1;
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
#else
emit_library_call (bzero_libfunc, LCT_NORMAL,
VOIDmode, 2, object, Pmode, size,
TYPE_MODE (integer_type_node));
#endif
/* If we are initializing a readonly value, show the above call /* If we are initializing a readonly value, show the above call
clobbered it. Otherwise, a load from it may erroneously be clobbered it. Otherwise, a load from it may erroneously be
hoisted from a loop. */ hoisted from a loop. */
if (RTX_UNCHANGING_P (object)) if (RTX_UNCHANGING_P (object))
emit_insn (gen_rtx_CLOBBER (VOIDmode, object)); emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
}
/* A subroutine of clear_storage_via_libcall. Create the tree node
for the function we use for block clears. The first time FOR_CALL
is true, we call assemble_external. */
static GTY(()) tree block_clear_fn;
static tree
clear_storage_libcall_fn (for_call)
int for_call;
{
static bool emitted_extern;
tree fn = block_clear_fn, args;
if (!fn)
{
if (TARGET_MEM_FUNCTIONS)
{
fn = get_identifier ("memset");
args = build_function_type_list (ptr_type_node, ptr_type_node,
integer_type_node, sizetype,
NULL_TREE);
}
else
{
fn = get_identifier ("bzero");
args = build_function_type_list (void_type_node, ptr_type_node,
unsigned_type_node, NULL_TREE);
} }
fn = build_decl (FUNCTION_DECL, fn, args);
DECL_EXTERNAL (fn) = 1;
TREE_PUBLIC (fn) = 1;
DECL_ARTIFICIAL (fn) = 1;
TREE_NOTHROW (fn) = 1;
block_clear_fn = fn;
} }
return retval; if (for_call && !emitted_extern)
{
emitted_extern = true;
make_decl_rtl (fn, NULL);
assemble_external (fn);
}
return fn;
} }
/* Generate code to copy Y into X. /* Generate code to copy Y into X.
...@@ -3501,6 +3600,18 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra, ...@@ -3501,6 +3600,18 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra,
args_addr, args_addr,
args_so_far), args_so_far),
skip)); skip));
if (!ACCUMULATE_OUTGOING_ARGS)
{
/* If the source is referenced relative to the stack pointer,
copy it to another register to stabilize it. We do not need
to do this if we know that we won't be changing sp. */
if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
|| reg_mentioned_p (virtual_outgoing_args_rtx, temp))
temp = copy_to_reg (temp);
}
target = gen_rtx_MEM (BLKmode, temp); target = gen_rtx_MEM (BLKmode, temp);
if (type != 0) if (type != 0)
...@@ -3515,86 +3626,12 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra, ...@@ -3515,86 +3626,12 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra,
else else
set_mem_align (target, align); set_mem_align (target, align);
/* TEMP is the address of the block. Copy the data there. */
if (GET_CODE (size) == CONST_INT
&& MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
{
move_by_pieces (target, xinner, INTVAL (size), align);
goto ret;
}
else
{
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
enum machine_mode mode;
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
{
enum insn_code code = movstr_optab[(int) mode];
insn_operand_predicate_fn pred;
if (code != CODE_FOR_nothing
&& ((GET_CODE (size) == CONST_INT
&& ((unsigned HOST_WIDE_INT) INTVAL (size)
<= (GET_MODE_MASK (mode) >> 1)))
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
&& (!(pred = insn_data[(int) code].operand[0].predicate)
|| ((*pred) (target, BLKmode)))
&& (!(pred = insn_data[(int) code].operand[1].predicate)
|| ((*pred) (xinner, BLKmode)))
&& (!(pred = insn_data[(int) code].operand[3].predicate)
|| ((*pred) (opalign, VOIDmode))))
{
rtx op2 = convert_to_mode (mode, size, 1);
rtx last = get_last_insn ();
rtx pat;
pred = insn_data[(int) code].operand[2].predicate;
if (pred != 0 && ! (*pred) (op2, mode))
op2 = copy_to_mode_reg (mode, op2);
pat = GEN_FCN ((int) code) (target, xinner,
op2, opalign);
if (pat)
{
emit_insn (pat);
goto ret;
}
else
delete_insns_since (last);
}
}
}
if (!ACCUMULATE_OUTGOING_ARGS)
{
/* If the source is referenced relative to the stack pointer,
copy it to another register to stabilize it. We do not need
to do this if we know that we won't be changing sp. */
if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
|| reg_mentioned_p (virtual_outgoing_args_rtx, temp))
temp = copy_to_reg (temp);
}
/* Make inhibit_defer_pop nonzero around the library call /* Make inhibit_defer_pop nonzero around the library call
to force it to pop the bcopy-arguments right away. */ to force it to pop the bcopy-arguments right away. */
NO_DEFER_POP; NO_DEFER_POP;
#ifdef TARGET_MEM_FUNCTIONS
emit_library_call (memcpy_libfunc, LCT_NORMAL, emit_block_move (target, xinner, size);
VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
convert_to_mode (TYPE_MODE (sizetype),
size, TREE_UNSIGNED (sizetype)),
TYPE_MODE (sizetype));
#else
emit_library_call (bcopy_libfunc, LCT_NORMAL,
VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
convert_to_mode (TYPE_MODE (integer_type_node),
size,
TREE_UNSIGNED (integer_type_node)),
TYPE_MODE (integer_type_node));
#endif
OK_DEFER_POP; OK_DEFER_POP;
} }
} }
...@@ -3700,10 +3737,8 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra, ...@@ -3700,10 +3737,8 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra,
emit_move_insn (dest, x); emit_move_insn (dest, x);
} }
} }
ret:
/* If part should go in registers, copy that part /* If part should go in registers, copy that part
into the appropriate registers. Do this now, at the end, into the appropriate registers. Do this now, at the end,
since mem-to-mem copies above may do function calls. */ since mem-to-mem copies above may do function calls. */
...@@ -3971,21 +4006,21 @@ expand_assignment (to, from, want_value, suggest_reg) ...@@ -3971,21 +4006,21 @@ expand_assignment (to, from, want_value, suggest_reg)
size = expr_size (from); size = expr_size (from);
from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0); from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
#ifdef TARGET_MEM_FUNCTIONS if (TARGET_MEM_FUNCTIONS)
emit_library_call (memmove_libfunc, LCT_NORMAL, emit_library_call (memmove_libfunc, LCT_NORMAL,
VOIDmode, 3, XEXP (to_rtx, 0), Pmode, VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
XEXP (from_rtx, 0), Pmode, XEXP (from_rtx, 0), Pmode,
convert_to_mode (TYPE_MODE (sizetype), convert_to_mode (TYPE_MODE (sizetype),
size, TREE_UNSIGNED (sizetype)), size, TREE_UNSIGNED (sizetype)),
TYPE_MODE (sizetype)); TYPE_MODE (sizetype));
#else else
emit_library_call (bcopy_libfunc, LCT_NORMAL, emit_library_call (bcopy_libfunc, LCT_NORMAL,
VOIDmode, 3, XEXP (from_rtx, 0), Pmode, VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
XEXP (to_rtx, 0), Pmode, XEXP (to_rtx, 0), Pmode,
convert_to_mode (TYPE_MODE (integer_type_node), convert_to_mode (TYPE_MODE (integer_type_node),
size, TREE_UNSIGNED (integer_type_node)), size,
TREE_UNSIGNED (integer_type_node)),
TYPE_MODE (integer_type_node)); TYPE_MODE (integer_type_node));
#endif
preserve_temp_slots (to_rtx); preserve_temp_slots (to_rtx);
free_temp_slots (); free_temp_slots ();
...@@ -5048,9 +5083,7 @@ store_constructor (exp, target, cleared, size) ...@@ -5048,9 +5083,7 @@ store_constructor (exp, target, cleared, size)
tree startbit = TREE_PURPOSE (elt); tree startbit = TREE_PURPOSE (elt);
/* End of range of element, or element value. */ /* End of range of element, or element value. */
tree endbit = TREE_VALUE (elt); tree endbit = TREE_VALUE (elt);
#ifdef TARGET_MEM_FUNCTIONS
HOST_WIDE_INT startb, endb; HOST_WIDE_INT startb, endb;
#endif
rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx; rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
bitlength_rtx = expand_expr (bitlength, bitlength_rtx = expand_expr (bitlength,
...@@ -5091,11 +5124,10 @@ store_constructor (exp, target, cleared, size) ...@@ -5091,11 +5124,10 @@ store_constructor (exp, target, cleared, size)
else else
abort (); abort ();
#ifdef TARGET_MEM_FUNCTIONS /* Optimization: If startbit and endbit are constants divisible
/* Optimization: If startbit and endbit are by BITS_PER_UNIT, call memset instead. */
constants divisible by BITS_PER_UNIT, if (TARGET_MEM_FUNCTIONS
call memset instead. */ && TREE_CODE (startbit) == INTEGER_CST
if (TREE_CODE (startbit) == INTEGER_CST
&& TREE_CODE (endbit) == INTEGER_CST && TREE_CODE (endbit) == INTEGER_CST
&& (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
&& (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0) && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
...@@ -5110,7 +5142,6 @@ store_constructor (exp, target, cleared, size) ...@@ -5110,7 +5142,6 @@ store_constructor (exp, target, cleared, size)
TYPE_MODE (sizetype)); TYPE_MODE (sizetype));
} }
else else
#endif
emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"), emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0), LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
Pmode, bitlength_rtx, TYPE_MODE (sizetype), Pmode, bitlength_rtx, TYPE_MODE (sizetype),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment