Commit e576ddb5 by Kito Cheng Committed by Chung-Ju Wu

[NDS32] Add intrinsic functions for unalignment memory access.

gcc/
	* config/nds32/constants.md (unspec_element): New enum.
	* config/nds32/constraints.md (Umw): New constraint.
	* config/nds32/nds32-intrinsic.c: Add more builtin functions.
	* config/nds32/nds32-intrinsic.md: Likewise.
	* config/nds32/nds32-md-auxiliary.c (nds32_regno_to_enable4): New.
	(nds32_valid_smw_lwm_base_p): New.
	(nds32_output_smw_single_word): New.
	(nds32_output_lmw_single_word): New.
	(nds32_expand_unaligned_load): New.
	(nds32_expand_unaligned_store): New.
	* config/nds32/nds32-protos.h (nds32_valid_smw_lwm_base_p): Declare.
	(nds32_output_smw_single_word): Declare.
	(nds32_output_lmw_single_word): Declare.
	(nds32_expand_unaligned_load): Declare.
	(nds32_expand_unaligned_store): Declare.
	* config/nds32/nds32.h (nds32_builtins): Add NDS32_BUILTIN_UALOAD_HW,
	NDS32_BUILTIN_UALOAD_W, NDS32_BUILTIN_UALOAD_DW,
	NDS32_BUILTIN_UASTORE_HW, NDS32_BUILTIN_UASTORE_W,
	NDS32_BUILTIN_UASTORE_DW.
	* config/nds32/predicates.md (nds32_lmw_smw_base_operand): New
	predicate.

Co-Authored-By: Chung-Ju Wu <jasonwucj@gmail.com>
Co-Authored-By: Monk Chiang <sh.chiang04@gmail.com>

From-SVN: r258214
parent 7a12ea32
2018-03-03 Kito Cheng <kito.cheng@gmail.com>
Monk Chiang <sh.chiang04@gmail.com>
Chung-Ju Wu <jasonwucj@gmail.com>
* config/nds32/constants.md (unspec_element): New enum.
* config/nds32/constraints.md (Umw): New constraint.
* config/nds32/nds32-intrinsic.c: Add more builtin functions.
* config/nds32/nds32-intrinsic.md: Likewise.
* config/nds32/nds32-md-auxiliary.c (nds32_regno_to_enable4): New.
(nds32_valid_smw_lwm_base_p): New.
(nds32_output_smw_single_word): New.
(nds32_output_lmw_single_word): New.
(nds32_expand_unaligned_load): New.
(nds32_expand_unaligned_store): New.
* config/nds32/nds32-protos.h (nds32_valid_smw_lwm_base_p): Declare.
(nds32_output_smw_single_word): Declare.
(nds32_output_lmw_single_word): Declare.
(nds32_expand_unaligned_load): Declare.
(nds32_expand_unaligned_store): Declare.
* config/nds32/nds32.h (nds32_builtins): Add NDS32_BUILTIN_UALOAD_HW,
NDS32_BUILTIN_UALOAD_W, NDS32_BUILTIN_UALOAD_DW,
NDS32_BUILTIN_UASTORE_HW, NDS32_BUILTIN_UASTORE_W,
NDS32_BUILTIN_UASTORE_DW.
* config/nds32/predicates.md (nds32_lmw_smw_base_operand): New
predicate.
2018-03-03 Monk Chiang <sh.chiang04@gmail.com> 2018-03-03 Monk Chiang <sh.chiang04@gmail.com>
Kito Cheng <kito.cheng@gmail.com> Kito Cheng <kito.cheng@gmail.com>
Chung-Ju Wu <jasonwucj@gmail.com> Chung-Ju Wu <jasonwucj@gmail.com>
......
...@@ -30,6 +30,16 @@ ...@@ -30,6 +30,16 @@
]) ])
;; The unpec operation index.
(define_c_enum "unspec_element" [
UNSPEC_UALOAD_HW
UNSPEC_UALOAD_W
UNSPEC_UALOAD_DW
UNSPEC_UASTORE_HW
UNSPEC_UASTORE_W
UNSPEC_UASTORE_DW
])
;; The unspec_volatile operation index. ;; The unspec_volatile operation index.
(define_c_enum "unspec_volatile_element" [ (define_c_enum "unspec_volatile_element" [
UNSPEC_VOLATILE_ISYNC UNSPEC_VOLATILE_ISYNC
......
...@@ -303,4 +303,10 @@ ...@@ -303,4 +303,10 @@
|| nds32_mem_format (op) == ADDRESS_FP_IMM7U) || nds32_mem_format (op) == ADDRESS_FP_IMM7U)
&& (GET_MODE (op) == SImode)"))) && (GET_MODE (op) == SImode)")))
(define_memory_constraint "Umw"
"Memory constraint for lwm/smw"
(and (match_code "mem")
(match_test "nds32_valid_smw_lwm_base_p (op)")))
;; ------------------------------------------------------------------------ ;; ------------------------------------------------------------------------
...@@ -241,6 +241,9 @@ struct builtin_description ...@@ -241,6 +241,9 @@ struct builtin_description
/* Intrinsics that take just one argument. */ /* Intrinsics that take just one argument. */
static struct builtin_description bdesc_1arg[] = static struct builtin_description bdesc_1arg[] =
{ {
NDS32_BUILTIN(unaligned_load_hw, "unaligned_load_hw", UALOAD_HW)
NDS32_BUILTIN(unaligned_loadsi, "unaligned_load_w", UALOAD_W)
NDS32_BUILTIN(unaligned_loaddi, "unaligned_load_dw", UALOAD_DW)
NDS32_NO_TARGET_BUILTIN(unspec_volatile_isync, "isync", ISYNC) NDS32_NO_TARGET_BUILTIN(unspec_volatile_isync, "isync", ISYNC)
}; };
...@@ -256,6 +259,10 @@ static struct builtin_description bdesc_2arg[] = ...@@ -256,6 +259,10 @@ static struct builtin_description bdesc_2arg[] =
{ {
NDS32_NO_TARGET_BUILTIN(unspec_volatile_mtsr, "mtsr", MTSR) NDS32_NO_TARGET_BUILTIN(unspec_volatile_mtsr, "mtsr", MTSR)
NDS32_NO_TARGET_BUILTIN(unspec_volatile_mtusr, "mtusr", MTUSR) NDS32_NO_TARGET_BUILTIN(unspec_volatile_mtusr, "mtusr", MTUSR)
NDS32_NO_TARGET_BUILTIN(unaligned_store_hw, "unaligned_store_hw", UASTORE_HW)
NDS32_NO_TARGET_BUILTIN(unaligned_storesi, "unaligned_store_hw", UASTORE_W)
NDS32_NO_TARGET_BUILTIN(unaligned_storedi, "unaligned_store_hw", UASTORE_DW)
}; };
rtx rtx
...@@ -355,7 +362,9 @@ nds32_init_builtins_impl (void) ...@@ -355,7 +362,9 @@ nds32_init_builtins_impl (void)
NDS32_BUILTIN_ ## CODE, BUILT_IN_MD, NULL, NULL_TREE) NDS32_BUILTIN_ ## CODE, BUILT_IN_MD, NULL, NULL_TREE)
/* Looking for return type and argument can be found in tree.h file. */ /* Looking for return type and argument can be found in tree.h file. */
tree ptr_ushort_type_node = build_pointer_type (short_unsigned_type_node);
tree ptr_uint_type_node = build_pointer_type (unsigned_type_node); tree ptr_uint_type_node = build_pointer_type (unsigned_type_node);
tree ptr_ulong_type_node = build_pointer_type (long_long_unsigned_type_node);
/* Cache. */ /* Cache. */
ADD_NDS32_BUILTIN1 ("isync", void, ptr_uint, ISYNC); ADD_NDS32_BUILTIN1 ("isync", void, ptr_uint, ISYNC);
...@@ -370,4 +379,17 @@ nds32_init_builtins_impl (void) ...@@ -370,4 +379,17 @@ nds32_init_builtins_impl (void)
/* Interrupt. */ /* Interrupt. */
ADD_NDS32_BUILTIN0 ("setgie_en", void, SETGIE_EN); ADD_NDS32_BUILTIN0 ("setgie_en", void, SETGIE_EN);
ADD_NDS32_BUILTIN0 ("setgie_dis", void, SETGIE_DIS); ADD_NDS32_BUILTIN0 ("setgie_dis", void, SETGIE_DIS);
/* Unaligned Load/Store */
ADD_NDS32_BUILTIN1 ("unaligned_load_hw", short_unsigned, ptr_ushort,
UALOAD_HW);
ADD_NDS32_BUILTIN1 ("unaligned_load_w", unsigned, ptr_uint, UALOAD_W);
ADD_NDS32_BUILTIN1 ("unaligned_load_dw", long_long_unsigned, ptr_ulong,
UALOAD_DW);
ADD_NDS32_BUILTIN2 ("unaligned_store_hw", void, ptr_ushort, short_unsigned,
UASTORE_HW);
ADD_NDS32_BUILTIN2 ("unaligned_store_w", void, ptr_uint, unsigned, UASTORE_W);
ADD_NDS32_BUILTIN2 ("unaligned_store_dw", void, ptr_ulong, long_long_unsigned,
UASTORE_DW);
} }
...@@ -94,4 +94,150 @@ ...@@ -94,4 +94,150 @@
[(set_attr "type" "misc")] [(set_attr "type" "misc")]
) )
;;Unaligned Load/Store
(define_expand "unaligned_load_hw"
[(set (match_operand:HI 0 "register_operand" "")
(unspec:HI [(mem:HI (match_operand:SI 1 "register_operand" ""))] UNSPEC_UALOAD_HW))]
""
{
operands[0] = simplify_gen_subreg (SImode, operands[0],
GET_MODE (operands[0]), 0);
if (TARGET_ISA_V3M)
{
nds32_expand_unaligned_load (operands, HImode);
}
else
{
emit_insn (gen_unaligned_load_w (operands[0],
gen_rtx_MEM (SImode, operands[1])));
if (WORDS_BIG_ENDIAN)
emit_insn (gen_lshrsi3 (operands[0], operands[0], GEN_INT(16)));
else
emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (0xffff)));
}
DONE;
})
(define_expand "unaligned_loadsi"
[(set (match_operand:SI 0 "register_operand" "=r")
(unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "r"))] UNSPEC_UALOAD_W))]
""
{
if (TARGET_ISA_V3M)
nds32_expand_unaligned_load (operands, SImode);
else
emit_insn (gen_unaligned_load_w (operands[0],
gen_rtx_MEM (SImode, (operands[1]))));
DONE;
})
(define_insn "unaligned_load_w"
[(set (match_operand:SI 0 "register_operand" "= r")
(unspec:SI [(match_operand:SI 1 "nds32_lmw_smw_base_operand" " Umw")] UNSPEC_UALOAD_W))]
""
{
return nds32_output_lmw_single_word (operands);
}
[(set_attr "type" "load")
(set_attr "length" "4")]
)
(define_expand "unaligned_loaddi"
[(set (match_operand:DI 0 "register_operand" "=r")
(unspec:DI [(mem:DI (match_operand:SI 1 "register_operand" "r"))] UNSPEC_UALOAD_DW))]
""
{
if (TARGET_ISA_V3M)
{
nds32_expand_unaligned_load (operands, DImode);
}
else
emit_insn (gen_unaligned_load_dw (operands[0], operands[1]));
DONE;
})
(define_insn "unaligned_load_dw"
[(set (match_operand:DI 0 "register_operand" "=r")
(unspec:DI [(mem:DI (match_operand:SI 1 "register_operand" "r"))] UNSPEC_UALOAD_DW))]
""
{
rtx otherops[3];
otherops[0] = gen_rtx_REG (SImode, REGNO (operands[0]));
otherops[1] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
otherops[2] = operands[1];
output_asm_insn ("lmw.bi\t%0, [%2], %1, 0", otherops);
return "";
}
[(set_attr "type" "load")
(set_attr "length" "4")]
)
(define_expand "unaligned_store_hw"
[(set (mem:SI (match_operand:SI 0 "register_operand" ""))
(unspec:HI [(match_operand:HI 1 "register_operand" "")] UNSPEC_UASTORE_HW))]
""
{
operands[1] = simplify_gen_subreg (SImode, operands[1],
GET_MODE (operands[1]), 0);
nds32_expand_unaligned_store (operands, HImode);
DONE;
})
(define_expand "unaligned_storesi"
[(set (mem:SI (match_operand:SI 0 "register_operand" "r"))
(unspec:SI [(match_operand:SI 1 "register_operand" "r")] UNSPEC_UASTORE_W))]
""
{
if (TARGET_ISA_V3M)
nds32_expand_unaligned_store (operands, SImode);
else
emit_insn (gen_unaligned_store_w (gen_rtx_MEM (SImode, operands[0]),
operands[1]));
DONE;
})
(define_insn "unaligned_store_w"
[(set (match_operand:SI 0 "nds32_lmw_smw_base_operand" "=Umw")
(unspec:SI [(match_operand:SI 1 "register_operand" " r")] UNSPEC_UASTORE_W))]
""
{
return nds32_output_smw_single_word (operands);
}
[(set_attr "type" "store")
(set_attr "length" "4")]
)
(define_expand "unaligned_storedi"
[(set (mem:DI (match_operand:SI 0 "register_operand" "r"))
(unspec:DI [(match_operand:DI 1 "register_operand" "r")] UNSPEC_UASTORE_DW))]
""
{
if (TARGET_ISA_V3M)
nds32_expand_unaligned_store (operands, DImode);
else
emit_insn (gen_unaligned_store_dw (operands[0], operands[1]));
DONE;
})
(define_insn "unaligned_store_dw"
[(set (mem:DI (match_operand:SI 0 "register_operand" "r"))
(unspec:DI [(match_operand:DI 1 "register_operand" "r")] UNSPEC_UASTORE_DW))]
""
{
rtx otherops[3];
otherops[0] = gen_rtx_REG (SImode, REGNO (operands[1]));
otherops[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
otherops[2] = operands[0];
output_asm_insn ("smw.bi\t%0, [%2], %1, 0", otherops);
return "";
}
[(set_attr "type" "store")
(set_attr "length" "4")]
)
;; ------------------------------------------------------------------------ ;; ------------------------------------------------------------------------
...@@ -36,9 +36,28 @@ ...@@ -36,9 +36,28 @@
#include "recog.h" #include "recog.h"
#include "output.h" #include "output.h"
#include "tm-constrs.h" #include "tm-constrs.h"
#include "expr.h"
/* ------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------ */
static int
nds32_regno_to_enable4 (unsigned regno)
{
switch (regno)
{
case 28: /* $r28/fp */
return 0x8;
case 29: /* $r29/gp */
return 0x4;
case 30: /* $r30/lp */
return 0x2;
case 31: /* $r31/sp */
return 0x1;
default:
gcc_unreachable ();
}
}
/* A helper function to return character based on byte size. */ /* A helper function to return character based on byte size. */
static char static char
nds32_byte_to_size (int byte) nds32_byte_to_size (int byte)
...@@ -849,4 +868,280 @@ nds32_output_casesi (rtx *operands) ...@@ -849,4 +868,280 @@ nds32_output_casesi (rtx *operands)
return "jr\t%2"; return "jr\t%2";
} }
/* Auxiliary functions for lwm/smw. */
bool
nds32_valid_smw_lwm_base_p (rtx op)
{
rtx base_addr;
if (!MEM_P (op))
return false;
base_addr = XEXP (op, 0);
if (REG_P (base_addr))
return true;
else
{
if (GET_CODE (base_addr) == POST_INC
&& REG_P (XEXP (base_addr, 0)))
return true;
}
return false;
}
/* ------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------ */
const char *
nds32_output_smw_single_word (rtx *operands)
{
char buff[100];
unsigned regno;
int enable4;
bool update_base_p;
rtx base_addr = operands[0];
rtx base_reg;
rtx otherops[2];
if (REG_P (XEXP (base_addr, 0)))
{
update_base_p = false;
base_reg = XEXP (base_addr, 0);
}
else
{
update_base_p = true;
base_reg = XEXP (XEXP (base_addr, 0), 0);
}
const char *update_base = update_base_p ? "m" : "";
regno = REGNO (operands[1]);
otherops[0] = base_reg;
otherops[1] = operands[1];
if (regno >= 28)
{
enable4 = nds32_regno_to_enable4 (regno);
sprintf (buff, "smw.bi%s\t$sp, [%%0], $sp, %x", update_base, enable4);
}
else
{
sprintf (buff, "smw.bi%s\t%%1, [%%0], %%1", update_base);
}
output_asm_insn (buff, otherops);
return "";
}
const char *
nds32_output_lmw_single_word (rtx *operands)
{
char buff[100];
unsigned regno;
bool update_base_p;
int enable4;
rtx base_addr = operands[1];
rtx base_reg;
rtx otherops[2];
if (REG_P (XEXP (base_addr, 0)))
{
update_base_p = false;
base_reg = XEXP (base_addr, 0);
}
else
{
update_base_p = true;
base_reg = XEXP (XEXP (base_addr, 0), 0);
}
const char *update_base = update_base_p ? "m" : "";
regno = REGNO (operands[0]);
otherops[0] = operands[0];
otherops[1] = base_reg;
if (regno >= 28)
{
enable4 = nds32_regno_to_enable4 (regno);
sprintf (buff, "lmw.bi%s\t$sp, [%%1], $sp, %x", update_base, enable4);
}
else
{
sprintf (buff, "lmw.bi%s\t%%0, [%%1], %%0", update_base);
}
output_asm_insn (buff, otherops);
return "";
}
void
nds32_expand_unaligned_load (rtx *operands, enum machine_mode mode)
{
/* Initial memory offset. */
int offset = WORDS_BIG_ENDIAN ? GET_MODE_SIZE (mode) - 1 : 0;
int offset_adj = WORDS_BIG_ENDIAN ? -1 : 1;
/* Initial register shift byte. */
int shift = 0;
/* The first load byte instruction is not the same. */
int width = GET_MODE_SIZE (mode) - 1;
rtx mem[2];
rtx reg[2];
rtx sub_reg;
rtx temp_reg, temp_sub_reg;
int num_reg;
/* Generating a series of load byte instructions.
The first load byte instructions and other
load byte instructions are not the same. like:
First:
lbi reg0, [mem]
zeh reg0, reg0
Second:
lbi temp_reg, [mem + offset]
sll temp_reg, (8 * shift)
ior reg0, temp_reg
lbi temp_reg, [mem + (offset + 1)]
sll temp_reg, (8 * (shift + 1))
ior reg0, temp_reg */
temp_reg = gen_reg_rtx (SImode);
temp_sub_reg = gen_lowpart (QImode, temp_reg);
if (mode == DImode)
{
/* Load doubleword, we need two registers to access. */
reg[0] = simplify_gen_subreg (SImode, operands[0],
GET_MODE (operands[0]), 0);
reg[1] = simplify_gen_subreg (SImode, operands[0],
GET_MODE (operands[0]), 4);
/* A register only store 4 byte. */
width = GET_MODE_SIZE (SImode) - 1;
}
else
{
reg[0] = operands[0];
}
for (num_reg = (mode == DImode) ? 2 : 1; num_reg > 0; num_reg--)
{
sub_reg = gen_lowpart (QImode, reg[0]);
mem[0] = gen_rtx_MEM (QImode, plus_constant (Pmode, operands[1], offset));
/* Generating the first part instructions.
lbi reg0, [mem]
zeh reg0, reg0 */
emit_move_insn (sub_reg, mem[0]);
emit_insn (gen_zero_extendqisi2 (reg[0], sub_reg));
while (width > 0)
{
offset = offset + offset_adj;
shift++;
width--;
mem[1] = gen_rtx_MEM (QImode, plus_constant (Pmode,
operands[1],
offset));
/* Generating the second part instructions.
lbi temp_reg, [mem + offset]
sll temp_reg, (8 * shift)
ior reg0, temp_reg */
emit_move_insn (temp_sub_reg, mem[1]);
emit_insn (gen_ashlsi3 (temp_reg, temp_reg,
GEN_INT (shift * 8)));
emit_insn (gen_iorsi3 (reg[0], reg[0], temp_reg));
}
if (mode == DImode)
{
/* Using the second register to load memory information. */
reg[0] = reg[1];
shift = 0;
width = GET_MODE_SIZE (SImode) - 1;
offset = offset + offset_adj;
}
}
}
void
nds32_expand_unaligned_store (rtx *operands, enum machine_mode mode)
{
/* Initial memory offset. */
int offset = WORDS_BIG_ENDIAN ? GET_MODE_SIZE (mode) - 1 : 0;
int offset_adj = WORDS_BIG_ENDIAN ? -1 : 1;
/* Initial register shift byte. */
int shift = 0;
/* The first load byte instruction is not the same. */
int width = GET_MODE_SIZE (mode) - 1;
rtx mem[2];
rtx reg[2];
rtx sub_reg;
rtx temp_reg, temp_sub_reg;
int num_reg;
/* Generating a series of store byte instructions.
The first store byte instructions and other
load byte instructions are not the same. like:
First:
sbi reg0, [mem + 0]
Second:
srli temp_reg, reg0, (8 * shift)
sbi temp_reg, [mem + offset] */
temp_reg = gen_reg_rtx (SImode);
temp_sub_reg = gen_lowpart (QImode, temp_reg);
if (mode == DImode)
{
/* Load doubleword, we need two registers to access. */
reg[0] = simplify_gen_subreg (SImode, operands[1],
GET_MODE (operands[1]), 0);
reg[1] = simplify_gen_subreg (SImode, operands[1],
GET_MODE (operands[1]), 4);
/* A register only store 4 byte. */
width = GET_MODE_SIZE (SImode) - 1;
}
else
{
reg[0] = operands[1];
}
for (num_reg = (mode == DImode) ? 2 : 1; num_reg > 0; num_reg--)
{
sub_reg = gen_lowpart (QImode, reg[0]);
mem[0] = gen_rtx_MEM (QImode, plus_constant (Pmode, operands[0], offset));
/* Generating the first part instructions.
sbi reg0, [mem + 0] */
emit_move_insn (mem[0], sub_reg);
while (width > 0)
{
offset = offset + offset_adj;
shift++;
width--;
mem[1] = gen_rtx_MEM (QImode, plus_constant (Pmode,
operands[0],
offset));
/* Generating the second part instructions.
srli temp_reg, reg0, (8 * shift)
sbi temp_reg, [mem + offset] */
emit_insn (gen_lshrsi3 (temp_reg, reg[0],
GEN_INT (shift * 8)));
emit_move_insn (mem[1], temp_sub_reg);
}
if (mode == DImode)
{
/* Using the second register to load memory information. */
reg[0] = reg[1];
shift = 0;
width = GET_MODE_SIZE (SImode) - 1;
offset = offset + offset_adj;
}
}
}
...@@ -60,12 +60,24 @@ extern void nds32_expand_epilogue_v3pop (bool); ...@@ -60,12 +60,24 @@ extern void nds32_expand_epilogue_v3pop (bool);
extern bool nds32_ls_333_p (rtx, rtx, rtx, machine_mode); extern bool nds32_ls_333_p (rtx, rtx, rtx, machine_mode);
/* Auxiliary functions for lwm/smw. */
extern bool nds32_valid_smw_lwm_base_p (rtx);
/* Auxiliary functions for expanding rtl used in nds32-multiple.md. */ /* Auxiliary functions for expanding rtl used in nds32-multiple.md. */
extern rtx nds32_expand_load_multiple (int, int, rtx, rtx); extern rtx nds32_expand_load_multiple (int, int, rtx, rtx);
extern rtx nds32_expand_store_multiple (int, int, rtx, rtx); extern rtx nds32_expand_store_multiple (int, int, rtx, rtx);
extern int nds32_expand_movmemqi (rtx, rtx, rtx, rtx); extern int nds32_expand_movmemqi (rtx, rtx, rtx, rtx);
/* Auxiliary functions for expand unalign load instruction. */
extern void nds32_expand_unaligned_load (rtx *, enum machine_mode);
/* Auxiliary functions for expand unalign store instruction. */
extern void nds32_expand_unaligned_store (rtx *, enum machine_mode);
/* Auxiliary functions for multiple load/store predicate checking. */ /* Auxiliary functions for multiple load/store predicate checking. */
extern bool nds32_valid_multiple_load_store (rtx, bool); extern bool nds32_valid_multiple_load_store (rtx, bool);
...@@ -106,6 +118,8 @@ extern const char *nds32_output_16bit_load (rtx *, int); ...@@ -106,6 +118,8 @@ extern const char *nds32_output_16bit_load (rtx *, int);
extern const char *nds32_output_32bit_store (rtx *, int); extern const char *nds32_output_32bit_store (rtx *, int);
extern const char *nds32_output_32bit_load (rtx *, int); extern const char *nds32_output_32bit_load (rtx *, int);
extern const char *nds32_output_32bit_load_s (rtx *, int); extern const char *nds32_output_32bit_load_s (rtx *, int);
extern const char *nds32_output_smw_single_word (rtx *);
extern const char *nds32_output_lmw_single_word (rtx *);
/* Auxiliary functions to output stack push/pop instruction. */ /* Auxiliary functions to output stack push/pop instruction. */
......
...@@ -346,6 +346,12 @@ enum nds32_builtins ...@@ -346,6 +346,12 @@ enum nds32_builtins
NDS32_BUILTIN_MTUSR, NDS32_BUILTIN_MTUSR,
NDS32_BUILTIN_SETGIE_EN, NDS32_BUILTIN_SETGIE_EN,
NDS32_BUILTIN_SETGIE_DIS, NDS32_BUILTIN_SETGIE_DIS,
NDS32_BUILTIN_UALOAD_HW,
NDS32_BUILTIN_UALOAD_W,
NDS32_BUILTIN_UALOAD_DW,
NDS32_BUILTIN_UASTORE_HW,
NDS32_BUILTIN_UASTORE_W,
NDS32_BUILTIN_UASTORE_DW,
NDS32_BUILTIN_COUNT NDS32_BUILTIN_COUNT
}; };
......
...@@ -57,6 +57,10 @@ ...@@ -57,6 +57,10 @@
return true; return true;
}) })
(define_predicate "nds32_lmw_smw_base_operand"
(and (match_code "mem")
(match_test "nds32_valid_smw_lwm_base_p (op)")))
(define_special_predicate "nds32_load_multiple_operation" (define_special_predicate "nds32_load_multiple_operation"
(match_code "parallel") (match_code "parallel")
{ {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment