Commit c5563e11 by Jim Wilson

(movesi_unaligned, movsi_ulw, movsi_usw): Delete.

(extv, extzv, insv, movsi_ulw, movsi_usw): New patterns.

From-SVN: r7413
parent a40ed31b
...@@ -2545,130 +2545,143 @@ move\\t%0,%z4\\n\\ ...@@ -2545,130 +2545,143 @@ move\\t%0,%z4\\n\\
;; ;;
;; .................... ;; ....................
;; unaligned word moves generated by the block moves. ;; Bit field extract patterns which use lwl/lwr.
;; I don't think these are used anymore. Ian Taylor 30 Sep 93 ;; ??? There should be DImode variants for 64 bit code, but the current
;; bitfield scheme can't handle that. We would need to add new optabs
;;(define_expand "movsi_unaligned" ;; in order to make that work.
;; [(set (match_operand:SI 0 "general_operand" "")
;; (unspec [(match_operand:SI 1 "general_operand" "")] 0))] ;; ??? There could be HImode variants for the ulh/ulhu/ush macros.
;; "" ;; It isn't clear whether this will give better code.
;; "
;;{ (define_expand "extv"
;; /* Handle stores. */ [(set (match_operand:SI 0 "register_operand" "")
;; if (GET_CODE (operands[0]) == MEM) (sign_extract:SI (match_operand:QI 1 "memory_operand" "")
;; { (match_operand:SI 2 "immediate_operand" "")
;; rtx reg = gen_reg_rtx (SImode); (match_operand:SI 3 "immediate_operand" "")))]
;; rtx insn = emit_insn (gen_movsi_ulw (reg, operands[1])); ""
;; rtx addr = XEXP (operands[0], 0); "
;; if (CONSTANT_P (addr)) {
;; REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUIV, addr, REG_NOTES (insn)); /* If this isn't a 32 bit field, and it doesn't start on a byte boundary
;; then fail. */
;; if (reg_or_0_operand (operands[1], SImode)) if (INTVAL (operands[2]) != 32 || (INTVAL (operands[3]) % 8) != 0)
;; DONE; FAIL;
;;
;; operands[1] = reg; /* This can happen for a 64 bit target, when extracting a value from
;; } a 64 bit union member. extract_bit_field doesn't verify that our
;; source matches the predicate, so we force it to be a MEM here. */
;; /* Generate appropriate load, store. If not a load or store, if (GET_CODE (operands[1]) != MEM)
;; do a normal movsi. */ FAIL;
;; if (GET_CODE (operands[0]) != MEM && GET_CODE (operands[1]) != MEM)
;; { /* Otherwise, emit a lwl/lwr pair to load the value. */
;; emit_insn (gen_movsi (operands[0], operands[1])); emit_insn (gen_movsi_ulw (operands[0], operands[1]));
;; DONE; DONE;
;; } }")
;;
;; /* Fall through and generate normal code. */ (define_expand "extzv"
;;}") [(set (match_operand:SI 0 "register_operand" "")
;; (zero_extract:SI (match_operand:QI 1 "memory_operand" "")
;;(define_insn "movsi_ulw" (match_operand:SI 2 "immediate_operand" "")
;; [(set (match_operand:SI 0 "register_operand" "=&d,&d,d,d") (match_operand:SI 3 "immediate_operand" "")))]
;; (unspec [(match_operand:SI 1 "general_operand" "R,o,dIKL,M")] 0))] ""
;; "" "
;; "* {
;;{ /* If this isn't a 32 bit field, and it doesn't start on a byte boundary
;; enum rtx_code code; then fail. */
;; char *ret; if (INTVAL (operands[2]) != 32 || (INTVAL (operands[3]) % 8) != 0)
;; rtx offset; FAIL;
;; rtx addr;
;; rtx mem_addr; /* This can happen for a 64 bit target, when extracting a value from
;; a 64 bit union member. extract_bit_field doesn't verify that our
;; if (which_alternative != 0) source matches the predicate, so we force it to be a MEM here. */
;; return mips_move_1word (operands, insn, FALSE); if (GET_CODE (operands[1]) != MEM)
;; FAIL;
;; if (TARGET_STATS)
;; mips_count_memory_refs (operands[1], 2); /* Otherwise, emit a lwl/lwr pair to load the value. */
;; emit_insn (gen_movsi_ulw (operands[0], operands[1]));
;; /* The stack/frame pointers are always aligned, so we can convert DONE;
;; to the faster lw if we are referencing an aligned stack location. */ }")
;;
;; offset = const0_rtx; (define_expand "insv"
;; addr = XEXP (operands[1], 0); [(set (zero_extract:SI (match_operand:QI 0 "memory_operand" "")
;; mem_addr = eliminate_constant_term (addr, &offset); (match_operand:SI 1 "immediate_operand" "")
;; (match_operand:SI 2 "immediate_operand" ""))
;; if ((INTVAL (offset) & (UNITS_PER_WORD-1)) == 0 (match_operand:SI 3 "register_operand" ""))]
;; && (mem_addr == stack_pointer_rtx || mem_addr == frame_pointer_rtx)) ""
;; ret = \"lw\\t%0,%1\"; "
;; {
;; else /* If this isn't a 32 bit field, and it doesn't start on a byte boundary
;; { then fail. */
;; ret = \"ulw\\t%0,%1\"; if (INTVAL (operands[1]) != 32 || (INTVAL (operands[2]) % 8) != 0)
;; if (TARGET_GAS) FAIL;
;; {
;; enum rtx_code code = GET_CODE (addr); /* This can happen for a 64 bit target, when storing into a 32 bit union
;; member. store_bit_field doesn't verify that our target matches the
;; if (code == CONST || code == SYMBOL_REF || code == LABEL_REF) predicate, so we force it to be a MEM here. */
;; { if (GET_CODE (operands[0]) != MEM)
;; operands[2] = gen_rtx (REG, SImode, GP_REG_FIRST + 1); FAIL;
;; ret = \"%[la\\t%2,%1\;ulw\\t%0,0(%2)%]\";
;; } /* Otherwise, emit a swl/swr pair to load the value. */
;; } emit_insn (gen_movsi_usw (operands[0], operands[3]));
;; } DONE;
;; }")
;; return mips_fill_delay_slot (ret, DELAY_LOAD, operands, insn);
;;}" ;; unaligned word moves generated by the bit field patterns
;; [(set_attr "type" "load,load,move,arith")
;; (set_attr "mode" "SI") (define_insn "movsi_ulw"
;; (set_attr "length" "2,4,1,2")]) [(set (match_operand:SI 0 "register_operand" "=&d,&d")
;; (unspec [(match_operand:QI 1 "general_operand" "R,o")] 0))]
;;(define_insn "movsi_usw" ""
;; [(set (match_operand:SI 0 "memory_operand" "=R,o") "*
;; (unspec [(match_operand:SI 1 "reg_or_0_operand" "dJ,dJ")] 0))] {
;; "" rtx offset = const0_rtx;
;; "* rtx addr = XEXP (operands[1], 0);
;;{ rtx mem_addr = eliminate_constant_term (addr, &offset);
;; rtx offset = const0_rtx; char *ret;
;; rtx addr = XEXP (operands[0], 0);
;; rtx mem_addr = eliminate_constant_term (addr, &offset); if (TARGET_STATS)
;; mips_count_memory_refs (operands[1], 2);
;; if (TARGET_STATS)
;; mips_count_memory_refs (operands[0], 2); /* The stack/frame pointers are always aligned, so we can convert
;; to the faster lw if we are referencing an aligned stack location. */
;; /* The stack/frame pointers are always aligned, so we can convert
;; to the faster sw if we are referencing an aligned stack location. */ if ((INTVAL (offset) & 3) == 0
;; && (mem_addr == stack_pointer_rtx || mem_addr == frame_pointer_rtx))
;; if ((INTVAL (offset) & (UNITS_PER_WORD-1)) == 0 ret = \"lw\\t%0,%1\";
;; && (mem_addr == stack_pointer_rtx || mem_addr == frame_pointer_rtx)) else
;; return \"sw\\t%1,%0\"; ret = \"ulw\\t%0,%1\";
;;
;;
;; if (TARGET_GAS)
;; {
;; enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
;;
;; if (code == CONST || code == SYMBOL_REF || code == LABEL_REF)
;; {
;; operands[2] = gen_rtx (REG, SImode, GP_REG_FIRST + 1);
;; return \"%[la\\t%2,%0\;usw\\t%z1,0(%2)%]\";
;; }
;; }
;;
;; return \"usw\\t%z1,%0\";
;;}"
;; [(set_attr "type" "store")
;; (set_attr "mode" "SI")
;; (set_attr "length" "2,4")])
return mips_fill_delay_slot (ret, DELAY_LOAD, operands, insn);
}"
[(set_attr "type" "load,load")
(set_attr "mode" "SI")
(set_attr "length" "2,4")])
(define_insn "movsi_usw"
[(set (match_operand:QI 0 "memory_operand" "=R,o")
(unspec [(match_operand:SI 1 "reg_or_0_operand" "dJ,dJ")] 1))]
""
"*
{
rtx offset = const0_rtx;
rtx addr = XEXP (operands[0], 0);
rtx mem_addr = eliminate_constant_term (addr, &offset);
if (TARGET_STATS)
mips_count_memory_refs (operands[0], 2);
/* The stack/frame pointers are always aligned, so we can convert
to the faster sw if we are referencing an aligned stack location. */
if ((INTVAL (offset) & 3) == 0
&& (mem_addr == stack_pointer_rtx || mem_addr == frame_pointer_rtx))
return \"sw\\t%1,%0\";
return \"usw\\t%z1,%0\";
}"
[(set_attr "type" "store")
(set_attr "mode" "SI")
(set_attr "length" "2,4")])
;; 64-bit integer moves ;; 64-bit integer moves
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment