aboutsummaryrefslogtreecommitdiffstats
path: root/recipes/gcc/gcc-4.2.2/902-avr32-fix-sync-insn-instructions.patch
diff options
context:
space:
mode:
Diffstat (limited to 'recipes/gcc/gcc-4.2.2/902-avr32-fix-sync-insn-instructions.patch')
-rw-r--r--recipes/gcc/gcc-4.2.2/902-avr32-fix-sync-insn-instructions.patch174
1 files changed, 174 insertions, 0 deletions
diff --git a/recipes/gcc/gcc-4.2.2/902-avr32-fix-sync-insn-instructions.patch b/recipes/gcc/gcc-4.2.2/902-avr32-fix-sync-insn-instructions.patch
new file mode 100644
index 0000000000..aab1490c98
--- /dev/null
+++ b/recipes/gcc/gcc-4.2.2/902-avr32-fix-sync-insn-instructions.patch
@@ -0,0 +1,174 @@
+Index: a/gcc/config/avr32/predicates.md
+===================================================================
+--- a/gcc/config/avr32/predicates.md (revision 42108)
++++ b/gcc/config/avr32/predicates.md (working copy)
+@@ -333,6 +333,11 @@
+ && ((GET_CODE(XEXP(op,0)) == CONST_INT)
+ || (GET_CODE(XEXP(op,1)) == CONST_INT))")) ))
+
++;; An offset k16 memory operand
++(define_predicate "avr32_ks16_memory_operand"
++ (and (match_code "mem")
++ (match_test "avr32_ks16_address_operand (XEXP (op, 0), GET_MODE (XEXP (op, 0)))")))
++
+ ;; An immediate k11 address operand
+ (define_predicate "avr32_ks11_address_operand"
+ (and (match_operand 0 "address_operand")
+Index: a/gcc/config/avr32/sync.md
+===================================================================
+--- a/gcc/config/avr32/sync.md (revision 41409)
++++ b/gcc/config/avr32/sync.md (working copy)
+@@ -32,9 +32,14 @@
+ (define_code_attr atomic_insn [(plus "add") (minus "sub") (and "and") (ior "ior") (xor "xor")])
+
+ (define_insn "sync_loadsi"
+- [(set (match_operand:SI 0 "register_operand" "=r")
++ ; NB! Put an early clobber on the destination operand to
++ ; avoid gcc using the same register in the source and
++ ; destination. This is done in order to avoid gcc to
++ ; clobber the source operand since these instructions
++ ; are actually inside a "loop".
++ [(set (match_operand:SI 0 "register_operand" "=&r")
+ (unspec_volatile:SI
+- [(match_operand:SI 1 "memory_operand" "RKs16")
++ [(match_operand:SI 1 "avr32_ks16_memory_operand" "RKs16")
+ (label_ref (match_operand 2 "" ""))]
+ VUNSPEC_SYNC_SET_LOCK_AND_LOAD) )]
+ ""
+@@ -46,7 +51,7 @@
+ )
+
+ (define_insn "sync_store_if_lock"
+- [(set (match_operand:SI 0 "memory_operand" "=RKs16")
++ [(set (match_operand:SI 0 "avr32_ks16_memory_operand" "=RKs16")
+ (unspec_volatile:SI
+ [(match_operand:SI 1 "register_operand" "r")
+ (label_ref (match_operand 2 "" ""))]
+@@ -62,7 +67,7 @@
+ (define_expand "sync_<atomic_insn>si"
+ [(set (match_dup 2)
+ (unspec_volatile:SI
+- [(match_operand:SI 0 "memory_operand" "")
++ [(match_operand:SI 0 "avr32_ks16_memory_operand" "")
+ (match_dup 3)]
+ VUNSPEC_SYNC_SET_LOCK_AND_LOAD))
+ (set (match_dup 2)
+@@ -72,11 +77,33 @@
+ (unspec_volatile:SI
+ [(match_dup 2)
+ (match_dup 3)]
+- VUNSPEC_SYNC_STORE_IF_LOCK) )]
++ VUNSPEC_SYNC_STORE_IF_LOCK) )
++ (use (match_dup 1))
++ (use (match_dup 4))]
+ ""
+ {
++ rtx *mem_expr = &operands[0];
++ rtx ptr_reg;
++ if ( !avr32_ks16_memory_operand (*mem_expr, GET_MODE (*mem_expr)) )
++ {
++ ptr_reg = force_reg (Pmode, XEXP (*mem_expr, 0));
++ XEXP (*mem_expr, 0) = ptr_reg;
++ }
++ else
++ {
++ rtx address = XEXP (*mem_expr, 0);
++ if ( REG_P (address) )
++ ptr_reg = address;
++ else if ( REG_P (XEXP (address, 0)) )
++ ptr_reg = XEXP (address, 0);
++ else
++ ptr_reg = XEXP (address, 1);
++ }
++
+ operands[2] = gen_reg_rtx (SImode);
+ operands[3] = gen_rtx_LABEL_REF(Pmode, gen_label_rtx ());
++ operands[4] = ptr_reg;
++
+ }
+ )
+
+@@ -85,7 +112,7 @@
+ (define_expand "sync_old_<atomic_insn>si"
+ [(set (match_operand:SI 0 "register_operand" "")
+ (unspec_volatile:SI
+- [(match_operand:SI 1 "memory_operand" "")
++ [(match_operand:SI 1 "avr32_ks16_memory_operand" "")
+ (match_dup 4)]
+ VUNSPEC_SYNC_SET_LOCK_AND_LOAD))
+ (set (match_dup 3)
+@@ -95,18 +122,39 @@
+ (unspec_volatile:SI
+ [(match_dup 3)
+ (match_dup 4)]
+- VUNSPEC_SYNC_STORE_IF_LOCK) )]
++ VUNSPEC_SYNC_STORE_IF_LOCK) )
++ (use (match_dup 2))
++ (use (match_dup 5))]
+ ""
+ {
++ rtx *mem_expr = &operands[1];
++ rtx ptr_reg;
++ if ( !avr32_ks16_memory_operand (*mem_expr, GET_MODE (*mem_expr)) )
++ {
++ ptr_reg = force_reg (Pmode, XEXP (*mem_expr, 0));
++ XEXP (*mem_expr, 0) = ptr_reg;
++ }
++ else
++ {
++ rtx address = XEXP (*mem_expr, 0);
++ if ( REG_P (address) )
++ ptr_reg = address;
++ else if ( REG_P (XEXP (address, 0)) )
++ ptr_reg = XEXP (address, 0);
++ else
++ ptr_reg = XEXP (address, 1);
++ }
++
+ operands[3] = gen_reg_rtx (SImode);
+ operands[4] = gen_rtx_LABEL_REF(Pmode, gen_label_rtx ());
++ operands[5] = ptr_reg;
+ }
+ )
+
+ (define_expand "sync_new_<atomic_insn>si"
+ [(set (match_operand:SI 0 "register_operand" "")
+ (unspec_volatile:SI
+- [(match_operand:SI 1 "memory_operand" "")
++ [(match_operand:SI 1 "avr32_ks16_memory_operand" "")
+ (match_dup 3)]
+ VUNSPEC_SYNC_SET_LOCK_AND_LOAD))
+ (set (match_dup 0)
+@@ -116,10 +164,31 @@
+ (unspec_volatile:SI
+ [(match_dup 0)
+ (match_dup 3)]
+- VUNSPEC_SYNC_STORE_IF_LOCK) )]
++ VUNSPEC_SYNC_STORE_IF_LOCK) )
++ (use (match_dup 2))
++ (use (match_dup 4))]
+ ""
+ {
++ rtx *mem_expr = &operands[1];
++ rtx ptr_reg;
++ if ( !avr32_ks16_memory_operand (*mem_expr, GET_MODE (*mem_expr)) )
++ {
++ ptr_reg = force_reg (Pmode, XEXP (*mem_expr, 0));
++ XEXP (*mem_expr, 0) = ptr_reg;
++ }
++ else
++ {
++ rtx address = XEXP (*mem_expr, 0);
++ if ( REG_P (address) )
++ ptr_reg = address;
++ else if ( REG_P (XEXP (address, 0)) )
++ ptr_reg = XEXP (address, 0);
++ else
++ ptr_reg = XEXP (address, 1);
++ }
++
+ operands[3] = gen_rtx_LABEL_REF(Pmode, gen_label_rtx ());
++ operands[4] = ptr_reg;
+ }
+ )
+