From c58cc7d3796dcee6e93885c835ed04cb566abeb2 Mon Sep 17 00:00:00 2001 From: Koen Kooi Date: Thu, 17 Mar 2011 21:41:22 +0100 Subject: move layer into meta-oe in preparation for future splits As per TSC decision Signed-off-by: Koen Kooi --- .../gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99429.patch | 1257 ++++++++++++++++++++ 1 file changed, 1257 insertions(+) create mode 100644 meta-oe/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99429.patch (limited to 'meta-oe/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99429.patch') diff --git a/meta-oe/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99429.patch b/meta-oe/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99429.patch new file mode 100644 index 0000000000..63ba95e0e3 --- /dev/null +++ b/meta-oe/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99429.patch @@ -0,0 +1,1257 @@ +2010-11-09 Michael Hope + + Revert: + + Backport from mainline: + + 2010-07-15 Bernd Schmidt + + gcc/ + * postreload.c (last_label_ruid, first_index_reg, last_index_reg): + New static variables. + (reload_combine_recognize_pattern): New static function, broken out + of reload_combine. + (reload_combine): Use it. Only initialize first_index_reg and + last_index_reg once. + + 2010-07-17 Bernd Schmidt + + PR target/42235 + gcc/ + * postreload.c (reload_cse_move2add): Return bool, true if anything. + changed. All callers changed. + (move2add_use_add2_insn): Likewise. + (move2add_use_add3_insn): Likewise. + (reload_cse_regs): If reload_cse_move2add changed anything, rerun + reload_combine. + (RELOAD_COMBINE_MAX_USES): Bump to 16. + (last_jump_ruid): New static variable. + (struct reg_use): New members CONTAINING_MEM and RUID. + (reg_state): New members ALL_OFFSETS_MATCH and REAL_STORE_RUID. + (reload_combine_split_one_ruid, reload_combine_split_ruids, + reload_combine_purge_insn_uses, reload_combine_closest_single_use + reload_combine_purge_reg_uses_after_ruid, + reload_combine_recognize_const_pattern): New static functions. + (reload_combine_recognize_pattern): Verify that ALL_OFFSETS_MATCH + is true for our reg and that we have available index regs. + (reload_combine_note_use): New args RUID and CONTAINING_MEM. All + callers changed. Use them to initialize fields in struct reg_use. + (reload_combine): Initialize last_jump_ruid. Be careful when to + take PREV_INSN of the scanned insn. Update REAL_STORE_RUID fields. + Call reload_combine_recognize_const_pattern. + (reload_combine_note_store): Update REAL_STORE_RUID field. + + gcc/testsuite/ + * gcc.target/arm/pr42235.c: New test. + + 2010-07-19 Bernd Schmidt + + gcc/ + * postreload.c (reload_combine_closest_single_use): Ignore the + number of uses for DEBUG_INSNs. + (fixup_debug_insns): New static function. + (reload_combine_recognize_const_pattern): Use it. Don't let the + main loop be affected by DEBUG_INSNs. + Really disallow moving adds past a jump insn. + (reload_combine_recognize_pattern): Don't update use_ruid here. + (reload_combine_note_use): Do it here. + (reload_combine): Use control_flow_insn_p rather than JUMP_P. + + 2010-07-20 Bernd Schmidt + + gcc/ + * postreload.c (fixup_debug_insns): Remove arg REGNO. New args + FROM and TO. All callers changed. Don't look for tracked uses, + just scan the RTL for DEBUG_INSNs and substitute. + (reload_combine_recognize_pattern): Call fixup_debug_insns. + (reload_combine): Ignore DEBUG_INSNs. + + 2010-07-22 Bernd Schmidt + + PR bootstrap/44970 + PR middle-end/45009 + gcc/ + * postreload.c: Include "target.h". + (reload_combine_closest_single_use): Don't take DEBUG_INSNs + into account. + (fixup_debug_insns): Don't copy the rtx. + (reload_combine_recognize_const_pattern): DEBUG_INSNs can't have uses. + Don't copy when replacing. Call fixup_debug_insns in the case where + we merged one add with another. + (reload_combine_recognize_pattern): Fail if there aren't any uses. + Try harder to determine whether we're picking a valid index register. + Don't set store_ruid for an insn we're going to scan in the + next iteration. + (reload_combine): Remove unused code. + (reload_combine_note_use): When updating use information for + an old insn, ignore a use that occurs after store_ruid. + * Makefile.in (postreload.o): Update dependencies. + + 2010-07-27 Bernd Schmidt + + gcc/ + * postreload.c (reload_combine_recognize_const_pattern): Move test + for limiting the insn movement to the right scope. + + 2010-07-27 Bernd Schmidt + + gcc/ + * postreload.c (try_replace_in_use): New static function. + (reload_combine_recognize_const_pattern): Use it here. Allow + substituting into a final add insn, and substituting into a memory + reference in an insn that sets the reg. + +=== modified file 'gcc/Makefile.in' +--- old/gcc/Makefile.in 2010-10-14 11:25:44 +0000 ++++ new/gcc/Makefile.in 2010-11-08 22:08:43 +0000 +@@ -3155,7 +3155,7 @@ + $(RTL_H) $(REAL_H) $(FLAGS_H) $(EXPR_H) $(OPTABS_H) reload.h $(REGS_H) \ + hard-reg-set.h insn-config.h $(BASIC_BLOCK_H) $(RECOG_H) output.h \ + $(FUNCTION_H) $(TOPLEV_H) cselib.h $(TM_P_H) $(EXCEPT_H) $(TREE_H) $(MACHMODE_H) \ +- $(OBSTACK_H) $(TARGET_H) $(TIMEVAR_H) $(TREE_PASS_H) $(DF_H) $(DBGCNT_H) ++ $(OBSTACK_H) $(TIMEVAR_H) $(TREE_PASS_H) $(DF_H) $(DBGCNT_H) + postreload-gcse.o : postreload-gcse.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \ + $(TM_H) $(RTL_H) $(REGS_H) hard-reg-set.h $(FLAGS_H) insn-config.h \ + $(RECOG_H) $(EXPR_H) $(BASIC_BLOCK_H) $(FUNCTION_H) output.h $(TOPLEV_H) \ + +=== modified file 'gcc/postreload.c' +--- old/gcc/postreload.c 2010-10-14 11:32:02 +0000 ++++ new/gcc/postreload.c 2010-11-08 22:08:43 +0000 +@@ -44,7 +44,6 @@ + #include "toplev.h" + #include "except.h" + #include "tree.h" +-#include "target.h" + #include "timevar.h" + #include "tree-pass.h" + #include "df.h" +@@ -57,10 +56,10 @@ + static int reload_cse_simplify_operands (rtx, rtx); + + static void reload_combine (void); +-static void reload_combine_note_use (rtx *, rtx, int, rtx); ++static void reload_combine_note_use (rtx *, rtx); + static void reload_combine_note_store (rtx, const_rtx, void *); + +-static bool reload_cse_move2add (rtx); ++static void reload_cse_move2add (rtx); + static void move2add_note_store (rtx, const_rtx, void *); + + /* Call cse / combine like post-reload optimization phases. +@@ -68,16 +67,11 @@ + void + reload_cse_regs (rtx first ATTRIBUTE_UNUSED) + { +- bool moves_converted; + reload_cse_regs_1 (first); + reload_combine (); +- moves_converted = reload_cse_move2add (first); ++ reload_cse_move2add (first); + if (flag_expensive_optimizations) +- { +- if (moves_converted) +- reload_combine (); +- reload_cse_regs_1 (first); +- } ++ reload_cse_regs_1 (first); + } + + /* See whether a single set SET is a noop. */ +@@ -666,43 +660,30 @@ + + /* The maximum number of uses of a register we can keep track of to + replace them with reg+reg addressing. */ +-#define RELOAD_COMBINE_MAX_USES 16 ++#define RELOAD_COMBINE_MAX_USES 6 + +-/* Describes a recorded use of a register. */ +-struct reg_use +-{ +- /* The insn where a register has been used. */ +- rtx insn; +- /* Points to the memory reference enclosing the use, if any, NULL_RTX +- otherwise. */ +- rtx containing_mem; +- /* Location of the register withing INSN. */ +- rtx *usep; +- /* The reverse uid of the insn. */ +- int ruid; +-}; ++/* INSN is the insn where a register has been used, and USEP points to the ++ location of the register within the rtl. */ ++struct reg_use { rtx insn, *usep; }; + + /* If the register is used in some unknown fashion, USE_INDEX is negative. + If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID +- indicates where it is first set or clobbered. ++ indicates where it becomes live again. + Otherwise, USE_INDEX is the index of the last encountered use of the +- register (which is first among these we have seen since we scan backwards). +- USE_RUID indicates the first encountered, i.e. last, of these uses. +- If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS +- with a constant offset; OFFSET contains this constant in that case. ++ register (which is first among these we have seen since we scan backwards), ++ OFFSET contains the constant offset that is added to the register in ++ all encountered uses, and USE_RUID indicates the first encountered, i.e. ++ last, of these uses. + STORE_RUID is always meaningful if we only want to use a value in a + register in a different place: it denotes the next insn in the insn +- stream (i.e. the last encountered) that sets or clobbers the register. +- REAL_STORE_RUID is similar, but clobbers are ignored when updating it. */ ++ stream (i.e. the last encountered) that sets or clobbers the register. */ + static struct + { + struct reg_use reg_use[RELOAD_COMBINE_MAX_USES]; ++ int use_index; + rtx offset; +- int use_index; + int store_ruid; +- int real_store_ruid; + int use_ruid; +- bool all_offsets_match; + } reg_state[FIRST_PSEUDO_REGISTER]; + + /* Reverse linear uid. This is increased in reload_combine while scanning +@@ -710,548 +691,42 @@ + and the store_ruid / use_ruid fields in reg_state. */ + static int reload_combine_ruid; + +-/* The RUID of the last label we encountered in reload_combine. */ +-static int last_label_ruid; +- +-/* The RUID of the last jump we encountered in reload_combine. */ +-static int last_jump_ruid; +- +-/* The register numbers of the first and last index register. A value of +- -1 in LAST_INDEX_REG indicates that we've previously computed these +- values and found no suitable index registers. */ +-static int first_index_reg = -1; +-static int last_index_reg; +- + #define LABEL_LIVE(LABEL) \ + (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno]) + +-/* Subroutine of reload_combine_split_ruids, called to fix up a single +- ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */ +- +-static inline void +-reload_combine_split_one_ruid (int *pruid, int split_ruid) +-{ +- if (*pruid > split_ruid) +- (*pruid)++; +-} +- +-/* Called when we insert a new insn in a position we've already passed in +- the scan. Examine all our state, increasing all ruids that are higher +- than SPLIT_RUID by one in order to make room for a new insn. */ +- +-static void +-reload_combine_split_ruids (int split_ruid) +-{ +- unsigned i; +- +- reload_combine_split_one_ruid (&reload_combine_ruid, split_ruid); +- reload_combine_split_one_ruid (&last_label_ruid, split_ruid); +- reload_combine_split_one_ruid (&last_jump_ruid, split_ruid); +- +- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) +- { +- int j, idx = reg_state[i].use_index; +- reload_combine_split_one_ruid (®_state[i].use_ruid, split_ruid); +- reload_combine_split_one_ruid (®_state[i].store_ruid, split_ruid); +- reload_combine_split_one_ruid (®_state[i].real_store_ruid, +- split_ruid); +- if (idx < 0) +- continue; +- for (j = idx; j < RELOAD_COMBINE_MAX_USES; j++) +- { +- reload_combine_split_one_ruid (®_state[i].reg_use[j].ruid, +- split_ruid); +- } +- } +-} +- +-/* Called when we are about to rescan a previously encountered insn with +- reload_combine_note_use after modifying some part of it. This clears all +- information about uses in that particular insn. */ +- +-static void +-reload_combine_purge_insn_uses (rtx insn) +-{ +- unsigned i; +- +- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) +- { +- int j, k, idx = reg_state[i].use_index; +- if (idx < 0) +- continue; +- j = k = RELOAD_COMBINE_MAX_USES; +- while (j-- > idx) +- { +- if (reg_state[i].reg_use[j].insn != insn) +- { +- k--; +- if (k != j) +- reg_state[i].reg_use[k] = reg_state[i].reg_use[j]; +- } +- } +- reg_state[i].use_index = k; +- } +-} +- +-/* Called when we need to forget about all uses of REGNO after an insn +- which is identified by RUID. */ +- +-static void +-reload_combine_purge_reg_uses_after_ruid (unsigned regno, int ruid) +-{ +- int j, k, idx = reg_state[regno].use_index; +- if (idx < 0) +- return; +- j = k = RELOAD_COMBINE_MAX_USES; +- while (j-- > idx) +- { +- if (reg_state[regno].reg_use[j].ruid >= ruid) +- { +- k--; +- if (k != j) +- reg_state[regno].reg_use[k] = reg_state[regno].reg_use[j]; +- } +- } +- reg_state[regno].use_index = k; +-} +- +-/* Find the use of REGNO with the ruid that is highest among those +- lower than RUID_LIMIT, and return it if it is the only use of this +- reg in the insn. Return NULL otherwise. */ +- +-static struct reg_use * +-reload_combine_closest_single_use (unsigned regno, int ruid_limit) +-{ +- int i, best_ruid = 0; +- int use_idx = reg_state[regno].use_index; +- struct reg_use *retval; +- +- if (use_idx < 0) +- return NULL; +- retval = NULL; +- for (i = use_idx; i < RELOAD_COMBINE_MAX_USES; i++) +- { +- struct reg_use *use = reg_state[regno].reg_use + i; +- int this_ruid = use->ruid; +- if (this_ruid >= ruid_limit) +- continue; +- if (this_ruid > best_ruid) +- { +- best_ruid = this_ruid; +- retval = use; +- } +- else if (this_ruid == best_ruid) +- retval = NULL; +- } +- if (last_label_ruid >= best_ruid) +- return NULL; +- return retval; +-} +- +-/* After we've moved an add insn, fix up any debug insns that occur +- between the old location of the add and the new location. REG is +- the destination register of the add insn; REPLACEMENT is the +- SET_SRC of the add. FROM and TO specify the range in which we +- should make this change on debug insns. */ +- +-static void +-fixup_debug_insns (rtx reg, rtx replacement, rtx from, rtx to) +-{ +- rtx insn; +- for (insn = from; insn != to; insn = NEXT_INSN (insn)) +- { +- rtx t; +- +- if (!DEBUG_INSN_P (insn)) +- continue; +- +- t = INSN_VAR_LOCATION_LOC (insn); +- t = simplify_replace_rtx (t, reg, replacement); +- validate_change (insn, &INSN_VAR_LOCATION_LOC (insn), t, 0); +- } +-} +- +-/* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG +- with SRC in the insn described by USE, taking costs into account. Return +- true if we made the replacement. */ +- +-static bool +-try_replace_in_use (struct reg_use *use, rtx reg, rtx src) +-{ +- rtx use_insn = use->insn; +- rtx mem = use->containing_mem; +- bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn)); +- +- if (mem != NULL_RTX) +- { +- addr_space_t as = MEM_ADDR_SPACE (mem); +- rtx oldaddr = XEXP (mem, 0); +- rtx newaddr = NULL_RTX; +- int old_cost = address_cost (oldaddr, GET_MODE (mem), as, speed); +- int new_cost; +- +- newaddr = simplify_replace_rtx (oldaddr, reg, src); +- if (memory_address_addr_space_p (GET_MODE (mem), newaddr, as)) +- { +- XEXP (mem, 0) = newaddr; +- new_cost = address_cost (newaddr, GET_MODE (mem), as, speed); +- XEXP (mem, 0) = oldaddr; +- if (new_cost <= old_cost +- && validate_change (use_insn, +- &XEXP (mem, 0), newaddr, 0)) +- return true; +- } +- } +- else +- { +- rtx new_set = single_set (use_insn); +- if (new_set +- && REG_P (SET_DEST (new_set)) +- && GET_CODE (SET_SRC (new_set)) == PLUS +- && REG_P (XEXP (SET_SRC (new_set), 0)) +- && CONSTANT_P (XEXP (SET_SRC (new_set), 1))) +- { +- rtx new_src; +- int old_cost = rtx_cost (SET_SRC (new_set), SET, speed); +- +- gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg)); +- new_src = simplify_replace_rtx (SET_SRC (new_set), reg, src); +- +- if (rtx_cost (new_src, SET, speed) <= old_cost +- && validate_change (use_insn, &SET_SRC (new_set), +- new_src, 0)) +- return true; +- } +- } +- return false; +-} +- +-/* Called by reload_combine when scanning INSN. This function tries to detect +- patterns where a constant is added to a register, and the result is used +- in an address. +- Return true if no further processing is needed on INSN; false if it wasn't +- recognized and should be handled normally. */ +- +-static bool +-reload_combine_recognize_const_pattern (rtx insn) +-{ +- int from_ruid = reload_combine_ruid; +- rtx set, pat, reg, src, addreg; +- unsigned int regno; +- struct reg_use *use; +- bool must_move_add; +- rtx add_moved_after_insn = NULL_RTX; +- int add_moved_after_ruid = 0; +- int clobbered_regno = -1; +- +- set = single_set (insn); +- if (set == NULL_RTX) +- return false; +- +- reg = SET_DEST (set); +- src = SET_SRC (set); +- if (!REG_P (reg) +- || hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] != 1 +- || GET_MODE (reg) != Pmode +- || reg == stack_pointer_rtx) +- return false; +- +- regno = REGNO (reg); +- +- /* We look for a REG1 = REG2 + CONSTANT insn, followed by either +- uses of REG1 inside an address, or inside another add insn. If +- possible and profitable, merge the addition into subsequent +- uses. */ +- if (GET_CODE (src) != PLUS +- || !REG_P (XEXP (src, 0)) +- || !CONSTANT_P (XEXP (src, 1))) +- return false; +- +- addreg = XEXP (src, 0); +- must_move_add = rtx_equal_p (reg, addreg); +- +- pat = PATTERN (insn); +- if (must_move_add && set != pat) +- { +- /* We have to be careful when moving the add; apart from the +- single_set there may also be clobbers. Recognize one special +- case, that of one clobber alongside the set (likely a clobber +- of the CC register). */ +- gcc_assert (GET_CODE (PATTERN (insn)) == PARALLEL); +- if (XVECLEN (pat, 0) != 2 || XVECEXP (pat, 0, 0) != set +- || GET_CODE (XVECEXP (pat, 0, 1)) != CLOBBER +- || !REG_P (XEXP (XVECEXP (pat, 0, 1), 0))) +- return false; +- clobbered_regno = REGNO (XEXP (XVECEXP (pat, 0, 1), 0)); +- } +- +- do +- { +- use = reload_combine_closest_single_use (regno, from_ruid); +- +- if (use) +- /* Start the search for the next use from here. */ +- from_ruid = use->ruid; +- +- if (use && GET_MODE (*use->usep) == Pmode) +- { +- bool delete_add = false; +- rtx use_insn = use->insn; +- int use_ruid = use->ruid; +- +- /* Avoid moving the add insn past a jump. */ +- if (must_move_add && use_ruid <= last_jump_ruid) +- break; +- +- /* If the add clobbers another hard reg in parallel, don't move +- it past a real set of this hard reg. */ +- if (must_move_add && clobbered_regno >= 0 +- && reg_state[clobbered_regno].real_store_ruid >= use_ruid) +- break; +- +- gcc_assert (reg_state[regno].store_ruid <= use_ruid); +- /* Avoid moving a use of ADDREG past a point where it is stored. */ +- if (reg_state[REGNO (addreg)].store_ruid > use_ruid) +- break; +- +- /* We also must not move the addition past an insn that sets +- the same register, unless we can combine two add insns. */ +- if (must_move_add && reg_state[regno].store_ruid == use_ruid) +- { +- if (use->containing_mem == NULL_RTX) +- delete_add = true; +- else +- break; +- } +- +- if (try_replace_in_use (use, reg, src)) +- { +- reload_combine_purge_insn_uses (use_insn); +- reload_combine_note_use (&PATTERN (use_insn), use_insn, +- use_ruid, NULL_RTX); +- +- if (delete_add) +- { +- fixup_debug_insns (reg, src, insn, use_insn); +- delete_insn (insn); +- return true; +- } +- if (must_move_add) +- { +- add_moved_after_insn = use_insn; +- add_moved_after_ruid = use_ruid; +- } +- continue; +- } +- } +- /* If we get here, we couldn't handle this use. */ +- if (must_move_add) +- break; +- } +- while (use); +- +- if (!must_move_add || add_moved_after_insn == NULL_RTX) +- /* Process the add normally. */ +- return false; +- +- fixup_debug_insns (reg, src, insn, add_moved_after_insn); +- +- reorder_insns (insn, insn, add_moved_after_insn); +- reload_combine_purge_reg_uses_after_ruid (regno, add_moved_after_ruid); +- reload_combine_split_ruids (add_moved_after_ruid - 1); +- reload_combine_note_use (&PATTERN (insn), insn, +- add_moved_after_ruid, NULL_RTX); +- reg_state[regno].store_ruid = add_moved_after_ruid; +- +- return true; +-} +- +-/* Called by reload_combine when scanning INSN. Try to detect a pattern we +- can handle and improve. Return true if no further processing is needed on +- INSN; false if it wasn't recognized and should be handled normally. */ +- +-static bool +-reload_combine_recognize_pattern (rtx insn) +-{ +- rtx set, reg, src; +- unsigned int regno; +- +- set = single_set (insn); +- if (set == NULL_RTX) +- return false; +- +- reg = SET_DEST (set); +- src = SET_SRC (set); +- if (!REG_P (reg) +- || hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] != 1) +- return false; +- +- regno = REGNO (reg); +- +- /* Look for (set (REGX) (CONST_INT)) +- (set (REGX) (PLUS (REGX) (REGY))) +- ... +- ... (MEM (REGX)) ... +- and convert it to +- (set (REGZ) (CONST_INT)) +- ... +- ... (MEM (PLUS (REGZ) (REGY)))... . +- +- First, check that we have (set (REGX) (PLUS (REGX) (REGY))) +- and that we know all uses of REGX before it dies. +- Also, explicitly check that REGX != REGY; our life information +- does not yet show whether REGY changes in this insn. */ +- +- if (GET_CODE (src) == PLUS +- && reg_state[regno].all_offsets_match +- && last_index_reg != -1 +- && REG_P (XEXP (src, 1)) +- && rtx_equal_p (XEXP (src, 0), reg) +- && !rtx_equal_p (XEXP (src, 1), reg) +- && reg_state[regno].use_index >= 0 +- && reg_state[regno].use_index < RELOAD_COMBINE_MAX_USES +- && last_label_ruid < reg_state[regno].use_ruid) +- { +- rtx base = XEXP (src, 1); +- rtx prev = prev_nonnote_insn (insn); +- rtx prev_set = prev ? single_set (prev) : NULL_RTX; +- rtx index_reg = NULL_RTX; +- rtx reg_sum = NULL_RTX; +- int i; +- +- /* Now we need to set INDEX_REG to an index register (denoted as +- REGZ in the illustration above) and REG_SUM to the expression +- register+register that we want to use to substitute uses of REG +- (typically in MEMs) with. First check REG and BASE for being +- index registers; we can use them even if they are not dead. */ +- if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno) +- || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], +- REGNO (base))) +- { +- index_reg = reg; +- reg_sum = src; +- } +- else +- { +- /* Otherwise, look for a free index register. Since we have +- checked above that neither REG nor BASE are index registers, +- if we find anything at all, it will be different from these +- two registers. */ +- for (i = first_index_reg; i <= last_index_reg; i++) +- { +- if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i) +- && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES +- && reg_state[i].store_ruid <= reg_state[regno].use_ruid +- && (call_used_regs[i] || df_regs_ever_live_p (i)) +- && (!frame_pointer_needed || i != HARD_FRAME_POINTER_REGNUM) +- && !fixed_regs[i] && !global_regs[i] +- && hard_regno_nregs[i][GET_MODE (reg)] == 1 +- && targetm.hard_regno_scratch_ok (i)) +- { +- index_reg = gen_rtx_REG (GET_MODE (reg), i); +- reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base); +- break; +- } +- } +- } +- +- /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that +- (REGY), i.e. BASE, is not clobbered before the last use we'll +- create. */ +- if (reg_sum +- && prev_set +- && CONST_INT_P (SET_SRC (prev_set)) +- && rtx_equal_p (SET_DEST (prev_set), reg) +- && (reg_state[REGNO (base)].store_ruid +- <= reg_state[regno].use_ruid)) +- { +- /* Change destination register and, if necessary, the constant +- value in PREV, the constant loading instruction. */ +- validate_change (prev, &SET_DEST (prev_set), index_reg, 1); +- if (reg_state[regno].offset != const0_rtx) +- validate_change (prev, +- &SET_SRC (prev_set), +- GEN_INT (INTVAL (SET_SRC (prev_set)) +- + INTVAL (reg_state[regno].offset)), +- 1); +- +- /* Now for every use of REG that we have recorded, replace REG +- with REG_SUM. */ +- for (i = reg_state[regno].use_index; +- i < RELOAD_COMBINE_MAX_USES; i++) +- validate_unshare_change (reg_state[regno].reg_use[i].insn, +- reg_state[regno].reg_use[i].usep, +- /* Each change must have its own +- replacement. */ +- reg_sum, 1); +- +- if (apply_change_group ()) +- { +- struct reg_use *lowest_ruid = NULL; +- +- /* For every new use of REG_SUM, we have to record the use +- of BASE therein, i.e. operand 1. */ +- for (i = reg_state[regno].use_index; +- i < RELOAD_COMBINE_MAX_USES; i++) +- { +- struct reg_use *use = reg_state[regno].reg_use + i; +- reload_combine_note_use (&XEXP (*use->usep, 1), use->insn, +- use->ruid, use->containing_mem); +- if (lowest_ruid == NULL || use->ruid < lowest_ruid->ruid) +- lowest_ruid = use; +- } +- +- fixup_debug_insns (reg, reg_sum, insn, lowest_ruid->insn); +- +- /* Delete the reg-reg addition. */ +- delete_insn (insn); +- +- if (reg_state[regno].offset != const0_rtx) +- /* Previous REG_EQUIV / REG_EQUAL notes for PREV +- are now invalid. */ +- remove_reg_equal_equiv_notes (prev); +- +- reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES; +- return true; +- } +- } +- } +- return false; +-} +- + static void + reload_combine (void) + { +- rtx insn, prev; ++ rtx insn, set; ++ int first_index_reg = -1; ++ int last_index_reg = 0; + int i; + basic_block bb; + unsigned int r; ++ int last_label_ruid; + int min_labelno, n_labels; + HARD_REG_SET ever_live_at_start, *label_live; + ++ /* If reg+reg can be used in offsetable memory addresses, the main chunk of ++ reload has already used it where appropriate, so there is no use in ++ trying to generate it now. */ ++ if (double_reg_address_ok && INDEX_REG_CLASS != NO_REGS) ++ return; ++ + /* To avoid wasting too much time later searching for an index register, + determine the minimum and maximum index register numbers. */ +- if (INDEX_REG_CLASS == NO_REGS) +- last_index_reg = -1; +- else if (first_index_reg == -1 && last_index_reg == 0) +- { +- for (r = 0; r < FIRST_PSEUDO_REGISTER; r++) +- if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r)) +- { +- if (first_index_reg == -1) +- first_index_reg = r; +- +- last_index_reg = r; +- } +- +- /* If no index register is available, we can quit now. Set LAST_INDEX_REG +- to -1 so we'll know to quit early the next time we get here. */ +- if (first_index_reg == -1) +- { +- last_index_reg = -1; +- return; +- } +- } ++ for (r = 0; r < FIRST_PSEUDO_REGISTER; r++) ++ if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r)) ++ { ++ if (first_index_reg == -1) ++ first_index_reg = r; ++ ++ last_index_reg = r; ++ } ++ ++ /* If no index register is available, we can quit now. */ ++ if (first_index_reg == -1) ++ return; + + /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime + information is a bit fuzzy immediately after reload, but it's +@@ -1278,23 +753,20 @@ + } + + /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */ +- last_label_ruid = last_jump_ruid = reload_combine_ruid = 0; ++ last_label_ruid = reload_combine_ruid = 0; + for (r = 0; r < FIRST_PSEUDO_REGISTER; r++) + { +- reg_state[r].store_ruid = 0; +- reg_state[r].real_store_ruid = 0; ++ reg_state[r].store_ruid = reload_combine_ruid; + if (fixed_regs[r]) + reg_state[r].use_index = -1; + else + reg_state[r].use_index = RELOAD_COMBINE_MAX_USES; + } + +- for (insn = get_last_insn (); insn; insn = prev) ++ for (insn = get_last_insn (); insn; insn = PREV_INSN (insn)) + { + rtx note; + +- prev = PREV_INSN (insn); +- + /* We cannot do our optimization across labels. Invalidating all the use + information we have would be costly, so we just note where the label + is and then later disable any optimization that would cross it. */ +@@ -1305,17 +777,141 @@ + if (! fixed_regs[r]) + reg_state[r].use_index = RELOAD_COMBINE_MAX_USES; + +- if (! NONDEBUG_INSN_P (insn)) ++ if (! INSN_P (insn)) + continue; + + reload_combine_ruid++; + +- if (control_flow_insn_p (insn)) +- last_jump_ruid = reload_combine_ruid; +- +- if (reload_combine_recognize_const_pattern (insn) +- || reload_combine_recognize_pattern (insn)) +- continue; ++ /* Look for (set (REGX) (CONST_INT)) ++ (set (REGX) (PLUS (REGX) (REGY))) ++ ... ++ ... (MEM (REGX)) ... ++ and convert it to ++ (set (REGZ) (CONST_INT)) ++ ... ++ ... (MEM (PLUS (REGZ) (REGY)))... . ++ ++ First, check that we have (set (REGX) (PLUS (REGX) (REGY))) ++ and that we know all uses of REGX before it dies. ++ Also, explicitly check that REGX != REGY; our life information ++ does not yet show whether REGY changes in this insn. */ ++ set = single_set (insn); ++ if (set != NULL_RTX ++ && REG_P (SET_DEST (set)) ++ && (hard_regno_nregs[REGNO (SET_DEST (set))] ++ [GET_MODE (SET_DEST (set))] ++ == 1) ++ && GET_CODE (SET_SRC (set)) == PLUS ++ && REG_P (XEXP (SET_SRC (set), 1)) ++ && rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set)) ++ && !rtx_equal_p (XEXP (SET_SRC (set), 1), SET_DEST (set)) ++ && last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid) ++ { ++ rtx reg = SET_DEST (set); ++ rtx plus = SET_SRC (set); ++ rtx base = XEXP (plus, 1); ++ rtx prev = prev_nonnote_nondebug_insn (insn); ++ rtx prev_set = prev ? single_set (prev) : NULL_RTX; ++ unsigned int regno = REGNO (reg); ++ rtx index_reg = NULL_RTX; ++ rtx reg_sum = NULL_RTX; ++ ++ /* Now we need to set INDEX_REG to an index register (denoted as ++ REGZ in the illustration above) and REG_SUM to the expression ++ register+register that we want to use to substitute uses of REG ++ (typically in MEMs) with. First check REG and BASE for being ++ index registers; we can use them even if they are not dead. */ ++ if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno) ++ || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], ++ REGNO (base))) ++ { ++ index_reg = reg; ++ reg_sum = plus; ++ } ++ else ++ { ++ /* Otherwise, look for a free index register. Since we have ++ checked above that neither REG nor BASE are index registers, ++ if we find anything at all, it will be different from these ++ two registers. */ ++ for (i = first_index_reg; i <= last_index_reg; i++) ++ { ++ if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], ++ i) ++ && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES ++ && reg_state[i].store_ruid <= reg_state[regno].use_ruid ++ && hard_regno_nregs[i][GET_MODE (reg)] == 1) ++ { ++ index_reg = gen_rtx_REG (GET_MODE (reg), i); ++ reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base); ++ break; ++ } ++ } ++ } ++ ++ /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that ++ (REGY), i.e. BASE, is not clobbered before the last use we'll ++ create. */ ++ if (reg_sum ++ && prev_set ++ && CONST_INT_P (SET_SRC (prev_set)) ++ && rtx_equal_p (SET_DEST (prev_set), reg) ++ && reg_state[regno].use_index >= 0 ++ && (reg_state[REGNO (base)].store_ruid ++ <= reg_state[regno].use_ruid)) ++ { ++ int i; ++ ++ /* Change destination register and, if necessary, the constant ++ value in PREV, the constant loading instruction. */ ++ validate_change (prev, &SET_DEST (prev_set), index_reg, 1); ++ if (reg_state[regno].offset != const0_rtx) ++ validate_change (prev, ++ &SET_SRC (prev_set), ++ GEN_INT (INTVAL (SET_SRC (prev_set)) ++ + INTVAL (reg_state[regno].offset)), ++ 1); ++ ++ /* Now for every use of REG that we have recorded, replace REG ++ with REG_SUM. */ ++ for (i = reg_state[regno].use_index; ++ i < RELOAD_COMBINE_MAX_USES; i++) ++ validate_unshare_change (reg_state[regno].reg_use[i].insn, ++ reg_state[regno].reg_use[i].usep, ++ /* Each change must have its own ++ replacement. */ ++ reg_sum, 1); ++ ++ if (apply_change_group ()) ++ { ++ /* For every new use of REG_SUM, we have to record the use ++ of BASE therein, i.e. operand 1. */ ++ for (i = reg_state[regno].use_index; ++ i < RELOAD_COMBINE_MAX_USES; i++) ++ reload_combine_note_use ++ (&XEXP (*reg_state[regno].reg_use[i].usep, 1), ++ reg_state[regno].reg_use[i].insn); ++ ++ if (reg_state[REGNO (base)].use_ruid ++ > reg_state[regno].use_ruid) ++ reg_state[REGNO (base)].use_ruid ++ = reg_state[regno].use_ruid; ++ ++ /* Delete the reg-reg addition. */ ++ delete_insn (insn); ++ ++ if (reg_state[regno].offset != const0_rtx) ++ /* Previous REG_EQUIV / REG_EQUAL notes for PREV ++ are now invalid. */ ++ remove_reg_equal_equiv_notes (prev); ++ ++ reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES; ++ reg_state[REGNO (index_reg)].store_ruid ++ = reload_combine_ruid; ++ continue; ++ } ++ } ++ } + + note_stores (PATTERN (insn), reload_combine_note_store, NULL); + +@@ -1371,8 +967,7 @@ + reg_state[i].use_index = -1; + } + +- reload_combine_note_use (&PATTERN (insn), insn, +- reload_combine_ruid, NULL_RTX); ++ reload_combine_note_use (&PATTERN (insn), insn); + for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) + { + if (REG_NOTE_KIND (note) == REG_INC +@@ -1381,7 +976,6 @@ + int regno = REGNO (XEXP (note, 0)); + + reg_state[regno].store_ruid = reload_combine_ruid; +- reg_state[regno].real_store_ruid = reload_combine_ruid; + reg_state[regno].use_index = -1; + } + } +@@ -1391,8 +985,8 @@ + } + + /* Check if DST is a register or a subreg of a register; if it is, +- update store_ruid, real_store_ruid and use_index in the reg_state +- structure accordingly. Called via note_stores from reload_combine. */ ++ update reg_state[regno].store_ruid and reg_state[regno].use_index ++ accordingly. Called via note_stores from reload_combine. */ + + static void + reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED) +@@ -1416,14 +1010,14 @@ + /* note_stores might have stripped a STRICT_LOW_PART, so we have to be + careful with registers / register parts that are not full words. + Similarly for ZERO_EXTRACT. */ +- if (GET_CODE (SET_DEST (set)) == ZERO_EXTRACT ++ if (GET_CODE (set) != SET ++ || GET_CODE (SET_DEST (set)) == ZERO_EXTRACT + || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART) + { + for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--) + { + reg_state[i].use_index = -1; + reg_state[i].store_ruid = reload_combine_ruid; +- reg_state[i].real_store_ruid = reload_combine_ruid; + } + } + else +@@ -1431,8 +1025,6 @@ + for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--) + { + reg_state[i].store_ruid = reload_combine_ruid; +- if (GET_CODE (set) == SET) +- reg_state[i].real_store_ruid = reload_combine_ruid; + reg_state[i].use_index = RELOAD_COMBINE_MAX_USES; + } + } +@@ -1443,7 +1035,7 @@ + *XP is the pattern of INSN, or a part of it. + Called from reload_combine, and recursively by itself. */ + static void +-reload_combine_note_use (rtx *xp, rtx insn, int ruid, rtx containing_mem) ++reload_combine_note_use (rtx *xp, rtx insn) + { + rtx x = *xp; + enum rtx_code code = x->code; +@@ -1456,7 +1048,7 @@ + case SET: + if (REG_P (SET_DEST (x))) + { +- reload_combine_note_use (&SET_SRC (x), insn, ruid, NULL_RTX); ++ reload_combine_note_use (&SET_SRC (x), insn); + return; + } + break; +@@ -1512,11 +1104,6 @@ + return; + } + +- /* We may be called to update uses in previously seen insns. +- Don't add uses beyond the last store we saw. */ +- if (ruid < reg_state[regno].store_ruid) +- return; +- + /* If this register is already used in some unknown fashion, we + can't do anything. + If we decrement the index from zero to -1, we can't store more +@@ -1525,34 +1112,29 @@ + if (use_index < 0) + return; + +- if (use_index == RELOAD_COMBINE_MAX_USES - 1) ++ if (use_index != RELOAD_COMBINE_MAX_USES - 1) ++ { ++ /* We have found another use for a register that is already ++ used later. Check if the offsets match; if not, mark the ++ register as used in an unknown fashion. */ ++ if (! rtx_equal_p (offset, reg_state[regno].offset)) ++ { ++ reg_state[regno].use_index = -1; ++ return; ++ } ++ } ++ else + { + /* This is the first use of this register we have seen since we + marked it as dead. */ + reg_state[regno].offset = offset; +- reg_state[regno].all_offsets_match = true; +- reg_state[regno].use_ruid = ruid; +- } +- else +- { +- if (reg_state[regno].use_ruid > ruid) +- reg_state[regno].use_ruid = ruid; +- +- if (! rtx_equal_p (offset, reg_state[regno].offset)) +- reg_state[regno].all_offsets_match = false; +- } +- ++ reg_state[regno].use_ruid = reload_combine_ruid; ++ } + reg_state[regno].reg_use[use_index].insn = insn; +- reg_state[regno].reg_use[use_index].ruid = ruid; +- reg_state[regno].reg_use[use_index].containing_mem = containing_mem; + reg_state[regno].reg_use[use_index].usep = xp; + return; + } + +- case MEM: +- containing_mem = x; +- break; +- + default: + break; + } +@@ -1562,12 +1144,11 @@ + for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) + { + if (fmt[i] == 'e') +- reload_combine_note_use (&XEXP (x, i), insn, ruid, containing_mem); ++ reload_combine_note_use (&XEXP (x, i), insn); + else if (fmt[i] == 'E') + { + for (j = XVECLEN (x, i) - 1; j >= 0; j--) +- reload_combine_note_use (&XVECEXP (x, i, j), insn, ruid, +- containing_mem); ++ reload_combine_note_use (&XVECEXP (x, i, j), insn); + } + } + } +@@ -1615,10 +1196,9 @@ + while REG is known to already have value (SYM + offset). + This function tries to change INSN into an add instruction + (set (REG) (plus (REG) (OFF - offset))) using the known value. +- It also updates the information about REG's known value. +- Return true if we made a change. */ ++ It also updates the information about REG's known value. */ + +-static bool ++static void + move2add_use_add2_insn (rtx reg, rtx sym, rtx off, rtx insn) + { + rtx pat = PATTERN (insn); +@@ -1627,7 +1207,6 @@ + rtx new_src = gen_int_mode (INTVAL (off) - reg_offset[regno], + GET_MODE (reg)); + bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)); +- bool changed = false; + + /* (set (reg) (plus (reg) (const_int 0))) is not canonical; + use (set (reg) (reg)) instead. +@@ -1642,13 +1221,13 @@ + (reg)), would be discarded. Maybe we should + try a truncMN pattern? */ + if (INTVAL (off) == reg_offset [regno]) +- changed = validate_change (insn, &SET_SRC (pat), reg, 0); ++ validate_change (insn, &SET_SRC (pat), reg, 0); + } + else if (rtx_cost (new_src, PLUS, speed) < rtx_cost (src, SET, speed) + && have_add2_insn (reg, new_src)) + { + rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src); +- changed = validate_change (insn, &SET_SRC (pat), tem, 0); ++ validate_change (insn, &SET_SRC (pat), tem, 0); + } + else if (sym == NULL_RTX && GET_MODE (reg) != BImode) + { +@@ -1673,9 +1252,8 @@ + gen_rtx_STRICT_LOW_PART (VOIDmode, + narrow_reg), + narrow_src); +- changed = validate_change (insn, &PATTERN (insn), +- new_set, 0); +- if (changed) ++ if (validate_change (insn, &PATTERN (insn), ++ new_set, 0)) + break; + } + } +@@ -1685,7 +1263,6 @@ + reg_mode[regno] = GET_MODE (reg); + reg_symbol_ref[regno] = sym; + reg_offset[regno] = INTVAL (off); +- return changed; + } + + +@@ -1695,10 +1272,9 @@ + value (SYM + offset) and change INSN into an add instruction + (set (REG) (plus (the found register) (OFF - offset))) if such + a register is found. It also updates the information about +- REG's known value. +- Return true iff we made a change. */ ++ REG's known value. */ + +-static bool ++static void + move2add_use_add3_insn (rtx reg, rtx sym, rtx off, rtx insn) + { + rtx pat = PATTERN (insn); +@@ -1708,7 +1284,6 @@ + int min_regno; + bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)); + int i; +- bool changed = false; + + for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) + if (reg_set_luid[i] > move2add_last_label_luid +@@ -1753,25 +1328,20 @@ + GET_MODE (reg)); + tem = gen_rtx_PLUS (GET_MODE (reg), tem, new_src); + } +- if (validate_change (insn, &SET_SRC (pat), tem, 0)) +- changed = true; ++ validate_change (insn, &SET_SRC (pat), tem, 0); + } + reg_set_luid[regno] = move2add_luid; + reg_base_reg[regno] = -1; + reg_mode[regno] = GET_MODE (reg); + reg_symbol_ref[regno] = sym; + reg_offset[regno] = INTVAL (off); +- return changed; + } + +-/* Convert move insns with constant inputs to additions if they are cheaper. +- Return true if any changes were made. */ +-static bool ++static void + reload_cse_move2add (rtx first) + { + int i; + rtx insn; +- bool changed = false; + + for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--) + { +@@ -1832,7 +1402,7 @@ + && reg_base_reg[regno] < 0 + && reg_symbol_ref[regno] == NULL_RTX) + { +- changed |= move2add_use_add2_insn (reg, NULL_RTX, src, insn); ++ move2add_use_add2_insn (reg, NULL_RTX, src, insn); + continue; + } + +@@ -1893,7 +1463,6 @@ + } + if (success) + delete_insn (insn); +- changed |= success; + insn = next; + reg_mode[regno] = GET_MODE (reg); + reg_offset[regno] = +@@ -1939,12 +1508,12 @@ + && reg_base_reg[regno] < 0 + && reg_symbol_ref[regno] != NULL_RTX + && rtx_equal_p (sym, reg_symbol_ref[regno])) +- changed |= move2add_use_add2_insn (reg, sym, off, insn); ++ move2add_use_add2_insn (reg, sym, off, insn); + + /* Otherwise, we have to find a register whose value is sum + of sym and some constant value. */ + else +- changed |= move2add_use_add3_insn (reg, sym, off, insn); ++ move2add_use_add3_insn (reg, sym, off, insn); + + continue; + } +@@ -1999,7 +1568,6 @@ + } + } + } +- return changed; + } + + /* SET is a SET or CLOBBER that sets DST. DATA is the insn which + -- cgit 1.2.3-korg