190075Sobrien/* Instruction scheduling pass. This file computes dependencies between 290075Sobrien instructions. 390075Sobrien Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 4169689Skan 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 5169689Skan Free Software Foundation, Inc. 690075Sobrien Contributed by Michael Tiemann (tiemann@cygnus.com) Enhanced by, 790075Sobrien and currently maintained by, Jim Wilson (wilson@cygnus.com) 890075Sobrien 990075SobrienThis file is part of GCC. 1090075Sobrien 1190075SobrienGCC is free software; you can redistribute it and/or modify it under 1290075Sobrienthe terms of the GNU General Public License as published by the Free 1390075SobrienSoftware Foundation; either version 2, or (at your option) any later 1490075Sobrienversion. 1590075Sobrien 1690075SobrienGCC is distributed in the hope that it will be useful, but WITHOUT ANY 1790075SobrienWARRANTY; without even the implied warranty of MERCHANTABILITY or 1890075SobrienFITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 1990075Sobrienfor more details. 2090075Sobrien 2190075SobrienYou should have received a copy of the GNU General Public License 2290075Sobrienalong with GCC; see the file COPYING. If not, write to the Free 23169689SkanSoftware Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 24169689Skan02110-1301, USA. */ 2590075Sobrien 2690075Sobrien#include "config.h" 2790075Sobrien#include "system.h" 28132718Skan#include "coretypes.h" 29132718Skan#include "tm.h" 3090075Sobrien#include "toplev.h" 3190075Sobrien#include "rtl.h" 3290075Sobrien#include "tm_p.h" 3390075Sobrien#include "hard-reg-set.h" 3490075Sobrien#include "regs.h" 3590075Sobrien#include "function.h" 3690075Sobrien#include "flags.h" 3790075Sobrien#include "insn-config.h" 3890075Sobrien#include "insn-attr.h" 3990075Sobrien#include "except.h" 4090075Sobrien#include "toplev.h" 4190075Sobrien#include "recog.h" 4290075Sobrien#include "sched-int.h" 4390075Sobrien#include "params.h" 4490075Sobrien#include "cselib.h" 45132718Skan#include "df.h" 4690075Sobrien 4790075Sobrien 4890075Sobrienstatic regset reg_pending_sets; 4990075Sobrienstatic regset reg_pending_clobbers; 5090075Sobrienstatic regset reg_pending_uses; 5190075Sobrien 52132718Skan/* The following enumeration values tell us what dependencies we 53132718Skan should use to implement the barrier. We use true-dependencies for 54132718Skan TRUE_BARRIER and anti-dependencies for MOVE_BARRIER. */ 55132718Skanenum reg_pending_barrier_mode 56132718Skan{ 57132718Skan NOT_A_BARRIER = 0, 58132718Skan MOVE_BARRIER, 59132718Skan TRUE_BARRIER 60132718Skan}; 61132718Skan 62132718Skanstatic enum reg_pending_barrier_mode reg_pending_barrier; 63132718Skan 6490075Sobrien/* To speed up the test for duplicate dependency links we keep a 6590075Sobrien record of dependencies created by add_dependence when the average 6690075Sobrien number of instructions in a basic block is very large. 6790075Sobrien 6890075Sobrien Studies have shown that there is typically around 5 instructions between 6990075Sobrien branches for typical C code. So we can make a guess that the average 7090075Sobrien basic block is approximately 5 instructions long; we will choose 100X 7190075Sobrien the average size as a very large basic block. 7290075Sobrien 7390075Sobrien Each insn has associated bitmaps for its dependencies. Each bitmap 7490075Sobrien has enough entries to represent a dependency on any other insn in 7590075Sobrien the insn chain. All bitmap for true dependencies cache is 7690075Sobrien allocated then the rest two ones are also allocated. */ 77132718Skanstatic bitmap_head *true_dependency_cache; 78169689Skanstatic bitmap_head *output_dependency_cache; 79132718Skanstatic bitmap_head *anti_dependency_cache; 80169689Skanstatic bitmap_head *spec_dependency_cache; 81169689Skanstatic int cache_size; 8290075Sobrien 8390075Sobrien/* To speed up checking consistency of formed forward insn 8490075Sobrien dependencies we use the following cache. Another possible solution 8590075Sobrien could be switching off checking duplication of insns in forward 8690075Sobrien dependencies. */ 8790075Sobrien#ifdef ENABLE_CHECKING 88132718Skanstatic bitmap_head *forward_dependency_cache; 8990075Sobrien#endif 9090075Sobrien 91132718Skanstatic int deps_may_trap_p (rtx); 92169689Skanstatic void add_dependence_list (rtx, rtx, int, enum reg_note); 93169689Skanstatic void add_dependence_list_and_free (rtx, rtx *, int, enum reg_note); 94169689Skanstatic void delete_all_dependences (rtx); 95169689Skanstatic void fixup_sched_groups (rtx); 9690075Sobrien 97132718Skanstatic void flush_pending_lists (struct deps *, rtx, int, int); 98132718Skanstatic void sched_analyze_1 (struct deps *, rtx, rtx); 99132718Skanstatic void sched_analyze_2 (struct deps *, rtx, rtx); 100169689Skanstatic void sched_analyze_insn (struct deps *, rtx, rtx); 10190075Sobrien 102169689Skanstatic rtx sched_get_condition (rtx); 103132718Skanstatic int conditions_mutex_p (rtx, rtx); 104169689Skan 105169689Skanstatic enum DEPS_ADJUST_RESULT maybe_add_or_update_back_dep_1 (rtx, rtx, 106169689Skan enum reg_note, ds_t, rtx, rtx, rtx **); 107169689Skanstatic enum DEPS_ADJUST_RESULT add_or_update_back_dep_1 (rtx, rtx, 108169689Skan enum reg_note, ds_t, rtx, rtx, rtx **); 109169689Skanstatic void add_back_dep (rtx, rtx, enum reg_note, ds_t); 110169689Skan 111169689Skanstatic void adjust_add_sorted_back_dep (rtx, rtx, rtx *); 112169689Skanstatic void adjust_back_add_forw_dep (rtx, rtx *); 113169689Skanstatic void delete_forw_dep (rtx, rtx); 114169689Skanstatic dw_t estimate_dep_weak (rtx, rtx); 115169689Skan#ifdef INSN_SCHEDULING 116169689Skan#ifdef ENABLE_CHECKING 117169689Skanstatic void check_dep_status (enum reg_note, ds_t, bool); 118169689Skan#endif 119169689Skan#endif 12090075Sobrien 12190075Sobrien/* Return nonzero if a load of the memory reference MEM can cause a trap. */ 12290075Sobrien 12390075Sobrienstatic int 124132718Skandeps_may_trap_p (rtx mem) 12590075Sobrien{ 12690075Sobrien rtx addr = XEXP (mem, 0); 12790075Sobrien 128132718Skan if (REG_P (addr) && REGNO (addr) >= FIRST_PSEUDO_REGISTER) 129132718Skan { 130132718Skan rtx t = get_reg_known_value (REGNO (addr)); 131132718Skan if (t) 132132718Skan addr = t; 133132718Skan } 13490075Sobrien return rtx_addr_can_trap_p (addr); 13590075Sobrien} 13690075Sobrien 13790075Sobrien/* Return the INSN_LIST containing INSN in LIST, or NULL 13890075Sobrien if LIST does not contain INSN. */ 13990075Sobrien 14090075Sobrienrtx 141132718Skanfind_insn_list (rtx insn, rtx list) 14290075Sobrien{ 14390075Sobrien while (list) 14490075Sobrien { 14590075Sobrien if (XEXP (list, 0) == insn) 14690075Sobrien return list; 14790075Sobrien list = XEXP (list, 1); 14890075Sobrien } 14990075Sobrien return 0; 15090075Sobrien} 15190075Sobrien 15290075Sobrien/* Find the condition under which INSN is executed. */ 15390075Sobrien 15490075Sobrienstatic rtx 155169689Skansched_get_condition (rtx insn) 15690075Sobrien{ 15790075Sobrien rtx pat = PATTERN (insn); 158169689Skan rtx src; 15990075Sobrien 16090075Sobrien if (pat == 0) 16190075Sobrien return 0; 162169689Skan 16390075Sobrien if (GET_CODE (pat) == COND_EXEC) 16490075Sobrien return COND_EXEC_TEST (pat); 165169689Skan 166169689Skan if (!any_condjump_p (insn) || !onlyjump_p (insn)) 16790075Sobrien return 0; 168169689Skan 169169689Skan src = SET_SRC (pc_set (insn)); 170169689Skan 171169689Skan if (XEXP (src, 2) == pc_rtx) 172169689Skan return XEXP (src, 0); 173169689Skan else if (XEXP (src, 1) == pc_rtx) 174169689Skan { 175169689Skan rtx cond = XEXP (src, 0); 176169689Skan enum rtx_code revcode = reversed_comparison_code (cond, insn); 177169689Skan 178169689Skan if (revcode == UNKNOWN) 179169689Skan return 0; 180169689Skan return gen_rtx_fmt_ee (revcode, GET_MODE (cond), XEXP (cond, 0), 181169689Skan XEXP (cond, 1)); 182169689Skan } 183169689Skan 184169689Skan return 0; 18590075Sobrien} 18690075Sobrien 187169689Skan 18890075Sobrien/* Return nonzero if conditions COND1 and COND2 can never be both true. */ 18990075Sobrien 19090075Sobrienstatic int 191132718Skanconditions_mutex_p (rtx cond1, rtx cond2) 19290075Sobrien{ 193169689Skan if (COMPARISON_P (cond1) 194169689Skan && COMPARISON_P (cond2) 195169689Skan && GET_CODE (cond1) == reversed_comparison_code (cond2, NULL) 19690075Sobrien && XEXP (cond1, 0) == XEXP (cond2, 0) 19790075Sobrien && XEXP (cond1, 1) == XEXP (cond2, 1)) 19890075Sobrien return 1; 19990075Sobrien return 0; 20090075Sobrien} 20190075Sobrien 202169689Skan/* Return true if insn1 and insn2 can never depend on one another because 203169689Skan the conditions under which they are executed are mutually exclusive. */ 204169689Skanbool 205169689Skansched_insns_conditions_mutex_p (rtx insn1, rtx insn2) 20690075Sobrien{ 20790075Sobrien rtx cond1, cond2; 20890075Sobrien 20990075Sobrien /* flow.c doesn't handle conditional lifetimes entirely correctly; 21090075Sobrien calls mess up the conditional lifetimes. */ 211169689Skan if (!CALL_P (insn1) && !CALL_P (insn2)) 21290075Sobrien { 213169689Skan cond1 = sched_get_condition (insn1); 214169689Skan cond2 = sched_get_condition (insn2); 21590075Sobrien if (cond1 && cond2 21690075Sobrien && conditions_mutex_p (cond1, cond2) 21790075Sobrien /* Make sure first instruction doesn't affect condition of second 21890075Sobrien instruction if switched. */ 219169689Skan && !modified_in_p (cond1, insn2) 22090075Sobrien /* Make sure second instruction doesn't affect condition of first 22190075Sobrien instruction if switched. */ 222169689Skan && !modified_in_p (cond2, insn1)) 223169689Skan return true; 22490075Sobrien } 225169689Skan return false; 226169689Skan} 227169689Skan 228169689Skan/* Add ELEM wrapped in an INSN_LIST with reg note kind DEP_TYPE to the 229169689Skan LOG_LINKS of INSN, if it is not already there. DEP_TYPE indicates the 230169689Skan type of dependence that this link represents. DS, if nonzero, 231169689Skan indicates speculations, through which this dependence can be overcome. 232169689Skan MEM1 and MEM2, if non-null, corresponds to memory locations in case of 233169689Skan data speculation. The function returns a value indicating if an old entry 234169689Skan has been changed or a new entry has been added to insn's LOG_LINK. 235169689Skan In case of changed entry CHANGED_LINKPP sets to its address. 236169689Skan See also the definition of enum DEPS_ADJUST_RESULT in sched-int.h. 237169689Skan Actual manipulation of dependence data structures is performed in 238169689Skan add_or_update_back_dep_1. */ 23990075Sobrien 240169689Skanstatic enum DEPS_ADJUST_RESULT 241169689Skanmaybe_add_or_update_back_dep_1 (rtx insn, rtx elem, enum reg_note dep_type, 242169689Skan ds_t ds, rtx mem1, rtx mem2, 243169689Skan rtx **changed_linkpp) 244169689Skan{ 245169689Skan gcc_assert (INSN_P (insn) && INSN_P (elem)); 246169689Skan 247169689Skan /* Don't depend an insn on itself. */ 248169689Skan if (insn == elem) 249169689Skan { 25090075Sobrien#ifdef INSN_SCHEDULING 251169689Skan if (current_sched_info->flags & DO_SPECULATION) 252169689Skan /* INSN has an internal dependence, which we can't overcome. */ 253169689Skan HAS_INTERNAL_DEP (insn) = 1; 25490075Sobrien#endif 255169689Skan return 0; 256169689Skan } 25790075Sobrien 258169689Skan return add_or_update_back_dep_1 (insn, elem, dep_type, 259169689Skan ds, mem1, mem2, changed_linkpp); 260169689Skan} 261169689Skan 262169689Skan/* This function has the same meaning of parameters and return values 263169689Skan as maybe_add_or_update_back_dep_1. The only difference between these 264169689Skan two functions is that INSN and ELEM are guaranteed not to be the same 265169689Skan in this one. */ 266169689Skanstatic enum DEPS_ADJUST_RESULT 267169689Skanadd_or_update_back_dep_1 (rtx insn, rtx elem, enum reg_note dep_type, 268169689Skan ds_t ds ATTRIBUTE_UNUSED, 269169689Skan rtx mem1 ATTRIBUTE_UNUSED, rtx mem2 ATTRIBUTE_UNUSED, 270169689Skan rtx **changed_linkpp ATTRIBUTE_UNUSED) 271169689Skan{ 272169689Skan bool maybe_present_p = true, present_p = false; 273169689Skan 274169689Skan gcc_assert (INSN_P (insn) && INSN_P (elem) && insn != elem); 275169689Skan 276169689Skan#ifdef INSN_SCHEDULING 277169689Skan 278169689Skan#ifdef ENABLE_CHECKING 279169689Skan check_dep_status (dep_type, ds, mem1 != NULL); 280169689Skan#endif 281169689Skan 28290075Sobrien /* If we already have a dependency for ELEM, then we do not need to 28390075Sobrien do anything. Avoiding the list walk below can cut compile times 28490075Sobrien dramatically for some code. */ 28590075Sobrien if (true_dependency_cache != NULL) 28690075Sobrien { 287169689Skan enum reg_note present_dep_type; 288169689Skan 289169689Skan gcc_assert (output_dependency_cache); 290169689Skan gcc_assert (anti_dependency_cache); 291169689Skan if (!(current_sched_info->flags & USE_DEPS_LIST)) 292169689Skan { 293169689Skan if (bitmap_bit_p (&true_dependency_cache[INSN_LUID (insn)], 294169689Skan INSN_LUID (elem))) 295169689Skan present_dep_type = REG_DEP_TRUE; 296169689Skan else if (bitmap_bit_p (&output_dependency_cache[INSN_LUID (insn)], 297169689Skan INSN_LUID (elem))) 298169689Skan present_dep_type = REG_DEP_OUTPUT; 299169689Skan else if (bitmap_bit_p (&anti_dependency_cache[INSN_LUID (insn)], 300169689Skan INSN_LUID (elem))) 301169689Skan present_dep_type = REG_DEP_ANTI; 302169689Skan else 303169689Skan maybe_present_p = false; 30490075Sobrien 305169689Skan if (maybe_present_p) 306169689Skan { 307169689Skan if ((int) dep_type >= (int) present_dep_type) 308169689Skan return DEP_PRESENT; 309169689Skan 310169689Skan present_p = true; 311169689Skan } 312169689Skan } 313117395Skan else 314169689Skan { 315169689Skan ds_t present_dep_types = 0; 316169689Skan 317169689Skan if (bitmap_bit_p (&true_dependency_cache[INSN_LUID (insn)], 318169689Skan INSN_LUID (elem))) 319169689Skan present_dep_types |= DEP_TRUE; 320169689Skan if (bitmap_bit_p (&output_dependency_cache[INSN_LUID (insn)], 321169689Skan INSN_LUID (elem))) 322169689Skan present_dep_types |= DEP_OUTPUT; 323169689Skan if (bitmap_bit_p (&anti_dependency_cache[INSN_LUID (insn)], 324169689Skan INSN_LUID (elem))) 325169689Skan present_dep_types |= DEP_ANTI; 326169689Skan 327169689Skan if (present_dep_types) 328169689Skan { 329169689Skan if (!(current_sched_info->flags & DO_SPECULATION) 330169689Skan || !bitmap_bit_p (&spec_dependency_cache[INSN_LUID (insn)], 331169689Skan INSN_LUID (elem))) 332169689Skan { 333169689Skan if ((present_dep_types | (ds & DEP_TYPES)) 334169689Skan == present_dep_types) 335169689Skan /* We already have all these bits. */ 336169689Skan return DEP_PRESENT; 337169689Skan } 338169689Skan else 339169689Skan { 340169689Skan /* Only true dependencies can be data speculative and 341169689Skan only anti dependencies can be control speculative. */ 342169689Skan gcc_assert ((present_dep_types & (DEP_TRUE | DEP_ANTI)) 343169689Skan == present_dep_types); 344169689Skan 345169689Skan /* if (additional dep is SPECULATIVE) then 346169689Skan we should update DEP_STATUS 347169689Skan else 348169689Skan we should reset existing dep to non-speculative. */ 349169689Skan } 350169689Skan 351169689Skan present_p = true; 352169689Skan } 353169689Skan else 354169689Skan maybe_present_p = false; 355169689Skan } 35690075Sobrien } 35790075Sobrien#endif 35890075Sobrien 35990075Sobrien /* Check that we don't already have this dependence. */ 360169689Skan if (maybe_present_p) 361169689Skan { 362169689Skan rtx *linkp; 363169689Skan 364169689Skan for (linkp = &LOG_LINKS (insn); *linkp; linkp = &XEXP (*linkp, 1)) 365169689Skan { 366169689Skan rtx link = *linkp; 367169689Skan 368169689Skan gcc_assert (true_dependency_cache == 0 || present_p); 369169689Skan 370169689Skan if (XEXP (link, 0) == elem) 371169689Skan { 372169689Skan enum DEPS_ADJUST_RESULT changed_p = DEP_PRESENT; 373169689Skan 37490075Sobrien#ifdef INSN_SCHEDULING 375169689Skan if (current_sched_info->flags & USE_DEPS_LIST) 376169689Skan { 377169689Skan ds_t new_status = ds | DEP_STATUS (link); 378169689Skan 379169689Skan if (new_status & SPECULATIVE) 380169689Skan { 381169689Skan if (!(ds & SPECULATIVE) 382169689Skan || !(DEP_STATUS (link) & SPECULATIVE)) 383169689Skan /* Then this dep can't be speculative. */ 384169689Skan { 385169689Skan new_status &= ~SPECULATIVE; 386169689Skan if (true_dependency_cache 387169689Skan && (DEP_STATUS (link) & SPECULATIVE)) 388169689Skan bitmap_clear_bit (&spec_dependency_cache 389169689Skan [INSN_LUID (insn)], 390169689Skan INSN_LUID (elem)); 391169689Skan } 392169689Skan else 393169689Skan { 394169689Skan /* Both are speculative. Merging probabilities. */ 395169689Skan if (mem1) 396169689Skan { 397169689Skan dw_t dw; 398169689Skan 399169689Skan dw = estimate_dep_weak (mem1, mem2); 400169689Skan ds = set_dep_weak (ds, BEGIN_DATA, dw); 401169689Skan } 402169689Skan 403169689Skan new_status = ds_merge (DEP_STATUS (link), ds); 404169689Skan } 405169689Skan } 406169689Skan 407169689Skan ds = new_status; 408169689Skan } 409169689Skan 410169689Skan /* Clear corresponding cache entry because type of the link 411169689Skan may have changed. Keep them if we use_deps_list. */ 412169689Skan if (true_dependency_cache != NULL 413169689Skan && !(current_sched_info->flags & USE_DEPS_LIST)) 414169689Skan { 415169689Skan enum reg_note kind = REG_NOTE_KIND (link); 416169689Skan 417169689Skan switch (kind) 418169689Skan { 419169689Skan case REG_DEP_OUTPUT: 420169689Skan bitmap_clear_bit (&output_dependency_cache 421169689Skan [INSN_LUID (insn)], INSN_LUID (elem)); 422169689Skan break; 423169689Skan case REG_DEP_ANTI: 424169689Skan bitmap_clear_bit (&anti_dependency_cache 425169689Skan [INSN_LUID (insn)], INSN_LUID (elem)); 426169689Skan break; 427169689Skan default: 428169689Skan gcc_unreachable (); 429169689Skan } 430169689Skan } 431169689Skan 432169689Skan if ((current_sched_info->flags & USE_DEPS_LIST) 433169689Skan && DEP_STATUS (link) != ds) 434169689Skan { 435169689Skan DEP_STATUS (link) = ds; 436169689Skan changed_p = DEP_CHANGED; 437169689Skan } 43890075Sobrien#endif 43990075Sobrien 440169689Skan /* If this is a more restrictive type of dependence than the 441169689Skan existing one, then change the existing dependence to this 442169689Skan type. */ 443169689Skan if ((int) dep_type < (int) REG_NOTE_KIND (link)) 444169689Skan { 445169689Skan PUT_REG_NOTE_KIND (link, dep_type); 446169689Skan changed_p = DEP_CHANGED; 447169689Skan } 448117395Skan 44990075Sobrien#ifdef INSN_SCHEDULING 450169689Skan /* If we are adding a dependency to INSN's LOG_LINKs, then 451169689Skan note that in the bitmap caches of dependency information. */ 452169689Skan if (true_dependency_cache != NULL) 453169689Skan { 454169689Skan if (!(current_sched_info->flags & USE_DEPS_LIST)) 455169689Skan { 456169689Skan if (REG_NOTE_KIND (link) == REG_DEP_TRUE) 457169689Skan bitmap_set_bit (&true_dependency_cache 458169689Skan [INSN_LUID (insn)], INSN_LUID (elem)); 459169689Skan else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT) 460169689Skan bitmap_set_bit (&output_dependency_cache 461169689Skan [INSN_LUID (insn)], INSN_LUID (elem)); 462169689Skan else if (REG_NOTE_KIND (link) == REG_DEP_ANTI) 463169689Skan bitmap_set_bit (&anti_dependency_cache 464169689Skan [INSN_LUID (insn)], INSN_LUID (elem)); 465169689Skan } 466169689Skan else 467169689Skan { 468169689Skan if (ds & DEP_TRUE) 469169689Skan bitmap_set_bit (&true_dependency_cache 470169689Skan [INSN_LUID (insn)], INSN_LUID (elem)); 471169689Skan if (ds & DEP_OUTPUT) 472169689Skan bitmap_set_bit (&output_dependency_cache 473169689Skan [INSN_LUID (insn)], INSN_LUID (elem)); 474169689Skan if (ds & DEP_ANTI) 475169689Skan bitmap_set_bit (&anti_dependency_cache 476169689Skan [INSN_LUID (insn)], INSN_LUID (elem)); 477169689Skan /* Note, that dep can become speculative only 478169689Skan at the moment of creation. Thus, we don't need to 479169689Skan check for it here. */ 480169689Skan } 481169689Skan } 482169689Skan 483169689Skan if (changed_linkpp && changed_p == DEP_CHANGED) 484169689Skan *changed_linkpp = linkp; 48590075Sobrien#endif 486169689Skan return changed_p; 487169689Skan } 488169689Skan } 489169689Skan /* We didn't find a dep. It shouldn't be present in the cache. */ 490169689Skan gcc_assert (!present_p); 491169689Skan } 49290075Sobrien 493169689Skan /* Might want to check one level of transitivity to save conses. 494169689Skan This check should be done in maybe_add_or_update_back_dep_1. 495169689Skan Since we made it to add_or_update_back_dep_1, we must create 496169689Skan (or update) a link. */ 49790075Sobrien 498169689Skan if (mem1) 499169689Skan { 500169689Skan gcc_assert (current_sched_info->flags & DO_SPECULATION); 501169689Skan ds = set_dep_weak (ds, BEGIN_DATA, estimate_dep_weak (mem1, mem2)); 502169689Skan } 503169689Skan 504169689Skan add_back_dep (insn, elem, dep_type, ds); 505169689Skan 506169689Skan return DEP_CREATED; 507169689Skan} 508169689Skan 509169689Skan/* This function creates a link between INSN and ELEM under any 510169689Skan conditions. DS describes speculative status of the link. */ 511169689Skanstatic void 512169689Skanadd_back_dep (rtx insn, rtx elem, enum reg_note dep_type, ds_t ds) 513169689Skan{ 514169689Skan gcc_assert (INSN_P (insn) && INSN_P (elem) && insn != elem); 515169689Skan 516169689Skan if (current_sched_info->flags & USE_DEPS_LIST) 517169689Skan LOG_LINKS (insn) = alloc_DEPS_LIST (elem, LOG_LINKS (insn), ds); 518169689Skan else 519169689Skan LOG_LINKS (insn) = alloc_INSN_LIST (elem, LOG_LINKS (insn)); 520169689Skan 52190075Sobrien /* Insn dependency, not data dependency. */ 522169689Skan PUT_REG_NOTE_KIND (LOG_LINKS (insn), dep_type); 523169689Skan 524169689Skan#ifdef INSN_SCHEDULING 525169689Skan#ifdef ENABLE_CHECKING 526169689Skan check_dep_status (dep_type, ds, false); 527169689Skan#endif 52890075Sobrien 52990075Sobrien /* If we are adding a dependency to INSN's LOG_LINKs, then note that 53090075Sobrien in the bitmap caches of dependency information. */ 53190075Sobrien if (true_dependency_cache != NULL) 53290075Sobrien { 533169689Skan if (!(current_sched_info->flags & USE_DEPS_LIST)) 534169689Skan { 535169689Skan if (dep_type == REG_DEP_TRUE) 536169689Skan bitmap_set_bit (&true_dependency_cache[INSN_LUID (insn)], 537169689Skan INSN_LUID (elem)); 538169689Skan else if (dep_type == REG_DEP_OUTPUT) 539169689Skan bitmap_set_bit (&output_dependency_cache[INSN_LUID (insn)], 540169689Skan INSN_LUID (elem)); 541169689Skan else if (dep_type == REG_DEP_ANTI) 542169689Skan bitmap_set_bit (&anti_dependency_cache[INSN_LUID (insn)], 543169689Skan INSN_LUID (elem)); 544169689Skan } 545169689Skan else 546169689Skan { 547169689Skan if (ds & DEP_TRUE) 548169689Skan bitmap_set_bit (&true_dependency_cache[INSN_LUID (insn)], 549169689Skan INSN_LUID (elem)); 550169689Skan if (ds & DEP_OUTPUT) 551169689Skan bitmap_set_bit (&output_dependency_cache[INSN_LUID (insn)], 552169689Skan INSN_LUID (elem)); 553169689Skan if (ds & DEP_ANTI) 554169689Skan bitmap_set_bit (&anti_dependency_cache[INSN_LUID (insn)], 555169689Skan INSN_LUID (elem)); 556169689Skan if (ds & SPECULATIVE) 557169689Skan { 558169689Skan gcc_assert (current_sched_info->flags & DO_SPECULATION); 559169689Skan bitmap_set_bit (&spec_dependency_cache[INSN_LUID (insn)], 560169689Skan INSN_LUID (elem)); 561169689Skan } 562169689Skan } 56390075Sobrien } 56490075Sobrien#endif 56590075Sobrien} 56690075Sobrien 56790075Sobrien/* A convenience wrapper to operate on an entire list. */ 56890075Sobrien 56990075Sobrienstatic void 570169689Skanadd_dependence_list (rtx insn, rtx list, int uncond, enum reg_note dep_type) 57190075Sobrien{ 57290075Sobrien for (; list; list = XEXP (list, 1)) 573169689Skan { 574169689Skan if (uncond || ! sched_insns_conditions_mutex_p (insn, XEXP (list, 0))) 575169689Skan add_dependence (insn, XEXP (list, 0), dep_type); 576169689Skan } 57790075Sobrien} 57890075Sobrien 57990075Sobrien/* Similar, but free *LISTP at the same time. */ 58090075Sobrien 58190075Sobrienstatic void 582169689Skanadd_dependence_list_and_free (rtx insn, rtx *listp, int uncond, 583169689Skan enum reg_note dep_type) 58490075Sobrien{ 58590075Sobrien rtx list, next; 58690075Sobrien for (list = *listp, *listp = NULL; list ; list = next) 58790075Sobrien { 58890075Sobrien next = XEXP (list, 1); 589169689Skan if (uncond || ! sched_insns_conditions_mutex_p (insn, XEXP (list, 0))) 590169689Skan add_dependence (insn, XEXP (list, 0), dep_type); 59190075Sobrien free_INSN_LIST_node (list); 59290075Sobrien } 59390075Sobrien} 59490075Sobrien 595169689Skan/* Clear all dependencies for an insn. */ 59690075Sobrien 59790075Sobrienstatic void 598169689Skandelete_all_dependences (rtx insn) 59990075Sobrien{ 600169689Skan /* Clear caches, if they exist, as well as free the dependence. */ 60190075Sobrien 602169689Skan#ifdef INSN_SCHEDULING 603169689Skan if (true_dependency_cache != NULL) 604169689Skan { 605169689Skan bitmap_clear (&true_dependency_cache[INSN_LUID (insn)]); 606169689Skan bitmap_clear (&output_dependency_cache[INSN_LUID (insn)]); 607169689Skan bitmap_clear (&anti_dependency_cache[INSN_LUID (insn)]); 608169689Skan /* We don't have to clear forward_dependency_cache here, 609169689Skan because it is formed later. */ 610169689Skan if (current_sched_info->flags & DO_SPECULATION) 611169689Skan bitmap_clear (&spec_dependency_cache[INSN_LUID (insn)]); 612169689Skan } 613169689Skan#endif 61490075Sobrien 615169689Skan if (!(current_sched_info->flags & USE_DEPS_LIST)) 616169689Skan /* In this case LOG_LINKS are formed from the DEPS_LISTs, 617169689Skan not the INSN_LISTs. */ 618169689Skan free_INSN_LIST_list (&LOG_LINKS (insn)); 619169689Skan else 620169689Skan free_DEPS_LIST_list (&LOG_LINKS (insn)); 62190075Sobrien} 622169689Skan 623169689Skan/* All insns in a scheduling group except the first should only have 624169689Skan dependencies on the previous insn in the group. So we find the 625169689Skan first instruction in the scheduling group by walking the dependence 626169689Skan chains backwards. Then we add the dependencies for the group to 627169689Skan the previous nonnote insn. */ 628169689Skan 629169689Skanstatic void 630169689Skanfixup_sched_groups (rtx insn) 631169689Skan{ 632169689Skan rtx link, prev_nonnote; 633169689Skan 634169689Skan for (link = LOG_LINKS (insn); link ; link = XEXP (link, 1)) 635169689Skan { 636169689Skan rtx i = insn; 637169689Skan do 638169689Skan { 639169689Skan i = prev_nonnote_insn (i); 640169689Skan 641169689Skan if (XEXP (link, 0) == i) 642169689Skan goto next_link; 643169689Skan } while (SCHED_GROUP_P (i)); 644169689Skan if (! sched_insns_conditions_mutex_p (i, XEXP (link, 0))) 645169689Skan add_dependence (i, XEXP (link, 0), REG_NOTE_KIND (link)); 646169689Skan next_link:; 647169689Skan } 648169689Skan 649169689Skan delete_all_dependences (insn); 650169689Skan 651169689Skan prev_nonnote = prev_nonnote_insn (insn); 652169689Skan if (BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (prev_nonnote) 653169689Skan && ! sched_insns_conditions_mutex_p (insn, prev_nonnote)) 654169689Skan add_dependence (insn, prev_nonnote, REG_DEP_ANTI); 655169689Skan} 65690075Sobrien 65790075Sobrien/* Process an insn's memory dependencies. There are four kinds of 65890075Sobrien dependencies: 65990075Sobrien 66090075Sobrien (0) read dependence: read follows read 66190075Sobrien (1) true dependence: read follows write 662169689Skan (2) output dependence: write follows write 663169689Skan (3) anti dependence: write follows read 66490075Sobrien 66590075Sobrien We are careful to build only dependencies which actually exist, and 66690075Sobrien use transitivity to avoid building too many links. */ 66790075Sobrien 66890075Sobrien/* Add an INSN and MEM reference pair to a pending INSN_LIST and MEM_LIST. 66990075Sobrien The MEM is a memory reference contained within INSN, which we are saving 67090075Sobrien so that we can do memory aliasing on it. */ 67190075Sobrien 672169689Skanstatic void 673132718Skanadd_insn_mem_dependence (struct deps *deps, rtx *insn_list, rtx *mem_list, 674132718Skan rtx insn, rtx mem) 67590075Sobrien{ 67690075Sobrien rtx link; 67790075Sobrien 67890075Sobrien link = alloc_INSN_LIST (insn, *insn_list); 67990075Sobrien *insn_list = link; 68090075Sobrien 68190075Sobrien if (current_sched_info->use_cselib) 68290075Sobrien { 68390075Sobrien mem = shallow_copy_rtx (mem); 68490075Sobrien XEXP (mem, 0) = cselib_subst_to_values (XEXP (mem, 0)); 68590075Sobrien } 686132718Skan link = alloc_EXPR_LIST (VOIDmode, canon_rtx (mem), *mem_list); 68790075Sobrien *mem_list = link; 68890075Sobrien 68990075Sobrien deps->pending_lists_length++; 69090075Sobrien} 69190075Sobrien 69290075Sobrien/* Make a dependency between every memory reference on the pending lists 69390075Sobrien and INSN, thus flushing the pending lists. FOR_READ is true if emitting 69490075Sobrien dependencies for a read operation, similarly with FOR_WRITE. */ 69590075Sobrien 69690075Sobrienstatic void 697132718Skanflush_pending_lists (struct deps *deps, rtx insn, int for_read, 698132718Skan int for_write) 69990075Sobrien{ 70090075Sobrien if (for_write) 70190075Sobrien { 702169689Skan add_dependence_list_and_free (insn, &deps->pending_read_insns, 1, 70390075Sobrien REG_DEP_ANTI); 70490075Sobrien free_EXPR_LIST_list (&deps->pending_read_mems); 70590075Sobrien } 70690075Sobrien 707169689Skan add_dependence_list_and_free (insn, &deps->pending_write_insns, 1, 70890075Sobrien for_read ? REG_DEP_ANTI : REG_DEP_OUTPUT); 70990075Sobrien free_EXPR_LIST_list (&deps->pending_write_mems); 71090075Sobrien deps->pending_lists_length = 0; 71190075Sobrien 712169689Skan add_dependence_list_and_free (insn, &deps->last_pending_memory_flush, 1, 71390075Sobrien for_read ? REG_DEP_ANTI : REG_DEP_OUTPUT); 71490075Sobrien deps->last_pending_memory_flush = alloc_INSN_LIST (insn, NULL_RTX); 71590075Sobrien deps->pending_flush_length = 1; 71690075Sobrien} 71790075Sobrien 718169689Skan/* Analyze a single reference to register (reg:MODE REGNO) in INSN. 719169689Skan The type of the reference is specified by REF and can be SET, 720169689Skan CLOBBER, PRE_DEC, POST_DEC, PRE_INC, POST_INC or USE. */ 721169689Skan 722169689Skanstatic void 723169689Skansched_analyze_reg (struct deps *deps, int regno, enum machine_mode mode, 724169689Skan enum rtx_code ref, rtx insn) 725169689Skan{ 726169689Skan /* A hard reg in a wide mode may really be multiple registers. 727169689Skan If so, mark all of them just like the first. */ 728169689Skan if (regno < FIRST_PSEUDO_REGISTER) 729169689Skan { 730169689Skan int i = hard_regno_nregs[regno][mode]; 731169689Skan if (ref == SET) 732169689Skan { 733169689Skan while (--i >= 0) 734169689Skan SET_REGNO_REG_SET (reg_pending_sets, regno + i); 735169689Skan } 736169689Skan else if (ref == USE) 737169689Skan { 738169689Skan while (--i >= 0) 739169689Skan SET_REGNO_REG_SET (reg_pending_uses, regno + i); 740169689Skan } 741169689Skan else 742169689Skan { 743169689Skan while (--i >= 0) 744169689Skan SET_REGNO_REG_SET (reg_pending_clobbers, regno + i); 745169689Skan } 746169689Skan } 747169689Skan 748169689Skan /* ??? Reload sometimes emits USEs and CLOBBERs of pseudos that 749169689Skan it does not reload. Ignore these as they have served their 750169689Skan purpose already. */ 751169689Skan else if (regno >= deps->max_reg) 752169689Skan { 753169689Skan enum rtx_code code = GET_CODE (PATTERN (insn)); 754169689Skan gcc_assert (code == USE || code == CLOBBER); 755169689Skan } 756169689Skan 757169689Skan else 758169689Skan { 759169689Skan if (ref == SET) 760169689Skan SET_REGNO_REG_SET (reg_pending_sets, regno); 761169689Skan else if (ref == USE) 762169689Skan SET_REGNO_REG_SET (reg_pending_uses, regno); 763169689Skan else 764169689Skan SET_REGNO_REG_SET (reg_pending_clobbers, regno); 765169689Skan 766169689Skan /* Pseudos that are REG_EQUIV to something may be replaced 767169689Skan by that during reloading. We need only add dependencies for 768169689Skan the address in the REG_EQUIV note. */ 769169689Skan if (!reload_completed && get_reg_known_equiv_p (regno)) 770169689Skan { 771169689Skan rtx t = get_reg_known_value (regno); 772169689Skan if (MEM_P (t)) 773169689Skan sched_analyze_2 (deps, XEXP (t, 0), insn); 774169689Skan } 775169689Skan 776169689Skan /* Don't let it cross a call after scheduling if it doesn't 777169689Skan already cross one. */ 778169689Skan if (REG_N_CALLS_CROSSED (regno) == 0) 779169689Skan { 780169689Skan if (ref == USE) 781169689Skan deps->sched_before_next_call 782169689Skan = alloc_INSN_LIST (insn, deps->sched_before_next_call); 783169689Skan else 784169689Skan add_dependence_list (insn, deps->last_function_call, 1, 785169689Skan REG_DEP_ANTI); 786169689Skan } 787169689Skan } 788169689Skan} 789169689Skan 79090075Sobrien/* Analyze a single SET, CLOBBER, PRE_DEC, POST_DEC, PRE_INC or POST_INC 79190075Sobrien rtx, X, creating all dependencies generated by the write to the 79290075Sobrien destination of X, and reads of everything mentioned. */ 79390075Sobrien 79490075Sobrienstatic void 795132718Skansched_analyze_1 (struct deps *deps, rtx x, rtx insn) 79690075Sobrien{ 79790075Sobrien rtx dest = XEXP (x, 0); 79890075Sobrien enum rtx_code code = GET_CODE (x); 79990075Sobrien 80090075Sobrien if (dest == 0) 80190075Sobrien return; 80290075Sobrien 80390075Sobrien if (GET_CODE (dest) == PARALLEL) 80490075Sobrien { 80590075Sobrien int i; 80690075Sobrien 80790075Sobrien for (i = XVECLEN (dest, 0) - 1; i >= 0; i--) 80890075Sobrien if (XEXP (XVECEXP (dest, 0, i), 0) != 0) 80990075Sobrien sched_analyze_1 (deps, 81090075Sobrien gen_rtx_CLOBBER (VOIDmode, 81190075Sobrien XEXP (XVECEXP (dest, 0, i), 0)), 81290075Sobrien insn); 81390075Sobrien 81490075Sobrien if (GET_CODE (x) == SET) 81590075Sobrien sched_analyze_2 (deps, SET_SRC (x), insn); 81690075Sobrien return; 81790075Sobrien } 81890075Sobrien 81990075Sobrien while (GET_CODE (dest) == STRICT_LOW_PART || GET_CODE (dest) == SUBREG 820169689Skan || GET_CODE (dest) == ZERO_EXTRACT) 82190075Sobrien { 822132718Skan if (GET_CODE (dest) == STRICT_LOW_PART 823132718Skan || GET_CODE (dest) == ZERO_EXTRACT 824169689Skan || df_read_modify_subreg_p (dest)) 825132718Skan { 826132718Skan /* These both read and modify the result. We must handle 827132718Skan them as writes to get proper dependencies for following 828132718Skan instructions. We must handle them as reads to get proper 829132718Skan dependencies from this to previous instructions. 830132718Skan Thus we need to call sched_analyze_2. */ 831132718Skan 832132718Skan sched_analyze_2 (deps, XEXP (dest, 0), insn); 833132718Skan } 834169689Skan if (GET_CODE (dest) == ZERO_EXTRACT) 83590075Sobrien { 83690075Sobrien /* The second and third arguments are values read by this insn. */ 83790075Sobrien sched_analyze_2 (deps, XEXP (dest, 1), insn); 83890075Sobrien sched_analyze_2 (deps, XEXP (dest, 2), insn); 83990075Sobrien } 84090075Sobrien dest = XEXP (dest, 0); 84190075Sobrien } 84290075Sobrien 843169689Skan if (REG_P (dest)) 84490075Sobrien { 845169689Skan int regno = REGNO (dest); 846169689Skan enum machine_mode mode = GET_MODE (dest); 84790075Sobrien 848169689Skan sched_analyze_reg (deps, regno, mode, code, insn); 849169689Skan 850169689Skan#ifdef STACK_REGS 851169689Skan /* Treat all writes to a stack register as modifying the TOS. */ 852169689Skan if (regno >= FIRST_STACK_REG && regno <= LAST_STACK_REG) 85390075Sobrien { 854169689Skan /* Avoid analyzing the same register twice. */ 855169689Skan if (regno != FIRST_STACK_REG) 856169689Skan sched_analyze_reg (deps, FIRST_STACK_REG, mode, code, insn); 857169689Skan sched_analyze_reg (deps, FIRST_STACK_REG, mode, USE, insn); 85890075Sobrien } 859169689Skan#endif 86090075Sobrien } 861169689Skan else if (MEM_P (dest)) 86290075Sobrien { 86390075Sobrien /* Writing memory. */ 86490075Sobrien rtx t = dest; 86590075Sobrien 86690075Sobrien if (current_sched_info->use_cselib) 86790075Sobrien { 86890075Sobrien t = shallow_copy_rtx (dest); 86990075Sobrien cselib_lookup (XEXP (t, 0), Pmode, 1); 87090075Sobrien XEXP (t, 0) = cselib_subst_to_values (XEXP (t, 0)); 87190075Sobrien } 872132718Skan t = canon_rtx (t); 87390075Sobrien 87490075Sobrien if (deps->pending_lists_length > MAX_PENDING_LIST_LENGTH) 87590075Sobrien { 87690075Sobrien /* Flush all pending reads and writes to prevent the pending lists 87790075Sobrien from getting any larger. Insn scheduling runs too slowly when 87890075Sobrien these lists get long. When compiling GCC with itself, 87990075Sobrien this flush occurs 8 times for sparc, and 10 times for m88k using 88090075Sobrien the default value of 32. */ 88190075Sobrien flush_pending_lists (deps, insn, false, true); 88290075Sobrien } 88390075Sobrien else 88490075Sobrien { 88590075Sobrien rtx pending, pending_mem; 88690075Sobrien 88790075Sobrien pending = deps->pending_read_insns; 88890075Sobrien pending_mem = deps->pending_read_mems; 88990075Sobrien while (pending) 89090075Sobrien { 891169689Skan if (anti_dependence (XEXP (pending_mem, 0), t) 892169689Skan && ! sched_insns_conditions_mutex_p (insn, XEXP (pending, 0))) 89390075Sobrien add_dependence (insn, XEXP (pending, 0), REG_DEP_ANTI); 89490075Sobrien 89590075Sobrien pending = XEXP (pending, 1); 89690075Sobrien pending_mem = XEXP (pending_mem, 1); 89790075Sobrien } 89890075Sobrien 89990075Sobrien pending = deps->pending_write_insns; 90090075Sobrien pending_mem = deps->pending_write_mems; 90190075Sobrien while (pending) 90290075Sobrien { 903169689Skan if (output_dependence (XEXP (pending_mem, 0), t) 904169689Skan && ! sched_insns_conditions_mutex_p (insn, XEXP (pending, 0))) 90590075Sobrien add_dependence (insn, XEXP (pending, 0), REG_DEP_OUTPUT); 90690075Sobrien 90790075Sobrien pending = XEXP (pending, 1); 90890075Sobrien pending_mem = XEXP (pending_mem, 1); 90990075Sobrien } 91090075Sobrien 911169689Skan add_dependence_list (insn, deps->last_pending_memory_flush, 1, 91290075Sobrien REG_DEP_ANTI); 91390075Sobrien 91490075Sobrien add_insn_mem_dependence (deps, &deps->pending_write_insns, 91590075Sobrien &deps->pending_write_mems, insn, dest); 91690075Sobrien } 91790075Sobrien sched_analyze_2 (deps, XEXP (dest, 0), insn); 91890075Sobrien } 91990075Sobrien 92090075Sobrien /* Analyze reads. */ 92190075Sobrien if (GET_CODE (x) == SET) 92290075Sobrien sched_analyze_2 (deps, SET_SRC (x), insn); 92390075Sobrien} 92490075Sobrien 92590075Sobrien/* Analyze the uses of memory and registers in rtx X in INSN. */ 92690075Sobrien 92790075Sobrienstatic void 928132718Skansched_analyze_2 (struct deps *deps, rtx x, rtx insn) 92990075Sobrien{ 93090075Sobrien int i; 93190075Sobrien int j; 93290075Sobrien enum rtx_code code; 93390075Sobrien const char *fmt; 93490075Sobrien 93590075Sobrien if (x == 0) 93690075Sobrien return; 93790075Sobrien 93890075Sobrien code = GET_CODE (x); 93990075Sobrien 94090075Sobrien switch (code) 94190075Sobrien { 94290075Sobrien case CONST_INT: 94390075Sobrien case CONST_DOUBLE: 94496263Sobrien case CONST_VECTOR: 94590075Sobrien case SYMBOL_REF: 94690075Sobrien case CONST: 94790075Sobrien case LABEL_REF: 94890075Sobrien /* Ignore constants. Note that we must handle CONST_DOUBLE here 94990075Sobrien because it may have a cc0_rtx in its CONST_DOUBLE_CHAIN field, but 95090075Sobrien this does not mean that this insn is using cc0. */ 95190075Sobrien return; 95290075Sobrien 95390075Sobrien#ifdef HAVE_cc0 95490075Sobrien case CC0: 95590075Sobrien /* User of CC0 depends on immediately preceding insn. */ 956169689Skan SCHED_GROUP_P (insn) = 1; 957132718Skan /* Don't move CC0 setter to another block (it can set up the 958132718Skan same flag for previous CC0 users which is safe). */ 959132718Skan CANT_MOVE (prev_nonnote_insn (insn)) = 1; 96090075Sobrien return; 96190075Sobrien#endif 96290075Sobrien 96390075Sobrien case REG: 96490075Sobrien { 96590075Sobrien int regno = REGNO (x); 966169689Skan enum machine_mode mode = GET_MODE (x); 96790075Sobrien 968169689Skan sched_analyze_reg (deps, regno, mode, USE, insn); 96990075Sobrien 970169689Skan#ifdef STACK_REGS 971169689Skan /* Treat all reads of a stack register as modifying the TOS. */ 972169689Skan if (regno >= FIRST_STACK_REG && regno <= LAST_STACK_REG) 973169689Skan { 974169689Skan /* Avoid analyzing the same register twice. */ 975169689Skan if (regno != FIRST_STACK_REG) 976169689Skan sched_analyze_reg (deps, FIRST_STACK_REG, mode, USE, insn); 977169689Skan sched_analyze_reg (deps, FIRST_STACK_REG, mode, SET, insn); 978169689Skan } 979169689Skan#endif 98090075Sobrien return; 98190075Sobrien } 98290075Sobrien 98390075Sobrien case MEM: 98490075Sobrien { 98590075Sobrien /* Reading memory. */ 98690075Sobrien rtx u; 98790075Sobrien rtx pending, pending_mem; 98890075Sobrien rtx t = x; 98990075Sobrien 99090075Sobrien if (current_sched_info->use_cselib) 99190075Sobrien { 99290075Sobrien t = shallow_copy_rtx (t); 99390075Sobrien cselib_lookup (XEXP (t, 0), Pmode, 1); 99490075Sobrien XEXP (t, 0) = cselib_subst_to_values (XEXP (t, 0)); 99590075Sobrien } 996132718Skan t = canon_rtx (t); 99790075Sobrien pending = deps->pending_read_insns; 99890075Sobrien pending_mem = deps->pending_read_mems; 99990075Sobrien while (pending) 100090075Sobrien { 1001169689Skan if (read_dependence (XEXP (pending_mem, 0), t) 1002169689Skan && ! sched_insns_conditions_mutex_p (insn, XEXP (pending, 0))) 100390075Sobrien add_dependence (insn, XEXP (pending, 0), REG_DEP_ANTI); 100490075Sobrien 100590075Sobrien pending = XEXP (pending, 1); 100690075Sobrien pending_mem = XEXP (pending_mem, 1); 100790075Sobrien } 100890075Sobrien 100990075Sobrien pending = deps->pending_write_insns; 101090075Sobrien pending_mem = deps->pending_write_mems; 101190075Sobrien while (pending) 101290075Sobrien { 101390075Sobrien if (true_dependence (XEXP (pending_mem, 0), VOIDmode, 1014169689Skan t, rtx_varies_p) 1015169689Skan && ! sched_insns_conditions_mutex_p (insn, XEXP (pending, 0))) 1016169689Skan { 1017169689Skan if (current_sched_info->flags & DO_SPECULATION) 1018169689Skan maybe_add_or_update_back_dep_1 (insn, XEXP (pending, 0), 1019169689Skan REG_DEP_TRUE, 1020169689Skan BEGIN_DATA | DEP_TRUE, 1021169689Skan XEXP (pending_mem, 0), t, 0); 1022169689Skan else 1023169689Skan add_dependence (insn, XEXP (pending, 0), REG_DEP_TRUE); 1024169689Skan } 102590075Sobrien 102690075Sobrien pending = XEXP (pending, 1); 102790075Sobrien pending_mem = XEXP (pending_mem, 1); 102890075Sobrien } 102990075Sobrien 103090075Sobrien for (u = deps->last_pending_memory_flush; u; u = XEXP (u, 1)) 1031169689Skan if (! JUMP_P (XEXP (u, 0)) || deps_may_trap_p (x)) 103290075Sobrien add_dependence (insn, XEXP (u, 0), REG_DEP_ANTI); 103390075Sobrien 103490075Sobrien /* Always add these dependencies to pending_reads, since 103590075Sobrien this insn may be followed by a write. */ 103690075Sobrien add_insn_mem_dependence (deps, &deps->pending_read_insns, 103790075Sobrien &deps->pending_read_mems, insn, x); 103890075Sobrien 103990075Sobrien /* Take advantage of tail recursion here. */ 104090075Sobrien sched_analyze_2 (deps, XEXP (x, 0), insn); 104190075Sobrien return; 104290075Sobrien } 104390075Sobrien 104490075Sobrien /* Force pending stores to memory in case a trap handler needs them. */ 104590075Sobrien case TRAP_IF: 104690075Sobrien flush_pending_lists (deps, insn, true, false); 104790075Sobrien break; 104890075Sobrien 104990075Sobrien case ASM_OPERANDS: 105090075Sobrien case ASM_INPUT: 105190075Sobrien case UNSPEC_VOLATILE: 105290075Sobrien { 105390075Sobrien /* Traditional and volatile asm instructions must be considered to use 105490075Sobrien and clobber all hard registers, all pseudo-registers and all of 105590075Sobrien memory. So must TRAP_IF and UNSPEC_VOLATILE operations. 105690075Sobrien 105790075Sobrien Consider for instance a volatile asm that changes the fpu rounding 105890075Sobrien mode. An insn should not be moved across this even if it only uses 105990075Sobrien pseudo-regs because it might give an incorrectly rounded result. */ 106090075Sobrien if (code != ASM_OPERANDS || MEM_VOLATILE_P (x)) 1061132718Skan reg_pending_barrier = TRUE_BARRIER; 106290075Sobrien 106390075Sobrien /* For all ASM_OPERANDS, we must traverse the vector of input operands. 106490075Sobrien We can not just fall through here since then we would be confused 106590075Sobrien by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate 106690075Sobrien traditional asms unlike their normal usage. */ 106790075Sobrien 106890075Sobrien if (code == ASM_OPERANDS) 106990075Sobrien { 107090075Sobrien for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++) 107190075Sobrien sched_analyze_2 (deps, ASM_OPERANDS_INPUT (x, j), insn); 107290075Sobrien return; 107390075Sobrien } 107490075Sobrien break; 107590075Sobrien } 107690075Sobrien 107790075Sobrien case PRE_DEC: 107890075Sobrien case POST_DEC: 107990075Sobrien case PRE_INC: 108090075Sobrien case POST_INC: 108190075Sobrien /* These both read and modify the result. We must handle them as writes 108290075Sobrien to get proper dependencies for following instructions. We must handle 108390075Sobrien them as reads to get proper dependencies from this to previous 108490075Sobrien instructions. Thus we need to pass them to both sched_analyze_1 108590075Sobrien and sched_analyze_2. We must call sched_analyze_2 first in order 108690075Sobrien to get the proper antecedent for the read. */ 108790075Sobrien sched_analyze_2 (deps, XEXP (x, 0), insn); 108890075Sobrien sched_analyze_1 (deps, x, insn); 108990075Sobrien return; 109090075Sobrien 109190075Sobrien case POST_MODIFY: 109290075Sobrien case PRE_MODIFY: 109390075Sobrien /* op0 = op0 + op1 */ 109490075Sobrien sched_analyze_2 (deps, XEXP (x, 0), insn); 109590075Sobrien sched_analyze_2 (deps, XEXP (x, 1), insn); 109690075Sobrien sched_analyze_1 (deps, x, insn); 109790075Sobrien return; 109890075Sobrien 109990075Sobrien default: 110090075Sobrien break; 110190075Sobrien } 110290075Sobrien 110390075Sobrien /* Other cases: walk the insn. */ 110490075Sobrien fmt = GET_RTX_FORMAT (code); 110590075Sobrien for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 110690075Sobrien { 110790075Sobrien if (fmt[i] == 'e') 110890075Sobrien sched_analyze_2 (deps, XEXP (x, i), insn); 110990075Sobrien else if (fmt[i] == 'E') 111090075Sobrien for (j = 0; j < XVECLEN (x, i); j++) 111190075Sobrien sched_analyze_2 (deps, XVECEXP (x, i, j), insn); 111290075Sobrien } 111390075Sobrien} 111490075Sobrien 111590075Sobrien/* Analyze an INSN with pattern X to find all dependencies. */ 111690075Sobrien 111790075Sobrienstatic void 1118169689Skansched_analyze_insn (struct deps *deps, rtx x, rtx insn) 111990075Sobrien{ 112090075Sobrien RTX_CODE code = GET_CODE (x); 112190075Sobrien rtx link; 1122169689Skan unsigned i; 1123169689Skan reg_set_iterator rsi; 112490075Sobrien 112590075Sobrien if (code == COND_EXEC) 112690075Sobrien { 112790075Sobrien sched_analyze_2 (deps, COND_EXEC_TEST (x), insn); 112890075Sobrien 112990075Sobrien /* ??? Should be recording conditions so we reduce the number of 113090075Sobrien false dependencies. */ 113190075Sobrien x = COND_EXEC_CODE (x); 113290075Sobrien code = GET_CODE (x); 113390075Sobrien } 113490075Sobrien if (code == SET || code == CLOBBER) 1135104752Skan { 1136104752Skan sched_analyze_1 (deps, x, insn); 1137104752Skan 1138104752Skan /* Bare clobber insns are used for letting life analysis, reg-stack 1139104752Skan and others know that a value is dead. Depend on the last call 1140104752Skan instruction so that reg-stack won't get confused. */ 1141104752Skan if (code == CLOBBER) 1142169689Skan add_dependence_list (insn, deps->last_function_call, 1, REG_DEP_OUTPUT); 1143104752Skan } 114490075Sobrien else if (code == PARALLEL) 114590075Sobrien { 1146169689Skan for (i = XVECLEN (x, 0); i--;) 114790075Sobrien { 114890075Sobrien rtx sub = XVECEXP (x, 0, i); 114990075Sobrien code = GET_CODE (sub); 115090075Sobrien 115190075Sobrien if (code == COND_EXEC) 115290075Sobrien { 115390075Sobrien sched_analyze_2 (deps, COND_EXEC_TEST (sub), insn); 115490075Sobrien sub = COND_EXEC_CODE (sub); 115590075Sobrien code = GET_CODE (sub); 115690075Sobrien } 115790075Sobrien if (code == SET || code == CLOBBER) 115890075Sobrien sched_analyze_1 (deps, sub, insn); 115990075Sobrien else 116090075Sobrien sched_analyze_2 (deps, sub, insn); 116190075Sobrien } 116290075Sobrien } 116390075Sobrien else 116490075Sobrien sched_analyze_2 (deps, x, insn); 116590075Sobrien 116690075Sobrien /* Mark registers CLOBBERED or used by called function. */ 1167169689Skan if (CALL_P (insn)) 116890075Sobrien { 116990075Sobrien for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) 117090075Sobrien { 117190075Sobrien if (GET_CODE (XEXP (link, 0)) == CLOBBER) 117290075Sobrien sched_analyze_1 (deps, XEXP (link, 0), insn); 117390075Sobrien else 117490075Sobrien sched_analyze_2 (deps, XEXP (link, 0), insn); 117590075Sobrien } 117690075Sobrien if (find_reg_note (insn, REG_SETJMP, NULL)) 1177132718Skan reg_pending_barrier = MOVE_BARRIER; 117890075Sobrien } 117990075Sobrien 1180169689Skan if (JUMP_P (insn)) 118190075Sobrien { 118290075Sobrien rtx next; 118390075Sobrien next = next_nonnote_insn (insn); 1184169689Skan if (next && BARRIER_P (next)) 1185132718Skan reg_pending_barrier = TRUE_BARRIER; 118690075Sobrien else 118790075Sobrien { 118890075Sobrien rtx pending, pending_mem; 1189119256Skan regset_head tmp_uses, tmp_sets; 1190119256Skan INIT_REG_SET (&tmp_uses); 1191119256Skan INIT_REG_SET (&tmp_sets); 119290075Sobrien 1193119256Skan (*current_sched_info->compute_jump_reg_dependencies) 1194119256Skan (insn, &deps->reg_conditional_sets, &tmp_uses, &tmp_sets); 1195132718Skan /* Make latency of jump equal to 0 by using anti-dependence. */ 1196169689Skan EXECUTE_IF_SET_IN_REG_SET (&tmp_uses, 0, i, rsi) 1197132718Skan { 1198132718Skan struct deps_reg *reg_last = &deps->reg_last[i]; 1199169689Skan add_dependence_list (insn, reg_last->sets, 0, REG_DEP_ANTI); 1200169689Skan add_dependence_list (insn, reg_last->clobbers, 0, REG_DEP_ANTI); 1201132718Skan reg_last->uses_length++; 1202132718Skan reg_last->uses = alloc_INSN_LIST (insn, reg_last->uses); 1203169689Skan } 1204119256Skan IOR_REG_SET (reg_pending_sets, &tmp_sets); 120590075Sobrien 1206119256Skan CLEAR_REG_SET (&tmp_uses); 1207119256Skan CLEAR_REG_SET (&tmp_sets); 1208119256Skan 120990075Sobrien /* All memory writes and volatile reads must happen before the 121090075Sobrien jump. Non-volatile reads must happen before the jump iff 121190075Sobrien the result is needed by the above register used mask. */ 121290075Sobrien 121390075Sobrien pending = deps->pending_write_insns; 121490075Sobrien pending_mem = deps->pending_write_mems; 121590075Sobrien while (pending) 121690075Sobrien { 1217169689Skan if (! sched_insns_conditions_mutex_p (insn, XEXP (pending, 0))) 1218169689Skan add_dependence (insn, XEXP (pending, 0), REG_DEP_OUTPUT); 121990075Sobrien pending = XEXP (pending, 1); 122090075Sobrien pending_mem = XEXP (pending_mem, 1); 122190075Sobrien } 122290075Sobrien 122390075Sobrien pending = deps->pending_read_insns; 122490075Sobrien pending_mem = deps->pending_read_mems; 122590075Sobrien while (pending) 122690075Sobrien { 1227169689Skan if (MEM_VOLATILE_P (XEXP (pending_mem, 0)) 1228169689Skan && ! sched_insns_conditions_mutex_p (insn, XEXP (pending, 0))) 122990075Sobrien add_dependence (insn, XEXP (pending, 0), REG_DEP_OUTPUT); 123090075Sobrien pending = XEXP (pending, 1); 123190075Sobrien pending_mem = XEXP (pending_mem, 1); 123290075Sobrien } 123390075Sobrien 1234169689Skan add_dependence_list (insn, deps->last_pending_memory_flush, 1, 123590075Sobrien REG_DEP_ANTI); 123690075Sobrien } 123790075Sobrien } 123890075Sobrien 123990075Sobrien /* If this instruction can throw an exception, then moving it changes 1240117395Skan where block boundaries fall. This is mighty confusing elsewhere. 1241169689Skan Therefore, prevent such an instruction from being moved. Same for 1242169689Skan non-jump instructions that define block boundaries. 1243169689Skan ??? Unclear whether this is still necessary in EBB mode. If not, 1244169689Skan add_branch_dependences should be adjusted for RGN mode instead. */ 1245169689Skan if (((CALL_P (insn) || JUMP_P (insn)) && can_throw_internal (insn)) 1246169689Skan || (NONJUMP_INSN_P (insn) && control_flow_insn_p (insn))) 1247132718Skan reg_pending_barrier = MOVE_BARRIER; 124890075Sobrien 124990075Sobrien /* Add dependencies if a scheduling barrier was found. */ 125090075Sobrien if (reg_pending_barrier) 125190075Sobrien { 1252132718Skan /* In the case of barrier the most added dependencies are not 1253132718Skan real, so we use anti-dependence here. */ 1254169689Skan if (sched_get_condition (insn)) 125590075Sobrien { 1256169689Skan EXECUTE_IF_SET_IN_REG_SET (&deps->reg_last_in_use, 0, i, rsi) 125790075Sobrien { 125890075Sobrien struct deps_reg *reg_last = &deps->reg_last[i]; 1259169689Skan add_dependence_list (insn, reg_last->uses, 0, REG_DEP_ANTI); 1260132718Skan add_dependence_list 1261169689Skan (insn, reg_last->sets, 0, 1262169689Skan reg_pending_barrier == TRUE_BARRIER ? REG_DEP_TRUE : REG_DEP_ANTI); 1263132718Skan add_dependence_list 1264169689Skan (insn, reg_last->clobbers, 0, 1265169689Skan reg_pending_barrier == TRUE_BARRIER ? REG_DEP_TRUE : REG_DEP_ANTI); 1266169689Skan } 126790075Sobrien } 126890075Sobrien else 126990075Sobrien { 1270169689Skan EXECUTE_IF_SET_IN_REG_SET (&deps->reg_last_in_use, 0, i, rsi) 127190075Sobrien { 127290075Sobrien struct deps_reg *reg_last = &deps->reg_last[i]; 1273169689Skan add_dependence_list_and_free (insn, ®_last->uses, 0, 127490075Sobrien REG_DEP_ANTI); 1275132718Skan add_dependence_list_and_free 1276169689Skan (insn, ®_last->sets, 0, 1277169689Skan reg_pending_barrier == TRUE_BARRIER ? REG_DEP_TRUE : REG_DEP_ANTI); 1278132718Skan add_dependence_list_and_free 1279169689Skan (insn, ®_last->clobbers, 0, 1280169689Skan reg_pending_barrier == TRUE_BARRIER ? REG_DEP_TRUE : REG_DEP_ANTI); 128190075Sobrien reg_last->uses_length = 0; 128290075Sobrien reg_last->clobbers_length = 0; 1283169689Skan } 128490075Sobrien } 128590075Sobrien 1286169689Skan for (i = 0; i < (unsigned)deps->max_reg; i++) 128790075Sobrien { 128890075Sobrien struct deps_reg *reg_last = &deps->reg_last[i]; 128990075Sobrien reg_last->sets = alloc_INSN_LIST (insn, reg_last->sets); 129090075Sobrien SET_REGNO_REG_SET (&deps->reg_last_in_use, i); 129190075Sobrien } 129290075Sobrien 129390075Sobrien flush_pending_lists (deps, insn, true, true); 1294119256Skan CLEAR_REG_SET (&deps->reg_conditional_sets); 1295132718Skan reg_pending_barrier = NOT_A_BARRIER; 129690075Sobrien } 129790075Sobrien else 129890075Sobrien { 129990075Sobrien /* If the current insn is conditional, we can't free any 130090075Sobrien of the lists. */ 1301169689Skan if (sched_get_condition (insn)) 130290075Sobrien { 1303169689Skan EXECUTE_IF_SET_IN_REG_SET (reg_pending_uses, 0, i, rsi) 130490075Sobrien { 130590075Sobrien struct deps_reg *reg_last = &deps->reg_last[i]; 1306169689Skan add_dependence_list (insn, reg_last->sets, 0, REG_DEP_TRUE); 1307169689Skan add_dependence_list (insn, reg_last->clobbers, 0, REG_DEP_TRUE); 130890075Sobrien reg_last->uses = alloc_INSN_LIST (insn, reg_last->uses); 130990075Sobrien reg_last->uses_length++; 1310169689Skan } 1311169689Skan EXECUTE_IF_SET_IN_REG_SET (reg_pending_clobbers, 0, i, rsi) 131290075Sobrien { 131390075Sobrien struct deps_reg *reg_last = &deps->reg_last[i]; 1314169689Skan add_dependence_list (insn, reg_last->sets, 0, REG_DEP_OUTPUT); 1315169689Skan add_dependence_list (insn, reg_last->uses, 0, REG_DEP_ANTI); 131690075Sobrien reg_last->clobbers = alloc_INSN_LIST (insn, reg_last->clobbers); 131790075Sobrien reg_last->clobbers_length++; 1318169689Skan } 1319169689Skan EXECUTE_IF_SET_IN_REG_SET (reg_pending_sets, 0, i, rsi) 132090075Sobrien { 132190075Sobrien struct deps_reg *reg_last = &deps->reg_last[i]; 1322169689Skan add_dependence_list (insn, reg_last->sets, 0, REG_DEP_OUTPUT); 1323169689Skan add_dependence_list (insn, reg_last->clobbers, 0, REG_DEP_OUTPUT); 1324169689Skan add_dependence_list (insn, reg_last->uses, 0, REG_DEP_ANTI); 132590075Sobrien reg_last->sets = alloc_INSN_LIST (insn, reg_last->sets); 1326119256Skan SET_REGNO_REG_SET (&deps->reg_conditional_sets, i); 1327169689Skan } 132890075Sobrien } 132990075Sobrien else 133090075Sobrien { 1331169689Skan EXECUTE_IF_SET_IN_REG_SET (reg_pending_uses, 0, i, rsi) 133290075Sobrien { 133390075Sobrien struct deps_reg *reg_last = &deps->reg_last[i]; 1334169689Skan add_dependence_list (insn, reg_last->sets, 0, REG_DEP_TRUE); 1335169689Skan add_dependence_list (insn, reg_last->clobbers, 0, REG_DEP_TRUE); 133690075Sobrien reg_last->uses_length++; 133790075Sobrien reg_last->uses = alloc_INSN_LIST (insn, reg_last->uses); 1338169689Skan } 1339169689Skan EXECUTE_IF_SET_IN_REG_SET (reg_pending_clobbers, 0, i, rsi) 134090075Sobrien { 134190075Sobrien struct deps_reg *reg_last = &deps->reg_last[i]; 134290075Sobrien if (reg_last->uses_length > MAX_PENDING_LIST_LENGTH 134390075Sobrien || reg_last->clobbers_length > MAX_PENDING_LIST_LENGTH) 134490075Sobrien { 1345169689Skan add_dependence_list_and_free (insn, ®_last->sets, 0, 134690075Sobrien REG_DEP_OUTPUT); 1347169689Skan add_dependence_list_and_free (insn, ®_last->uses, 0, 134890075Sobrien REG_DEP_ANTI); 1349169689Skan add_dependence_list_and_free (insn, ®_last->clobbers, 0, 135090075Sobrien REG_DEP_OUTPUT); 1351103445Skan reg_last->sets = alloc_INSN_LIST (insn, reg_last->sets); 135290075Sobrien reg_last->clobbers_length = 0; 135390075Sobrien reg_last->uses_length = 0; 135490075Sobrien } 135590075Sobrien else 135690075Sobrien { 1357169689Skan add_dependence_list (insn, reg_last->sets, 0, REG_DEP_OUTPUT); 1358169689Skan add_dependence_list (insn, reg_last->uses, 0, REG_DEP_ANTI); 135990075Sobrien } 136090075Sobrien reg_last->clobbers_length++; 136190075Sobrien reg_last->clobbers = alloc_INSN_LIST (insn, reg_last->clobbers); 1362169689Skan } 1363169689Skan EXECUTE_IF_SET_IN_REG_SET (reg_pending_sets, 0, i, rsi) 136490075Sobrien { 136590075Sobrien struct deps_reg *reg_last = &deps->reg_last[i]; 1366169689Skan add_dependence_list_and_free (insn, ®_last->sets, 0, 136790075Sobrien REG_DEP_OUTPUT); 1368169689Skan add_dependence_list_and_free (insn, ®_last->clobbers, 0, 136990075Sobrien REG_DEP_OUTPUT); 1370169689Skan add_dependence_list_and_free (insn, ®_last->uses, 0, 137190075Sobrien REG_DEP_ANTI); 137290075Sobrien reg_last->sets = alloc_INSN_LIST (insn, reg_last->sets); 137390075Sobrien reg_last->uses_length = 0; 137490075Sobrien reg_last->clobbers_length = 0; 1375119256Skan CLEAR_REGNO_REG_SET (&deps->reg_conditional_sets, i); 1376169689Skan } 137790075Sobrien } 137890075Sobrien 137990075Sobrien IOR_REG_SET (&deps->reg_last_in_use, reg_pending_uses); 138090075Sobrien IOR_REG_SET (&deps->reg_last_in_use, reg_pending_clobbers); 138190075Sobrien IOR_REG_SET (&deps->reg_last_in_use, reg_pending_sets); 138290075Sobrien } 138390075Sobrien CLEAR_REG_SET (reg_pending_uses); 138490075Sobrien CLEAR_REG_SET (reg_pending_clobbers); 138590075Sobrien CLEAR_REG_SET (reg_pending_sets); 138690075Sobrien 1387102780Skan /* If we are currently in a libcall scheduling group, then mark the 1388102780Skan current insn as being in a scheduling group and that it can not 1389102780Skan be moved into a different basic block. */ 1390102780Skan 1391102780Skan if (deps->libcall_block_tail_insn) 1392102780Skan { 1393169689Skan SCHED_GROUP_P (insn) = 1; 1394102780Skan CANT_MOVE (insn) = 1; 1395102780Skan } 1396102780Skan 139790075Sobrien /* If a post-call group is still open, see if it should remain so. 139890075Sobrien This insn must be a simple move of a hard reg to a pseudo or 139990075Sobrien vice-versa. 140090075Sobrien 140190075Sobrien We must avoid moving these insns for correctness on 140290075Sobrien SMALL_REGISTER_CLASS machines, and for special registers like 140390075Sobrien PIC_OFFSET_TABLE_REGNUM. For simplicity, extend this to all 140490075Sobrien hard regs for all targets. */ 140590075Sobrien 140690075Sobrien if (deps->in_post_call_group_p) 140790075Sobrien { 140890075Sobrien rtx tmp, set = single_set (insn); 140990075Sobrien int src_regno, dest_regno; 141090075Sobrien 141190075Sobrien if (set == NULL) 141290075Sobrien goto end_call_group; 141390075Sobrien 141490075Sobrien tmp = SET_DEST (set); 141590075Sobrien if (GET_CODE (tmp) == SUBREG) 141690075Sobrien tmp = SUBREG_REG (tmp); 1417169689Skan if (REG_P (tmp)) 141890075Sobrien dest_regno = REGNO (tmp); 141990075Sobrien else 142090075Sobrien goto end_call_group; 142190075Sobrien 142290075Sobrien tmp = SET_SRC (set); 142390075Sobrien if (GET_CODE (tmp) == SUBREG) 142490075Sobrien tmp = SUBREG_REG (tmp); 1425169689Skan if ((GET_CODE (tmp) == PLUS 1426169689Skan || GET_CODE (tmp) == MINUS) 1427169689Skan && REG_P (XEXP (tmp, 0)) 1428169689Skan && REGNO (XEXP (tmp, 0)) == STACK_POINTER_REGNUM 1429169689Skan && dest_regno == STACK_POINTER_REGNUM) 1430169689Skan src_regno = STACK_POINTER_REGNUM; 1431169689Skan else if (REG_P (tmp)) 143290075Sobrien src_regno = REGNO (tmp); 143390075Sobrien else 143490075Sobrien goto end_call_group; 143590075Sobrien 143690075Sobrien if (src_regno < FIRST_PSEUDO_REGISTER 143790075Sobrien || dest_regno < FIRST_PSEUDO_REGISTER) 143890075Sobrien { 1439169689Skan if (deps->in_post_call_group_p == post_call_initial) 1440169689Skan deps->in_post_call_group_p = post_call; 1441169689Skan 1442169689Skan SCHED_GROUP_P (insn) = 1; 144390075Sobrien CANT_MOVE (insn) = 1; 144490075Sobrien } 144590075Sobrien else 144690075Sobrien { 144790075Sobrien end_call_group: 1448169689Skan deps->in_post_call_group_p = not_post_call; 144990075Sobrien } 145090075Sobrien } 1451169689Skan 1452169689Skan /* Fixup the dependencies in the sched group. */ 1453169689Skan if (SCHED_GROUP_P (insn)) 1454169689Skan fixup_sched_groups (insn); 145590075Sobrien} 145690075Sobrien 145790075Sobrien/* Analyze every insn between HEAD and TAIL inclusive, creating LOG_LINKS 145890075Sobrien for every dependency. */ 145990075Sobrien 146090075Sobrienvoid 1461132718Skansched_analyze (struct deps *deps, rtx head, rtx tail) 146290075Sobrien{ 146390075Sobrien rtx insn; 146490075Sobrien 146590075Sobrien if (current_sched_info->use_cselib) 1466169689Skan cselib_init (true); 146790075Sobrien 1468169689Skan /* Before reload, if the previous block ended in a call, show that 1469169689Skan we are inside a post-call group, so as to keep the lifetimes of 1470169689Skan hard registers correct. */ 1471169689Skan if (! reload_completed && !LABEL_P (head)) 1472169689Skan { 1473169689Skan insn = prev_nonnote_insn (head); 1474169689Skan if (insn && CALL_P (insn)) 1475169689Skan deps->in_post_call_group_p = post_call_initial; 1476169689Skan } 147790075Sobrien for (insn = head;; insn = NEXT_INSN (insn)) 147890075Sobrien { 1479117395Skan rtx link, end_seq, r0, set; 1480102780Skan 1481169689Skan if (NONJUMP_INSN_P (insn) || JUMP_P (insn)) 148290075Sobrien { 148390075Sobrien /* Clear out the stale LOG_LINKS from flow. */ 148490075Sobrien free_INSN_LIST_list (&LOG_LINKS (insn)); 148590075Sobrien 148690075Sobrien /* Make each JUMP_INSN a scheduling barrier for memory 148790075Sobrien references. */ 1488169689Skan if (JUMP_P (insn)) 148990075Sobrien { 149090075Sobrien /* Keep the list a reasonable size. */ 149190075Sobrien if (deps->pending_flush_length++ > MAX_PENDING_LIST_LENGTH) 149290075Sobrien flush_pending_lists (deps, insn, true, true); 149390075Sobrien else 149490075Sobrien deps->last_pending_memory_flush 149590075Sobrien = alloc_INSN_LIST (insn, deps->last_pending_memory_flush); 149690075Sobrien } 1497169689Skan sched_analyze_insn (deps, PATTERN (insn), insn); 149890075Sobrien } 1499169689Skan else if (CALL_P (insn)) 150090075Sobrien { 150190075Sobrien int i; 150290075Sobrien 150390075Sobrien CANT_MOVE (insn) = 1; 150490075Sobrien 150590075Sobrien /* Clear out the stale LOG_LINKS from flow. */ 150690075Sobrien free_INSN_LIST_list (&LOG_LINKS (insn)); 150790075Sobrien 150890075Sobrien if (find_reg_note (insn, REG_SETJMP, NULL)) 150990075Sobrien { 151090075Sobrien /* This is setjmp. Assume that all registers, not just 151190075Sobrien hard registers, may be clobbered by this call. */ 1512132718Skan reg_pending_barrier = MOVE_BARRIER; 151390075Sobrien } 151490075Sobrien else 151590075Sobrien { 151690075Sobrien for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 151790075Sobrien /* A call may read and modify global register variables. */ 151890075Sobrien if (global_regs[i]) 151990075Sobrien { 152090075Sobrien SET_REGNO_REG_SET (reg_pending_sets, i); 152190075Sobrien SET_REGNO_REG_SET (reg_pending_uses, i); 152290075Sobrien } 1523117395Skan /* Other call-clobbered hard regs may be clobbered. 1524117395Skan Since we only have a choice between 'might be clobbered' 1525117395Skan and 'definitely not clobbered', we must include all 1526117395Skan partly call-clobbered registers here. */ 1527117395Skan else if (HARD_REGNO_CALL_PART_CLOBBERED (i, reg_raw_mode[i]) 1528117395Skan || TEST_HARD_REG_BIT (regs_invalidated_by_call, i)) 152990075Sobrien SET_REGNO_REG_SET (reg_pending_clobbers, i); 153090075Sobrien /* We don't know what set of fixed registers might be used 153190075Sobrien by the function, but it is certain that the stack pointer 153290075Sobrien is among them, but be conservative. */ 153390075Sobrien else if (fixed_regs[i]) 153490075Sobrien SET_REGNO_REG_SET (reg_pending_uses, i); 153590075Sobrien /* The frame pointer is normally not used by the function 153690075Sobrien itself, but by the debugger. */ 153790075Sobrien /* ??? MIPS o32 is an exception. It uses the frame pointer 153890075Sobrien in the macro expansion of jal but does not represent this 153990075Sobrien fact in the call_insn rtl. */ 154090075Sobrien else if (i == FRAME_POINTER_REGNUM 154190075Sobrien || (i == HARD_FRAME_POINTER_REGNUM 154290075Sobrien && (! reload_completed || frame_pointer_needed))) 154390075Sobrien SET_REGNO_REG_SET (reg_pending_uses, i); 154490075Sobrien } 154590075Sobrien 154690075Sobrien /* For each insn which shouldn't cross a call, add a dependence 154790075Sobrien between that insn and this call insn. */ 1548169689Skan add_dependence_list_and_free (insn, &deps->sched_before_next_call, 1, 154990075Sobrien REG_DEP_ANTI); 155090075Sobrien 1551169689Skan sched_analyze_insn (deps, PATTERN (insn), insn); 155290075Sobrien 155390075Sobrien /* In the absence of interprocedural alias analysis, we must flush 155490075Sobrien all pending reads and writes, and start new dependencies starting 155590075Sobrien from here. But only flush writes for constant calls (which may 155690075Sobrien be passed a pointer to something we haven't written yet). */ 155790075Sobrien flush_pending_lists (deps, insn, true, !CONST_OR_PURE_CALL_P (insn)); 155890075Sobrien 155990075Sobrien /* Remember the last function call for limiting lifetimes. */ 156090075Sobrien free_INSN_LIST_list (&deps->last_function_call); 156190075Sobrien deps->last_function_call = alloc_INSN_LIST (insn, NULL_RTX); 156290075Sobrien 156390075Sobrien /* Before reload, begin a post-call group, so as to keep the 156490075Sobrien lifetimes of hard registers correct. */ 156590075Sobrien if (! reload_completed) 1566169689Skan deps->in_post_call_group_p = post_call; 156790075Sobrien } 156890075Sobrien 1569169689Skan /* EH_REGION insn notes can not appear until well after we complete 1570169689Skan scheduling. */ 1571169689Skan if (NOTE_P (insn)) 1572169689Skan gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_EH_REGION_BEG 1573169689Skan && NOTE_LINE_NUMBER (insn) != NOTE_INSN_EH_REGION_END); 157490075Sobrien 157590075Sobrien if (current_sched_info->use_cselib) 157690075Sobrien cselib_process_insn (insn); 1577102780Skan 1578102780Skan /* Now that we have completed handling INSN, check and see if it is 1579102780Skan a CLOBBER beginning a libcall block. If it is, record the 1580132718Skan end of the libcall sequence. 1581102780Skan 1582102780Skan We want to schedule libcall blocks as a unit before reload. While 1583102780Skan this restricts scheduling, it preserves the meaning of a libcall 1584102780Skan block. 1585102780Skan 1586102780Skan As a side effect, we may get better code due to decreased register 1587102780Skan pressure as well as less chance of a foreign insn appearing in 1588102780Skan a libcall block. */ 1589102780Skan if (!reload_completed 1590102780Skan /* Note we may have nested libcall sequences. We only care about 1591132718Skan the outermost libcall sequence. */ 1592102780Skan && deps->libcall_block_tail_insn == 0 1593102780Skan /* The sequence must start with a clobber of a register. */ 1594169689Skan && NONJUMP_INSN_P (insn) 1595102780Skan && GET_CODE (PATTERN (insn)) == CLOBBER 1596169689Skan && (r0 = XEXP (PATTERN (insn), 0), REG_P (r0)) 1597169689Skan && REG_P (XEXP (PATTERN (insn), 0)) 1598102780Skan /* The CLOBBER must also have a REG_LIBCALL note attached. */ 1599102780Skan && (link = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0 1600102780Skan && (end_seq = XEXP (link, 0)) != 0 1601102780Skan /* The insn referenced by the REG_LIBCALL note must be a 1602102780Skan simple nop copy with the same destination as the register 1603102780Skan mentioned in the clobber. */ 1604102780Skan && (set = single_set (end_seq)) != 0 1605102780Skan && SET_DEST (set) == r0 && SET_SRC (set) == r0 1606102780Skan /* And finally the insn referenced by the REG_LIBCALL must 1607102780Skan also contain a REG_EQUAL note and a REG_RETVAL note. */ 1608102780Skan && find_reg_note (end_seq, REG_EQUAL, NULL_RTX) != 0 1609102780Skan && find_reg_note (end_seq, REG_RETVAL, NULL_RTX) != 0) 1610102780Skan deps->libcall_block_tail_insn = XEXP (link, 0); 1611102780Skan 1612102780Skan /* If we have reached the end of a libcall block, then close the 1613102780Skan block. */ 1614102780Skan if (deps->libcall_block_tail_insn == insn) 1615102780Skan deps->libcall_block_tail_insn = 0; 1616102780Skan 161790075Sobrien if (insn == tail) 161890075Sobrien { 161990075Sobrien if (current_sched_info->use_cselib) 162090075Sobrien cselib_finish (); 162190075Sobrien return; 162290075Sobrien } 162390075Sobrien } 1624169689Skan gcc_unreachable (); 162590075Sobrien} 162690075Sobrien 1627132718Skan 1628132718Skan/* The following function adds forward dependence (FROM, TO) with 1629132718Skan given DEP_TYPE. The forward dependence should be not exist before. */ 1630132718Skan 1631132718Skanvoid 1632169689Skanadd_forw_dep (rtx to, rtx link) 1633132718Skan{ 1634169689Skan rtx new_link, from; 1635132718Skan 1636169689Skan from = XEXP (link, 0); 1637169689Skan 1638132718Skan#ifdef ENABLE_CHECKING 1639132718Skan /* If add_dependence is working properly there should never 1640132718Skan be notes, deleted insns or duplicates in the backward 1641132718Skan links. Thus we need not check for them here. 1642132718Skan 1643132718Skan However, if we have enabled checking we might as well go 1644132718Skan ahead and verify that add_dependence worked properly. */ 1645169689Skan gcc_assert (INSN_P (from)); 1646169689Skan gcc_assert (!INSN_DELETED_P (from)); 1647169689Skan if (true_dependency_cache) 1648169689Skan { 1649169689Skan gcc_assert (!bitmap_bit_p (&forward_dependency_cache[INSN_LUID (from)], 1650169689Skan INSN_LUID (to))); 1651169689Skan bitmap_set_bit (&forward_dependency_cache[INSN_LUID (from)], 1652169689Skan INSN_LUID (to)); 1653169689Skan } 1654169689Skan else 1655169689Skan gcc_assert (!find_insn_list (to, INSN_DEPEND (from))); 1656132718Skan#endif 1657132718Skan 1658169689Skan if (!(current_sched_info->flags & USE_DEPS_LIST)) 1659169689Skan new_link = alloc_INSN_LIST (to, INSN_DEPEND (from)); 1660169689Skan else 1661169689Skan new_link = alloc_DEPS_LIST (to, INSN_DEPEND (from), DEP_STATUS (link)); 1662132718Skan 1663169689Skan PUT_REG_NOTE_KIND (new_link, REG_NOTE_KIND (link)); 1664132718Skan 1665132718Skan INSN_DEPEND (from) = new_link; 1666132718Skan INSN_DEP_COUNT (to) += 1; 1667132718Skan} 1668132718Skan 166990075Sobrien/* Examine insns in the range [ HEAD, TAIL ] and Use the backward 167090075Sobrien dependences from LOG_LINKS to build forward dependences in 167190075Sobrien INSN_DEPEND. */ 167290075Sobrien 167390075Sobrienvoid 1674132718Skancompute_forward_dependences (rtx head, rtx tail) 167590075Sobrien{ 1676169689Skan rtx insn; 167790075Sobrien rtx next_tail; 167890075Sobrien 167990075Sobrien next_tail = NEXT_INSN (tail); 168090075Sobrien for (insn = head; insn != next_tail; insn = NEXT_INSN (insn)) 168190075Sobrien { 1682169689Skan rtx link; 1683169689Skan 168490075Sobrien if (! INSN_P (insn)) 168590075Sobrien continue; 1686169689Skan 1687169689Skan if (current_sched_info->flags & DO_SPECULATION) 1688169689Skan { 1689169689Skan rtx new = 0, link, next; 169090075Sobrien 1691169689Skan for (link = LOG_LINKS (insn); link; link = next) 1692169689Skan { 1693169689Skan next = XEXP (link, 1); 1694169689Skan adjust_add_sorted_back_dep (insn, link, &new); 1695169689Skan } 1696169689Skan 1697169689Skan LOG_LINKS (insn) = new; 1698169689Skan } 1699169689Skan 170090075Sobrien for (link = LOG_LINKS (insn); link; link = XEXP (link, 1)) 1701169689Skan add_forw_dep (insn, link); 170290075Sobrien } 170390075Sobrien} 170490075Sobrien 170590075Sobrien/* Initialize variables for region data dependence analysis. 170690075Sobrien n_bbs is the number of region blocks. */ 170790075Sobrien 170890075Sobrienvoid 1709132718Skaninit_deps (struct deps *deps) 171090075Sobrien{ 171190075Sobrien int max_reg = (reload_completed ? FIRST_PSEUDO_REGISTER : max_reg_num ()); 171290075Sobrien 171390075Sobrien deps->max_reg = max_reg; 1714169689Skan deps->reg_last = XCNEWVEC (struct deps_reg, max_reg); 171590075Sobrien INIT_REG_SET (&deps->reg_last_in_use); 1716119256Skan INIT_REG_SET (&deps->reg_conditional_sets); 171790075Sobrien 171890075Sobrien deps->pending_read_insns = 0; 171990075Sobrien deps->pending_read_mems = 0; 172090075Sobrien deps->pending_write_insns = 0; 172190075Sobrien deps->pending_write_mems = 0; 172290075Sobrien deps->pending_lists_length = 0; 172390075Sobrien deps->pending_flush_length = 0; 172490075Sobrien deps->last_pending_memory_flush = 0; 172590075Sobrien deps->last_function_call = 0; 172690075Sobrien deps->sched_before_next_call = 0; 1727169689Skan deps->in_post_call_group_p = not_post_call; 1728102780Skan deps->libcall_block_tail_insn = 0; 172990075Sobrien} 173090075Sobrien 173190075Sobrien/* Free insn lists found in DEPS. */ 173290075Sobrien 173390075Sobrienvoid 1734132718Skanfree_deps (struct deps *deps) 173590075Sobrien{ 1736169689Skan unsigned i; 1737169689Skan reg_set_iterator rsi; 173890075Sobrien 173990075Sobrien free_INSN_LIST_list (&deps->pending_read_insns); 174090075Sobrien free_EXPR_LIST_list (&deps->pending_read_mems); 174190075Sobrien free_INSN_LIST_list (&deps->pending_write_insns); 174290075Sobrien free_EXPR_LIST_list (&deps->pending_write_mems); 174390075Sobrien free_INSN_LIST_list (&deps->last_pending_memory_flush); 174490075Sobrien 174590075Sobrien /* Without the EXECUTE_IF_SET, this loop is executed max_reg * nr_regions 1746132718Skan times. For a testcase with 42000 regs and 8000 small basic blocks, 174790075Sobrien this loop accounted for nearly 60% (84 sec) of the total -O2 runtime. */ 1748169689Skan EXECUTE_IF_SET_IN_REG_SET (&deps->reg_last_in_use, 0, i, rsi) 174990075Sobrien { 175090075Sobrien struct deps_reg *reg_last = &deps->reg_last[i]; 1751117395Skan if (reg_last->uses) 1752117395Skan free_INSN_LIST_list (®_last->uses); 1753117395Skan if (reg_last->sets) 1754117395Skan free_INSN_LIST_list (®_last->sets); 1755117395Skan if (reg_last->clobbers) 1756117395Skan free_INSN_LIST_list (®_last->clobbers); 1757169689Skan } 175890075Sobrien CLEAR_REG_SET (&deps->reg_last_in_use); 1759119256Skan CLEAR_REG_SET (&deps->reg_conditional_sets); 176090075Sobrien 176190075Sobrien free (deps->reg_last); 176290075Sobrien} 176390075Sobrien 176490075Sobrien/* If it is profitable to use them, initialize caches for tracking 1765132718Skan dependency information. LUID is the number of insns to be scheduled, 176690075Sobrien it is used in the estimate of profitability. */ 176790075Sobrien 176890075Sobrienvoid 1769132718Skaninit_dependency_caches (int luid) 177090075Sobrien{ 177190075Sobrien /* ?!? We could save some memory by computing a per-region luid mapping 177290075Sobrien which could reduce both the number of vectors in the cache and the size 177390075Sobrien of each vector. Instead we just avoid the cache entirely unless the 177490075Sobrien average number of instructions in a basic block is very high. See 177590075Sobrien the comment before the declaration of true_dependency_cache for 177690075Sobrien what we consider "very high". */ 177790075Sobrien if (luid / n_basic_blocks > 100 * 5) 177890075Sobrien { 1779169689Skan cache_size = 0; 1780169689Skan extend_dependency_caches (luid, true); 1781169689Skan } 1782169689Skan} 1783169689Skan 1784169689Skan/* Create or extend (depending on CREATE_P) dependency caches to 1785169689Skan size N. */ 1786169689Skanvoid 1787169689Skanextend_dependency_caches (int n, bool create_p) 1788169689Skan{ 1789169689Skan if (create_p || true_dependency_cache) 1790169689Skan { 1791169689Skan int i, luid = cache_size + n; 1792169689Skan 1793169689Skan true_dependency_cache = XRESIZEVEC (bitmap_head, true_dependency_cache, 1794169689Skan luid); 1795169689Skan output_dependency_cache = XRESIZEVEC (bitmap_head, 1796169689Skan output_dependency_cache, luid); 1797169689Skan anti_dependency_cache = XRESIZEVEC (bitmap_head, anti_dependency_cache, 1798169689Skan luid); 179990075Sobrien#ifdef ENABLE_CHECKING 1800169689Skan forward_dependency_cache = XRESIZEVEC (bitmap_head, 1801169689Skan forward_dependency_cache, luid); 180290075Sobrien#endif 1803169689Skan if (current_sched_info->flags & DO_SPECULATION) 1804169689Skan spec_dependency_cache = XRESIZEVEC (bitmap_head, spec_dependency_cache, 1805169689Skan luid); 1806169689Skan 1807169689Skan for (i = cache_size; i < luid; i++) 1808132718Skan { 1809132718Skan bitmap_initialize (&true_dependency_cache[i], 0); 1810169689Skan bitmap_initialize (&output_dependency_cache[i], 0); 1811132718Skan bitmap_initialize (&anti_dependency_cache[i], 0); 1812132718Skan#ifdef ENABLE_CHECKING 1813132718Skan bitmap_initialize (&forward_dependency_cache[i], 0); 1814132718Skan#endif 1815169689Skan if (current_sched_info->flags & DO_SPECULATION) 1816169689Skan bitmap_initialize (&spec_dependency_cache[i], 0); 1817132718Skan } 1818132718Skan cache_size = luid; 181990075Sobrien } 182090075Sobrien} 182190075Sobrien 182290075Sobrien/* Free the caches allocated in init_dependency_caches. */ 182390075Sobrien 182490075Sobrienvoid 1825132718Skanfree_dependency_caches (void) 182690075Sobrien{ 182790075Sobrien if (true_dependency_cache) 182890075Sobrien { 1829132718Skan int i; 1830132718Skan 1831132718Skan for (i = 0; i < cache_size; i++) 1832132718Skan { 1833132718Skan bitmap_clear (&true_dependency_cache[i]); 1834169689Skan bitmap_clear (&output_dependency_cache[i]); 1835132718Skan bitmap_clear (&anti_dependency_cache[i]); 1836132718Skan#ifdef ENABLE_CHECKING 1837132718Skan bitmap_clear (&forward_dependency_cache[i]); 1838132718Skan#endif 1839169689Skan if (current_sched_info->flags & DO_SPECULATION) 1840169689Skan bitmap_clear (&spec_dependency_cache[i]); 1841132718Skan } 1842132718Skan free (true_dependency_cache); 184390075Sobrien true_dependency_cache = NULL; 1844169689Skan free (output_dependency_cache); 1845169689Skan output_dependency_cache = NULL; 1846132718Skan free (anti_dependency_cache); 184790075Sobrien anti_dependency_cache = NULL; 184890075Sobrien#ifdef ENABLE_CHECKING 1849132718Skan free (forward_dependency_cache); 185090075Sobrien forward_dependency_cache = NULL; 185190075Sobrien#endif 1852169689Skan if (current_sched_info->flags & DO_SPECULATION) 1853169689Skan { 1854169689Skan free (spec_dependency_cache); 1855169689Skan spec_dependency_cache = NULL; 1856169689Skan } 185790075Sobrien } 185890075Sobrien} 185990075Sobrien 186090075Sobrien/* Initialize some global variables needed by the dependency analysis 186190075Sobrien code. */ 186290075Sobrien 186390075Sobrienvoid 1864132718Skaninit_deps_global (void) 186590075Sobrien{ 1866169689Skan reg_pending_sets = ALLOC_REG_SET (®_obstack); 1867169689Skan reg_pending_clobbers = ALLOC_REG_SET (®_obstack); 1868169689Skan reg_pending_uses = ALLOC_REG_SET (®_obstack); 1869132718Skan reg_pending_barrier = NOT_A_BARRIER; 187090075Sobrien} 187190075Sobrien 187290075Sobrien/* Free everything used by the dependency analysis code. */ 187390075Sobrien 187490075Sobrienvoid 1875132718Skanfinish_deps_global (void) 187690075Sobrien{ 187790075Sobrien FREE_REG_SET (reg_pending_sets); 187890075Sobrien FREE_REG_SET (reg_pending_clobbers); 187990075Sobrien FREE_REG_SET (reg_pending_uses); 188090075Sobrien} 1881169689Skan 1882169689Skan/* Insert LINK into the dependence chain pointed to by LINKP and 1883169689Skan maintain the sort order. */ 1884169689Skanstatic void 1885169689Skanadjust_add_sorted_back_dep (rtx insn, rtx link, rtx *linkp) 1886169689Skan{ 1887169689Skan gcc_assert (current_sched_info->flags & DO_SPECULATION); 1888169689Skan 1889169689Skan /* If the insn cannot move speculatively, but the link is speculative, 1890169689Skan make it hard dependence. */ 1891169689Skan if (HAS_INTERNAL_DEP (insn) 1892169689Skan && (DEP_STATUS (link) & SPECULATIVE)) 1893169689Skan { 1894169689Skan DEP_STATUS (link) &= ~SPECULATIVE; 1895169689Skan 1896169689Skan if (true_dependency_cache) 1897169689Skan bitmap_clear_bit (&spec_dependency_cache[INSN_LUID (insn)], 1898169689Skan INSN_LUID (XEXP (link, 0))); 1899169689Skan } 1900169689Skan 1901169689Skan /* Non-speculative links go at the head of LOG_LINKS, followed by 1902169689Skan speculative links. */ 1903169689Skan if (DEP_STATUS (link) & SPECULATIVE) 1904169689Skan while (*linkp && !(DEP_STATUS (*linkp) & SPECULATIVE)) 1905169689Skan linkp = &XEXP (*linkp, 1); 1906169689Skan 1907169689Skan XEXP (link, 1) = *linkp; 1908169689Skan *linkp = link; 1909169689Skan} 1910169689Skan 1911169689Skan/* Move the dependence pointed to by LINKP to the back dependencies 1912169689Skan of INSN, and also add this dependence to the forward ones. All LOG_LINKS, 1913169689Skan except one pointed to by LINKP, must be sorted. */ 1914169689Skanstatic void 1915169689Skanadjust_back_add_forw_dep (rtx insn, rtx *linkp) 1916169689Skan{ 1917169689Skan rtx link; 1918169689Skan 1919169689Skan gcc_assert (current_sched_info->flags & DO_SPECULATION); 1920169689Skan 1921169689Skan link = *linkp; 1922169689Skan *linkp = XEXP (*linkp, 1); 1923169689Skan 1924169689Skan adjust_add_sorted_back_dep (insn, link, &LOG_LINKS (insn)); 1925169689Skan add_forw_dep (insn, link); 1926169689Skan} 1927169689Skan 1928169689Skan/* Remove forward dependence ELEM from the DEPS_LIST of INSN. */ 1929169689Skanstatic void 1930169689Skandelete_forw_dep (rtx insn, rtx elem) 1931169689Skan{ 1932169689Skan gcc_assert (current_sched_info->flags & DO_SPECULATION); 1933169689Skan 1934169689Skan#ifdef ENABLE_CHECKING 1935169689Skan if (true_dependency_cache) 1936169689Skan bitmap_clear_bit (&forward_dependency_cache[INSN_LUID (elem)], 1937169689Skan INSN_LUID (insn)); 1938169689Skan#endif 1939169689Skan 1940169689Skan remove_free_DEPS_LIST_elem (insn, &INSN_DEPEND (elem)); 1941169689Skan INSN_DEP_COUNT (insn)--; 1942169689Skan} 1943169689Skan 1944169689Skan/* Estimate the weakness of dependence between MEM1 and MEM2. */ 1945169689Skanstatic dw_t 1946169689Skanestimate_dep_weak (rtx mem1, rtx mem2) 1947169689Skan{ 1948169689Skan rtx r1, r2; 1949169689Skan 1950169689Skan if (mem1 == mem2) 1951169689Skan /* MEMs are the same - don't speculate. */ 1952169689Skan return MIN_DEP_WEAK; 1953169689Skan 1954169689Skan r1 = XEXP (mem1, 0); 1955169689Skan r2 = XEXP (mem2, 0); 1956169689Skan 1957169689Skan if (r1 == r2 1958169689Skan || (REG_P (r1) && REG_P (r2) 1959169689Skan && REGNO (r1) == REGNO (r2))) 1960169689Skan /* Again, MEMs are the same. */ 1961169689Skan return MIN_DEP_WEAK; 1962169689Skan else if ((REG_P (r1) && !REG_P (r2)) 1963169689Skan || (!REG_P (r1) && REG_P (r2))) 1964169689Skan /* Different addressing modes - reason to be more speculative, 1965169689Skan than usual. */ 1966169689Skan return NO_DEP_WEAK - (NO_DEP_WEAK - UNCERTAIN_DEP_WEAK) / 2; 1967169689Skan else 1968169689Skan /* We can't say anything about the dependence. */ 1969169689Skan return UNCERTAIN_DEP_WEAK; 1970169689Skan} 1971169689Skan 1972169689Skan/* Add or update backward dependence between INSN and ELEM with type DEP_TYPE. 1973169689Skan This function can handle same INSN and ELEM (INSN == ELEM). 1974169689Skan It is a convenience wrapper. */ 1975169689Skanvoid 1976169689Skanadd_dependence (rtx insn, rtx elem, enum reg_note dep_type) 1977169689Skan{ 1978169689Skan ds_t ds; 1979169689Skan 1980169689Skan if (dep_type == REG_DEP_TRUE) 1981169689Skan ds = DEP_TRUE; 1982169689Skan else if (dep_type == REG_DEP_OUTPUT) 1983169689Skan ds = DEP_OUTPUT; 1984169689Skan else if (dep_type == REG_DEP_ANTI) 1985169689Skan ds = DEP_ANTI; 1986169689Skan else 1987169689Skan gcc_unreachable (); 1988169689Skan 1989169689Skan maybe_add_or_update_back_dep_1 (insn, elem, dep_type, ds, 0, 0, 0); 1990169689Skan} 1991169689Skan 1992169689Skan/* Add or update backward dependence between INSN and ELEM 1993169689Skan with given type DEP_TYPE and dep_status DS. 1994169689Skan This function is a convenience wrapper. */ 1995169689Skanenum DEPS_ADJUST_RESULT 1996169689Skanadd_or_update_back_dep (rtx insn, rtx elem, enum reg_note dep_type, ds_t ds) 1997169689Skan{ 1998169689Skan return add_or_update_back_dep_1 (insn, elem, dep_type, ds, 0, 0, 0); 1999169689Skan} 2000169689Skan 2001169689Skan/* Add or update both backward and forward dependencies between INSN and ELEM 2002169689Skan with given type DEP_TYPE and dep_status DS. */ 2003169689Skanvoid 2004169689Skanadd_or_update_back_forw_dep (rtx insn, rtx elem, enum reg_note dep_type, 2005169689Skan ds_t ds) 2006169689Skan{ 2007169689Skan enum DEPS_ADJUST_RESULT res; 2008169689Skan rtx *linkp; 2009169689Skan 2010169689Skan res = add_or_update_back_dep_1 (insn, elem, dep_type, ds, 0, 0, &linkp); 2011169689Skan 2012169689Skan if (res == DEP_CHANGED || res == DEP_CREATED) 2013169689Skan { 2014169689Skan if (res == DEP_CHANGED) 2015169689Skan delete_forw_dep (insn, elem); 2016169689Skan else if (res == DEP_CREATED) 2017169689Skan linkp = &LOG_LINKS (insn); 2018169689Skan 2019169689Skan adjust_back_add_forw_dep (insn, linkp); 2020169689Skan } 2021169689Skan} 2022169689Skan 2023169689Skan/* Add both backward and forward dependencies between INSN and ELEM 2024169689Skan with given type DEP_TYPE and dep_status DS. */ 2025169689Skanvoid 2026169689Skanadd_back_forw_dep (rtx insn, rtx elem, enum reg_note dep_type, ds_t ds) 2027169689Skan{ 2028169689Skan add_back_dep (insn, elem, dep_type, ds); 2029169689Skan adjust_back_add_forw_dep (insn, &LOG_LINKS (insn)); 2030169689Skan} 2031169689Skan 2032169689Skan/* Remove both backward and forward dependencies between INSN and ELEM. */ 2033169689Skanvoid 2034169689Skandelete_back_forw_dep (rtx insn, rtx elem) 2035169689Skan{ 2036169689Skan gcc_assert (current_sched_info->flags & DO_SPECULATION); 2037169689Skan 2038169689Skan if (true_dependency_cache != NULL) 2039169689Skan { 2040169689Skan bitmap_clear_bit (&true_dependency_cache[INSN_LUID (insn)], 2041169689Skan INSN_LUID (elem)); 2042169689Skan bitmap_clear_bit (&anti_dependency_cache[INSN_LUID (insn)], 2043169689Skan INSN_LUID (elem)); 2044169689Skan bitmap_clear_bit (&output_dependency_cache[INSN_LUID (insn)], 2045169689Skan INSN_LUID (elem)); 2046169689Skan bitmap_clear_bit (&spec_dependency_cache[INSN_LUID (insn)], 2047169689Skan INSN_LUID (elem)); 2048169689Skan } 2049169689Skan 2050169689Skan remove_free_DEPS_LIST_elem (elem, &LOG_LINKS (insn)); 2051169689Skan delete_forw_dep (insn, elem); 2052169689Skan} 2053169689Skan 2054169689Skan/* Return weakness of speculative type TYPE in the dep_status DS. */ 2055169689Skandw_t 2056169689Skanget_dep_weak (ds_t ds, ds_t type) 2057169689Skan{ 2058169689Skan ds = ds & type; 2059169689Skan switch (type) 2060169689Skan { 2061169689Skan case BEGIN_DATA: ds >>= BEGIN_DATA_BITS_OFFSET; break; 2062169689Skan case BE_IN_DATA: ds >>= BE_IN_DATA_BITS_OFFSET; break; 2063169689Skan case BEGIN_CONTROL: ds >>= BEGIN_CONTROL_BITS_OFFSET; break; 2064169689Skan case BE_IN_CONTROL: ds >>= BE_IN_CONTROL_BITS_OFFSET; break; 2065169689Skan default: gcc_unreachable (); 2066169689Skan } 2067169689Skan 2068169689Skan gcc_assert (MIN_DEP_WEAK <= ds && ds <= MAX_DEP_WEAK); 2069169689Skan return (dw_t) ds; 2070169689Skan} 2071169689Skan 2072169689Skan/* Return the dep_status, which has the same parameters as DS, except for 2073169689Skan speculative type TYPE, that will have weakness DW. */ 2074169689Skands_t 2075169689Skanset_dep_weak (ds_t ds, ds_t type, dw_t dw) 2076169689Skan{ 2077169689Skan gcc_assert (MIN_DEP_WEAK <= dw && dw <= MAX_DEP_WEAK); 2078169689Skan 2079169689Skan ds &= ~type; 2080169689Skan switch (type) 2081169689Skan { 2082169689Skan case BEGIN_DATA: ds |= ((ds_t) dw) << BEGIN_DATA_BITS_OFFSET; break; 2083169689Skan case BE_IN_DATA: ds |= ((ds_t) dw) << BE_IN_DATA_BITS_OFFSET; break; 2084169689Skan case BEGIN_CONTROL: ds |= ((ds_t) dw) << BEGIN_CONTROL_BITS_OFFSET; break; 2085169689Skan case BE_IN_CONTROL: ds |= ((ds_t) dw) << BE_IN_CONTROL_BITS_OFFSET; break; 2086169689Skan default: gcc_unreachable (); 2087169689Skan } 2088169689Skan return ds; 2089169689Skan} 2090169689Skan 2091169689Skan/* Return the join of two dep_statuses DS1 and DS2. */ 2092169689Skands_t 2093169689Skands_merge (ds_t ds1, ds_t ds2) 2094169689Skan{ 2095169689Skan ds_t ds, t; 2096169689Skan 2097169689Skan gcc_assert ((ds1 & SPECULATIVE) && (ds2 & SPECULATIVE)); 2098169689Skan 2099169689Skan ds = (ds1 & DEP_TYPES) | (ds2 & DEP_TYPES); 2100169689Skan 2101169689Skan t = FIRST_SPEC_TYPE; 2102169689Skan do 2103169689Skan { 2104169689Skan if ((ds1 & t) && !(ds2 & t)) 2105169689Skan ds |= ds1 & t; 2106169689Skan else if (!(ds1 & t) && (ds2 & t)) 2107169689Skan ds |= ds2 & t; 2108169689Skan else if ((ds1 & t) && (ds2 & t)) 2109169689Skan { 2110169689Skan ds_t dw; 2111169689Skan 2112169689Skan dw = ((ds_t) get_dep_weak (ds1, t)) * ((ds_t) get_dep_weak (ds2, t)); 2113169689Skan dw /= MAX_DEP_WEAK; 2114169689Skan if (dw < MIN_DEP_WEAK) 2115169689Skan dw = MIN_DEP_WEAK; 2116169689Skan 2117169689Skan ds = set_dep_weak (ds, t, (dw_t) dw); 2118169689Skan } 2119169689Skan 2120169689Skan if (t == LAST_SPEC_TYPE) 2121169689Skan break; 2122169689Skan t <<= SPEC_TYPE_SHIFT; 2123169689Skan } 2124169689Skan while (1); 2125169689Skan 2126169689Skan return ds; 2127169689Skan} 2128169689Skan 2129169689Skan#ifdef INSN_SCHEDULING 2130169689Skan#ifdef ENABLE_CHECKING 2131169689Skan/* Verify that dependence type and status are consistent. 2132169689Skan If RELAXED_P is true, then skip dep_weakness checks. */ 2133169689Skanstatic void 2134169689Skancheck_dep_status (enum reg_note dt, ds_t ds, bool relaxed_p) 2135169689Skan{ 2136169689Skan /* Check that dependence type contains the same bits as the status. */ 2137169689Skan if (dt == REG_DEP_TRUE) 2138169689Skan gcc_assert (ds & DEP_TRUE); 2139169689Skan else if (dt == REG_DEP_OUTPUT) 2140169689Skan gcc_assert ((ds & DEP_OUTPUT) 2141169689Skan && !(ds & DEP_TRUE)); 2142169689Skan else 2143169689Skan gcc_assert ((dt == REG_DEP_ANTI) 2144169689Skan && (ds & DEP_ANTI) 2145169689Skan && !(ds & (DEP_OUTPUT | DEP_TRUE))); 2146169689Skan 2147169689Skan /* HARD_DEP can not appear in dep_status of a link. */ 2148169689Skan gcc_assert (!(ds & HARD_DEP)); 2149169689Skan 2150169689Skan /* Check that dependence status is set correctly when speculation is not 2151169689Skan supported. */ 2152169689Skan if (!(current_sched_info->flags & DO_SPECULATION)) 2153169689Skan gcc_assert (!(ds & SPECULATIVE)); 2154169689Skan else if (ds & SPECULATIVE) 2155169689Skan { 2156169689Skan if (!relaxed_p) 2157169689Skan { 2158169689Skan ds_t type = FIRST_SPEC_TYPE; 2159169689Skan 2160169689Skan /* Check that dependence weakness is in proper range. */ 2161169689Skan do 2162169689Skan { 2163169689Skan if (ds & type) 2164169689Skan get_dep_weak (ds, type); 2165169689Skan 2166169689Skan if (type == LAST_SPEC_TYPE) 2167169689Skan break; 2168169689Skan type <<= SPEC_TYPE_SHIFT; 2169169689Skan } 2170169689Skan while (1); 2171169689Skan } 2172169689Skan 2173169689Skan if (ds & BEGIN_SPEC) 2174169689Skan { 2175169689Skan /* Only true dependence can be data speculative. */ 2176169689Skan if (ds & BEGIN_DATA) 2177169689Skan gcc_assert (ds & DEP_TRUE); 2178169689Skan 2179169689Skan /* Control dependencies in the insn scheduler are represented by 2180169689Skan anti-dependencies, therefore only anti dependence can be 2181169689Skan control speculative. */ 2182169689Skan if (ds & BEGIN_CONTROL) 2183169689Skan gcc_assert (ds & DEP_ANTI); 2184169689Skan } 2185169689Skan else 2186169689Skan { 2187169689Skan /* Subsequent speculations should resolve true dependencies. */ 2188169689Skan gcc_assert ((ds & DEP_TYPES) == DEP_TRUE); 2189169689Skan } 2190169689Skan 2191169689Skan /* Check that true and anti dependencies can't have other speculative 2192169689Skan statuses. */ 2193169689Skan if (ds & DEP_TRUE) 2194169689Skan gcc_assert (ds & (BEGIN_DATA | BE_IN_SPEC)); 2195169689Skan /* An output dependence can't be speculative at all. */ 2196169689Skan gcc_assert (!(ds & DEP_OUTPUT)); 2197169689Skan if (ds & DEP_ANTI) 2198169689Skan gcc_assert (ds & BEGIN_CONTROL); 2199169689Skan } 2200169689Skan} 2201169689Skan#endif 2202169689Skan#endif 2203